mirror of https://github.com/openclaw/openclaw.git
Merge remote-tracking branch 'origin/main' into fix/session-index-reset-deleted
# Conflicts: # src/agents/memory-search.test.ts # src/agents/memory-search.ts # src/config/schema.help.ts # src/config/schema.labels.ts # src/config/types.tools.ts # src/config/zod-schema.agent-runtime.ts # src/memory/manager-sync-ops.ts
This commit is contained in:
commit
d3abd97d92
|
|
@ -123,3 +123,11 @@ dist/protocol.schema.json
|
|||
# Synthing
|
||||
**/.stfolder/
|
||||
.dev-state
|
||||
docs/superpowers/plans/2026-03-10-collapsed-side-nav.md
|
||||
docs/superpowers/specs/2026-03-10-collapsed-side-nav-design.md
|
||||
.gitignore
|
||||
test/config-form.analyze.telegram.test.ts
|
||||
ui/src/ui/theme-variants.browser.test.ts
|
||||
ui/src/ui/__screenshots__/navigation.browser.test.ts/control-UI-routing-auto-scrolls-chat-history-to-the-latest-message-1.png
|
||||
ui/src/ui/__screenshots__/navigation.browser.test.ts/control-UI-routing-auto-scrolls-chat-history-to-the-latest-message-1.png
|
||||
ui/src/ui/__screenshots__/navigation.browser.test.ts/control-UI-routing-auto-scrolls-chat-history-to-the-latest-message-1.png
|
||||
|
|
|
|||
|
|
@ -118,6 +118,7 @@
|
|||
- Keep files concise; extract helpers instead of “V2” copies. Use existing patterns for CLI options and dependency injection via `createDefaultDeps`.
|
||||
- Aim to keep files under ~700 LOC; guideline only (not a hard guardrail). Split/refactor when it improves clarity or testability.
|
||||
- Naming: use **OpenClaw** for product/app/docs headings; use `openclaw` for CLI command, package/binary, paths, and config keys.
|
||||
- Written English: use American spelling and grammar in code, comments, docs, and UI strings (e.g. "color" not "colour", "behavior" not "behaviour", "analyze" not "analyse").
|
||||
|
||||
## Release Channels (Naming)
|
||||
|
||||
|
|
|
|||
19
CHANGELOG.md
19
CHANGELOG.md
|
|
@ -8,9 +8,12 @@ Docs: https://docs.openclaw.ai
|
|||
|
||||
- Agents/subagents: add `sessions_yield` so orchestrators can end the current turn immediately, skip queued tool work, and carry a hidden follow-up payload into the next session turn. (#36537) thanks @jriff
|
||||
- Docs/Kubernetes: Add a starter K8s install path with raw manifests, Kind setup, and deployment docs. Thanks @sallyom @dzianisv @egkristi
|
||||
- Control UI/dashboard-v2: refresh the gateway dashboard with modular overview, chat, config, agent, and session views, plus a command palette, mobile bottom tabs, and richer chat tools like slash commands, search, export, and pinned messages. (#41503) Thanks @BunsDev.
|
||||
|
||||
### Fixes
|
||||
|
||||
- Ollama/Kimi Cloud: apply the Moonshot Kimi payload compatibility wrapper to Ollama-hosted Kimi models like `kimi-k2.5:cloud`, so tool routing no longer breaks when thinking is enabled. (#41519) Thanks @vincentkoc.
|
||||
- Models/Kimi Coding: send the built-in `User-Agent: claude-code/0.1.0` header by default for `kimi-coding` while still allowing explicit provider headers to override it, so Kimi Code subscription auth can work without a local header-injection proxy. (#30099) Thanks @Amineelfarssi and @vincentkoc.
|
||||
- Security/plugins: disable implicit workspace plugin auto-load so cloned repositories cannot execute workspace plugin code without an explicit trust decision. (`GHSA-99qw-6mr3-36qr`)(#44174) Thanks @lintsinghua and @vincentkoc.
|
||||
- Moonshot CN API: respect explicit `baseUrl` (api.moonshot.cn) in implicit provider resolution so platform.moonshot.cn API keys authenticate correctly instead of returning HTTP 401. (#33637) Thanks @chengzhichao-xydt.
|
||||
- Kimi Coding/provider config: respect explicit `models.providers["kimi-coding"].baseUrl` when resolving the implicit provider so custom Kimi Coding endpoints no longer get overwritten by the built-in default. (#36353) Thanks @2233admin.
|
||||
|
|
@ -32,6 +35,8 @@ Docs: https://docs.openclaw.ai
|
|||
- Security/exec approvals: escape invisible Unicode format characters in approval prompts so zero-width command text renders as visible `\u{...}` escapes instead of spoofing the reviewed command. (`GHSA-pcqg-f7rg-xfvv`)(#43687) Thanks @EkiXu and @vincentkoc.
|
||||
- Security/exec detection: normalize compatibility Unicode and strip invisible formatting code points before obfuscation checks so zero-width and fullwidth command tricks no longer suppress heuristic detection. (`GHSA-9r3v-37xh-2cf6`)(#44091) Thanks @wooluo and @vincentkoc.
|
||||
- Security/exec allowlist: preserve POSIX case sensitivity and keep `?` within a single path segment so exact-looking allowlist patterns no longer overmatch executables across case or directory boundaries. (`GHSA-f8r2-vg7x-gh8m`)(#43798) Thanks @zpbrent and @vincentkoc.
|
||||
- Security/commands: require sender ownership for `/config` and `/debug` so authorized non-owner senders can no longer reach owner-only config and runtime debug surfaces. (`GHSA-r7vr-gr74-94p8`)(#44305) Thanks @tdjackey and @vincentkoc.
|
||||
- Security/gateway auth: clear unbound client-declared scopes on shared-token WebSocket connects so device-less shared-token operators cannot self-declare elevated scopes. (`GHSA-rqpp-rjj8-7wv8`)(#44306) Thanks @LUOYEcode and @vincentkoc.
|
||||
- Security/browser.request: block persistent browser profile create/delete routes from write-scoped `browser.request` so callers can no longer persist admin-only browser profile changes through the browser control surface. (`GHSA-vmhq-cqm9-6p7q`)(#43800) Thanks @tdjackey and @vincentkoc.
|
||||
- Security/agent: reject public spawned-run lineage fields and keep workspace inheritance on the internal spawned-session path so external `agent` callers can no longer override the gateway workspace boundary. (`GHSA-2rqg-gjgv-84jm`)(#43801) Thanks @tdjackey and @vincentkoc.
|
||||
- Security/session_status: enforce sandbox session-tree visibility and shared agent-to-agent access guards before reading or mutating target session state, so sandboxed subagents can no longer inspect parent session metadata or write parent model overrides via `session_status`. (`GHSA-wcxr-59v9-rxr8`)(#43754) Thanks @tdjackey and @vincentkoc.
|
||||
|
|
@ -44,8 +49,19 @@ Docs: https://docs.openclaw.ai
|
|||
- Security/Feishu reactions: preserve looked-up group chat typing and fail closed on ambiguous reaction context so group authorization and mention gating cannot be bypassed through synthetic `p2p` reactions. (`GHSA-m69h-jm2f-2pv8`)(#44088) Thanks @zpbrent and @vincentkoc.
|
||||
- Security/LINE webhook: require signatures for empty-event POST probes too so unsigned requests no longer confirm webhook reachability with a `200` response. (`GHSA-mhxh-9pjm-w7q5`)(#44090) Thanks @TerminalsandCoffee and @vincentkoc.
|
||||
- Security/Zalo webhook: rate limit invalid secret guesses before auth so weak webhook secrets cannot be brute-forced through unauthenticated churned requests without pre-auth `429` responses. (`GHSA-5m9r-p9g7-679c`)(#44173) Thanks @zpbrent and @vincentkoc.
|
||||
- Security/exec approvals: fail closed for ambiguous inline loader and shell-payload script execution, bind the real script after POSIX shell value-taking flags, and unwrap `pnpm`/`npm exec`/`npx` script runners before approval binding. (`GHSA-57jw-9722-6rf2`)(`GHSA-jvqh-rfmh-jh27`)(`GHSA-x7pp-23xv-mmr4`)(`GHSA-jc5j-vg4r-j5jx`)(#44247) Thanks @tdjackey and @vincentkoc.
|
||||
- Doctor/gateway service audit: canonicalize service entrypoint paths before comparing them so symlink-vs-realpath installs no longer trigger false "entrypoint does not match the current install" repair prompts. (#43882) Thanks @ngutman.
|
||||
- Doctor/gateway service audit: earlier groundwork for this fix landed in the superseded #28338 branch. Thanks @realriphub.
|
||||
- Gateway/session stores: regenerate the Swift push-test protocol models and align Windows native session-store realpath handling so protocol checks and sync session discovery stop drifting on Windows. (#44266) thanks @jalehman.
|
||||
- Context engine/session routing: forward optional `sessionKey` through context-engine lifecycle calls so plugins can see structured routing metadata during bootstrap, assembly, post-turn ingestion, and compaction. (#44157) thanks @jalehman.
|
||||
- Agents/failover: classify z.ai `network_error` stop reasons as retryable timeouts so provider connectivity failures trigger fallback instead of surfacing raw unhandled-stop-reason errors. (#43884) Thanks @hougangdev.
|
||||
- Memory/session sync: add mode-aware post-compaction session reindexing with `agents.defaults.compaction.postIndexSync` plus `agents.defaults.memorySearch.sync.sessions.postCompactionForce`, so compacted session memory can refresh immediately without forcing every deployment into synchronous reindexing. (#25561) thanks @rodrigouroz.
|
||||
- Telegram/model picker: make inline model button selections persist the chosen session model correctly, clear overrides when selecting the configured default, and include effective fallback models in `/models` button validation. (#40105) Thanks @avirweb.
|
||||
- Mattermost/reply media delivery: pass agent-scoped `mediaLocalRoots` through shared reply delivery so allowed local files upload correctly from button, slash-command, and model-picker replies. (#44021) Thanks @LyleLiu666.
|
||||
- Plugins/env-scoped roots: fix plugin discovery/load caches and provenance tracking so same-process `HOME`/`OPENCLAW_HOME` changes no longer reuse stale plugin state or misreport `~/...` plugins as untracked. (#44046) thanks @gumadeiras.
|
||||
- Gateway/session discovery: discover disk-only and retired ACP session stores under custom templated `session.store` roots so ACP reconciliation, session-id/session-label targeting, and run-id fallback keep working after restart. (#44176) thanks @gumadeiras.
|
||||
- Models/OpenRouter native ids: canonicalize native OpenRouter model keys across config writes, runtime lookups, fallback management, and `models list --plain`, and migrate legacy duplicated `openrouter/openrouter/...` config entries forward on write.
|
||||
- Gateway/hooks: bucket hook auth failures by forwarded client IP behind trusted proxies and warn when `hooks.allowedAgentIds` leaves hook routing unrestricted.
|
||||
|
||||
## 2026.3.11
|
||||
|
||||
|
|
@ -137,6 +153,7 @@ Docs: https://docs.openclaw.ai
|
|||
- Gateway/session reset auth: split conversation `/new` and `/reset` handling away from the admin-only `sessions.reset` control-plane RPC so write-scoped gateway callers can no longer reach the privileged reset path through `agent`. Thanks @tdjackey for reporting.
|
||||
- Security/plugin runtime: stop unauthenticated plugin HTTP routes from inheriting synthetic admin gateway scopes when they call `runtime.subagent.*`, so admin-only methods like `sessions.delete` stay blocked without gateway auth.
|
||||
- Security/nodes: treat the `nodes` agent tool as owner-only fallback policy so non-owner senders cannot reach paired-node approval or invoke paths through the shared tool set.
|
||||
- Sandbox/sessions_spawn: restore real workspace handoff for read-only sandboxed sessions so spawned subagents mount the configured workspace at `/agent` instead of inheriting the sandbox copy. Related #40582.
|
||||
- Security/external content: treat whitespace-delimited `EXTERNAL UNTRUSTED CONTENT` boundary markers like underscore-delimited variants so prompt wrappers cannot bypass marker sanitization. (#35983) Thanks @urianpaul94.
|
||||
- Telegram/exec approvals: reject `/approve` commands aimed at other bots, keep deterministic approval prompts visible when tool-result delivery fails, and stop resolved exact IDs from matching other pending approvals by prefix. (#37233) Thanks @huntharo.
|
||||
- Subagents/authority: persist leaf vs orchestrator control scope at spawn time and route tool plus slash-command control through shared ownership checks, so leaf sessions cannot regain orchestration privileges after restore or flat-key lookups. Thanks @tdjackey.
|
||||
|
|
@ -178,6 +195,8 @@ Docs: https://docs.openclaw.ai
|
|||
- Status/context windows: normalize provider-qualified override cache keys so `/status` resolves the active provider's configured context window even when `models.providers` keys use mixed case or surrounding whitespace. (#36389) Thanks @haoruilee.
|
||||
- ACP/main session aliases: canonicalize `main` before ACP session lookup so restarted ACP main sessions rehydrate instead of failing closed with `Session is not ACP-enabled: main`. (#43285, fixes #25692)
|
||||
- Agents/embedded runner: recover canonical allowlisted tool names from malformed `toolCallId` and malformed non-blank tool-name variants before dispatch, while failing closed on ambiguous matches. (#34485) thanks @yuweuii.
|
||||
- Agents/failover: classify ZenMux quota-refresh `402` responses as `rate_limit` so model fallback retries continue instead of stopping on a temporary subscription window. (#43917) thanks @bwjoke.
|
||||
- Agents/failover: classify HTTP 422 malformed-request responses as `format` and recognize OpenRouter "requires more credits" billing errors so provider fallback triggers instead of surfacing raw errors. (#43823) thanks @jnMetaCode.
|
||||
|
||||
## 2026.3.8
|
||||
|
||||
|
|
|
|||
|
|
@ -92,6 +92,7 @@ Welcome to the lobster tank! 🦞
|
|||
- Describe what & why
|
||||
- Reply to or resolve bot review conversations you addressed before asking for review again
|
||||
- **Include screenshots** — one showing the problem/before, one showing the fix/after (for UI or visual changes)
|
||||
- Use American English spelling and grammar in code, comments, docs, and UI strings
|
||||
|
||||
## Review Conversations Are Author-Owned
|
||||
|
||||
|
|
|
|||
|
|
@ -2198,7 +2198,7 @@ Anthropic-compatible, built-in provider. Shortcut: `openclaw onboard --auth-choi
|
|||
{
|
||||
id: "hf:MiniMaxAI/MiniMax-M2.5",
|
||||
name: "MiniMax M2.5",
|
||||
reasoning: false,
|
||||
reasoning: true,
|
||||
input: ["text"],
|
||||
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
|
||||
contextWindow: 192000,
|
||||
|
|
@ -2238,7 +2238,7 @@ Base URL should omit `/v1` (Anthropic client appends it). Shortcut: `openclaw on
|
|||
{
|
||||
id: "MiniMax-M2.5",
|
||||
name: "MiniMax M2.5",
|
||||
reasoning: false,
|
||||
reasoning: true,
|
||||
input: ["text"],
|
||||
cost: { input: 15, output: 60, cacheRead: 2, cacheWrite: 10 },
|
||||
contextWindow: 200000,
|
||||
|
|
|
|||
|
|
@ -151,7 +151,7 @@ Configure manually via `openclaw.json`:
|
|||
{
|
||||
id: "minimax-m2.5-gs32",
|
||||
name: "MiniMax M2.5 GS32",
|
||||
reasoning: false,
|
||||
reasoning: true,
|
||||
input: ["text"],
|
||||
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
|
||||
contextWindow: 196608,
|
||||
|
|
|
|||
|
|
@ -69,6 +69,7 @@ describe("failover-error", () => {
|
|||
expect(resolveFailoverReasonFromError({ status: 408 })).toBe("timeout");
|
||||
expect(resolveFailoverReasonFromError({ status: 499 })).toBe("timeout");
|
||||
expect(resolveFailoverReasonFromError({ status: 400 })).toBe("format");
|
||||
expect(resolveFailoverReasonFromError({ status: 422 })).toBe("format");
|
||||
// Keep the status-only path behavior-preserving and conservative.
|
||||
expect(resolveFailoverReasonFromError({ status: 500 })).toBeNull();
|
||||
expect(resolveFailoverReasonFromError({ status: 502 })).toBe("timeout");
|
||||
|
|
@ -162,6 +163,44 @@ describe("failover-error", () => {
|
|||
).toBe("billing");
|
||||
});
|
||||
|
||||
it("treats HTTP 422 as format error", () => {
|
||||
expect(
|
||||
resolveFailoverReasonFromError({
|
||||
status: 422,
|
||||
message: "check open ai req parameter error",
|
||||
}),
|
||||
).toBe("format");
|
||||
expect(
|
||||
resolveFailoverReasonFromError({
|
||||
status: 422,
|
||||
message: "Unprocessable Entity",
|
||||
}),
|
||||
).toBe("format");
|
||||
});
|
||||
|
||||
it("treats 422 with billing message as billing instead of format", () => {
|
||||
expect(
|
||||
resolveFailoverReasonFromError({
|
||||
status: 422,
|
||||
message: "insufficient credits",
|
||||
}),
|
||||
).toBe("billing");
|
||||
});
|
||||
|
||||
it("classifies OpenRouter 'requires more credits' text as billing", () => {
|
||||
expect(
|
||||
resolveFailoverReasonFromError({
|
||||
message: "This model requires more credits to use",
|
||||
}),
|
||||
).toBe("billing");
|
||||
expect(
|
||||
resolveFailoverReasonFromError({
|
||||
status: 402,
|
||||
message: "This model require more credits",
|
||||
}),
|
||||
).toBe("billing");
|
||||
});
|
||||
|
||||
it("treats zhipuai weekly/monthly limit exhausted as rate_limit", () => {
|
||||
expect(
|
||||
resolveFailoverReasonFromError({
|
||||
|
|
@ -204,6 +243,13 @@ describe("failover-error", () => {
|
|||
message: "Workspace spend limit reached. Contact your admin.",
|
||||
}),
|
||||
).toBe("rate_limit");
|
||||
expect(
|
||||
resolveFailoverReasonFromError({
|
||||
status: 402,
|
||||
message:
|
||||
"You have reached your subscription quota limit. Please wait for automatic quota refresh in the rolling time window, upgrade to a higher plan, or use a Pay-As-You-Go API Key for unlimited access. Learn more: https://zenmux.ai/docs/guide/subscription.html",
|
||||
}),
|
||||
).toBe("rate_limit");
|
||||
expect(
|
||||
resolveFailoverReasonFromError({
|
||||
status: 402,
|
||||
|
|
@ -289,6 +335,9 @@ describe("failover-error", () => {
|
|||
expect(resolveFailoverReasonFromError({ message: "stop reason: error" })).toBe("timeout");
|
||||
expect(resolveFailoverReasonFromError({ message: "reason: abort" })).toBe("timeout");
|
||||
expect(resolveFailoverReasonFromError({ message: "reason: error" })).toBe("timeout");
|
||||
expect(
|
||||
resolveFailoverReasonFromError({ message: "Unhandled stop reason: network_error" }),
|
||||
).toBe("timeout");
|
||||
});
|
||||
|
||||
it("infers timeout from connection/network error messages", () => {
|
||||
|
|
|
|||
|
|
@ -285,6 +285,7 @@ describe("memory search config", () => {
|
|||
deltaBytes: 100000,
|
||||
deltaMessages: 50,
|
||||
includeResetArchives: false,
|
||||
postCompactionForce: true,
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -62,6 +62,7 @@ export type ResolvedMemorySearchConfig = {
|
|||
deltaBytes: number;
|
||||
deltaMessages: number;
|
||||
includeResetArchives: boolean;
|
||||
postCompactionForce: boolean;
|
||||
};
|
||||
};
|
||||
query: {
|
||||
|
|
@ -254,6 +255,10 @@ function mergeConfig(
|
|||
overrides?.sync?.sessions?.includeResetArchives ??
|
||||
defaults?.sync?.sessions?.includeResetArchives ??
|
||||
DEFAULT_SESSION_INCLUDE_RESET_ARCHIVES,
|
||||
postCompactionForce:
|
||||
overrides?.sync?.sessions?.postCompactionForce ??
|
||||
defaults?.sync?.sessions?.postCompactionForce ??
|
||||
true,
|
||||
},
|
||||
};
|
||||
const query = {
|
||||
|
|
@ -321,6 +326,7 @@ function mergeConfig(
|
|||
);
|
||||
const deltaBytes = clampInt(sync.sessions.deltaBytes, 0, Number.MAX_SAFE_INTEGER);
|
||||
const deltaMessages = clampInt(sync.sessions.deltaMessages, 0, Number.MAX_SAFE_INTEGER);
|
||||
const postCompactionForce = sync.sessions.postCompactionForce;
|
||||
return {
|
||||
enabled,
|
||||
sources,
|
||||
|
|
@ -343,6 +349,7 @@ function mergeConfig(
|
|||
deltaBytes,
|
||||
deltaMessages,
|
||||
includeResetArchives: Boolean(sync.sessions.includeResetArchives),
|
||||
postCompactionForce,
|
||||
},
|
||||
},
|
||||
query: {
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import { resolveThinkingDefaultForModel } from "../auto-reply/thinking.js";
|
||||
import type { OpenClawConfig } from "../config/config.js";
|
||||
import {
|
||||
resolveAgentModelFallbackValues,
|
||||
|
|
@ -36,7 +37,6 @@ const ANTHROPIC_MODEL_ALIASES: Record<string, string> = {
|
|||
"sonnet-4.6": "claude-sonnet-4-6",
|
||||
"sonnet-4.5": "claude-sonnet-4-5",
|
||||
};
|
||||
const CLAUDE_46_MODEL_RE = /claude-(?:opus|sonnet)-4(?:\.|-)6(?:$|[-.])/i;
|
||||
|
||||
function normalizeAliasKey(value: string): string {
|
||||
return value.trim().toLowerCase();
|
||||
|
|
@ -629,8 +629,8 @@ export function resolveThinkingDefault(params: {
|
|||
model: string;
|
||||
catalog?: ModelCatalogEntry[];
|
||||
}): ThinkLevel {
|
||||
const normalizedProvider = normalizeProviderId(params.provider);
|
||||
const modelLower = params.model.toLowerCase();
|
||||
const _normalizedProvider = normalizeProviderId(params.provider);
|
||||
const _modelLower = params.model.toLowerCase();
|
||||
const configuredModels = params.cfg.agents?.defaults?.models;
|
||||
const canonicalKey = modelKey(params.provider, params.model);
|
||||
const legacyKey = legacyModelKey(params.provider, params.model);
|
||||
|
|
@ -652,21 +652,11 @@ export function resolveThinkingDefault(params: {
|
|||
if (configured) {
|
||||
return configured;
|
||||
}
|
||||
const isAnthropicFamilyModel =
|
||||
normalizedProvider === "anthropic" ||
|
||||
normalizedProvider === "amazon-bedrock" ||
|
||||
modelLower.includes("anthropic/") ||
|
||||
modelLower.includes(".anthropic.");
|
||||
if (isAnthropicFamilyModel && CLAUDE_46_MODEL_RE.test(modelLower)) {
|
||||
return "adaptive";
|
||||
}
|
||||
const candidate = params.catalog?.find(
|
||||
(entry) => entry.provider === params.provider && entry.id === params.model,
|
||||
);
|
||||
if (candidate?.reasoning) {
|
||||
return "low";
|
||||
}
|
||||
return "off";
|
||||
return resolveThinkingDefaultForModel({
|
||||
provider: params.provider,
|
||||
model: params.model,
|
||||
catalog: params.catalog,
|
||||
});
|
||||
}
|
||||
|
||||
/** Default reasoning level when session/directive do not set it: "on" if model supports reasoning, else "off". */
|
||||
|
|
|
|||
|
|
@ -66,6 +66,42 @@ describe("models-config merge helpers", () => {
|
|||
});
|
||||
});
|
||||
|
||||
it("preserves implicit provider headers when explicit config adds extra headers", () => {
|
||||
const merged = mergeProviderModels(
|
||||
{
|
||||
baseUrl: "https://api.example.com",
|
||||
api: "anthropic-messages",
|
||||
headers: { "User-Agent": "claude-code/0.1.0" },
|
||||
models: [
|
||||
{
|
||||
id: "k2p5",
|
||||
name: "Kimi for Coding",
|
||||
input: ["text", "image"],
|
||||
reasoning: true,
|
||||
},
|
||||
],
|
||||
} as unknown as ProviderConfig,
|
||||
{
|
||||
baseUrl: "https://api.example.com",
|
||||
api: "anthropic-messages",
|
||||
headers: { "X-Kimi-Tenant": "tenant-a" },
|
||||
models: [
|
||||
{
|
||||
id: "k2p5",
|
||||
name: "Kimi for Coding",
|
||||
input: ["text", "image"],
|
||||
reasoning: true,
|
||||
},
|
||||
],
|
||||
} as unknown as ProviderConfig,
|
||||
);
|
||||
|
||||
expect(merged.headers).toEqual({
|
||||
"User-Agent": "claude-code/0.1.0",
|
||||
"X-Kimi-Tenant": "tenant-a",
|
||||
});
|
||||
});
|
||||
|
||||
it("replaces stale baseUrl when model api surface changes", () => {
|
||||
const merged = mergeWithExistingProviderSecrets({
|
||||
nextProviders: {
|
||||
|
|
|
|||
|
|
@ -39,8 +39,27 @@ export function mergeProviderModels(
|
|||
): ProviderConfig {
|
||||
const implicitModels = Array.isArray(implicit.models) ? implicit.models : [];
|
||||
const explicitModels = Array.isArray(explicit.models) ? explicit.models : [];
|
||||
const implicitHeaders =
|
||||
implicit.headers && typeof implicit.headers === "object" && !Array.isArray(implicit.headers)
|
||||
? implicit.headers
|
||||
: undefined;
|
||||
const explicitHeaders =
|
||||
explicit.headers && typeof explicit.headers === "object" && !Array.isArray(explicit.headers)
|
||||
? explicit.headers
|
||||
: undefined;
|
||||
if (implicitModels.length === 0) {
|
||||
return { ...implicit, ...explicit };
|
||||
return {
|
||||
...implicit,
|
||||
...explicit,
|
||||
...(implicitHeaders || explicitHeaders
|
||||
? {
|
||||
headers: {
|
||||
...implicitHeaders,
|
||||
...explicitHeaders,
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
};
|
||||
}
|
||||
|
||||
const implicitById = new Map(
|
||||
|
|
@ -93,6 +112,14 @@ export function mergeProviderModels(
|
|||
return {
|
||||
...implicit,
|
||||
...explicit,
|
||||
...(implicitHeaders || explicitHeaders
|
||||
? {
|
||||
headers: {
|
||||
...implicitHeaders,
|
||||
...explicitHeaders,
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
models: mergedModels,
|
||||
};
|
||||
}
|
||||
|
|
|
|||
|
|
@ -26,6 +26,7 @@ describe("kimi-coding implicit provider (#22409)", () => {
|
|||
const provider = buildKimiCodingProvider();
|
||||
expect(provider.api).toBe("anthropic-messages");
|
||||
expect(provider.baseUrl).toBe("https://api.kimi.com/coding/");
|
||||
expect(provider.headers).toEqual({ "User-Agent": "claude-code/0.1.0" });
|
||||
expect(provider.models).toBeDefined();
|
||||
expect(provider.models.length).toBeGreaterThan(0);
|
||||
expect(provider.models[0].id).toBe("k2p5");
|
||||
|
|
@ -65,4 +66,33 @@ describe("kimi-coding implicit provider (#22409)", () => {
|
|||
envSnapshot.restore();
|
||||
}
|
||||
});
|
||||
|
||||
it("merges explicit kimi-coding headers on top of the built-in user agent", async () => {
|
||||
const agentDir = mkdtempSync(join(tmpdir(), "openclaw-test-"));
|
||||
const envSnapshot = captureEnv(["KIMI_API_KEY"]);
|
||||
process.env.KIMI_API_KEY = "test-key";
|
||||
|
||||
try {
|
||||
const providers = await resolveImplicitProvidersForTest({
|
||||
agentDir,
|
||||
explicitProviders: {
|
||||
"kimi-coding": {
|
||||
baseUrl: "https://api.kimi.com/coding/",
|
||||
api: "anthropic-messages",
|
||||
headers: {
|
||||
"User-Agent": "custom-kimi-client/1.0",
|
||||
"X-Kimi-Tenant": "tenant-a",
|
||||
},
|
||||
models: buildKimiCodingProvider().models,
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(providers?.["kimi-coding"]?.headers).toEqual({
|
||||
"User-Agent": "custom-kimi-client/1.0",
|
||||
"X-Kimi-Tenant": "tenant-a",
|
||||
});
|
||||
} finally {
|
||||
envSnapshot.restore();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -95,6 +95,7 @@ const MOONSHOT_DEFAULT_COST = {
|
|||
};
|
||||
|
||||
const KIMI_CODING_BASE_URL = "https://api.kimi.com/coding/";
|
||||
const KIMI_CODING_USER_AGENT = "claude-code/0.1.0";
|
||||
const KIMI_CODING_DEFAULT_MODEL_ID = "k2p5";
|
||||
const KIMI_CODING_DEFAULT_CONTEXT_WINDOW = 262144;
|
||||
const KIMI_CODING_DEFAULT_MAX_TOKENS = 32768;
|
||||
|
|
@ -186,7 +187,7 @@ const MODELSTUDIO_MODEL_CATALOG: ReadonlyArray<ProviderModelConfig> = [
|
|||
{
|
||||
id: "MiniMax-M2.5",
|
||||
name: "MiniMax-M2.5",
|
||||
reasoning: false,
|
||||
reasoning: true,
|
||||
input: ["text"],
|
||||
cost: MODELSTUDIO_DEFAULT_COST,
|
||||
contextWindow: 1_000_000,
|
||||
|
|
@ -308,6 +309,9 @@ export function buildKimiCodingProvider(): ProviderConfig {
|
|||
return {
|
||||
baseUrl: KIMI_CODING_BASE_URL,
|
||||
api: "anthropic-messages",
|
||||
headers: {
|
||||
"User-Agent": KIMI_CODING_USER_AGENT,
|
||||
},
|
||||
models: [
|
||||
{
|
||||
id: KIMI_CODING_DEFAULT_MODEL_ID,
|
||||
|
|
|
|||
|
|
@ -667,12 +667,24 @@ const SIMPLE_IMPLICIT_PROVIDER_LOADERS: ImplicitProviderLoader[] = [
|
|||
};
|
||||
}),
|
||||
withApiKey("kimi-coding", async ({ apiKey, explicitProvider }) => {
|
||||
const builtInProvider = buildKimiCodingProvider();
|
||||
const explicitBaseUrl = explicitProvider?.baseUrl;
|
||||
const explicitHeaders = isRecord(explicitProvider?.headers)
|
||||
? (explicitProvider.headers as ProviderConfig["headers"])
|
||||
: undefined;
|
||||
return {
|
||||
...buildKimiCodingProvider(),
|
||||
...builtInProvider,
|
||||
...(typeof explicitBaseUrl === "string" && explicitBaseUrl.trim()
|
||||
? { baseUrl: explicitBaseUrl.trim() }
|
||||
: {}),
|
||||
...(explicitHeaders
|
||||
? {
|
||||
headers: {
|
||||
...builtInProvider.headers,
|
||||
...explicitHeaders,
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
apiKey,
|
||||
};
|
||||
}),
|
||||
|
|
|
|||
|
|
@ -110,6 +110,9 @@ describe("isBillingErrorMessage", () => {
|
|||
// Venice returns "Insufficient USD or Diem balance" which has extra words
|
||||
// between "insufficient" and "balance"
|
||||
"Insufficient USD or Diem balance to complete request. Visit https://venice.ai/settings/api to add credits.",
|
||||
// OpenRouter returns "requires more credits" for underfunded accounts
|
||||
"This model requires more credits to use",
|
||||
"This endpoint require more credits",
|
||||
];
|
||||
for (const sample of samples) {
|
||||
expect(isBillingErrorMessage(sample)).toBe(true);
|
||||
|
|
@ -503,6 +506,18 @@ describe("isTransientHttpError", () => {
|
|||
});
|
||||
|
||||
describe("classifyFailoverReasonFromHttpStatus", () => {
|
||||
it("treats HTTP 422 as format error", () => {
|
||||
expect(classifyFailoverReasonFromHttpStatus(422)).toBe("format");
|
||||
expect(classifyFailoverReasonFromHttpStatus(422, "check open ai req parameter error")).toBe(
|
||||
"format",
|
||||
);
|
||||
expect(classifyFailoverReasonFromHttpStatus(422, "Unprocessable Entity")).toBe("format");
|
||||
});
|
||||
|
||||
it("treats 422 with billing message as billing instead of format", () => {
|
||||
expect(classifyFailoverReasonFromHttpStatus(422, "insufficient credits")).toBe("billing");
|
||||
});
|
||||
|
||||
it("treats HTTP 499 as transient for structured errors", () => {
|
||||
expect(classifyFailoverReasonFromHttpStatus(499)).toBe("timeout");
|
||||
expect(classifyFailoverReasonFromHttpStatus(499, "499 Client Closed Request")).toBe("timeout");
|
||||
|
|
@ -576,6 +591,19 @@ describe("isFailoverErrorMessage", () => {
|
|||
}
|
||||
});
|
||||
|
||||
it("matches z.ai network_error stop reason as timeout", () => {
|
||||
const samples = [
|
||||
"Unhandled stop reason: network_error",
|
||||
"stop reason: network_error",
|
||||
"reason: network_error",
|
||||
];
|
||||
for (const sample of samples) {
|
||||
expect(isTimeoutErrorMessage(sample)).toBe(true);
|
||||
expect(classifyFailoverReason(sample)).toBe("timeout");
|
||||
expect(isFailoverErrorMessage(sample)).toBe(true);
|
||||
}
|
||||
});
|
||||
|
||||
it("does not classify MALFORMED_FUNCTION_CALL as timeout", () => {
|
||||
const sample = "Unhandled stop reason: MALFORMED_FUNCTION_CALL";
|
||||
expect(isTimeoutErrorMessage(sample)).toBe(false);
|
||||
|
|
@ -705,6 +733,8 @@ describe("classifyFailoverReason", () => {
|
|||
"Insufficient USD or Diem balance to complete request. Visit https://venice.ai/settings/api to add credits.",
|
||||
),
|
||||
).toBe("billing");
|
||||
// OpenRouter "requires more credits" billing text
|
||||
expect(classifyFailoverReason("This model requires more credits to use")).toBe("billing");
|
||||
});
|
||||
|
||||
it("classifies internal and compatibility error messages", () => {
|
||||
|
|
|
|||
|
|
@ -288,6 +288,13 @@ function hasExplicit402BillingSignal(text: string): boolean {
|
|||
);
|
||||
}
|
||||
|
||||
function hasQuotaRefreshWindowSignal(text: string): boolean {
|
||||
return (
|
||||
text.includes("subscription quota limit") &&
|
||||
(text.includes("automatic quota refresh") || text.includes("rolling time window"))
|
||||
);
|
||||
}
|
||||
|
||||
function hasRetryable402TransientSignal(text: string): boolean {
|
||||
const hasPeriodicHint = includesAnyHint(text, PERIODIC_402_HINTS);
|
||||
const hasSpendLimit = text.includes("spend limit") || text.includes("spending limit");
|
||||
|
|
@ -313,6 +320,10 @@ function classify402Message(message: string): PaymentRequiredFailoverReason {
|
|||
return "billing";
|
||||
}
|
||||
|
||||
if (hasQuotaRefreshWindowSignal(normalized)) {
|
||||
return "rate_limit";
|
||||
}
|
||||
|
||||
if (hasExplicit402BillingSignal(normalized)) {
|
||||
return "billing";
|
||||
}
|
||||
|
|
@ -420,7 +431,7 @@ export function classifyFailoverReasonFromHttpStatus(
|
|||
if (status === 529) {
|
||||
return "overloaded";
|
||||
}
|
||||
if (status === 400) {
|
||||
if (status === 400 || status === 422) {
|
||||
// Some providers return quota/balance errors under HTTP 400, so do not
|
||||
// let the generic format fallback mask an explicit billing signal.
|
||||
if (message && isBillingErrorMessage(message)) {
|
||||
|
|
|
|||
|
|
@ -47,9 +47,9 @@ const ERROR_PATTERNS = {
|
|||
/\benotfound\b/i,
|
||||
/\beai_again\b/i,
|
||||
/without sending (?:any )?chunks?/i,
|
||||
/\bstop reason:\s*(?:abort|error|malformed_response)\b/i,
|
||||
/\breason:\s*(?:abort|error|malformed_response)\b/i,
|
||||
/\bunhandled stop reason:\s*(?:abort|error|malformed_response)\b/i,
|
||||
/\bstop reason:\s*(?:abort|error|malformed_response|network_error)\b/i,
|
||||
/\breason:\s*(?:abort|error|malformed_response|network_error)\b/i,
|
||||
/\bunhandled stop reason:\s*(?:abort|error|malformed_response|network_error)\b/i,
|
||||
],
|
||||
billing: [
|
||||
/["']?(?:status|code)["']?\s*[:=]\s*402\b|\bhttp\s*402\b|\berror(?:\s+code)?\s*[:=]?\s*402\b|\b(?:got|returned|received)\s+(?:a\s+)?402\b|^\s*402\s+payment/i,
|
||||
|
|
@ -60,6 +60,7 @@ const ERROR_PATTERNS = {
|
|||
"plans & billing",
|
||||
"insufficient balance",
|
||||
"insufficient usd or diem balance",
|
||||
/requires?\s+more\s+credits/i,
|
||||
],
|
||||
authPermanent: [
|
||||
/api[_ ]?key[_ ]?(?:revoked|invalid|deactivated|deleted)/i,
|
||||
|
|
|
|||
|
|
@ -695,6 +695,33 @@ describe("applyExtraParamsToAgent", () => {
|
|||
expect(payloads[0]?.tool_choice).toBe("auto");
|
||||
});
|
||||
|
||||
it("disables thinking instead of broadening pinned Moonshot tool_choice", () => {
|
||||
const payloads: Record<string, unknown>[] = [];
|
||||
const baseStreamFn: StreamFn = (_model, _context, options) => {
|
||||
const payload: Record<string, unknown> = {
|
||||
tool_choice: { type: "tool", name: "read" },
|
||||
};
|
||||
options?.onPayload?.(payload, _model);
|
||||
payloads.push(payload);
|
||||
return {} as ReturnType<StreamFn>;
|
||||
};
|
||||
const agent = { streamFn: baseStreamFn };
|
||||
|
||||
applyExtraParamsToAgent(agent, undefined, "moonshot", "kimi-k2.5", undefined, "low");
|
||||
|
||||
const model = {
|
||||
api: "openai-completions",
|
||||
provider: "moonshot",
|
||||
id: "kimi-k2.5",
|
||||
} as Model<"openai-completions">;
|
||||
const context: Context = { messages: [] };
|
||||
void agent.streamFn?.(model, context, {});
|
||||
|
||||
expect(payloads).toHaveLength(1);
|
||||
expect(payloads[0]?.thinking).toEqual({ type: "disabled" });
|
||||
expect(payloads[0]?.tool_choice).toEqual({ type: "tool", name: "read" });
|
||||
});
|
||||
|
||||
it("respects explicit Moonshot thinking param from model config", () => {
|
||||
const payloads: Record<string, unknown>[] = [];
|
||||
const baseStreamFn: StreamFn = (_model, _context, options) => {
|
||||
|
|
@ -732,6 +759,85 @@ describe("applyExtraParamsToAgent", () => {
|
|||
expect(payloads[0]?.thinking).toEqual({ type: "disabled" });
|
||||
});
|
||||
|
||||
it("applies Moonshot payload compatibility to Ollama Kimi cloud models", () => {
|
||||
const payloads: Record<string, unknown>[] = [];
|
||||
const baseStreamFn: StreamFn = (_model, _context, options) => {
|
||||
const payload: Record<string, unknown> = { tool_choice: "required" };
|
||||
options?.onPayload?.(payload, _model);
|
||||
payloads.push(payload);
|
||||
return {} as ReturnType<StreamFn>;
|
||||
};
|
||||
const agent = { streamFn: baseStreamFn };
|
||||
|
||||
applyExtraParamsToAgent(agent, undefined, "ollama", "kimi-k2.5:cloud", undefined, "low");
|
||||
|
||||
const model = {
|
||||
api: "openai-completions",
|
||||
provider: "ollama",
|
||||
id: "kimi-k2.5:cloud",
|
||||
} as Model<"openai-completions">;
|
||||
const context: Context = { messages: [] };
|
||||
void agent.streamFn?.(model, context, {});
|
||||
|
||||
expect(payloads).toHaveLength(1);
|
||||
expect(payloads[0]?.thinking).toEqual({ type: "enabled" });
|
||||
expect(payloads[0]?.tool_choice).toBe("auto");
|
||||
});
|
||||
|
||||
it("maps thinkingLevel=off for Ollama Kimi cloud models through Moonshot compatibility", () => {
|
||||
const payloads: Record<string, unknown>[] = [];
|
||||
const baseStreamFn: StreamFn = (_model, _context, options) => {
|
||||
const payload: Record<string, unknown> = {};
|
||||
options?.onPayload?.(payload, _model);
|
||||
payloads.push(payload);
|
||||
return {} as ReturnType<StreamFn>;
|
||||
};
|
||||
const agent = { streamFn: baseStreamFn };
|
||||
|
||||
applyExtraParamsToAgent(agent, undefined, "ollama", "kimi-k2.5:cloud", undefined, "off");
|
||||
|
||||
const model = {
|
||||
api: "openai-completions",
|
||||
provider: "ollama",
|
||||
id: "kimi-k2.5:cloud",
|
||||
} as Model<"openai-completions">;
|
||||
const context: Context = { messages: [] };
|
||||
void agent.streamFn?.(model, context, {});
|
||||
|
||||
expect(payloads).toHaveLength(1);
|
||||
expect(payloads[0]?.thinking).toEqual({ type: "disabled" });
|
||||
});
|
||||
|
||||
it("disables thinking instead of broadening pinned Ollama Kimi cloud tool_choice", () => {
|
||||
const payloads: Record<string, unknown>[] = [];
|
||||
const baseStreamFn: StreamFn = (_model, _context, options) => {
|
||||
const payload: Record<string, unknown> = {
|
||||
tool_choice: { type: "function", function: { name: "read" } },
|
||||
};
|
||||
options?.onPayload?.(payload, _model);
|
||||
payloads.push(payload);
|
||||
return {} as ReturnType<StreamFn>;
|
||||
};
|
||||
const agent = { streamFn: baseStreamFn };
|
||||
|
||||
applyExtraParamsToAgent(agent, undefined, "ollama", "kimi-k2.5:cloud", undefined, "low");
|
||||
|
||||
const model = {
|
||||
api: "openai-completions",
|
||||
provider: "ollama",
|
||||
id: "kimi-k2.5:cloud",
|
||||
} as Model<"openai-completions">;
|
||||
const context: Context = { messages: [] };
|
||||
void agent.streamFn?.(model, context, {});
|
||||
|
||||
expect(payloads).toHaveLength(1);
|
||||
expect(payloads[0]?.thinking).toEqual({ type: "disabled" });
|
||||
expect(payloads[0]?.tool_choice).toEqual({
|
||||
type: "function",
|
||||
function: { name: "read" },
|
||||
});
|
||||
});
|
||||
|
||||
it("does not rewrite tool schema for kimi-coding (native Anthropic format)", () => {
|
||||
const payloads: Record<string, unknown>[] = [];
|
||||
const baseStreamFn: StreamFn = (_model, _context, options) => {
|
||||
|
|
|
|||
|
|
@ -4,41 +4,67 @@ import { onSessionTranscriptUpdate } from "../../sessions/transcript-events.js";
|
|||
const {
|
||||
hookRunner,
|
||||
ensureRuntimePluginsLoaded,
|
||||
resolveContextEngineMock,
|
||||
resolveModelMock,
|
||||
sessionCompactImpl,
|
||||
triggerInternalHook,
|
||||
sanitizeSessionHistoryMock,
|
||||
contextEngineCompactMock,
|
||||
} = vi.hoisted(() => ({
|
||||
hookRunner: {
|
||||
hasHooks: vi.fn(),
|
||||
runBeforeCompaction: vi.fn(),
|
||||
runAfterCompaction: vi.fn(),
|
||||
},
|
||||
ensureRuntimePluginsLoaded: vi.fn(),
|
||||
resolveModelMock: vi.fn(() => ({
|
||||
model: { provider: "openai", api: "responses", id: "fake", input: [] },
|
||||
error: null,
|
||||
authStorage: { setRuntimeApiKey: vi.fn() },
|
||||
modelRegistry: {},
|
||||
})),
|
||||
sessionCompactImpl: vi.fn(async () => ({
|
||||
summary: "summary",
|
||||
firstKeptEntryId: "entry-1",
|
||||
tokensBefore: 120,
|
||||
details: { ok: true },
|
||||
})),
|
||||
triggerInternalHook: vi.fn(),
|
||||
sanitizeSessionHistoryMock: vi.fn(async (params: { messages: unknown[] }) => params.messages),
|
||||
contextEngineCompactMock: vi.fn(async () => ({
|
||||
getMemorySearchManagerMock,
|
||||
resolveMemorySearchConfigMock,
|
||||
resolveSessionAgentIdMock,
|
||||
} = vi.hoisted(() => {
|
||||
const contextEngineCompactMock = vi.fn(async () => ({
|
||||
ok: true as boolean,
|
||||
compacted: true as boolean,
|
||||
reason: undefined as string | undefined,
|
||||
result: { summary: "engine-summary", tokensAfter: 50 } as
|
||||
| { summary: string; tokensAfter: number }
|
||||
| undefined,
|
||||
})),
|
||||
}));
|
||||
}));
|
||||
|
||||
return {
|
||||
hookRunner: {
|
||||
hasHooks: vi.fn(),
|
||||
runBeforeCompaction: vi.fn(),
|
||||
runAfterCompaction: vi.fn(),
|
||||
},
|
||||
ensureRuntimePluginsLoaded: vi.fn(),
|
||||
resolveContextEngineMock: vi.fn(async () => ({
|
||||
info: { ownsCompaction: true },
|
||||
compact: contextEngineCompactMock,
|
||||
})),
|
||||
resolveModelMock: vi.fn(() => ({
|
||||
model: { provider: "openai", api: "responses", id: "fake", input: [] },
|
||||
error: null,
|
||||
authStorage: { setRuntimeApiKey: vi.fn() },
|
||||
modelRegistry: {},
|
||||
})),
|
||||
sessionCompactImpl: vi.fn(async () => ({
|
||||
summary: "summary",
|
||||
firstKeptEntryId: "entry-1",
|
||||
tokensBefore: 120,
|
||||
details: { ok: true },
|
||||
})),
|
||||
triggerInternalHook: vi.fn(),
|
||||
sanitizeSessionHistoryMock: vi.fn(async (params: { messages: unknown[] }) => params.messages),
|
||||
contextEngineCompactMock,
|
||||
getMemorySearchManagerMock: vi.fn(async () => ({
|
||||
manager: {
|
||||
sync: vi.fn(async () => {}),
|
||||
},
|
||||
})),
|
||||
resolveMemorySearchConfigMock: vi.fn(() => ({
|
||||
sources: ["sessions"],
|
||||
sync: {
|
||||
sessions: {
|
||||
postCompactionForce: true,
|
||||
},
|
||||
},
|
||||
})),
|
||||
resolveSessionAgentIdMock: vi.fn(() => "main"),
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock("../../plugins/hook-runner-global.js", () => ({
|
||||
getGlobalHookRunner: () => hookRunner,
|
||||
|
|
@ -135,10 +161,7 @@ vi.mock("../session-write-lock.js", () => ({
|
|||
|
||||
vi.mock("../../context-engine/index.js", () => ({
|
||||
ensureContextEnginesInitialized: vi.fn(),
|
||||
resolveContextEngine: vi.fn(async () => ({
|
||||
info: { ownsCompaction: true },
|
||||
compact: contextEngineCompactMock,
|
||||
})),
|
||||
resolveContextEngine: resolveContextEngineMock,
|
||||
}));
|
||||
|
||||
vi.mock("../../process/command-queue.js", () => ({
|
||||
|
|
@ -211,9 +234,18 @@ vi.mock("../agent-paths.js", () => ({
|
|||
}));
|
||||
|
||||
vi.mock("../agent-scope.js", () => ({
|
||||
resolveSessionAgentId: resolveSessionAgentIdMock,
|
||||
resolveSessionAgentIds: vi.fn(() => ({ defaultAgentId: "main", sessionAgentId: "main" })),
|
||||
}));
|
||||
|
||||
vi.mock("../memory-search.js", () => ({
|
||||
resolveMemorySearchConfig: resolveMemorySearchConfigMock,
|
||||
}));
|
||||
|
||||
vi.mock("../../memory/index.js", () => ({
|
||||
getMemorySearchManager: getMemorySearchManagerMock,
|
||||
}));
|
||||
|
||||
vi.mock("../date-time.js", () => ({
|
||||
formatUserTime: vi.fn(() => ""),
|
||||
resolveUserTimeFormat: vi.fn(() => ""),
|
||||
|
|
@ -314,6 +346,23 @@ describe("compactEmbeddedPiSessionDirect hooks", () => {
|
|||
sanitizeSessionHistoryMock.mockImplementation(async (params: { messages: unknown[] }) => {
|
||||
return params.messages;
|
||||
});
|
||||
getMemorySearchManagerMock.mockReset();
|
||||
getMemorySearchManagerMock.mockResolvedValue({
|
||||
manager: {
|
||||
sync: vi.fn(async () => {}),
|
||||
},
|
||||
});
|
||||
resolveMemorySearchConfigMock.mockReset();
|
||||
resolveMemorySearchConfigMock.mockReturnValue({
|
||||
sources: ["sessions"],
|
||||
sync: {
|
||||
sessions: {
|
||||
postCompactionForce: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
resolveSessionAgentIdMock.mockReset();
|
||||
resolveSessionAgentIdMock.mockReturnValue("main");
|
||||
unregisterApiProviders(getCustomApiRegistrySourceId("ollama"));
|
||||
});
|
||||
|
||||
|
|
@ -452,6 +501,161 @@ describe("compactEmbeddedPiSessionDirect hooks", () => {
|
|||
}
|
||||
});
|
||||
|
||||
it("skips sync in await mode when postCompactionForce is false", async () => {
|
||||
const sync = vi.fn(async () => {});
|
||||
getMemorySearchManagerMock.mockResolvedValue({ manager: { sync } });
|
||||
resolveMemorySearchConfigMock.mockReturnValue({
|
||||
sources: ["sessions"],
|
||||
sync: {
|
||||
sessions: {
|
||||
postCompactionForce: false,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const result = await compactEmbeddedPiSessionDirect({
|
||||
sessionId: "session-1",
|
||||
sessionKey: "agent:main:session-1",
|
||||
sessionFile: "/tmp/session.jsonl",
|
||||
workspaceDir: "/tmp",
|
||||
customInstructions: "focus on decisions",
|
||||
config: {
|
||||
agents: {
|
||||
defaults: {
|
||||
compaction: {
|
||||
postIndexSync: "await",
|
||||
},
|
||||
},
|
||||
},
|
||||
} as never,
|
||||
});
|
||||
|
||||
expect(result.ok).toBe(true);
|
||||
expect(resolveSessionAgentIdMock).toHaveBeenCalledWith({
|
||||
sessionKey: "agent:main:session-1",
|
||||
config: expect.any(Object),
|
||||
});
|
||||
expect(getMemorySearchManagerMock).not.toHaveBeenCalled();
|
||||
expect(sync).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("awaits post-compaction memory sync in await mode when postCompactionForce is true", async () => {
|
||||
let releaseSync: (() => void) | undefined;
|
||||
const syncGate = new Promise<void>((resolve) => {
|
||||
releaseSync = resolve;
|
||||
});
|
||||
const sync = vi.fn(() => syncGate);
|
||||
getMemorySearchManagerMock.mockResolvedValue({ manager: { sync } });
|
||||
let settled = false;
|
||||
|
||||
const resultPromise = compactEmbeddedPiSessionDirect({
|
||||
sessionId: "session-1",
|
||||
sessionKey: "agent:main:session-1",
|
||||
sessionFile: "/tmp/session.jsonl",
|
||||
workspaceDir: "/tmp",
|
||||
customInstructions: "focus on decisions",
|
||||
config: {
|
||||
agents: {
|
||||
defaults: {
|
||||
compaction: {
|
||||
postIndexSync: "await",
|
||||
},
|
||||
},
|
||||
},
|
||||
} as never,
|
||||
});
|
||||
|
||||
void resultPromise.then(() => {
|
||||
settled = true;
|
||||
});
|
||||
await vi.waitFor(() => {
|
||||
expect(sync).toHaveBeenCalledWith({
|
||||
reason: "post-compaction",
|
||||
sessionFiles: ["/tmp/session.jsonl"],
|
||||
});
|
||||
});
|
||||
expect(settled).toBe(false);
|
||||
releaseSync?.();
|
||||
const result = await resultPromise;
|
||||
expect(result.ok).toBe(true);
|
||||
expect(settled).toBe(true);
|
||||
});
|
||||
|
||||
it("skips post-compaction memory sync when the mode is off", async () => {
|
||||
const sync = vi.fn(async () => {});
|
||||
getMemorySearchManagerMock.mockResolvedValue({ manager: { sync } });
|
||||
|
||||
const result = await compactEmbeddedPiSessionDirect({
|
||||
sessionId: "session-1",
|
||||
sessionKey: "agent:main:session-1",
|
||||
sessionFile: "/tmp/session.jsonl",
|
||||
workspaceDir: "/tmp",
|
||||
customInstructions: "focus on decisions",
|
||||
config: {
|
||||
agents: {
|
||||
defaults: {
|
||||
compaction: {
|
||||
postIndexSync: "off",
|
||||
},
|
||||
},
|
||||
},
|
||||
} as never,
|
||||
});
|
||||
|
||||
expect(result.ok).toBe(true);
|
||||
expect(resolveSessionAgentIdMock).not.toHaveBeenCalled();
|
||||
expect(getMemorySearchManagerMock).not.toHaveBeenCalled();
|
||||
expect(sync).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("fires post-compaction memory sync without awaiting it in async mode", async () => {
|
||||
const sync = vi.fn(async () => {});
|
||||
let resolveManager: ((value: { manager: { sync: typeof sync } }) => void) | undefined;
|
||||
const managerGate = new Promise<{ manager: { sync: typeof sync } }>((resolve) => {
|
||||
resolveManager = resolve;
|
||||
});
|
||||
getMemorySearchManagerMock.mockImplementation(() => managerGate);
|
||||
let settled = false;
|
||||
|
||||
const resultPromise = compactEmbeddedPiSessionDirect({
|
||||
sessionId: "session-1",
|
||||
sessionKey: "agent:main:session-1",
|
||||
sessionFile: "/tmp/session.jsonl",
|
||||
workspaceDir: "/tmp",
|
||||
customInstructions: "focus on decisions",
|
||||
config: {
|
||||
agents: {
|
||||
defaults: {
|
||||
compaction: {
|
||||
postIndexSync: "async",
|
||||
},
|
||||
},
|
||||
},
|
||||
} as never,
|
||||
});
|
||||
|
||||
await vi.waitFor(() => {
|
||||
expect(getMemorySearchManagerMock).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
void resultPromise.then(() => {
|
||||
settled = true;
|
||||
});
|
||||
await vi.waitFor(() => {
|
||||
expect(settled).toBe(true);
|
||||
});
|
||||
expect(sync).not.toHaveBeenCalled();
|
||||
resolveManager?.({ manager: { sync } });
|
||||
await managerGate;
|
||||
await vi.waitFor(() => {
|
||||
expect(sync).toHaveBeenCalledWith({
|
||||
reason: "post-compaction",
|
||||
sessionFiles: ["/tmp/session.jsonl"],
|
||||
});
|
||||
});
|
||||
const result = await resultPromise;
|
||||
expect(result.ok).toBe(true);
|
||||
});
|
||||
|
||||
it("registers the Ollama api provider before compaction", async () => {
|
||||
resolveModelMock.mockReturnValue({
|
||||
model: {
|
||||
|
|
@ -493,6 +697,11 @@ describe("compactEmbeddedPiSession hooks (ownsCompaction engine)", () => {
|
|||
hookRunner.hasHooks.mockReset();
|
||||
hookRunner.runBeforeCompaction.mockReset();
|
||||
hookRunner.runAfterCompaction.mockReset();
|
||||
resolveContextEngineMock.mockReset();
|
||||
resolveContextEngineMock.mockResolvedValue({
|
||||
info: { ownsCompaction: true },
|
||||
compact: contextEngineCompactMock,
|
||||
});
|
||||
contextEngineCompactMock.mockReset();
|
||||
contextEngineCompactMock.mockResolvedValue({
|
||||
ok: true,
|
||||
|
|
@ -546,8 +755,47 @@ describe("compactEmbeddedPiSession hooks (ownsCompaction engine)", () => {
|
|||
);
|
||||
});
|
||||
|
||||
it("emits a transcript update and post-compaction memory sync on the engine-owned path", async () => {
|
||||
const listener = vi.fn();
|
||||
const cleanup = onSessionTranscriptUpdate(listener);
|
||||
const sync = vi.fn(async () => {});
|
||||
getMemorySearchManagerMock.mockResolvedValue({ manager: { sync } });
|
||||
|
||||
try {
|
||||
const result = await compactEmbeddedPiSession({
|
||||
sessionId: "session-1",
|
||||
sessionKey: "agent:main:session-1",
|
||||
sessionFile: " /tmp/session.jsonl ",
|
||||
workspaceDir: "/tmp",
|
||||
customInstructions: "focus on decisions",
|
||||
enqueue: (task) => task(),
|
||||
config: {
|
||||
agents: {
|
||||
defaults: {
|
||||
compaction: {
|
||||
postIndexSync: "await",
|
||||
},
|
||||
},
|
||||
},
|
||||
} as never,
|
||||
});
|
||||
|
||||
expect(result.ok).toBe(true);
|
||||
expect(listener).toHaveBeenCalledTimes(1);
|
||||
expect(listener).toHaveBeenCalledWith({ sessionFile: "/tmp/session.jsonl" });
|
||||
expect(sync).toHaveBeenCalledWith({
|
||||
reason: "post-compaction",
|
||||
sessionFiles: ["/tmp/session.jsonl"],
|
||||
});
|
||||
} finally {
|
||||
cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
it("does not fire after_compaction when compaction fails", async () => {
|
||||
hookRunner.hasHooks.mockReturnValue(true);
|
||||
const sync = vi.fn(async () => {});
|
||||
getMemorySearchManagerMock.mockResolvedValue({ manager: { sync } });
|
||||
contextEngineCompactMock.mockResolvedValue({
|
||||
ok: false,
|
||||
compacted: false,
|
||||
|
|
@ -567,6 +815,44 @@ describe("compactEmbeddedPiSession hooks (ownsCompaction engine)", () => {
|
|||
expect(result.ok).toBe(false);
|
||||
expect(hookRunner.runBeforeCompaction).toHaveBeenCalled();
|
||||
expect(hookRunner.runAfterCompaction).not.toHaveBeenCalled();
|
||||
expect(sync).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("does not duplicate transcript updates or sync in the wrapper when the engine delegates compaction", async () => {
|
||||
const listener = vi.fn();
|
||||
const cleanup = onSessionTranscriptUpdate(listener);
|
||||
const sync = vi.fn(async () => {});
|
||||
getMemorySearchManagerMock.mockResolvedValue({ manager: { sync } });
|
||||
resolveContextEngineMock.mockResolvedValue({
|
||||
info: { ownsCompaction: false },
|
||||
compact: contextEngineCompactMock,
|
||||
});
|
||||
|
||||
try {
|
||||
const result = await compactEmbeddedPiSession({
|
||||
sessionId: "session-1",
|
||||
sessionKey: "agent:main:session-1",
|
||||
sessionFile: "/tmp/session.jsonl",
|
||||
workspaceDir: "/tmp",
|
||||
customInstructions: "focus on decisions",
|
||||
enqueue: (task) => task(),
|
||||
config: {
|
||||
agents: {
|
||||
defaults: {
|
||||
compaction: {
|
||||
postIndexSync: "await",
|
||||
},
|
||||
},
|
||||
},
|
||||
} as never,
|
||||
});
|
||||
|
||||
expect(result.ok).toBe(true);
|
||||
expect(listener).not.toHaveBeenCalled();
|
||||
expect(sync).not.toHaveBeenCalled();
|
||||
} finally {
|
||||
cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
it("catches and logs hook exceptions without aborting compaction", async () => {
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ import {
|
|||
import { createInternalHookEvent, triggerInternalHook } from "../../hooks/internal-hooks.js";
|
||||
import { getMachineDisplayName } from "../../infra/machine-name.js";
|
||||
import { generateSecureToken } from "../../infra/secure-random.js";
|
||||
import { getMemorySearchManager } from "../../memory/index.js";
|
||||
import { getGlobalHookRunner } from "../../plugins/hook-runner-global.js";
|
||||
import { type enqueueCommand, enqueueCommandInLane } from "../../process/command-queue.js";
|
||||
import { isCronSessionKey, isSubagentSessionKey } from "../../routing/session-key.js";
|
||||
|
|
@ -30,7 +31,7 @@ import { resolveUserPath } from "../../utils.js";
|
|||
import { normalizeMessageChannel } from "../../utils/message-channel.js";
|
||||
import { isReasoningTagProvider } from "../../utils/provider-utils.js";
|
||||
import { resolveOpenClawAgentDir } from "../agent-paths.js";
|
||||
import { resolveSessionAgentIds } from "../agent-scope.js";
|
||||
import { resolveSessionAgentId, resolveSessionAgentIds } from "../agent-scope.js";
|
||||
import type { ExecElevatedDefaults } from "../bash-tools.js";
|
||||
import { makeBootstrapWarn, resolveBootstrapContextForRun } from "../bootstrap-files.js";
|
||||
import { listChannelSupportedActions, resolveChannelMessageToolHints } from "../channel-tools.js";
|
||||
|
|
@ -39,6 +40,7 @@ import { ensureCustomApiRegistered } from "../custom-api-registry.js";
|
|||
import { formatUserTime, resolveUserTimeFormat, resolveUserTimezone } from "../date-time.js";
|
||||
import { DEFAULT_CONTEXT_TOKENS, DEFAULT_MODEL, DEFAULT_PROVIDER } from "../defaults.js";
|
||||
import { resolveOpenClawDocsPath } from "../docs-path.js";
|
||||
import { resolveMemorySearchConfig } from "../memory-search.js";
|
||||
import { getApiKeyForModel, resolveModelAuthMode } from "../model-auth.js";
|
||||
import { supportsModelTools } from "../model-tool-support.js";
|
||||
import { ensureOpenClawModelsJson } from "../models-config.js";
|
||||
|
|
@ -268,6 +270,95 @@ function classifyCompactionReason(reason?: string): string {
|
|||
return "unknown";
|
||||
}
|
||||
|
||||
function resolvePostCompactionIndexSyncMode(config?: OpenClawConfig): "off" | "async" | "await" {
|
||||
const mode = config?.agents?.defaults?.compaction?.postIndexSync;
|
||||
if (mode === "off" || mode === "async" || mode === "await") {
|
||||
return mode;
|
||||
}
|
||||
return "async";
|
||||
}
|
||||
|
||||
async function runPostCompactionSessionMemorySync(params: {
|
||||
config?: OpenClawConfig;
|
||||
sessionKey?: string;
|
||||
sessionFile: string;
|
||||
}): Promise<void> {
|
||||
if (!params.config) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const sessionFile = params.sessionFile.trim();
|
||||
if (!sessionFile) {
|
||||
return;
|
||||
}
|
||||
const agentId = resolveSessionAgentId({
|
||||
sessionKey: params.sessionKey,
|
||||
config: params.config,
|
||||
});
|
||||
const resolvedMemory = resolveMemorySearchConfig(params.config, agentId);
|
||||
if (!resolvedMemory || !resolvedMemory.sources.includes("sessions")) {
|
||||
return;
|
||||
}
|
||||
if (!resolvedMemory.sync.sessions.postCompactionForce) {
|
||||
return;
|
||||
}
|
||||
const { manager } = await getMemorySearchManager({
|
||||
cfg: params.config,
|
||||
agentId,
|
||||
});
|
||||
if (!manager?.sync) {
|
||||
return;
|
||||
}
|
||||
const syncTask = manager.sync({
|
||||
reason: "post-compaction",
|
||||
sessionFiles: [sessionFile],
|
||||
});
|
||||
await syncTask;
|
||||
} catch (err) {
|
||||
log.warn(`memory sync skipped (post-compaction): ${String(err)}`);
|
||||
}
|
||||
}
|
||||
|
||||
function syncPostCompactionSessionMemory(params: {
|
||||
config?: OpenClawConfig;
|
||||
sessionKey?: string;
|
||||
sessionFile: string;
|
||||
mode: "off" | "async" | "await";
|
||||
}): Promise<void> {
|
||||
if (params.mode === "off" || !params.config) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
const syncTask = runPostCompactionSessionMemorySync({
|
||||
config: params.config,
|
||||
sessionKey: params.sessionKey,
|
||||
sessionFile: params.sessionFile,
|
||||
});
|
||||
if (params.mode === "await") {
|
||||
return syncTask;
|
||||
}
|
||||
void syncTask;
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
async function runPostCompactionSideEffects(params: {
|
||||
config?: OpenClawConfig;
|
||||
sessionKey?: string;
|
||||
sessionFile: string;
|
||||
}): Promise<void> {
|
||||
const sessionFile = params.sessionFile.trim();
|
||||
if (!sessionFile) {
|
||||
return;
|
||||
}
|
||||
emitSessionTranscriptUpdate(sessionFile);
|
||||
await syncPostCompactionSessionMemory({
|
||||
config: params.config,
|
||||
sessionKey: params.sessionKey,
|
||||
sessionFile,
|
||||
mode: resolvePostCompactionIndexSyncMode(params.config),
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Core compaction logic without lane queueing.
|
||||
* Use this when already inside a session/global lane to avoid deadlocks.
|
||||
|
|
@ -809,7 +900,11 @@ export async function compactEmbeddedPiSessionDirect(
|
|||
const result = await compactWithSafetyTimeout(() =>
|
||||
session.compact(params.customInstructions),
|
||||
);
|
||||
emitSessionTranscriptUpdate(params.sessionFile);
|
||||
await runPostCompactionSideEffects({
|
||||
config: params.config,
|
||||
sessionKey: params.sessionKey,
|
||||
sessionFile: params.sessionFile,
|
||||
});
|
||||
// Estimate tokens after compaction by summing token estimates for remaining messages
|
||||
let tokensAfter: number | undefined;
|
||||
try {
|
||||
|
|
@ -991,6 +1086,7 @@ export async function compactEmbeddedPiSession(
|
|||
}
|
||||
const result = await contextEngine.compact({
|
||||
sessionId: params.sessionId,
|
||||
sessionKey: params.sessionKey,
|
||||
sessionFile: params.sessionFile,
|
||||
tokenBudget: ceCtxInfo.tokens,
|
||||
currentTokenCount: params.currentTokenCount,
|
||||
|
|
@ -998,6 +1094,13 @@ export async function compactEmbeddedPiSession(
|
|||
force: params.trigger === "manual",
|
||||
runtimeContext: params as Record<string, unknown>,
|
||||
});
|
||||
if (engineOwnsCompaction && result.ok && result.compacted) {
|
||||
await runPostCompactionSideEffects({
|
||||
config: params.config,
|
||||
sessionKey: params.sessionKey,
|
||||
sessionFile: params.sessionFile,
|
||||
});
|
||||
}
|
||||
if (result.ok && result.compacted && hookRunner?.hasHooks("after_compaction")) {
|
||||
try {
|
||||
await hookRunner.runAfterCompaction(
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@ import {
|
|||
createMoonshotThinkingWrapper,
|
||||
createSiliconFlowThinkingWrapper,
|
||||
resolveMoonshotThinkingType,
|
||||
shouldApplyMoonshotPayloadCompat,
|
||||
shouldApplySiliconFlowThinkingOffCompat,
|
||||
} from "./moonshot-stream-wrappers.js";
|
||||
import {
|
||||
|
|
@ -373,7 +374,7 @@ export function applyExtraParamsToAgent(
|
|||
agent.streamFn = createSiliconFlowThinkingWrapper(agent.streamFn);
|
||||
}
|
||||
|
||||
if (provider === "moonshot") {
|
||||
if (shouldApplyMoonshotPayloadCompat({ provider, modelId })) {
|
||||
const moonshotThinkingType = resolveMoonshotThinkingType({
|
||||
configuredThinking: merged?.thinking,
|
||||
thinkingLevel,
|
||||
|
|
|
|||
|
|
@ -915,6 +915,43 @@ describe("resolveModel", () => {
|
|||
});
|
||||
});
|
||||
|
||||
it("lets provider config override registry-found kimi user agent headers", () => {
|
||||
mockDiscoveredModel({
|
||||
provider: "kimi-coding",
|
||||
modelId: "k2p5",
|
||||
templateModel: {
|
||||
...buildForwardCompatTemplate({
|
||||
id: "k2p5",
|
||||
name: "Kimi for Coding",
|
||||
provider: "kimi-coding",
|
||||
api: "anthropic-messages",
|
||||
baseUrl: "https://api.kimi.com/coding/",
|
||||
}),
|
||||
headers: { "User-Agent": "claude-code/0.1.0" },
|
||||
},
|
||||
});
|
||||
|
||||
const cfg = {
|
||||
models: {
|
||||
providers: {
|
||||
"kimi-coding": {
|
||||
headers: {
|
||||
"User-Agent": "custom-kimi-client/1.0",
|
||||
"X-Kimi-Tenant": "tenant-a",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
} as unknown as OpenClawConfig;
|
||||
|
||||
const result = resolveModel("kimi-coding", "k2p5", "/tmp/agent", cfg);
|
||||
expect(result.error).toBeUndefined();
|
||||
expect((result.model as unknown as { headers?: Record<string, string> }).headers).toEqual({
|
||||
"User-Agent": "custom-kimi-client/1.0",
|
||||
"X-Kimi-Tenant": "tenant-a",
|
||||
});
|
||||
});
|
||||
|
||||
it("does not override when no provider config exists", () => {
|
||||
mockDiscoveredModel({
|
||||
provider: "anthropic",
|
||||
|
|
|
|||
|
|
@ -35,6 +35,14 @@ function isMoonshotToolChoiceCompatible(toolChoice: unknown): boolean {
|
|||
return false;
|
||||
}
|
||||
|
||||
function isPinnedToolChoice(toolChoice: unknown): boolean {
|
||||
if (!toolChoice || typeof toolChoice !== "object" || Array.isArray(toolChoice)) {
|
||||
return false;
|
||||
}
|
||||
const typeValue = (toolChoice as Record<string, unknown>).type;
|
||||
return typeValue === "tool" || typeValue === "function";
|
||||
}
|
||||
|
||||
export function shouldApplySiliconFlowThinkingOffCompat(params: {
|
||||
provider: string;
|
||||
modelId: string;
|
||||
|
|
@ -47,6 +55,27 @@ export function shouldApplySiliconFlowThinkingOffCompat(params: {
|
|||
);
|
||||
}
|
||||
|
||||
export function shouldApplyMoonshotPayloadCompat(params: {
|
||||
provider: string;
|
||||
modelId: string;
|
||||
}): boolean {
|
||||
const normalizedProvider = params.provider.trim().toLowerCase();
|
||||
const normalizedModelId = params.modelId.trim().toLowerCase();
|
||||
|
||||
if (normalizedProvider === "moonshot") {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Ollama Cloud exposes Kimi variants through OpenAI-compatible model IDs such
|
||||
// as `kimi-k2.5:cloud`, but they still need the same payload normalization as
|
||||
// native Moonshot endpoints when thinking/tool_choice are enabled together.
|
||||
return (
|
||||
normalizedProvider === "ollama" &&
|
||||
normalizedModelId.startsWith("kimi-k") &&
|
||||
normalizedModelId.includes(":cloud")
|
||||
);
|
||||
}
|
||||
|
||||
export function createSiliconFlowThinkingWrapper(baseStreamFn: StreamFn | undefined): StreamFn {
|
||||
const underlying = baseStreamFn ?? streamSimple;
|
||||
return (model, context, options) => {
|
||||
|
|
@ -103,7 +132,11 @@ export function createMoonshotThinkingWrapper(
|
|||
effectiveThinkingType === "enabled" &&
|
||||
!isMoonshotToolChoiceCompatible(payloadObj.tool_choice)
|
||||
) {
|
||||
payloadObj.tool_choice = "auto";
|
||||
if (payloadObj.tool_choice === "required") {
|
||||
payloadObj.tool_choice = "auto";
|
||||
} else if (isPinnedToolChoice(payloadObj.tool_choice)) {
|
||||
payloadObj.thinking = { type: "disabled" };
|
||||
}
|
||||
}
|
||||
}
|
||||
return originalOnPayload?.(payload, model);
|
||||
|
|
|
|||
|
|
@ -1053,6 +1053,7 @@ export async function runEmbeddedPiAgent(
|
|||
try {
|
||||
compactResult = await contextEngine.compact({
|
||||
sessionId: params.sessionId,
|
||||
sessionKey: params.sessionKey,
|
||||
sessionFile: params.sessionFile,
|
||||
tokenBudget: ctxInfo.tokens,
|
||||
...(observedOverflowTokens !== undefined
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import type { AgentMessage } from "@mariozechner/pi-agent-core";
|
||||
import type { Api, Model } from "@mariozechner/pi-ai";
|
||||
import type {
|
||||
AuthStorage,
|
||||
|
|
@ -9,6 +10,14 @@ import type {
|
|||
ToolDefinition,
|
||||
} from "@mariozechner/pi-coding-agent";
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import type {
|
||||
AssembleResult,
|
||||
BootstrapResult,
|
||||
CompactResult,
|
||||
ContextEngineInfo,
|
||||
IngestBatchResult,
|
||||
IngestResult,
|
||||
} from "../../../context-engine/types.js";
|
||||
import { createHostSandboxFsBridge } from "../../test-helpers/host-sandbox-fs-bridge.js";
|
||||
import { createPiToolsSandboxContext } from "../../test-helpers/pi-tools-sandbox-context.js";
|
||||
|
||||
|
|
@ -23,7 +32,7 @@ const hoisted = vi.hoisted(() => {
|
|||
getLeafEntry: vi.fn(() => null),
|
||||
branch: vi.fn(),
|
||||
resetLeaf: vi.fn(),
|
||||
buildSessionContext: vi.fn(() => ({ messages: [] })),
|
||||
buildSessionContext: vi.fn<() => { messages: AgentMessage[] }>(() => ({ messages: [] })),
|
||||
appendCustomEntry: vi.fn(),
|
||||
};
|
||||
return {
|
||||
|
|
@ -240,6 +249,14 @@ function createSubscriptionMock() {
|
|||
};
|
||||
}
|
||||
|
||||
const testModel = {
|
||||
api: "openai-completions",
|
||||
provider: "openai",
|
||||
compat: {},
|
||||
contextWindow: 8192,
|
||||
input: ["text"],
|
||||
} as unknown as Model<Api>;
|
||||
|
||||
describe("runEmbeddedAttempt sessions_spawn workspace inheritance", () => {
|
||||
const tempPaths: string[] = [];
|
||||
|
||||
|
|
@ -326,14 +343,6 @@ describe("runEmbeddedAttempt sessions_spawn workspace inheritance", () => {
|
|||
},
|
||||
);
|
||||
|
||||
const model = {
|
||||
api: "openai-completions",
|
||||
provider: "openai",
|
||||
compat: {},
|
||||
contextWindow: 8192,
|
||||
input: ["text"],
|
||||
} as unknown as Model<Api>;
|
||||
|
||||
const result = await runEmbeddedAttempt({
|
||||
sessionId: "embedded-session",
|
||||
sessionKey: "agent:main:main",
|
||||
|
|
@ -346,7 +355,7 @@ describe("runEmbeddedAttempt sessions_spawn workspace inheritance", () => {
|
|||
runId: "run-1",
|
||||
provider: "openai",
|
||||
modelId: "gpt-test",
|
||||
model,
|
||||
model: testModel,
|
||||
authStorage: {} as AuthStorage,
|
||||
modelRegistry: {} as ModelRegistry,
|
||||
thinkLevel: "off",
|
||||
|
|
@ -372,3 +381,243 @@ describe("runEmbeddedAttempt sessions_spawn workspace inheritance", () => {
|
|||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("runEmbeddedAttempt context engine sessionKey forwarding", () => {
|
||||
const tempPaths: string[] = [];
|
||||
const sessionKey = "agent:main:discord:channel:test-ctx-engine";
|
||||
|
||||
beforeEach(() => {
|
||||
hoisted.createAgentSessionMock.mockReset();
|
||||
hoisted.sessionManagerOpenMock.mockReset().mockReturnValue(hoisted.sessionManager);
|
||||
hoisted.resolveSandboxContextMock.mockReset();
|
||||
hoisted.subscribeEmbeddedPiSessionMock.mockReset().mockImplementation(createSubscriptionMock);
|
||||
hoisted.acquireSessionWriteLockMock.mockReset().mockResolvedValue({
|
||||
release: async () => {},
|
||||
});
|
||||
hoisted.sessionManager.getLeafEntry.mockReset().mockReturnValue(null);
|
||||
hoisted.sessionManager.branch.mockReset();
|
||||
hoisted.sessionManager.resetLeaf.mockReset();
|
||||
hoisted.sessionManager.appendCustomEntry.mockReset();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
while (tempPaths.length > 0) {
|
||||
const target = tempPaths.pop();
|
||||
if (target) {
|
||||
await fs.rm(target, { recursive: true, force: true });
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Build a minimal real attempt harness so lifecycle hooks run against
|
||||
// the actual runner flow instead of a hand-written wrapper.
|
||||
async function runAttemptWithContextEngine(contextEngine: {
|
||||
bootstrap?: (params: {
|
||||
sessionId: string;
|
||||
sessionKey?: string;
|
||||
sessionFile: string;
|
||||
}) => Promise<BootstrapResult>;
|
||||
assemble: (params: {
|
||||
sessionId: string;
|
||||
sessionKey?: string;
|
||||
messages: AgentMessage[];
|
||||
tokenBudget?: number;
|
||||
}) => Promise<AssembleResult>;
|
||||
afterTurn?: (params: {
|
||||
sessionId: string;
|
||||
sessionKey?: string;
|
||||
sessionFile: string;
|
||||
messages: AgentMessage[];
|
||||
prePromptMessageCount: number;
|
||||
tokenBudget?: number;
|
||||
runtimeContext?: Record<string, unknown>;
|
||||
}) => Promise<void>;
|
||||
ingestBatch?: (params: {
|
||||
sessionId: string;
|
||||
sessionKey?: string;
|
||||
messages: AgentMessage[];
|
||||
}) => Promise<IngestBatchResult>;
|
||||
ingest?: (params: {
|
||||
sessionId: string;
|
||||
sessionKey?: string;
|
||||
message: AgentMessage;
|
||||
}) => Promise<IngestResult>;
|
||||
compact?: (params: {
|
||||
sessionId: string;
|
||||
sessionKey?: string;
|
||||
sessionFile: string;
|
||||
tokenBudget?: number;
|
||||
}) => Promise<CompactResult>;
|
||||
info?: Partial<ContextEngineInfo>;
|
||||
}) {
|
||||
const workspaceDir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-ctx-engine-workspace-"));
|
||||
const agentDir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-ctx-engine-agent-"));
|
||||
const sessionFile = path.join(workspaceDir, "session.jsonl");
|
||||
tempPaths.push(workspaceDir, agentDir);
|
||||
await fs.writeFile(sessionFile, "", "utf8");
|
||||
const seedMessages: AgentMessage[] = [
|
||||
{ role: "user", content: "seed", timestamp: 1 } as AgentMessage,
|
||||
];
|
||||
const infoId = contextEngine.info?.id ?? "test-context-engine";
|
||||
const infoName = contextEngine.info?.name ?? "Test Context Engine";
|
||||
const infoVersion = contextEngine.info?.version ?? "0.0.1";
|
||||
|
||||
hoisted.sessionManager.buildSessionContext
|
||||
.mockReset()
|
||||
.mockReturnValue({ messages: seedMessages });
|
||||
|
||||
hoisted.createAgentSessionMock.mockImplementation(async () => {
|
||||
const session: MutableSession = {
|
||||
sessionId: "embedded-session",
|
||||
messages: [],
|
||||
isCompacting: false,
|
||||
isStreaming: false,
|
||||
agent: {
|
||||
replaceMessages: (messages: unknown[]) => {
|
||||
session.messages = [...messages];
|
||||
},
|
||||
},
|
||||
prompt: async () => {
|
||||
session.messages = [
|
||||
...session.messages,
|
||||
{ role: "assistant", content: "done", timestamp: 2 },
|
||||
];
|
||||
},
|
||||
abort: async () => {},
|
||||
dispose: () => {},
|
||||
steer: async () => {},
|
||||
};
|
||||
|
||||
return { session };
|
||||
});
|
||||
|
||||
return await runEmbeddedAttempt({
|
||||
sessionId: "embedded-session",
|
||||
sessionKey,
|
||||
sessionFile,
|
||||
workspaceDir,
|
||||
agentDir,
|
||||
config: {},
|
||||
prompt: "hello",
|
||||
timeoutMs: 10_000,
|
||||
runId: "run-context-engine-forwarding",
|
||||
provider: "openai",
|
||||
modelId: "gpt-test",
|
||||
model: testModel,
|
||||
authStorage: {} as AuthStorage,
|
||||
modelRegistry: {} as ModelRegistry,
|
||||
thinkLevel: "off",
|
||||
senderIsOwner: true,
|
||||
disableMessageTool: true,
|
||||
contextTokenBudget: 2048,
|
||||
contextEngine: {
|
||||
...contextEngine,
|
||||
ingest:
|
||||
contextEngine.ingest ??
|
||||
(async () => ({
|
||||
ingested: true,
|
||||
})),
|
||||
compact:
|
||||
contextEngine.compact ??
|
||||
(async () => ({
|
||||
ok: false,
|
||||
compacted: false,
|
||||
reason: "not used in this test",
|
||||
})),
|
||||
info: {
|
||||
id: infoId,
|
||||
name: infoName,
|
||||
version: infoVersion,
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
it("forwards sessionKey to bootstrap, assemble, and afterTurn", async () => {
|
||||
const bootstrap = vi.fn(async (_params: { sessionKey?: string }) => ({ bootstrapped: true }));
|
||||
const assemble = vi.fn(
|
||||
async ({ messages }: { messages: AgentMessage[]; sessionKey?: string }) => ({
|
||||
messages,
|
||||
estimatedTokens: 1,
|
||||
}),
|
||||
);
|
||||
const afterTurn = vi.fn(async (_params: { sessionKey?: string }) => {});
|
||||
|
||||
const result = await runAttemptWithContextEngine({
|
||||
bootstrap,
|
||||
assemble,
|
||||
afterTurn,
|
||||
});
|
||||
|
||||
expect(result.promptError).toBeNull();
|
||||
expect(bootstrap).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
sessionKey,
|
||||
}),
|
||||
);
|
||||
expect(assemble).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
sessionKey,
|
||||
}),
|
||||
);
|
||||
expect(afterTurn).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
sessionKey,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it("forwards sessionKey to ingestBatch when afterTurn is absent", async () => {
|
||||
const bootstrap = vi.fn(async (_params: { sessionKey?: string }) => ({ bootstrapped: true }));
|
||||
const assemble = vi.fn(
|
||||
async ({ messages }: { messages: AgentMessage[]; sessionKey?: string }) => ({
|
||||
messages,
|
||||
estimatedTokens: 1,
|
||||
}),
|
||||
);
|
||||
const ingestBatch = vi.fn(
|
||||
async (_params: { sessionKey?: string; messages: AgentMessage[] }) => ({ ingestedCount: 1 }),
|
||||
);
|
||||
|
||||
const result = await runAttemptWithContextEngine({
|
||||
bootstrap,
|
||||
assemble,
|
||||
ingestBatch,
|
||||
});
|
||||
|
||||
expect(result.promptError).toBeNull();
|
||||
expect(ingestBatch).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
sessionKey,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it("forwards sessionKey to per-message ingest when ingestBatch is absent", async () => {
|
||||
const bootstrap = vi.fn(async (_params: { sessionKey?: string }) => ({ bootstrapped: true }));
|
||||
const assemble = vi.fn(
|
||||
async ({ messages }: { messages: AgentMessage[]; sessionKey?: string }) => ({
|
||||
messages,
|
||||
estimatedTokens: 1,
|
||||
}),
|
||||
);
|
||||
const ingest = vi.fn(async (_params: { sessionKey?: string; message: AgentMessage }) => ({
|
||||
ingested: true,
|
||||
}));
|
||||
|
||||
const result = await runAttemptWithContextEngine({
|
||||
bootstrap,
|
||||
assemble,
|
||||
ingest,
|
||||
});
|
||||
|
||||
expect(result.promptError).toBeNull();
|
||||
expect(ingest).toHaveBeenCalled();
|
||||
expect(
|
||||
ingest.mock.calls.every((call) => {
|
||||
const params = call[0];
|
||||
return params.sessionKey === sessionKey;
|
||||
}),
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1502,6 +1502,10 @@ export async function runEmbeddedAttempt(
|
|||
runId: params.runId,
|
||||
agentDir,
|
||||
workspaceDir: effectiveWorkspace,
|
||||
// When sandboxing uses a copied workspace (`ro` or `none`), effectiveWorkspace points
|
||||
// at the sandbox copy. Spawned subagents should inherit the real workspace instead.
|
||||
spawnWorkspaceDir:
|
||||
sandbox?.enabled && sandbox.workspaceAccess !== "rw" ? resolvedWorkspace : undefined,
|
||||
config: params.config,
|
||||
abortSignal: runAbortController.signal,
|
||||
modelProvider: params.model.provider,
|
||||
|
|
@ -1737,6 +1741,7 @@ export async function runEmbeddedAttempt(
|
|||
try {
|
||||
await params.contextEngine.bootstrap({
|
||||
sessionId: params.sessionId,
|
||||
sessionKey: params.sessionKey,
|
||||
sessionFile: params.sessionFile,
|
||||
});
|
||||
} catch (bootstrapErr) {
|
||||
|
|
@ -2089,6 +2094,7 @@ export async function runEmbeddedAttempt(
|
|||
try {
|
||||
const assembled = await params.contextEngine.assemble({
|
||||
sessionId: params.sessionId,
|
||||
sessionKey: params.sessionKey,
|
||||
messages: activeSession.messages,
|
||||
tokenBudget: params.contextTokenBudget,
|
||||
});
|
||||
|
|
@ -2604,6 +2610,7 @@ export async function runEmbeddedAttempt(
|
|||
try {
|
||||
await params.contextEngine.afterTurn({
|
||||
sessionId: sessionIdUsed,
|
||||
sessionKey: params.sessionKey,
|
||||
sessionFile: params.sessionFile,
|
||||
messages: messagesSnapshot,
|
||||
prePromptMessageCount,
|
||||
|
|
@ -2621,6 +2628,7 @@ export async function runEmbeddedAttempt(
|
|||
try {
|
||||
await params.contextEngine.ingestBatch({
|
||||
sessionId: sessionIdUsed,
|
||||
sessionKey: params.sessionKey,
|
||||
messages: newMessages,
|
||||
});
|
||||
} catch (ingestErr) {
|
||||
|
|
@ -2631,6 +2639,7 @@ export async function runEmbeddedAttempt(
|
|||
try {
|
||||
await params.contextEngine.ingest({
|
||||
sessionId: sessionIdUsed,
|
||||
sessionKey: params.sessionKey,
|
||||
message: msg,
|
||||
});
|
||||
} catch (ingestErr) {
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import { afterEach, describe, expect, it, vi } from "vitest";
|
||||
import { importFreshModule } from "../../../test/helpers/import-fresh.js";
|
||||
import {
|
||||
__testing,
|
||||
abortEmbeddedPiRun,
|
||||
|
|
@ -105,4 +106,35 @@ describe("pi-embedded runner run registry", () => {
|
|||
vi.useRealTimers();
|
||||
}
|
||||
});
|
||||
|
||||
it("shares active run state across distinct module instances", async () => {
|
||||
const runsA = await importFreshModule<typeof import("./runs.js")>(
|
||||
import.meta.url,
|
||||
"./runs.js?scope=shared-a",
|
||||
);
|
||||
const runsB = await importFreshModule<typeof import("./runs.js")>(
|
||||
import.meta.url,
|
||||
"./runs.js?scope=shared-b",
|
||||
);
|
||||
const handle = {
|
||||
queueMessage: async () => {},
|
||||
isStreaming: () => true,
|
||||
isCompacting: () => false,
|
||||
abort: vi.fn(),
|
||||
};
|
||||
|
||||
runsA.__testing.resetActiveEmbeddedRuns();
|
||||
runsB.__testing.resetActiveEmbeddedRuns();
|
||||
|
||||
try {
|
||||
runsA.setActiveEmbeddedRun("session-shared", handle);
|
||||
expect(runsB.isEmbeddedPiRunActive("session-shared")).toBe(true);
|
||||
|
||||
runsB.clearActiveEmbeddedRun("session-shared", handle);
|
||||
expect(runsA.isEmbeddedPiRunActive("session-shared")).toBe(false);
|
||||
} finally {
|
||||
runsA.__testing.resetActiveEmbeddedRuns();
|
||||
runsB.__testing.resetActiveEmbeddedRuns();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ import {
|
|||
logMessageQueued,
|
||||
logSessionStateChange,
|
||||
} from "../../logging/diagnostic.js";
|
||||
import { resolveGlobalSingleton } from "../../shared/global-singleton.js";
|
||||
|
||||
type EmbeddedPiQueueHandle = {
|
||||
queueMessage: (text: string) => Promise<void>;
|
||||
|
|
@ -11,12 +12,23 @@ type EmbeddedPiQueueHandle = {
|
|||
abort: () => void;
|
||||
};
|
||||
|
||||
const ACTIVE_EMBEDDED_RUNS = new Map<string, EmbeddedPiQueueHandle>();
|
||||
type EmbeddedRunWaiter = {
|
||||
resolve: (ended: boolean) => void;
|
||||
timer: NodeJS.Timeout;
|
||||
};
|
||||
const EMBEDDED_RUN_WAITERS = new Map<string, Set<EmbeddedRunWaiter>>();
|
||||
|
||||
/**
|
||||
* Use global singleton state so busy/streaming checks stay consistent even
|
||||
* when the bundler emits multiple copies of this module into separate chunks.
|
||||
*/
|
||||
const EMBEDDED_RUN_STATE_KEY = Symbol.for("openclaw.embeddedRunState");
|
||||
|
||||
const embeddedRunState = resolveGlobalSingleton(EMBEDDED_RUN_STATE_KEY, () => ({
|
||||
activeRuns: new Map<string, EmbeddedPiQueueHandle>(),
|
||||
waiters: new Map<string, Set<EmbeddedRunWaiter>>(),
|
||||
}));
|
||||
const ACTIVE_EMBEDDED_RUNS = embeddedRunState.activeRuns;
|
||||
const EMBEDDED_RUN_WAITERS = embeddedRunState.waiters;
|
||||
|
||||
export function queueEmbeddedPiMessage(sessionId: string, text: string): boolean {
|
||||
const handle = ACTIVE_EMBEDDED_RUNS.get(sessionId);
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import type { CommandFlagKey } from "../../config/commands.js";
|
||||
import { isCommandFlagEnabled } from "../../config/commands.js";
|
||||
import { logVerbose } from "../../globals.js";
|
||||
import { redactIdentifier } from "../../logging/redact-identifier.js";
|
||||
import { isInternalMessageChannel } from "../../utils/message-channel.js";
|
||||
import type { ReplyPayload } from "../types.js";
|
||||
import type { CommandHandlerResult, HandleCommandsParams } from "./commands-types.js";
|
||||
|
|
@ -13,7 +14,20 @@ export function rejectUnauthorizedCommand(
|
|||
return null;
|
||||
}
|
||||
logVerbose(
|
||||
`Ignoring ${commandLabel} from unauthorized sender: ${params.command.senderId || "<unknown>"}`,
|
||||
`Ignoring ${commandLabel} from unauthorized sender: ${redactIdentifier(params.command.senderId)}`,
|
||||
);
|
||||
return { shouldContinue: false };
|
||||
}
|
||||
|
||||
export function rejectNonOwnerCommand(
|
||||
params: HandleCommandsParams,
|
||||
commandLabel: string,
|
||||
): CommandHandlerResult | null {
|
||||
if (params.command.senderIsOwner) {
|
||||
return null;
|
||||
}
|
||||
logVerbose(
|
||||
`Ignoring ${commandLabel} from non-owner sender: ${redactIdentifier(params.command.senderId)}`,
|
||||
);
|
||||
return { shouldContinue: false };
|
||||
}
|
||||
|
|
|
|||
|
|
@ -22,7 +22,9 @@ import {
|
|||
setConfigOverride,
|
||||
unsetConfigOverride,
|
||||
} from "../../config/runtime-overrides.js";
|
||||
import { isInternalMessageChannel } from "../../utils/message-channel.js";
|
||||
import {
|
||||
rejectNonOwnerCommand,
|
||||
rejectUnauthorizedCommand,
|
||||
requireCommandFlagEnabled,
|
||||
requireGatewayClientScopeForInternalChannel,
|
||||
|
|
@ -43,6 +45,12 @@ export const handleConfigCommand: CommandHandler = async (params, allowTextComma
|
|||
if (unauthorized) {
|
||||
return unauthorized;
|
||||
}
|
||||
const allowInternalReadOnlyShow =
|
||||
configCommand.action === "show" && isInternalMessageChannel(params.command.channel);
|
||||
const nonOwner = allowInternalReadOnlyShow ? null : rejectNonOwnerCommand(params, "/config");
|
||||
if (nonOwner) {
|
||||
return nonOwner;
|
||||
}
|
||||
const disabled = requireCommandFlagEnabled(params.cfg, {
|
||||
label: "/config",
|
||||
configKey: "config",
|
||||
|
|
@ -197,6 +205,10 @@ export const handleDebugCommand: CommandHandler = async (params, allowTextComman
|
|||
if (unauthorized) {
|
||||
return unauthorized;
|
||||
}
|
||||
const nonOwner = rejectNonOwnerCommand(params, "/debug");
|
||||
if (nonOwner) {
|
||||
return nonOwner;
|
||||
}
|
||||
const disabled = requireCommandFlagEnabled(params.cfg, {
|
||||
label: "/debug",
|
||||
configKey: "debug",
|
||||
|
|
|
|||
|
|
@ -181,6 +181,9 @@ describe("handleCommands gating", () => {
|
|||
commands: { config: false, debug: false, text: true },
|
||||
channels: { whatsapp: { allowFrom: ["*"] } },
|
||||
}) as OpenClawConfig,
|
||||
applyParams: (params: ReturnType<typeof buildParams>) => {
|
||||
params.command.senderIsOwner = true;
|
||||
},
|
||||
expectedText: "/config is disabled",
|
||||
},
|
||||
{
|
||||
|
|
@ -191,6 +194,9 @@ describe("handleCommands gating", () => {
|
|||
commands: { config: false, debug: false, text: true },
|
||||
channels: { whatsapp: { allowFrom: ["*"] } },
|
||||
}) as OpenClawConfig,
|
||||
applyParams: (params: ReturnType<typeof buildParams>) => {
|
||||
params.command.senderIsOwner = true;
|
||||
},
|
||||
expectedText: "/debug is disabled",
|
||||
},
|
||||
{
|
||||
|
|
@ -223,6 +229,9 @@ describe("handleCommands gating", () => {
|
|||
channels: { whatsapp: { allowFrom: ["*"] } },
|
||||
} as OpenClawConfig;
|
||||
},
|
||||
applyParams: (params: ReturnType<typeof buildParams>) => {
|
||||
params.command.senderIsOwner = true;
|
||||
},
|
||||
expectedText: "/config is disabled",
|
||||
},
|
||||
{
|
||||
|
|
@ -239,6 +248,9 @@ describe("handleCommands gating", () => {
|
|||
channels: { whatsapp: { allowFrom: ["*"] } },
|
||||
} as OpenClawConfig;
|
||||
},
|
||||
applyParams: (params: ReturnType<typeof buildParams>) => {
|
||||
params.command.senderIsOwner = true;
|
||||
},
|
||||
expectedText: "/debug is disabled",
|
||||
},
|
||||
]);
|
||||
|
|
@ -670,6 +682,36 @@ describe("extractMessageText", () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe("handleCommands /config owner gating", () => {
|
||||
it("blocks /config show from authorized non-owner senders", async () => {
|
||||
const cfg = {
|
||||
commands: { config: true, text: true },
|
||||
channels: { whatsapp: { allowFrom: ["*"] } },
|
||||
} as OpenClawConfig;
|
||||
const params = buildParams("/config show", cfg);
|
||||
params.command.senderIsOwner = false;
|
||||
const result = await handleCommands(params);
|
||||
expect(result.shouldContinue).toBe(false);
|
||||
expect(result.reply).toBeUndefined();
|
||||
});
|
||||
|
||||
it("keeps /config show working for owners", async () => {
|
||||
const cfg = {
|
||||
commands: { config: true, text: true },
|
||||
channels: { whatsapp: { allowFrom: ["*"] } },
|
||||
} as OpenClawConfig;
|
||||
readConfigFileSnapshotMock.mockResolvedValueOnce({
|
||||
valid: true,
|
||||
parsed: { messages: { ackreaction: ":)" } },
|
||||
});
|
||||
const params = buildParams("/config show messages.ackReaction", cfg);
|
||||
params.command.senderIsOwner = true;
|
||||
const result = await handleCommands(params);
|
||||
expect(result.shouldContinue).toBe(false);
|
||||
expect(result.reply?.text).toContain("Config messages.ackreaction");
|
||||
});
|
||||
});
|
||||
|
||||
describe("handleCommands /config configWrites gating", () => {
|
||||
it("blocks /config set when channel config writes are disabled", async () => {
|
||||
const cfg = {
|
||||
|
|
@ -677,6 +719,7 @@ describe("handleCommands /config configWrites gating", () => {
|
|||
channels: { whatsapp: { allowFrom: ["*"], configWrites: false } },
|
||||
} as OpenClawConfig;
|
||||
const params = buildParams('/config set messages.ackReaction=":)"', cfg);
|
||||
params.command.senderIsOwner = true;
|
||||
const result = await handleCommands(params);
|
||||
expect(result.shouldContinue).toBe(false);
|
||||
expect(result.reply?.text).toContain("Config writes are disabled");
|
||||
|
|
@ -704,6 +747,7 @@ describe("handleCommands /config configWrites gating", () => {
|
|||
Surface: "telegram",
|
||||
},
|
||||
);
|
||||
params.command.senderIsOwner = true;
|
||||
const result = await handleCommands(params);
|
||||
expect(result.shouldContinue).toBe(false);
|
||||
expect(result.reply?.text).toContain("channels.telegram.accounts.work.configWrites=true");
|
||||
|
|
@ -720,6 +764,7 @@ describe("handleCommands /config configWrites gating", () => {
|
|||
Provider: "telegram",
|
||||
Surface: "telegram",
|
||||
});
|
||||
params.command.senderIsOwner = true;
|
||||
const result = await handleCommands(params);
|
||||
expect(result.shouldContinue).toBe(false);
|
||||
expect(result.reply?.text).toContain(
|
||||
|
|
@ -738,6 +783,7 @@ describe("handleCommands /config configWrites gating", () => {
|
|||
GatewayClientScopes: ["operator.write"],
|
||||
});
|
||||
params.command.channel = INTERNAL_MESSAGE_CHANNEL;
|
||||
params.command.senderIsOwner = true;
|
||||
const result = await handleCommands(params);
|
||||
expect(result.shouldContinue).toBe(false);
|
||||
expect(result.reply?.text).toContain("requires operator.admin");
|
||||
|
|
@ -757,6 +803,7 @@ describe("handleCommands /config configWrites gating", () => {
|
|||
GatewayClientScopes: ["operator.write"],
|
||||
});
|
||||
params.command.channel = INTERNAL_MESSAGE_CHANNEL;
|
||||
params.command.senderIsOwner = false;
|
||||
const result = await handleCommands(params);
|
||||
expect(result.shouldContinue).toBe(false);
|
||||
expect(result.reply?.text).toContain("Config messages.ackreaction");
|
||||
|
|
@ -780,6 +827,7 @@ describe("handleCommands /config configWrites gating", () => {
|
|||
GatewayClientScopes: ["operator.write", "operator.admin"],
|
||||
});
|
||||
params.command.channel = INTERNAL_MESSAGE_CHANNEL;
|
||||
params.command.senderIsOwner = true;
|
||||
const result = await handleCommands(params);
|
||||
expect(result.shouldContinue).toBe(false);
|
||||
expect(writeConfigFileMock).toHaveBeenCalledOnce();
|
||||
|
|
@ -822,6 +870,7 @@ describe("handleCommands /config configWrites gating", () => {
|
|||
},
|
||||
);
|
||||
params.command.channel = INTERNAL_MESSAGE_CHANNEL;
|
||||
params.command.senderIsOwner = true;
|
||||
const result = await handleCommands(params);
|
||||
expect(result.shouldContinue).toBe(false);
|
||||
expect(result.reply?.text).toContain("Config updated");
|
||||
|
|
@ -830,6 +879,32 @@ describe("handleCommands /config configWrites gating", () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe("handleCommands /debug owner gating", () => {
|
||||
it("blocks /debug show from authorized non-owner senders", async () => {
|
||||
const cfg = {
|
||||
commands: { debug: true, text: true },
|
||||
channels: { whatsapp: { allowFrom: ["*"] } },
|
||||
} as OpenClawConfig;
|
||||
const params = buildParams("/debug show", cfg);
|
||||
params.command.senderIsOwner = false;
|
||||
const result = await handleCommands(params);
|
||||
expect(result.shouldContinue).toBe(false);
|
||||
expect(result.reply).toBeUndefined();
|
||||
});
|
||||
|
||||
it("keeps /debug show working for owners", async () => {
|
||||
const cfg = {
|
||||
commands: { debug: true, text: true },
|
||||
channels: { whatsapp: { allowFrom: ["*"] } },
|
||||
} as OpenClawConfig;
|
||||
const params = buildParams("/debug show", cfg);
|
||||
params.command.senderIsOwner = true;
|
||||
const result = await handleCommands(params);
|
||||
expect(result.shouldContinue).toBe(false);
|
||||
expect(result.reply?.text).toContain("Debug overrides");
|
||||
});
|
||||
});
|
||||
|
||||
describe("handleCommands bash alias", () => {
|
||||
it("routes !poll and !stop through the /bash handler", async () => {
|
||||
const cfg = {
|
||||
|
|
|
|||
|
|
@ -0,0 +1,43 @@
|
|||
import { afterEach, describe, expect, it } from "vitest";
|
||||
import { importFreshModule } from "../../../test/helpers/import-fresh.js";
|
||||
import type { MsgContext } from "../templating.js";
|
||||
import { resetInboundDedupe } from "./inbound-dedupe.js";
|
||||
|
||||
const sharedInboundContext: MsgContext = {
|
||||
Provider: "discord",
|
||||
Surface: "discord",
|
||||
From: "discord:user-1",
|
||||
To: "channel:c1",
|
||||
OriginatingChannel: "discord",
|
||||
OriginatingTo: "channel:c1",
|
||||
SessionKey: "agent:main:discord:channel:c1",
|
||||
MessageSid: "msg-1",
|
||||
};
|
||||
|
||||
describe("inbound dedupe", () => {
|
||||
afterEach(() => {
|
||||
resetInboundDedupe();
|
||||
});
|
||||
|
||||
it("shares dedupe state across distinct module instances", async () => {
|
||||
const inboundA = await importFreshModule<typeof import("./inbound-dedupe.js")>(
|
||||
import.meta.url,
|
||||
"./inbound-dedupe.js?scope=shared-a",
|
||||
);
|
||||
const inboundB = await importFreshModule<typeof import("./inbound-dedupe.js")>(
|
||||
import.meta.url,
|
||||
"./inbound-dedupe.js?scope=shared-b",
|
||||
);
|
||||
|
||||
inboundA.resetInboundDedupe();
|
||||
inboundB.resetInboundDedupe();
|
||||
|
||||
try {
|
||||
expect(inboundA.shouldSkipDuplicateInbound(sharedInboundContext)).toBe(false);
|
||||
expect(inboundB.shouldSkipDuplicateInbound(sharedInboundContext)).toBe(true);
|
||||
} finally {
|
||||
inboundA.resetInboundDedupe();
|
||||
inboundB.resetInboundDedupe();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
|
@ -1,15 +1,24 @@
|
|||
import { logVerbose, shouldLogVerbose } from "../../globals.js";
|
||||
import { createDedupeCache, type DedupeCache } from "../../infra/dedupe.js";
|
||||
import { parseAgentSessionKey } from "../../sessions/session-key-utils.js";
|
||||
import { resolveGlobalSingleton } from "../../shared/global-singleton.js";
|
||||
import type { MsgContext } from "../templating.js";
|
||||
|
||||
const DEFAULT_INBOUND_DEDUPE_TTL_MS = 20 * 60_000;
|
||||
const DEFAULT_INBOUND_DEDUPE_MAX = 5000;
|
||||
|
||||
const inboundDedupeCache = createDedupeCache({
|
||||
ttlMs: DEFAULT_INBOUND_DEDUPE_TTL_MS,
|
||||
maxSize: DEFAULT_INBOUND_DEDUPE_MAX,
|
||||
});
|
||||
/**
|
||||
* Keep inbound dedupe shared across bundled chunks so the same provider
|
||||
* message cannot bypass dedupe by entering through a different chunk copy.
|
||||
*/
|
||||
const INBOUND_DEDUPE_CACHE_KEY = Symbol.for("openclaw.inboundDedupeCache");
|
||||
|
||||
const inboundDedupeCache = resolveGlobalSingleton<DedupeCache>(INBOUND_DEDUPE_CACHE_KEY, () =>
|
||||
createDedupeCache({
|
||||
ttlMs: DEFAULT_INBOUND_DEDUPE_TTL_MS,
|
||||
maxSize: DEFAULT_INBOUND_DEDUPE_MAX,
|
||||
}),
|
||||
);
|
||||
|
||||
const normalizeProvider = (value?: string | null) => value?.trim().toLowerCase() || "";
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import { defaultRuntime } from "../../../runtime.js";
|
||||
import { resolveGlobalMap } from "../../../shared/global-singleton.js";
|
||||
import {
|
||||
buildCollectPrompt,
|
||||
beginQueueDrain,
|
||||
|
|
@ -15,7 +16,11 @@ import type { FollowupRun } from "./types.js";
|
|||
|
||||
// Persists the most recent runFollowup callback per queue key so that
|
||||
// enqueueFollowupRun can restart a drain that finished and deleted the queue.
|
||||
const FOLLOWUP_RUN_CALLBACKS = new Map<string, (run: FollowupRun) => Promise<void>>();
|
||||
const FOLLOWUP_DRAIN_CALLBACKS_KEY = Symbol.for("openclaw.followupDrainCallbacks");
|
||||
|
||||
const FOLLOWUP_RUN_CALLBACKS = resolveGlobalMap<string, (run: FollowupRun) => Promise<void>>(
|
||||
FOLLOWUP_DRAIN_CALLBACKS_KEY,
|
||||
);
|
||||
|
||||
export function clearFollowupDrainCallback(key: string): void {
|
||||
FOLLOWUP_RUN_CALLBACKS.delete(key);
|
||||
|
|
|
|||
|
|
@ -1,13 +1,22 @@
|
|||
import { createDedupeCache } from "../../../infra/dedupe.js";
|
||||
import { resolveGlobalSingleton } from "../../../shared/global-singleton.js";
|
||||
import { applyQueueDropPolicy, shouldSkipQueueItem } from "../../../utils/queue-helpers.js";
|
||||
import { kickFollowupDrainIfIdle } from "./drain.js";
|
||||
import { getExistingFollowupQueue, getFollowupQueue } from "./state.js";
|
||||
import type { FollowupRun, QueueDedupeMode, QueueSettings } from "./types.js";
|
||||
|
||||
const RECENT_QUEUE_MESSAGE_IDS = createDedupeCache({
|
||||
ttlMs: 5 * 60 * 1000,
|
||||
maxSize: 10_000,
|
||||
});
|
||||
/**
|
||||
* Keep queued message-id dedupe shared across bundled chunks so redeliveries
|
||||
* are rejected no matter which chunk receives the enqueue call.
|
||||
*/
|
||||
const RECENT_QUEUE_MESSAGE_IDS_KEY = Symbol.for("openclaw.recentQueueMessageIds");
|
||||
|
||||
const RECENT_QUEUE_MESSAGE_IDS = resolveGlobalSingleton(RECENT_QUEUE_MESSAGE_IDS_KEY, () =>
|
||||
createDedupeCache({
|
||||
ttlMs: 5 * 60 * 1000,
|
||||
maxSize: 10_000,
|
||||
}),
|
||||
);
|
||||
|
||||
function buildRecentMessageIdKey(run: FollowupRun, queueKey: string): string | undefined {
|
||||
const messageId = run.messageId?.trim();
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import { resolveGlobalMap } from "../../../shared/global-singleton.js";
|
||||
import { applyQueueRuntimeSettings } from "../../../utils/queue-helpers.js";
|
||||
import type { FollowupRun, QueueDropPolicy, QueueMode, QueueSettings } from "./types.js";
|
||||
|
||||
|
|
@ -18,7 +19,13 @@ export const DEFAULT_QUEUE_DEBOUNCE_MS = 1000;
|
|||
export const DEFAULT_QUEUE_CAP = 20;
|
||||
export const DEFAULT_QUEUE_DROP: QueueDropPolicy = "summarize";
|
||||
|
||||
export const FOLLOWUP_QUEUES = new Map<string, FollowupQueueState>();
|
||||
/**
|
||||
* Share followup queues across bundled chunks so busy-session enqueue/drain
|
||||
* logic observes one queue registry per process.
|
||||
*/
|
||||
const FOLLOWUP_QUEUES_KEY = Symbol.for("openclaw.followupQueues");
|
||||
|
||||
export const FOLLOWUP_QUEUES = resolveGlobalMap<string, FollowupQueueState>(FOLLOWUP_QUEUES_KEY);
|
||||
|
||||
export function getExistingFollowupQueue(key: string): FollowupQueueState | undefined {
|
||||
const cleaned = key.trim();
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import { afterAll, beforeAll, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import { importFreshModule } from "../../../test/helpers/import-fresh.js";
|
||||
import { expectInboundContextContract } from "../../../test/helpers/inbound-contract.js";
|
||||
import type { OpenClawConfig } from "../../config/config.js";
|
||||
import { defaultRuntime } from "../../runtime.js";
|
||||
|
|
@ -743,6 +744,71 @@ describe("followup queue deduplication", () => {
|
|||
expect(calls).toHaveLength(1);
|
||||
});
|
||||
|
||||
it("deduplicates same message_id across distinct enqueue module instances", async () => {
|
||||
const enqueueA = await importFreshModule<typeof import("./queue/enqueue.js")>(
|
||||
import.meta.url,
|
||||
"./queue/enqueue.js?scope=dedupe-a",
|
||||
);
|
||||
const enqueueB = await importFreshModule<typeof import("./queue/enqueue.js")>(
|
||||
import.meta.url,
|
||||
"./queue/enqueue.js?scope=dedupe-b",
|
||||
);
|
||||
const { clearSessionQueues } = await import("./queue.js");
|
||||
const key = `test-dedup-cross-module-${Date.now()}`;
|
||||
const calls: FollowupRun[] = [];
|
||||
const done = createDeferred<void>();
|
||||
const runFollowup = async (run: FollowupRun) => {
|
||||
calls.push(run);
|
||||
done.resolve();
|
||||
};
|
||||
const settings: QueueSettings = {
|
||||
mode: "collect",
|
||||
debounceMs: 0,
|
||||
cap: 50,
|
||||
dropPolicy: "summarize",
|
||||
};
|
||||
|
||||
enqueueA.resetRecentQueuedMessageIdDedupe();
|
||||
enqueueB.resetRecentQueuedMessageIdDedupe();
|
||||
|
||||
try {
|
||||
expect(
|
||||
enqueueA.enqueueFollowupRun(
|
||||
key,
|
||||
createRun({
|
||||
prompt: "first",
|
||||
messageId: "same-id",
|
||||
originatingChannel: "signal",
|
||||
originatingTo: "+10000000000",
|
||||
}),
|
||||
settings,
|
||||
),
|
||||
).toBe(true);
|
||||
|
||||
scheduleFollowupDrain(key, runFollowup);
|
||||
await done.promise;
|
||||
await new Promise<void>((resolve) => setImmediate(resolve));
|
||||
|
||||
expect(
|
||||
enqueueB.enqueueFollowupRun(
|
||||
key,
|
||||
createRun({
|
||||
prompt: "first-redelivery",
|
||||
messageId: "same-id",
|
||||
originatingChannel: "signal",
|
||||
originatingTo: "+10000000000",
|
||||
}),
|
||||
settings,
|
||||
),
|
||||
).toBe(false);
|
||||
expect(calls).toHaveLength(1);
|
||||
} finally {
|
||||
clearSessionQueues([key]);
|
||||
enqueueA.resetRecentQueuedMessageIdDedupe();
|
||||
enqueueB.resetRecentQueuedMessageIdDedupe();
|
||||
}
|
||||
});
|
||||
|
||||
it("does not collide recent message-id keys when routing contains delimiters", async () => {
|
||||
const key = `test-dedup-key-collision-${Date.now()}`;
|
||||
const calls: FollowupRun[] = [];
|
||||
|
|
@ -1264,6 +1330,55 @@ describe("followup queue drain restart after idle window", () => {
|
|||
expect(calls[1]?.prompt).toBe("after-idle");
|
||||
});
|
||||
|
||||
it("restarts an idle drain across distinct enqueue and drain module instances", async () => {
|
||||
const drainA = await importFreshModule<typeof import("./queue/drain.js")>(
|
||||
import.meta.url,
|
||||
"./queue/drain.js?scope=restart-a",
|
||||
);
|
||||
const enqueueB = await importFreshModule<typeof import("./queue/enqueue.js")>(
|
||||
import.meta.url,
|
||||
"./queue/enqueue.js?scope=restart-b",
|
||||
);
|
||||
const { clearSessionQueues } = await import("./queue.js");
|
||||
const key = `test-idle-window-cross-module-${Date.now()}`;
|
||||
const calls: FollowupRun[] = [];
|
||||
const settings: QueueSettings = { mode: "followup", debounceMs: 0, cap: 50 };
|
||||
const firstProcessed = createDeferred<void>();
|
||||
|
||||
enqueueB.resetRecentQueuedMessageIdDedupe();
|
||||
|
||||
try {
|
||||
const runFollowup = async (run: FollowupRun) => {
|
||||
calls.push(run);
|
||||
if (calls.length === 1) {
|
||||
firstProcessed.resolve();
|
||||
}
|
||||
};
|
||||
|
||||
enqueueB.enqueueFollowupRun(key, createRun({ prompt: "before-idle" }), settings);
|
||||
drainA.scheduleFollowupDrain(key, runFollowup);
|
||||
await firstProcessed.promise;
|
||||
|
||||
await new Promise<void>((resolve) => setImmediate(resolve));
|
||||
|
||||
enqueueB.enqueueFollowupRun(key, createRun({ prompt: "after-idle" }), settings);
|
||||
|
||||
await vi.waitFor(
|
||||
() => {
|
||||
expect(calls).toHaveLength(2);
|
||||
},
|
||||
{ timeout: 1_000 },
|
||||
);
|
||||
|
||||
expect(calls[0]?.prompt).toBe("before-idle");
|
||||
expect(calls[1]?.prompt).toBe("after-idle");
|
||||
} finally {
|
||||
clearSessionQueues([key]);
|
||||
drainA.clearFollowupDrainCallback(key);
|
||||
enqueueB.resetRecentQueuedMessageIdDedupe();
|
||||
}
|
||||
});
|
||||
|
||||
it("does not double-drain when a message arrives while drain is still running", async () => {
|
||||
const key = `test-no-double-drain-${Date.now()}`;
|
||||
const calls: FollowupRun[] = [];
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ import {
|
|||
listThinkingLevels,
|
||||
normalizeReasoningLevel,
|
||||
normalizeThinkLevel,
|
||||
resolveThinkingDefaultForModel,
|
||||
} from "./thinking.js";
|
||||
|
||||
describe("normalizeThinkLevel", () => {
|
||||
|
|
@ -84,6 +85,40 @@ describe("listThinkingLevelLabels", () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe("resolveThinkingDefaultForModel", () => {
|
||||
it("defaults Claude 4.6 models to adaptive", () => {
|
||||
expect(
|
||||
resolveThinkingDefaultForModel({ provider: "anthropic", model: "claude-opus-4-6" }),
|
||||
).toBe("adaptive");
|
||||
});
|
||||
|
||||
it("treats Bedrock Anthropic aliases as adaptive", () => {
|
||||
expect(
|
||||
resolveThinkingDefaultForModel({ provider: "aws-bedrock", model: "claude-sonnet-4-6" }),
|
||||
).toBe("adaptive");
|
||||
});
|
||||
|
||||
it("defaults reasoning-capable catalog models to low", () => {
|
||||
expect(
|
||||
resolveThinkingDefaultForModel({
|
||||
provider: "openai",
|
||||
model: "gpt-5.4",
|
||||
catalog: [{ provider: "openai", id: "gpt-5.4", reasoning: true }],
|
||||
}),
|
||||
).toBe("low");
|
||||
});
|
||||
|
||||
it("defaults to off when no adaptive or reasoning hint is present", () => {
|
||||
expect(
|
||||
resolveThinkingDefaultForModel({
|
||||
provider: "openai",
|
||||
model: "gpt-4.1-mini",
|
||||
catalog: [{ provider: "openai", id: "gpt-4.1-mini", reasoning: false }],
|
||||
}),
|
||||
).toBe("off");
|
||||
});
|
||||
});
|
||||
|
||||
describe("normalizeReasoningLevel", () => {
|
||||
it("accepts on/off", () => {
|
||||
expect(normalizeReasoningLevel("on")).toBe("on");
|
||||
|
|
|
|||
|
|
@ -5,6 +5,13 @@ export type ElevatedLevel = "off" | "on" | "ask" | "full";
|
|||
export type ElevatedMode = "off" | "ask" | "full";
|
||||
export type ReasoningLevel = "off" | "on" | "stream";
|
||||
export type UsageDisplayLevel = "off" | "tokens" | "full";
|
||||
export type ThinkingCatalogEntry = {
|
||||
provider: string;
|
||||
id: string;
|
||||
reasoning?: boolean;
|
||||
};
|
||||
|
||||
const CLAUDE_46_MODEL_RE = /claude-(?:opus|sonnet)-4(?:\.|-)6(?:$|[-.])/i;
|
||||
|
||||
function normalizeProviderId(provider?: string | null): string {
|
||||
if (!provider) {
|
||||
|
|
@ -14,6 +21,9 @@ function normalizeProviderId(provider?: string | null): string {
|
|||
if (normalized === "z.ai" || normalized === "z-ai") {
|
||||
return "zai";
|
||||
}
|
||||
if (normalized === "bedrock" || normalized === "aws-bedrock") {
|
||||
return "amazon-bedrock";
|
||||
}
|
||||
return normalized;
|
||||
}
|
||||
|
||||
|
|
@ -130,6 +140,30 @@ export function formatXHighModelHint(): string {
|
|||
return `${refs.slice(0, -1).join(", ")} or ${refs[refs.length - 1]}`;
|
||||
}
|
||||
|
||||
export function resolveThinkingDefaultForModel(params: {
|
||||
provider: string;
|
||||
model: string;
|
||||
catalog?: ThinkingCatalogEntry[];
|
||||
}): ThinkLevel {
|
||||
const normalizedProvider = normalizeProviderId(params.provider);
|
||||
const modelLower = params.model.trim().toLowerCase();
|
||||
const isAnthropicFamilyModel =
|
||||
normalizedProvider === "anthropic" ||
|
||||
normalizedProvider === "amazon-bedrock" ||
|
||||
modelLower.includes("anthropic/") ||
|
||||
modelLower.includes(".anthropic.");
|
||||
if (isAnthropicFamilyModel && CLAUDE_46_MODEL_RE.test(modelLower)) {
|
||||
return "adaptive";
|
||||
}
|
||||
const candidate = params.catalog?.find(
|
||||
(entry) => entry.provider === params.provider && entry.id === params.model,
|
||||
);
|
||||
if (candidate?.reasoning) {
|
||||
return "low";
|
||||
}
|
||||
return "off";
|
||||
}
|
||||
|
||||
type OnOffFullLevel = "off" | "on" | "full";
|
||||
|
||||
function normalizeOnOffFullLevel(raw?: string | null): OnOffFullLevel | undefined {
|
||||
|
|
|
|||
|
|
@ -5,8 +5,6 @@ export const AUTH_CHOICE_LEGACY_ALIASES_FOR_CLI: ReadonlyArray<AuthChoice> = [
|
|||
"oauth",
|
||||
"claude-cli",
|
||||
"codex-cli",
|
||||
"minimax-cloud",
|
||||
"minimax",
|
||||
];
|
||||
|
||||
export function normalizeLegacyOnboardAuthChoice(
|
||||
|
|
|
|||
|
|
@ -57,7 +57,7 @@ const AUTH_CHOICE_GROUP_DEFS: {
|
|||
value: "minimax",
|
||||
label: "MiniMax",
|
||||
hint: "M2.5 (recommended)",
|
||||
choices: ["minimax-portal", "minimax-api", "minimax-api-key-cn", "minimax-api-lightning"],
|
||||
choices: ["minimax-global-oauth", "minimax-global-api", "minimax-cn-oauth", "minimax-cn-api"],
|
||||
},
|
||||
{
|
||||
value: "moonshot",
|
||||
|
|
@ -291,9 +291,24 @@ const BASE_AUTH_CHOICE_OPTIONS: ReadonlyArray<AuthChoiceOption> = [
|
|||
label: "Xiaomi API key",
|
||||
},
|
||||
{
|
||||
value: "minimax-portal",
|
||||
label: "MiniMax OAuth",
|
||||
hint: "Oauth plugin for MiniMax",
|
||||
value: "minimax-global-oauth",
|
||||
label: "MiniMax Global — OAuth (minimax.io)",
|
||||
hint: "Only supports OAuth for the coding plan",
|
||||
},
|
||||
{
|
||||
value: "minimax-global-api",
|
||||
label: "MiniMax Global — API Key (minimax.io)",
|
||||
hint: "sk-api- or sk-cp- keys supported",
|
||||
},
|
||||
{
|
||||
value: "minimax-cn-oauth",
|
||||
label: "MiniMax CN — OAuth (minimaxi.com)",
|
||||
hint: "Only supports OAuth for the coding plan",
|
||||
},
|
||||
{
|
||||
value: "minimax-cn-api",
|
||||
label: "MiniMax CN — API Key (minimaxi.com)",
|
||||
hint: "sk-api- or sk-cp- keys supported",
|
||||
},
|
||||
{ value: "qwen-portal", label: "Qwen OAuth" },
|
||||
{
|
||||
|
|
@ -307,17 +322,6 @@ const BASE_AUTH_CHOICE_OPTIONS: ReadonlyArray<AuthChoiceOption> = [
|
|||
label: "OpenCode Zen catalog",
|
||||
hint: "Claude, GPT, Gemini via opencode.ai/zen",
|
||||
},
|
||||
{ value: "minimax-api", label: "MiniMax M2.5" },
|
||||
{
|
||||
value: "minimax-api-key-cn",
|
||||
label: "MiniMax M2.5 (CN)",
|
||||
hint: "China endpoint (api.minimaxi.com)",
|
||||
},
|
||||
{
|
||||
value: "minimax-api-lightning",
|
||||
label: "MiniMax M2.5 Highspeed",
|
||||
hint: "Official fast tier (legacy: Lightning)",
|
||||
},
|
||||
{ value: "qianfan-api-key", label: "Qianfan API key" },
|
||||
{
|
||||
value: "modelstudio-api-key-cn",
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
import { afterEach, describe, expect, it, vi } from "vitest";
|
||||
import { resolveAgentModelPrimaryValue } from "../config/model-input.js";
|
||||
import type { WizardPrompter } from "../wizard/prompts.js";
|
||||
import { applyAuthChoiceMiniMax } from "./auth-choice.apply.minimax.js";
|
||||
import {
|
||||
createAuthTestLifecycle,
|
||||
|
|
@ -10,23 +9,6 @@ import {
|
|||
setupAuthTestEnv,
|
||||
} from "./test-wizard-helpers.js";
|
||||
|
||||
function createMinimaxPrompter(
|
||||
params: {
|
||||
text?: WizardPrompter["text"];
|
||||
confirm?: WizardPrompter["confirm"];
|
||||
select?: WizardPrompter["select"];
|
||||
} = {},
|
||||
): WizardPrompter {
|
||||
return createWizardPrompter(
|
||||
{
|
||||
text: params.text,
|
||||
confirm: params.confirm,
|
||||
select: params.select,
|
||||
},
|
||||
{ defaultSelect: "oauth" },
|
||||
);
|
||||
}
|
||||
|
||||
describe("applyAuthChoiceMiniMax", () => {
|
||||
const lifecycle = createAuthTestLifecycle([
|
||||
"OPENCLAW_STATE_DIR",
|
||||
|
|
@ -56,27 +38,25 @@ describe("applyAuthChoiceMiniMax", () => {
|
|||
async function runMiniMaxChoice(params: {
|
||||
authChoice: Parameters<typeof applyAuthChoiceMiniMax>[0]["authChoice"];
|
||||
opts?: Parameters<typeof applyAuthChoiceMiniMax>[0]["opts"];
|
||||
env?: { apiKey?: string; oauthToken?: string };
|
||||
prompter?: Parameters<typeof createMinimaxPrompter>[0];
|
||||
env?: { apiKey?: string };
|
||||
prompterText?: () => Promise<string>;
|
||||
}) {
|
||||
const agentDir = await setupTempState();
|
||||
resetMiniMaxEnv();
|
||||
if (params.env?.apiKey !== undefined) {
|
||||
process.env.MINIMAX_API_KEY = params.env.apiKey;
|
||||
}
|
||||
if (params.env?.oauthToken !== undefined) {
|
||||
process.env.MINIMAX_OAUTH_TOKEN = params.env.oauthToken;
|
||||
}
|
||||
|
||||
const text = vi.fn(async () => "should-not-be-used");
|
||||
const confirm = vi.fn(async () => true);
|
||||
const result = await applyAuthChoiceMiniMax({
|
||||
authChoice: params.authChoice,
|
||||
config: {},
|
||||
prompter: createMinimaxPrompter({
|
||||
text,
|
||||
// Pass select: undefined so ref-mode uses the non-interactive fallback (same as old test behavior).
|
||||
prompter: createWizardPrompter({
|
||||
text: params.prompterText ?? text,
|
||||
confirm,
|
||||
...params.prompter,
|
||||
select: undefined,
|
||||
}),
|
||||
runtime: createExitThrowingRuntime(),
|
||||
setDefaultModel: true,
|
||||
|
|
@ -94,7 +74,7 @@ describe("applyAuthChoiceMiniMax", () => {
|
|||
const result = await applyAuthChoiceMiniMax({
|
||||
authChoice: "openrouter-api-key",
|
||||
config: {},
|
||||
prompter: createMinimaxPrompter(),
|
||||
prompter: createWizardPrompter({}),
|
||||
runtime: createExitThrowingRuntime(),
|
||||
setDefaultModel: true,
|
||||
});
|
||||
|
|
@ -104,61 +84,52 @@ describe("applyAuthChoiceMiniMax", () => {
|
|||
|
||||
it.each([
|
||||
{
|
||||
caseName: "uses opts token for minimax-api without prompt",
|
||||
authChoice: "minimax-api" as const,
|
||||
caseName: "uses opts token for minimax-global-api without prompt",
|
||||
authChoice: "minimax-global-api" as const,
|
||||
tokenProvider: "minimax",
|
||||
token: "mm-opts-token",
|
||||
profileId: "minimax:default",
|
||||
provider: "minimax",
|
||||
profileId: "minimax:global",
|
||||
expectedModel: "minimax/MiniMax-M2.5",
|
||||
},
|
||||
{
|
||||
caseName:
|
||||
"uses opts token for minimax-api-key-cn with trimmed/case-insensitive tokenProvider",
|
||||
authChoice: "minimax-api-key-cn" as const,
|
||||
tokenProvider: " MINIMAX-CN ",
|
||||
caseName: "uses opts token for minimax-cn-api with trimmed/case-insensitive tokenProvider",
|
||||
authChoice: "minimax-cn-api" as const,
|
||||
tokenProvider: " MINIMAX ",
|
||||
token: "mm-cn-opts-token",
|
||||
profileId: "minimax-cn:default",
|
||||
provider: "minimax-cn",
|
||||
expectedModel: "minimax-cn/MiniMax-M2.5",
|
||||
profileId: "minimax:cn",
|
||||
expectedModel: "minimax/MiniMax-M2.5",
|
||||
},
|
||||
])(
|
||||
"$caseName",
|
||||
async ({ authChoice, tokenProvider, token, profileId, provider, expectedModel }) => {
|
||||
const { agentDir, result, text, confirm } = await runMiniMaxChoice({
|
||||
authChoice,
|
||||
opts: {
|
||||
tokenProvider,
|
||||
token,
|
||||
},
|
||||
});
|
||||
])("$caseName", async ({ authChoice, tokenProvider, token, profileId, expectedModel }) => {
|
||||
const { agentDir, result, text, confirm } = await runMiniMaxChoice({
|
||||
authChoice,
|
||||
opts: { tokenProvider, token },
|
||||
});
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result?.config.auth?.profiles?.[profileId]).toMatchObject({
|
||||
provider,
|
||||
mode: "api_key",
|
||||
});
|
||||
expect(resolveAgentModelPrimaryValue(result?.config.agents?.defaults?.model)).toBe(
|
||||
expectedModel,
|
||||
);
|
||||
expect(text).not.toHaveBeenCalled();
|
||||
expect(confirm).not.toHaveBeenCalled();
|
||||
expect(result).not.toBeNull();
|
||||
expect(result?.config.auth?.profiles?.[profileId]).toMatchObject({
|
||||
provider: "minimax",
|
||||
mode: "api_key",
|
||||
});
|
||||
expect(resolveAgentModelPrimaryValue(result?.config.agents?.defaults?.model)).toBe(
|
||||
expectedModel,
|
||||
);
|
||||
expect(text).not.toHaveBeenCalled();
|
||||
expect(confirm).not.toHaveBeenCalled();
|
||||
|
||||
const parsed = await readAuthProfiles(agentDir);
|
||||
expect(parsed.profiles?.[profileId]?.key).toBe(token);
|
||||
},
|
||||
);
|
||||
const parsed = await readAuthProfiles(agentDir);
|
||||
expect(parsed.profiles?.[profileId]?.key).toBe(token);
|
||||
});
|
||||
|
||||
it.each([
|
||||
{
|
||||
name: "uses env token for minimax-api-key-cn as plaintext by default",
|
||||
name: "uses env token for minimax-cn-api as plaintext by default",
|
||||
opts: undefined,
|
||||
expectKey: "mm-env-token",
|
||||
expectKeyRef: undefined,
|
||||
expectConfirmCalls: 1,
|
||||
},
|
||||
{
|
||||
name: "uses env token for minimax-api-key-cn as keyRef in ref mode",
|
||||
name: "uses env token for minimax-cn-api as keyRef in ref mode",
|
||||
opts: { secretInputMode: "ref" as const }, // pragma: allowlist secret
|
||||
expectKey: undefined,
|
||||
expectKeyRef: {
|
||||
|
|
@ -170,54 +141,68 @@ describe("applyAuthChoiceMiniMax", () => {
|
|||
},
|
||||
])("$name", async ({ opts, expectKey, expectKeyRef, expectConfirmCalls }) => {
|
||||
const { agentDir, result, text, confirm } = await runMiniMaxChoice({
|
||||
authChoice: "minimax-api-key-cn",
|
||||
authChoice: "minimax-cn-api",
|
||||
opts,
|
||||
env: { apiKey: "mm-env-token" }, // pragma: allowlist secret
|
||||
});
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
if (!opts) {
|
||||
expect(result?.config.auth?.profiles?.["minimax-cn:default"]).toMatchObject({
|
||||
provider: "minimax-cn",
|
||||
expect(result?.config.auth?.profiles?.["minimax:cn"]).toMatchObject({
|
||||
provider: "minimax",
|
||||
mode: "api_key",
|
||||
});
|
||||
expect(resolveAgentModelPrimaryValue(result?.config.agents?.defaults?.model)).toBe(
|
||||
"minimax-cn/MiniMax-M2.5",
|
||||
"minimax/MiniMax-M2.5",
|
||||
);
|
||||
}
|
||||
expect(text).not.toHaveBeenCalled();
|
||||
expect(confirm).toHaveBeenCalledTimes(expectConfirmCalls);
|
||||
|
||||
const parsed = await readAuthProfiles(agentDir);
|
||||
expect(parsed.profiles?.["minimax-cn:default"]?.key).toBe(expectKey);
|
||||
expect(parsed.profiles?.["minimax:cn"]?.key).toBe(expectKey);
|
||||
if (expectKeyRef) {
|
||||
expect(parsed.profiles?.["minimax-cn:default"]?.keyRef).toEqual(expectKeyRef);
|
||||
expect(parsed.profiles?.["minimax:cn"]?.keyRef).toEqual(expectKeyRef);
|
||||
} else {
|
||||
expect(parsed.profiles?.["minimax-cn:default"]?.keyRef).toBeUndefined();
|
||||
expect(parsed.profiles?.["minimax:cn"]?.keyRef).toBeUndefined();
|
||||
}
|
||||
});
|
||||
|
||||
it("uses minimax-api-lightning default model", async () => {
|
||||
it("minimax-global-api uses minimax:global profile and minimax/MiniMax-M2.5 model", async () => {
|
||||
const { agentDir, result, text, confirm } = await runMiniMaxChoice({
|
||||
authChoice: "minimax-api-lightning",
|
||||
authChoice: "minimax-global-api",
|
||||
opts: {
|
||||
tokenProvider: "minimax",
|
||||
token: "mm-lightning-token",
|
||||
token: "mm-global-token",
|
||||
},
|
||||
});
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result?.config.auth?.profiles?.["minimax:default"]).toMatchObject({
|
||||
expect(result?.config.auth?.profiles?.["minimax:global"]).toMatchObject({
|
||||
provider: "minimax",
|
||||
mode: "api_key",
|
||||
});
|
||||
expect(resolveAgentModelPrimaryValue(result?.config.agents?.defaults?.model)).toBe(
|
||||
"minimax/MiniMax-M2.5-highspeed",
|
||||
"minimax/MiniMax-M2.5",
|
||||
);
|
||||
expect(result?.config.models?.providers?.minimax?.baseUrl).toContain("minimax.io");
|
||||
expect(text).not.toHaveBeenCalled();
|
||||
expect(confirm).not.toHaveBeenCalled();
|
||||
|
||||
const parsed = await readAuthProfiles(agentDir);
|
||||
expect(parsed.profiles?.["minimax:default"]?.key).toBe("mm-lightning-token");
|
||||
expect(parsed.profiles?.["minimax:global"]?.key).toBe("mm-global-token");
|
||||
});
|
||||
|
||||
it("minimax-cn-api sets CN baseUrl", async () => {
|
||||
const { result } = await runMiniMaxChoice({
|
||||
authChoice: "minimax-cn-api",
|
||||
opts: {
|
||||
tokenProvider: "minimax",
|
||||
token: "mm-cn-token",
|
||||
},
|
||||
});
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result?.config.models?.providers?.minimax?.baseUrl).toContain("minimaxi.com");
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -12,130 +12,93 @@ import {
|
|||
applyMinimaxApiConfigCn,
|
||||
applyMinimaxApiProviderConfig,
|
||||
applyMinimaxApiProviderConfigCn,
|
||||
applyMinimaxConfig,
|
||||
applyMinimaxProviderConfig,
|
||||
setMinimaxApiKey,
|
||||
} from "./onboard-auth.js";
|
||||
|
||||
export async function applyAuthChoiceMiniMax(
|
||||
params: ApplyAuthChoiceParams,
|
||||
): Promise<ApplyAuthChoiceResult | null> {
|
||||
let nextConfig = params.config;
|
||||
let agentModelOverride: string | undefined;
|
||||
const applyProviderDefaultModel = createAuthChoiceDefaultModelApplierForMutableState(
|
||||
params,
|
||||
() => nextConfig,
|
||||
(config) => (nextConfig = config),
|
||||
() => agentModelOverride,
|
||||
(model) => (agentModelOverride = model),
|
||||
);
|
||||
const requestedSecretInputMode = normalizeSecretInputModeInput(params.opts?.secretInputMode);
|
||||
const ensureMinimaxApiKey = async (opts: {
|
||||
profileId: string;
|
||||
promptMessage: string;
|
||||
}): Promise<void> => {
|
||||
// OAuth paths — delegate to plugin, no API key needed
|
||||
if (params.authChoice === "minimax-global-oauth") {
|
||||
return await applyAuthChoicePluginProvider(params, {
|
||||
authChoice: "minimax-global-oauth",
|
||||
pluginId: "minimax-portal-auth",
|
||||
providerId: "minimax-portal",
|
||||
methodId: "oauth",
|
||||
label: "MiniMax",
|
||||
});
|
||||
}
|
||||
|
||||
if (params.authChoice === "minimax-cn-oauth") {
|
||||
return await applyAuthChoicePluginProvider(params, {
|
||||
authChoice: "minimax-cn-oauth",
|
||||
pluginId: "minimax-portal-auth",
|
||||
providerId: "minimax-portal",
|
||||
methodId: "oauth-cn",
|
||||
label: "MiniMax CN",
|
||||
});
|
||||
}
|
||||
|
||||
// API key paths
|
||||
if (params.authChoice === "minimax-global-api" || params.authChoice === "minimax-cn-api") {
|
||||
const isCn = params.authChoice === "minimax-cn-api";
|
||||
const profileId = isCn ? "minimax:cn" : "minimax:global";
|
||||
const keyLink = isCn
|
||||
? "https://platform.minimaxi.com/user-center/basic-information/interface-key"
|
||||
: "https://platform.minimax.io/user-center/basic-information/interface-key";
|
||||
const promptMessage = `Enter MiniMax ${isCn ? "CN " : ""}API key (sk-api- or sk-cp-)\n${keyLink}`;
|
||||
|
||||
let nextConfig = params.config;
|
||||
let agentModelOverride: string | undefined;
|
||||
const applyProviderDefaultModel = createAuthChoiceDefaultModelApplierForMutableState(
|
||||
params,
|
||||
() => nextConfig,
|
||||
(config) => (nextConfig = config),
|
||||
() => agentModelOverride,
|
||||
(model) => (agentModelOverride = model),
|
||||
);
|
||||
const requestedSecretInputMode = normalizeSecretInputModeInput(params.opts?.secretInputMode);
|
||||
|
||||
// Warn when both Global and CN share the same `minimax` provider entry — configuring one
|
||||
// overwrites the other's baseUrl. Only show when the other profile is already present.
|
||||
const otherProfileId = isCn ? "minimax:global" : "minimax:cn";
|
||||
const hasOtherProfile = Boolean(nextConfig.auth?.profiles?.[otherProfileId]);
|
||||
const noteMessage = hasOtherProfile
|
||||
? `Note: Global and CN both use the "minimax" provider entry. Saving this key will overwrite the existing ${isCn ? "Global" : "CN"} endpoint (${otherProfileId}).`
|
||||
: undefined;
|
||||
|
||||
await ensureApiKeyFromOptionEnvOrPrompt({
|
||||
token: params.opts?.token,
|
||||
tokenProvider: params.opts?.tokenProvider,
|
||||
secretInputMode: requestedSecretInputMode,
|
||||
config: nextConfig,
|
||||
expectedProviders: ["minimax", "minimax-cn"],
|
||||
// Accept "minimax-cn" as a legacy tokenProvider alias for the CN path.
|
||||
expectedProviders: isCn ? ["minimax", "minimax-cn"] : ["minimax"],
|
||||
provider: "minimax",
|
||||
envLabel: "MINIMAX_API_KEY",
|
||||
promptMessage: opts.promptMessage,
|
||||
promptMessage,
|
||||
normalize: normalizeApiKeyInput,
|
||||
validate: validateApiKeyInput,
|
||||
prompter: params.prompter,
|
||||
noteMessage,
|
||||
setCredential: async (apiKey, mode) =>
|
||||
setMinimaxApiKey(apiKey, params.agentDir, opts.profileId, { secretInputMode: mode }),
|
||||
});
|
||||
};
|
||||
const applyMinimaxApiVariant = async (opts: {
|
||||
profileId: string;
|
||||
provider: "minimax" | "minimax-cn";
|
||||
promptMessage: string;
|
||||
modelRefPrefix: "minimax" | "minimax-cn";
|
||||
modelId: string;
|
||||
applyDefaultConfig: (
|
||||
config: ApplyAuthChoiceParams["config"],
|
||||
modelId: string,
|
||||
) => ApplyAuthChoiceParams["config"];
|
||||
applyProviderConfig: (
|
||||
config: ApplyAuthChoiceParams["config"],
|
||||
modelId: string,
|
||||
) => ApplyAuthChoiceParams["config"];
|
||||
}): Promise<ApplyAuthChoiceResult> => {
|
||||
await ensureMinimaxApiKey({
|
||||
profileId: opts.profileId,
|
||||
promptMessage: opts.promptMessage,
|
||||
setMinimaxApiKey(apiKey, params.agentDir, profileId, { secretInputMode: mode }),
|
||||
});
|
||||
|
||||
nextConfig = applyAuthProfileConfig(nextConfig, {
|
||||
profileId: opts.profileId,
|
||||
provider: opts.provider,
|
||||
profileId,
|
||||
provider: "minimax",
|
||||
mode: "api_key",
|
||||
});
|
||||
const modelRef = `${opts.modelRefPrefix}/${opts.modelId}`;
|
||||
|
||||
await applyProviderDefaultModel({
|
||||
defaultModel: modelRef,
|
||||
applyDefaultConfig: (config) => opts.applyDefaultConfig(config, opts.modelId),
|
||||
applyProviderConfig: (config) => opts.applyProviderConfig(config, opts.modelId),
|
||||
});
|
||||
return { config: nextConfig, agentModelOverride };
|
||||
};
|
||||
if (params.authChoice === "minimax-portal") {
|
||||
// Let user choose between Global/CN endpoints
|
||||
const endpoint = await params.prompter.select({
|
||||
message: "Select MiniMax endpoint",
|
||||
options: [
|
||||
{ value: "oauth", label: "Global", hint: "OAuth for international users" },
|
||||
{ value: "oauth-cn", label: "CN", hint: "OAuth for users in China" },
|
||||
],
|
||||
defaultModel: "minimax/MiniMax-M2.5",
|
||||
applyDefaultConfig: (config) =>
|
||||
isCn ? applyMinimaxApiConfigCn(config) : applyMinimaxApiConfig(config),
|
||||
applyProviderConfig: (config) =>
|
||||
isCn ? applyMinimaxApiProviderConfigCn(config) : applyMinimaxApiProviderConfig(config),
|
||||
});
|
||||
|
||||
return await applyAuthChoicePluginProvider(params, {
|
||||
authChoice: "minimax-portal",
|
||||
pluginId: "minimax-portal-auth",
|
||||
providerId: "minimax-portal",
|
||||
methodId: endpoint,
|
||||
label: "MiniMax",
|
||||
});
|
||||
}
|
||||
|
||||
if (
|
||||
params.authChoice === "minimax-cloud" ||
|
||||
params.authChoice === "minimax-api" ||
|
||||
params.authChoice === "minimax-api-lightning"
|
||||
) {
|
||||
return await applyMinimaxApiVariant({
|
||||
profileId: "minimax:default",
|
||||
provider: "minimax",
|
||||
promptMessage: "Enter MiniMax API key",
|
||||
modelRefPrefix: "minimax",
|
||||
modelId:
|
||||
params.authChoice === "minimax-api-lightning" ? "MiniMax-M2.5-highspeed" : "MiniMax-M2.5",
|
||||
applyDefaultConfig: applyMinimaxApiConfig,
|
||||
applyProviderConfig: applyMinimaxApiProviderConfig,
|
||||
});
|
||||
}
|
||||
|
||||
if (params.authChoice === "minimax-api-key-cn") {
|
||||
return await applyMinimaxApiVariant({
|
||||
profileId: "minimax-cn:default",
|
||||
provider: "minimax-cn",
|
||||
promptMessage: "Enter MiniMax China API key",
|
||||
modelRefPrefix: "minimax-cn",
|
||||
modelId: "MiniMax-M2.5",
|
||||
applyDefaultConfig: applyMinimaxApiConfigCn,
|
||||
applyProviderConfig: applyMinimaxApiProviderConfigCn,
|
||||
});
|
||||
}
|
||||
|
||||
if (params.authChoice === "minimax") {
|
||||
await applyProviderDefaultModel({
|
||||
defaultModel: "lmstudio/minimax-m2.5-gs32",
|
||||
applyDefaultConfig: applyMinimaxConfig,
|
||||
applyProviderConfig: applyMinimaxProviderConfig,
|
||||
});
|
||||
return { config: nextConfig, agentModelOverride };
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -34,11 +34,10 @@ const PREFERRED_PROVIDER_BY_AUTH_CHOICE: Partial<Record<AuthChoice, string>> = {
|
|||
"huggingface-api-key": "huggingface",
|
||||
"github-copilot": "github-copilot",
|
||||
"copilot-proxy": "copilot-proxy",
|
||||
"minimax-cloud": "minimax",
|
||||
"minimax-api": "minimax",
|
||||
"minimax-api-key-cn": "minimax-cn",
|
||||
"minimax-api-lightning": "minimax",
|
||||
minimax: "lmstudio",
|
||||
"minimax-global-oauth": "minimax-portal",
|
||||
"minimax-global-api": "minimax",
|
||||
"minimax-cn-oauth": "minimax-portal",
|
||||
"minimax-cn-api": "minimax",
|
||||
"opencode-zen": "opencode",
|
||||
"opencode-go": "opencode-go",
|
||||
"xai-api-key": "xai",
|
||||
|
|
@ -46,7 +45,6 @@ const PREFERRED_PROVIDER_BY_AUTH_CHOICE: Partial<Record<AuthChoice, string>> = {
|
|||
"qwen-portal": "qwen-portal",
|
||||
"volcengine-api-key": "volcengine",
|
||||
"byteplus-api-key": "byteplus",
|
||||
"minimax-portal": "minimax-portal",
|
||||
"qianfan-api-key": "qianfan",
|
||||
"custom-api-key": "custom",
|
||||
};
|
||||
|
|
|
|||
|
|
@ -208,8 +208,8 @@ describe("applyAuthChoice", () => {
|
|||
it("prompts and writes provider API key for common providers", async () => {
|
||||
const scenarios: Array<{
|
||||
authChoice:
|
||||
| "minimax-api"
|
||||
| "minimax-api-key-cn"
|
||||
| "minimax-global-api"
|
||||
| "minimax-cn-api"
|
||||
| "synthetic-api-key"
|
||||
| "huggingface-api-key";
|
||||
promptContains: string;
|
||||
|
|
@ -220,17 +220,17 @@ describe("applyAuthChoice", () => {
|
|||
expectedModelPrefix?: string;
|
||||
}> = [
|
||||
{
|
||||
authChoice: "minimax-api" as const,
|
||||
authChoice: "minimax-global-api" as const,
|
||||
promptContains: "Enter MiniMax API key",
|
||||
profileId: "minimax:default",
|
||||
profileId: "minimax:global",
|
||||
provider: "minimax",
|
||||
token: "sk-minimax-test",
|
||||
},
|
||||
{
|
||||
authChoice: "minimax-api-key-cn" as const,
|
||||
promptContains: "Enter MiniMax China API key",
|
||||
profileId: "minimax-cn:default",
|
||||
provider: "minimax-cn",
|
||||
authChoice: "minimax-cn-api" as const,
|
||||
promptContains: "Enter MiniMax CN API key",
|
||||
profileId: "minimax:cn",
|
||||
provider: "minimax",
|
||||
token: "sk-minimax-test",
|
||||
expectedBaseUrl: MINIMAX_CN_API_BASE_URL,
|
||||
},
|
||||
|
|
@ -1243,7 +1243,7 @@ describe("applyAuthChoice", () => {
|
|||
|
||||
it("writes portal OAuth credentials for plugin providers", async () => {
|
||||
const scenarios: Array<{
|
||||
authChoice: "qwen-portal" | "minimax-portal";
|
||||
authChoice: "qwen-portal" | "minimax-global-oauth";
|
||||
label: string;
|
||||
authId: string;
|
||||
authLabel: string;
|
||||
|
|
@ -1268,7 +1268,7 @@ describe("applyAuthChoice", () => {
|
|||
apiKey: "qwen-oauth", // pragma: allowlist secret
|
||||
},
|
||||
{
|
||||
authChoice: "minimax-portal",
|
||||
authChoice: "minimax-global-oauth",
|
||||
label: "MiniMax",
|
||||
authId: "oauth",
|
||||
authLabel: "MiniMax OAuth (Global)",
|
||||
|
|
@ -1278,7 +1278,6 @@ describe("applyAuthChoice", () => {
|
|||
api: "anthropic-messages",
|
||||
defaultModel: "minimax-portal/MiniMax-M2.5",
|
||||
apiKey: "minimax-oauth", // pragma: allowlist secret
|
||||
selectValue: "oauth",
|
||||
},
|
||||
];
|
||||
for (const scenario of scenarios) {
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
import type { OpenClawConfig } from "../config/config.js";
|
||||
import { toAgentModelListLike } from "../config/model-input.js";
|
||||
import type { ModelProviderConfig } from "../config/types.models.js";
|
||||
import {
|
||||
applyAgentDefaultModelPrimary,
|
||||
|
|
@ -7,154 +6,10 @@ import {
|
|||
} from "./onboard-auth.config-shared.js";
|
||||
import {
|
||||
buildMinimaxApiModelDefinition,
|
||||
buildMinimaxModelDefinition,
|
||||
DEFAULT_MINIMAX_BASE_URL,
|
||||
DEFAULT_MINIMAX_CONTEXT_WINDOW,
|
||||
DEFAULT_MINIMAX_MAX_TOKENS,
|
||||
MINIMAX_API_BASE_URL,
|
||||
MINIMAX_CN_API_BASE_URL,
|
||||
MINIMAX_HOSTED_COST,
|
||||
MINIMAX_HOSTED_MODEL_ID,
|
||||
MINIMAX_HOSTED_MODEL_REF,
|
||||
MINIMAX_LM_STUDIO_COST,
|
||||
} from "./onboard-auth.models.js";
|
||||
|
||||
export function applyMinimaxProviderConfig(cfg: OpenClawConfig): OpenClawConfig {
|
||||
const models = { ...cfg.agents?.defaults?.models };
|
||||
models["anthropic/claude-opus-4-6"] = {
|
||||
...models["anthropic/claude-opus-4-6"],
|
||||
alias: models["anthropic/claude-opus-4-6"]?.alias ?? "Opus",
|
||||
};
|
||||
models["lmstudio/minimax-m2.5-gs32"] = {
|
||||
...models["lmstudio/minimax-m2.5-gs32"],
|
||||
alias: models["lmstudio/minimax-m2.5-gs32"]?.alias ?? "Minimax",
|
||||
};
|
||||
|
||||
const providers = { ...cfg.models?.providers };
|
||||
if (!providers.lmstudio) {
|
||||
providers.lmstudio = {
|
||||
baseUrl: "http://127.0.0.1:1234/v1",
|
||||
apiKey: "lmstudio",
|
||||
api: "openai-responses",
|
||||
models: [
|
||||
buildMinimaxModelDefinition({
|
||||
id: "minimax-m2.5-gs32",
|
||||
name: "MiniMax M2.5 GS32",
|
||||
reasoning: false,
|
||||
cost: MINIMAX_LM_STUDIO_COST,
|
||||
contextWindow: 196608,
|
||||
maxTokens: 8192,
|
||||
}),
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
return applyOnboardAuthAgentModelsAndProviders(cfg, { agentModels: models, providers });
|
||||
}
|
||||
|
||||
export function applyMinimaxHostedProviderConfig(
|
||||
cfg: OpenClawConfig,
|
||||
params?: { baseUrl?: string },
|
||||
): OpenClawConfig {
|
||||
const models = { ...cfg.agents?.defaults?.models };
|
||||
models[MINIMAX_HOSTED_MODEL_REF] = {
|
||||
...models[MINIMAX_HOSTED_MODEL_REF],
|
||||
alias: models[MINIMAX_HOSTED_MODEL_REF]?.alias ?? "Minimax",
|
||||
};
|
||||
|
||||
const providers = { ...cfg.models?.providers };
|
||||
const hostedModel = buildMinimaxModelDefinition({
|
||||
id: MINIMAX_HOSTED_MODEL_ID,
|
||||
cost: MINIMAX_HOSTED_COST,
|
||||
contextWindow: DEFAULT_MINIMAX_CONTEXT_WINDOW,
|
||||
maxTokens: DEFAULT_MINIMAX_MAX_TOKENS,
|
||||
});
|
||||
const existingProvider = providers.minimax;
|
||||
const existingModels = Array.isArray(existingProvider?.models) ? existingProvider.models : [];
|
||||
const hasHostedModel = existingModels.some((model) => model.id === MINIMAX_HOSTED_MODEL_ID);
|
||||
const mergedModels = hasHostedModel ? existingModels : [...existingModels, hostedModel];
|
||||
providers.minimax = {
|
||||
...existingProvider,
|
||||
baseUrl: params?.baseUrl?.trim() || DEFAULT_MINIMAX_BASE_URL,
|
||||
apiKey: "minimax",
|
||||
api: "openai-completions",
|
||||
models: mergedModels.length > 0 ? mergedModels : [hostedModel],
|
||||
};
|
||||
|
||||
return applyOnboardAuthAgentModelsAndProviders(cfg, { agentModels: models, providers });
|
||||
}
|
||||
|
||||
export function applyMinimaxConfig(cfg: OpenClawConfig): OpenClawConfig {
|
||||
const next = applyMinimaxProviderConfig(cfg);
|
||||
return applyAgentDefaultModelPrimary(next, "lmstudio/minimax-m2.5-gs32");
|
||||
}
|
||||
|
||||
export function applyMinimaxHostedConfig(
|
||||
cfg: OpenClawConfig,
|
||||
params?: { baseUrl?: string },
|
||||
): OpenClawConfig {
|
||||
const next = applyMinimaxHostedProviderConfig(cfg, params);
|
||||
return {
|
||||
...next,
|
||||
agents: {
|
||||
...next.agents,
|
||||
defaults: {
|
||||
...next.agents?.defaults,
|
||||
model: {
|
||||
...toAgentModelListLike(next.agents?.defaults?.model),
|
||||
primary: MINIMAX_HOSTED_MODEL_REF,
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// MiniMax Anthropic-compatible API (platform.minimax.io/anthropic)
|
||||
export function applyMinimaxApiProviderConfig(
|
||||
cfg: OpenClawConfig,
|
||||
modelId: string = "MiniMax-M2.5",
|
||||
): OpenClawConfig {
|
||||
return applyMinimaxApiProviderConfigWithBaseUrl(cfg, {
|
||||
providerId: "minimax",
|
||||
modelId,
|
||||
baseUrl: MINIMAX_API_BASE_URL,
|
||||
});
|
||||
}
|
||||
|
||||
export function applyMinimaxApiConfig(
|
||||
cfg: OpenClawConfig,
|
||||
modelId: string = "MiniMax-M2.5",
|
||||
): OpenClawConfig {
|
||||
return applyMinimaxApiConfigWithBaseUrl(cfg, {
|
||||
providerId: "minimax",
|
||||
modelId,
|
||||
baseUrl: MINIMAX_API_BASE_URL,
|
||||
});
|
||||
}
|
||||
|
||||
// MiniMax China API (api.minimaxi.com)
|
||||
export function applyMinimaxApiProviderConfigCn(
|
||||
cfg: OpenClawConfig,
|
||||
modelId: string = "MiniMax-M2.5",
|
||||
): OpenClawConfig {
|
||||
return applyMinimaxApiProviderConfigWithBaseUrl(cfg, {
|
||||
providerId: "minimax-cn",
|
||||
modelId,
|
||||
baseUrl: MINIMAX_CN_API_BASE_URL,
|
||||
});
|
||||
}
|
||||
|
||||
export function applyMinimaxApiConfigCn(
|
||||
cfg: OpenClawConfig,
|
||||
modelId: string = "MiniMax-M2.5",
|
||||
): OpenClawConfig {
|
||||
return applyMinimaxApiConfigWithBaseUrl(cfg, {
|
||||
providerId: "minimax-cn",
|
||||
modelId,
|
||||
baseUrl: MINIMAX_CN_API_BASE_URL,
|
||||
});
|
||||
}
|
||||
|
||||
type MinimaxApiProviderConfigParams = {
|
||||
providerId: string;
|
||||
modelId: string;
|
||||
|
|
@ -193,17 +48,7 @@ function applyMinimaxApiProviderConfigWithBaseUrl(
|
|||
alias: "Minimax",
|
||||
};
|
||||
|
||||
return {
|
||||
...cfg,
|
||||
agents: {
|
||||
...cfg.agents,
|
||||
defaults: {
|
||||
...cfg.agents?.defaults,
|
||||
models,
|
||||
},
|
||||
},
|
||||
models: { mode: cfg.models?.mode ?? "merge", providers },
|
||||
};
|
||||
return applyOnboardAuthAgentModelsAndProviders(cfg, { agentModels: models, providers });
|
||||
}
|
||||
|
||||
function applyMinimaxApiConfigWithBaseUrl(
|
||||
|
|
@ -213,3 +58,49 @@ function applyMinimaxApiConfigWithBaseUrl(
|
|||
const next = applyMinimaxApiProviderConfigWithBaseUrl(cfg, params);
|
||||
return applyAgentDefaultModelPrimary(next, `${params.providerId}/${params.modelId}`);
|
||||
}
|
||||
|
||||
// MiniMax Global API (platform.minimax.io/anthropic)
|
||||
export function applyMinimaxApiProviderConfig(
|
||||
cfg: OpenClawConfig,
|
||||
modelId: string = "MiniMax-M2.5",
|
||||
): OpenClawConfig {
|
||||
return applyMinimaxApiProviderConfigWithBaseUrl(cfg, {
|
||||
providerId: "minimax",
|
||||
modelId,
|
||||
baseUrl: MINIMAX_API_BASE_URL,
|
||||
});
|
||||
}
|
||||
|
||||
export function applyMinimaxApiConfig(
|
||||
cfg: OpenClawConfig,
|
||||
modelId: string = "MiniMax-M2.5",
|
||||
): OpenClawConfig {
|
||||
return applyMinimaxApiConfigWithBaseUrl(cfg, {
|
||||
providerId: "minimax",
|
||||
modelId,
|
||||
baseUrl: MINIMAX_API_BASE_URL,
|
||||
});
|
||||
}
|
||||
|
||||
// MiniMax CN API (api.minimaxi.com/anthropic) — same provider id, different baseUrl
|
||||
export function applyMinimaxApiProviderConfigCn(
|
||||
cfg: OpenClawConfig,
|
||||
modelId: string = "MiniMax-M2.5",
|
||||
): OpenClawConfig {
|
||||
return applyMinimaxApiProviderConfigWithBaseUrl(cfg, {
|
||||
providerId: "minimax",
|
||||
modelId,
|
||||
baseUrl: MINIMAX_CN_API_BASE_URL,
|
||||
});
|
||||
}
|
||||
|
||||
export function applyMinimaxApiConfigCn(
|
||||
cfg: OpenClawConfig,
|
||||
modelId: string = "MiniMax-M2.5",
|
||||
): OpenClawConfig {
|
||||
return applyMinimaxApiConfigWithBaseUrl(cfg, {
|
||||
providerId: "minimax",
|
||||
modelId,
|
||||
baseUrl: MINIMAX_CN_API_BASE_URL,
|
||||
});
|
||||
}
|
||||
|
|
|
|||
|
|
@ -50,10 +50,6 @@ export {
|
|||
applyMinimaxApiConfigCn,
|
||||
applyMinimaxApiProviderConfig,
|
||||
applyMinimaxApiProviderConfigCn,
|
||||
applyMinimaxConfig,
|
||||
applyMinimaxHostedConfig,
|
||||
applyMinimaxHostedProviderConfig,
|
||||
applyMinimaxProviderConfig,
|
||||
} from "./onboard-auth.config-minimax.js";
|
||||
|
||||
export {
|
||||
|
|
|
|||
|
|
@ -183,16 +183,16 @@ describe("onboard (non-interactive): provider auth", () => {
|
|||
it("stores MiniMax API key and uses global baseUrl by default", async () => {
|
||||
await withOnboardEnv("openclaw-onboard-minimax-", async (env) => {
|
||||
const cfg = await runOnboardingAndReadConfig(env, {
|
||||
authChoice: "minimax-api",
|
||||
authChoice: "minimax-global-api",
|
||||
minimaxApiKey: "sk-minimax-test", // pragma: allowlist secret
|
||||
});
|
||||
|
||||
expect(cfg.auth?.profiles?.["minimax:default"]?.provider).toBe("minimax");
|
||||
expect(cfg.auth?.profiles?.["minimax:default"]?.mode).toBe("api_key");
|
||||
expect(cfg.auth?.profiles?.["minimax:global"]?.provider).toBe("minimax");
|
||||
expect(cfg.auth?.profiles?.["minimax:global"]?.mode).toBe("api_key");
|
||||
expect(cfg.models?.providers?.minimax?.baseUrl).toBe(MINIMAX_API_BASE_URL);
|
||||
expect(cfg.agents?.defaults?.model?.primary).toBe("minimax/MiniMax-M2.5");
|
||||
await expectApiKeyProfile({
|
||||
profileId: "minimax:default",
|
||||
profileId: "minimax:global",
|
||||
provider: "minimax",
|
||||
key: "sk-minimax-test",
|
||||
});
|
||||
|
|
@ -202,17 +202,17 @@ describe("onboard (non-interactive): provider auth", () => {
|
|||
it("supports MiniMax CN API endpoint auth choice", async () => {
|
||||
await withOnboardEnv("openclaw-onboard-minimax-cn-", async (env) => {
|
||||
const cfg = await runOnboardingAndReadConfig(env, {
|
||||
authChoice: "minimax-api-key-cn",
|
||||
authChoice: "minimax-cn-api",
|
||||
minimaxApiKey: "sk-minimax-test", // pragma: allowlist secret
|
||||
});
|
||||
|
||||
expect(cfg.auth?.profiles?.["minimax-cn:default"]?.provider).toBe("minimax-cn");
|
||||
expect(cfg.auth?.profiles?.["minimax-cn:default"]?.mode).toBe("api_key");
|
||||
expect(cfg.models?.providers?.["minimax-cn"]?.baseUrl).toBe(MINIMAX_CN_API_BASE_URL);
|
||||
expect(cfg.agents?.defaults?.model?.primary).toBe("minimax-cn/MiniMax-M2.5");
|
||||
expect(cfg.auth?.profiles?.["minimax:cn"]?.provider).toBe("minimax");
|
||||
expect(cfg.auth?.profiles?.["minimax:cn"]?.mode).toBe("api_key");
|
||||
expect(cfg.models?.providers?.minimax?.baseUrl).toBe(MINIMAX_CN_API_BASE_URL);
|
||||
expect(cfg.agents?.defaults?.model?.primary).toBe("minimax/MiniMax-M2.5");
|
||||
await expectApiKeyProfile({
|
||||
profileId: "minimax-cn:default",
|
||||
provider: "minimax-cn",
|
||||
profileId: "minimax:cn",
|
||||
provider: "minimax",
|
||||
key: "sk-minimax-test",
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -21,7 +21,6 @@ import {
|
|||
applyKimiCodeConfig,
|
||||
applyMinimaxApiConfig,
|
||||
applyMinimaxApiConfigCn,
|
||||
applyMinimaxConfig,
|
||||
applyMoonshotConfig,
|
||||
applyMoonshotConfigCn,
|
||||
applyOpencodeGoConfig,
|
||||
|
|
@ -863,22 +862,37 @@ export async function applyNonInteractiveAuthChoice(params: {
|
|||
return applyVeniceConfig(nextConfig);
|
||||
}
|
||||
|
||||
if (
|
||||
authChoice === "minimax-cloud" ||
|
||||
authChoice === "minimax-api" ||
|
||||
authChoice === "minimax-api-key-cn" ||
|
||||
authChoice === "minimax-api-lightning"
|
||||
) {
|
||||
const isCn = authChoice === "minimax-api-key-cn";
|
||||
const providerId = isCn ? "minimax-cn" : "minimax";
|
||||
const profileId = `${providerId}:default`;
|
||||
// Legacy aliases: these choice values were removed; fail with an actionable message so
|
||||
// existing CI automation gets a clear error instead of silently exiting 0 with no auth.
|
||||
const REMOVED_MINIMAX_CHOICES: Record<string, string> = {
|
||||
minimax: "minimax-global-api",
|
||||
"minimax-api": "minimax-global-api",
|
||||
"minimax-cloud": "minimax-global-api",
|
||||
"minimax-api-lightning": "minimax-global-api",
|
||||
"minimax-api-key-cn": "minimax-cn-api",
|
||||
};
|
||||
if (Object.prototype.hasOwnProperty.call(REMOVED_MINIMAX_CHOICES, authChoice as string)) {
|
||||
const replacement = REMOVED_MINIMAX_CHOICES[authChoice as string];
|
||||
runtime.error(
|
||||
`"${authChoice as string}" is no longer supported. Use --auth-choice ${replacement} instead.`,
|
||||
);
|
||||
runtime.exit(1);
|
||||
return null;
|
||||
}
|
||||
|
||||
if (authChoice === "minimax-global-api" || authChoice === "minimax-cn-api") {
|
||||
const isCn = authChoice === "minimax-cn-api";
|
||||
const profileId = isCn ? "minimax:cn" : "minimax:global";
|
||||
const resolved = await resolveApiKey({
|
||||
provider: providerId,
|
||||
provider: "minimax",
|
||||
cfg: baseConfig,
|
||||
flagValue: opts.minimaxApiKey,
|
||||
flagName: "--minimax-api-key",
|
||||
envVar: "MINIMAX_API_KEY",
|
||||
runtime,
|
||||
// Disable profile fallback: both regions share provider "minimax", so an existing
|
||||
// Global profile key must not be silently reused when configuring CN (and vice versa).
|
||||
allowProfile: false,
|
||||
});
|
||||
if (!resolved) {
|
||||
return null;
|
||||
|
|
@ -892,18 +906,10 @@ export async function applyNonInteractiveAuthChoice(params: {
|
|||
}
|
||||
nextConfig = applyAuthProfileConfig(nextConfig, {
|
||||
profileId,
|
||||
provider: providerId,
|
||||
provider: "minimax",
|
||||
mode: "api_key",
|
||||
});
|
||||
const modelId =
|
||||
authChoice === "minimax-api-lightning" ? "MiniMax-M2.5-highspeed" : "MiniMax-M2.5";
|
||||
return isCn
|
||||
? applyMinimaxApiConfigCn(nextConfig, modelId)
|
||||
: applyMinimaxApiConfig(nextConfig, modelId);
|
||||
}
|
||||
|
||||
if (authChoice === "minimax") {
|
||||
return applyMinimaxConfig(nextConfig);
|
||||
return isCn ? applyMinimaxApiConfigCn(nextConfig) : applyMinimaxApiConfig(nextConfig);
|
||||
}
|
||||
|
||||
if (authChoice === "opencode-zen") {
|
||||
|
|
@ -1091,7 +1097,8 @@ export async function applyNonInteractiveAuthChoice(params: {
|
|||
authChoice === "chutes" ||
|
||||
authChoice === "openai-codex" ||
|
||||
authChoice === "qwen-portal" ||
|
||||
authChoice === "minimax-portal"
|
||||
authChoice === "minimax-global-oauth" ||
|
||||
authChoice === "minimax-cn-oauth"
|
||||
) {
|
||||
runtime.error("OAuth requires interactive mode.");
|
||||
runtime.exit(1);
|
||||
|
|
|
|||
|
|
@ -126,7 +126,7 @@ export const ONBOARD_PROVIDER_AUTH_FLAGS: ReadonlyArray<OnboardProviderAuthFlag>
|
|||
},
|
||||
{
|
||||
optionKey: "minimaxApiKey",
|
||||
authChoice: "minimax-api",
|
||||
authChoice: "minimax-global-api",
|
||||
cliFlag: "--minimax-api-key",
|
||||
cliOption: "--minimax-api-key <key>",
|
||||
description: "MiniMax API key",
|
||||
|
|
|
|||
|
|
@ -35,12 +35,10 @@ export type AuthChoice =
|
|||
| "zai-global"
|
||||
| "zai-cn"
|
||||
| "xiaomi-api-key"
|
||||
| "minimax-cloud"
|
||||
| "minimax"
|
||||
| "minimax-api"
|
||||
| "minimax-api-key-cn"
|
||||
| "minimax-api-lightning"
|
||||
| "minimax-portal"
|
||||
| "minimax-global-oauth"
|
||||
| "minimax-global-api"
|
||||
| "minimax-cn-oauth"
|
||||
| "minimax-cn-api"
|
||||
| "opencode-zen"
|
||||
| "opencode-go"
|
||||
| "github-copilot"
|
||||
|
|
|
|||
|
|
@ -934,6 +934,8 @@ export const FIELD_HELP: Record<string, string> = {
|
|||
"Requires at least this many appended transcript messages before reindex is triggered (default: 50). Lower this for near-real-time transcript recall, or raise it to reduce indexing churn.",
|
||||
"agents.defaults.memorySearch.sync.sessions.includeResetArchives":
|
||||
"Includes reset transcript archives (`*.jsonl.reset.<timestamp>`) in builtin session-memory indexing (default: false). Enable only when reset snapshots should remain searchable.",
|
||||
"agents.defaults.memorySearch.sync.sessions.postCompactionForce":
|
||||
"Forces a session memory-search reindex after compaction-triggered transcript updates (default: true). Keep enabled when compacted summaries must be immediately searchable, or disable to reduce write-time indexing pressure.",
|
||||
ui: "UI presentation settings for accenting and assistant identity shown in control surfaces. Use this for branding and readability customization without changing runtime behavior.",
|
||||
"ui.seamColor":
|
||||
"Primary accent/seam color used by UI surfaces for emphasis, badges, and visual identity cues. Use high-contrast values that remain readable across light/dark themes.",
|
||||
|
|
@ -1037,6 +1039,8 @@ export const FIELD_HELP: Record<string, string> = {
|
|||
"Enables summary quality audits and regeneration retries for safeguard compaction. Default: false, so safeguard mode alone does not turn on retry behavior.",
|
||||
"agents.defaults.compaction.qualityGuard.maxRetries":
|
||||
"Maximum number of regeneration retries after a failed safeguard summary quality audit. Use small values to bound extra latency and token cost.",
|
||||
"agents.defaults.compaction.postIndexSync":
|
||||
'Controls post-compaction session memory reindex mode: "off", "async", or "await" (default: "async"). Use "await" for strongest freshness, "async" for lower compaction latency, and "off" only when session-memory sync is handled elsewhere.',
|
||||
"agents.defaults.compaction.postCompactionSections":
|
||||
'AGENTS.md H2/H3 section names re-injected after compaction so the agent reruns critical startup guidance. Leave unset to use "Session Startup"/"Red Lines" with legacy fallback to "Every Session"/"Safety"; set to [] to disable reinjection entirely.',
|
||||
"agents.defaults.compaction.model":
|
||||
|
|
|
|||
|
|
@ -356,6 +356,8 @@ export const FIELD_LABELS: Record<string, string> = {
|
|||
"agents.defaults.memorySearch.sync.sessions.deltaMessages": "Session Delta Messages",
|
||||
"agents.defaults.memorySearch.sync.sessions.includeResetArchives":
|
||||
"Include Reset Session Archives",
|
||||
"agents.defaults.memorySearch.sync.sessions.postCompactionForce":
|
||||
"Force Reindex After Compaction",
|
||||
"agents.defaults.memorySearch.query.maxResults": "Memory Search Max Results",
|
||||
"agents.defaults.memorySearch.query.minScore": "Memory Search Min Score",
|
||||
"agents.defaults.memorySearch.query.hybrid.enabled": "Memory Search Hybrid",
|
||||
|
|
@ -471,6 +473,7 @@ export const FIELD_LABELS: Record<string, string> = {
|
|||
"agents.defaults.compaction.qualityGuard": "Compaction Quality Guard",
|
||||
"agents.defaults.compaction.qualityGuard.enabled": "Compaction Quality Guard Enabled",
|
||||
"agents.defaults.compaction.qualityGuard.maxRetries": "Compaction Quality Guard Max Retries",
|
||||
"agents.defaults.compaction.postIndexSync": "Compaction Post-Index Sync",
|
||||
"agents.defaults.compaction.postCompactionSections": "Post-Compaction Context Sections",
|
||||
"agents.defaults.compaction.model": "Compaction Model Override",
|
||||
"agents.defaults.compaction.memoryFlush": "Compaction Memory Flush",
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import fsSync from "node:fs";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { describe, expect, it } from "vitest";
|
||||
|
|
@ -10,7 +11,8 @@ import {
|
|||
} from "./targets.js";
|
||||
|
||||
async function resolveRealStorePath(sessionsDir: string): Promise<string> {
|
||||
return await fs.realpath(path.join(sessionsDir, "sessions.json"));
|
||||
// Match the native realpath behavior used by both discovery paths.
|
||||
return fsSync.realpathSync.native(path.join(sessionsDir, "sessions.json"));
|
||||
}
|
||||
|
||||
describe("resolveSessionStoreTargets", () => {
|
||||
|
|
|
|||
|
|
@ -68,8 +68,8 @@ function resolveValidatedDiscoveredStorePathSync(params: {
|
|||
if (stat.isSymbolicLink() || !stat.isFile()) {
|
||||
return undefined;
|
||||
}
|
||||
const realStorePath = fsSync.realpathSync(storePath);
|
||||
const realAgentsRoot = params.realAgentsRoot ?? fsSync.realpathSync(params.agentsRoot);
|
||||
const realStorePath = fsSync.realpathSync.native(storePath);
|
||||
const realAgentsRoot = params.realAgentsRoot ?? fsSync.realpathSync.native(params.agentsRoot);
|
||||
return isWithinRoot(realStorePath, realAgentsRoot) ? realStorePath : undefined;
|
||||
} catch (err) {
|
||||
if (shouldSkipDiscoveryError(err)) {
|
||||
|
|
@ -153,7 +153,7 @@ export function resolveAllAgentSessionStoreTargetsSync(
|
|||
return cached;
|
||||
}
|
||||
try {
|
||||
const realAgentsRoot = fsSync.realpathSync(agentsRoot);
|
||||
const realAgentsRoot = fsSync.realpathSync.native(agentsRoot);
|
||||
realAgentsRoots.set(agentsRoot, realAgentsRoot);
|
||||
return realAgentsRoot;
|
||||
} catch (err) {
|
||||
|
|
|
|||
|
|
@ -287,6 +287,7 @@ export type AgentDefaultsConfig = {
|
|||
};
|
||||
|
||||
export type AgentCompactionMode = "default" | "safeguard";
|
||||
export type AgentCompactionPostIndexSyncMode = "off" | "async" | "await";
|
||||
export type AgentCompactionIdentifierPolicy = "strict" | "off" | "custom";
|
||||
export type AgentCompactionQualityGuardConfig = {
|
||||
/** Enable compaction summary quality audits and regeneration retries. Default: false. */
|
||||
|
|
@ -314,6 +315,8 @@ export type AgentCompactionConfig = {
|
|||
identifierInstructions?: string;
|
||||
/** Optional quality-audit retries for safeguard compaction summaries. */
|
||||
qualityGuard?: AgentCompactionQualityGuardConfig;
|
||||
/** Post-compaction session memory index sync mode. */
|
||||
postIndexSync?: AgentCompactionPostIndexSyncMode;
|
||||
/** Pre-compaction memory flush (agentic turn). Default: enabled. */
|
||||
memoryFlush?: AgentCompactionMemoryFlushConfig;
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -404,6 +404,8 @@ export type MemorySearchConfig = {
|
|||
deltaMessages?: number;
|
||||
/** Include reset transcript archives (`*.jsonl.reset.<timestamp>`) in session indexing. */
|
||||
includeResetArchives?: boolean;
|
||||
/** Force session reindex after compaction-triggered transcript updates (default: true). */
|
||||
postCompactionForce?: boolean;
|
||||
};
|
||||
};
|
||||
/** Query behavior. */
|
||||
|
|
|
|||
|
|
@ -103,6 +103,7 @@ export const AgentDefaultsSchema = z
|
|||
})
|
||||
.strict()
|
||||
.optional(),
|
||||
postIndexSync: z.enum(["off", "async", "await"]).optional(),
|
||||
postCompactionSections: z.array(z.string()).optional(),
|
||||
model: z.string().optional(),
|
||||
memoryFlush: z
|
||||
|
|
|
|||
|
|
@ -650,6 +650,7 @@ export const MemorySearchSchema = z
|
|||
deltaBytes: z.number().int().nonnegative().optional(),
|
||||
deltaMessages: z.number().int().nonnegative().optional(),
|
||||
includeResetArchives: z.boolean().optional(),
|
||||
postCompactionForce: z.boolean().optional(),
|
||||
})
|
||||
.strict()
|
||||
.optional(),
|
||||
|
|
|
|||
|
|
@ -104,8 +104,8 @@ export const TelegramDirectSchema = z
|
|||
|
||||
const TelegramCustomCommandSchema = z
|
||||
.object({
|
||||
command: z.string().transform(normalizeTelegramCommandName),
|
||||
description: z.string().transform(normalizeTelegramCommandDescription),
|
||||
command: z.string().overwrite(normalizeTelegramCommandName),
|
||||
description: z.string().overwrite(normalizeTelegramCommandDescription),
|
||||
})
|
||||
.strict();
|
||||
|
||||
|
|
|
|||
|
|
@ -61,6 +61,7 @@ class MockContextEngine implements ContextEngine {
|
|||
|
||||
async ingest(_params: {
|
||||
sessionId: string;
|
||||
sessionKey?: string;
|
||||
message: AgentMessage;
|
||||
isHeartbeat?: boolean;
|
||||
}): Promise<IngestResult> {
|
||||
|
|
@ -69,6 +70,7 @@ class MockContextEngine implements ContextEngine {
|
|||
|
||||
async assemble(params: {
|
||||
sessionId: string;
|
||||
sessionKey?: string;
|
||||
messages: AgentMessage[];
|
||||
tokenBudget?: number;
|
||||
}): Promise<AssembleResult> {
|
||||
|
|
@ -81,6 +83,7 @@ class MockContextEngine implements ContextEngine {
|
|||
|
||||
async compact(_params: {
|
||||
sessionId: string;
|
||||
sessionKey?: string;
|
||||
sessionFile: string;
|
||||
tokenBudget?: number;
|
||||
compactionTarget?: "budget" | "threshold";
|
||||
|
|
|
|||
|
|
@ -26,6 +26,7 @@ export class LegacyContextEngine implements ContextEngine {
|
|||
|
||||
async ingest(_params: {
|
||||
sessionId: string;
|
||||
sessionKey?: string;
|
||||
message: AgentMessage;
|
||||
isHeartbeat?: boolean;
|
||||
}): Promise<IngestResult> {
|
||||
|
|
@ -35,6 +36,7 @@ export class LegacyContextEngine implements ContextEngine {
|
|||
|
||||
async assemble(params: {
|
||||
sessionId: string;
|
||||
sessionKey?: string;
|
||||
messages: AgentMessage[];
|
||||
tokenBudget?: number;
|
||||
}): Promise<AssembleResult> {
|
||||
|
|
@ -49,6 +51,7 @@ export class LegacyContextEngine implements ContextEngine {
|
|||
|
||||
async afterTurn(_params: {
|
||||
sessionId: string;
|
||||
sessionKey?: string;
|
||||
sessionFile: string;
|
||||
messages: AgentMessage[];
|
||||
prePromptMessageCount: number;
|
||||
|
|
@ -62,6 +65,7 @@ export class LegacyContextEngine implements ContextEngine {
|
|||
|
||||
async compact(params: {
|
||||
sessionId: string;
|
||||
sessionKey?: string;
|
||||
sessionFile: string;
|
||||
tokenBudget?: number;
|
||||
force?: boolean;
|
||||
|
|
|
|||
|
|
@ -72,13 +72,18 @@ export interface ContextEngine {
|
|||
/**
|
||||
* Initialize engine state for a session, optionally importing historical context.
|
||||
*/
|
||||
bootstrap?(params: { sessionId: string; sessionFile: string }): Promise<BootstrapResult>;
|
||||
bootstrap?(params: {
|
||||
sessionId: string;
|
||||
sessionKey?: string;
|
||||
sessionFile: string;
|
||||
}): Promise<BootstrapResult>;
|
||||
|
||||
/**
|
||||
* Ingest a single message into the engine's store.
|
||||
*/
|
||||
ingest(params: {
|
||||
sessionId: string;
|
||||
sessionKey?: string;
|
||||
message: AgentMessage;
|
||||
/** True when the message belongs to a heartbeat run. */
|
||||
isHeartbeat?: boolean;
|
||||
|
|
@ -89,6 +94,7 @@ export interface ContextEngine {
|
|||
*/
|
||||
ingestBatch?(params: {
|
||||
sessionId: string;
|
||||
sessionKey?: string;
|
||||
messages: AgentMessage[];
|
||||
/** True when the batch belongs to a heartbeat run. */
|
||||
isHeartbeat?: boolean;
|
||||
|
|
@ -101,6 +107,7 @@ export interface ContextEngine {
|
|||
*/
|
||||
afterTurn?(params: {
|
||||
sessionId: string;
|
||||
sessionKey?: string;
|
||||
sessionFile: string;
|
||||
messages: AgentMessage[];
|
||||
/** Number of messages that existed before the prompt was sent. */
|
||||
|
|
@ -121,6 +128,7 @@ export interface ContextEngine {
|
|||
*/
|
||||
assemble(params: {
|
||||
sessionId: string;
|
||||
sessionKey?: string;
|
||||
messages: AgentMessage[];
|
||||
tokenBudget?: number;
|
||||
}): Promise<AssembleResult>;
|
||||
|
|
@ -131,6 +139,7 @@ export interface ContextEngine {
|
|||
*/
|
||||
compact(params: {
|
||||
sessionId: string;
|
||||
sessionKey?: string;
|
||||
sessionFile: string;
|
||||
tokenBudget?: number;
|
||||
/** Force compaction even below the default trigger threshold. */
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@ import { resolveGatewayCredentialsFromValues } from "./credentials.js";
|
|||
import {
|
||||
isLocalishHost,
|
||||
isLoopbackAddress,
|
||||
resolveRequestClientIp,
|
||||
isTrustedProxyAddress,
|
||||
resolveClientIp,
|
||||
} from "./net.js";
|
||||
|
|
@ -105,23 +106,6 @@ function resolveTailscaleClientIp(req?: IncomingMessage): string | undefined {
|
|||
});
|
||||
}
|
||||
|
||||
function resolveRequestClientIp(
|
||||
req?: IncomingMessage,
|
||||
trustedProxies?: string[],
|
||||
allowRealIpFallback = false,
|
||||
): string | undefined {
|
||||
if (!req) {
|
||||
return undefined;
|
||||
}
|
||||
return resolveClientIp({
|
||||
remoteAddr: req.socket?.remoteAddress ?? "",
|
||||
forwardedFor: headerValue(req.headers?.["x-forwarded-for"]),
|
||||
realIp: headerValue(req.headers?.["x-real-ip"]),
|
||||
trustedProxies,
|
||||
allowRealIpFallback,
|
||||
});
|
||||
}
|
||||
|
||||
export function isLocalDirectRequest(
|
||||
req?: IncomingMessage,
|
||||
trustedProxies?: string[],
|
||||
|
|
|
|||
|
|
@ -26,9 +26,11 @@ export function createGatewayRequest(params: {
|
|||
method?: string;
|
||||
remoteAddress?: string;
|
||||
host?: string;
|
||||
headers?: Record<string, string>;
|
||||
}): IncomingMessage {
|
||||
const headers: Record<string, string> = {
|
||||
host: params.host ?? "localhost:18789",
|
||||
...params.headers,
|
||||
};
|
||||
if (params.authorization) {
|
||||
headers.authorization = params.authorization;
|
||||
|
|
|
|||
|
|
@ -99,7 +99,7 @@ function resolveKnownAgentIds(cfg: OpenClawConfig, defaultAgentId: string): Set<
|
|||
return known;
|
||||
}
|
||||
|
||||
function resolveAllowedAgentIds(raw: string[] | undefined): Set<string> | undefined {
|
||||
export function resolveAllowedAgentIds(raw: string[] | undefined): Set<string> | undefined {
|
||||
if (!Array.isArray(raw)) {
|
||||
return undefined;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import type { IncomingMessage } from "node:http";
|
||||
import net from "node:net";
|
||||
import os from "node:os";
|
||||
import { pickPrimaryTailnetIPv4, pickPrimaryTailnetIPv6 } from "../infra/tailnet.js";
|
||||
|
|
@ -184,6 +185,27 @@ export function resolveClientIp(params: {
|
|||
return undefined;
|
||||
}
|
||||
|
||||
function headerValue(value: string | string[] | undefined): string | undefined {
|
||||
return Array.isArray(value) ? value[0] : value;
|
||||
}
|
||||
|
||||
export function resolveRequestClientIp(
|
||||
req?: IncomingMessage,
|
||||
trustedProxies?: string[],
|
||||
allowRealIpFallback = false,
|
||||
): string | undefined {
|
||||
if (!req) {
|
||||
return undefined;
|
||||
}
|
||||
return resolveClientIp({
|
||||
remoteAddr: req.socket?.remoteAddress ?? "",
|
||||
forwardedFor: headerValue(req.headers?.["x-forwarded-for"]),
|
||||
realIp: headerValue(req.headers?.["x-real-ip"]),
|
||||
trustedProxies,
|
||||
allowRealIpFallback,
|
||||
});
|
||||
}
|
||||
|
||||
export function isLocalGatewayAddress(ip: string | undefined): boolean {
|
||||
if (isLoopbackAddress(ip)) {
|
||||
return true;
|
||||
|
|
|
|||
|
|
@ -1,7 +1,9 @@
|
|||
import type { IncomingMessage, ServerResponse } from "node:http";
|
||||
import { beforeEach, describe, expect, test, vi } from "vitest";
|
||||
import type { createSubsystemLogger } from "../logging/subsystem.js";
|
||||
import { createGatewayRequest, createHooksConfig } from "./hooks-test-helpers.js";
|
||||
import {
|
||||
createHookRequest,
|
||||
createHooksHandler,
|
||||
createResponse,
|
||||
} from "./server-http.test-harness.js";
|
||||
|
||||
const { readJsonBodyMock } = vi.hoisted(() => ({
|
||||
readJsonBodyMock: vi.fn(),
|
||||
|
|
@ -15,64 +17,6 @@ vi.mock("./hooks.js", async (importOriginal) => {
|
|||
};
|
||||
});
|
||||
|
||||
import { createHooksRequestHandler } from "./server-http.js";
|
||||
|
||||
type HooksHandlerDeps = Parameters<typeof createHooksRequestHandler>[0];
|
||||
|
||||
function createRequest(params?: {
|
||||
authorization?: string;
|
||||
remoteAddress?: string;
|
||||
url?: string;
|
||||
}): IncomingMessage {
|
||||
return createGatewayRequest({
|
||||
method: "POST",
|
||||
path: params?.url ?? "/hooks/wake",
|
||||
host: "127.0.0.1:18789",
|
||||
authorization: params?.authorization ?? "Bearer hook-secret",
|
||||
remoteAddress: params?.remoteAddress,
|
||||
});
|
||||
}
|
||||
|
||||
function createResponse(): {
|
||||
res: ServerResponse;
|
||||
end: ReturnType<typeof vi.fn>;
|
||||
setHeader: ReturnType<typeof vi.fn>;
|
||||
} {
|
||||
const setHeader = vi.fn();
|
||||
const end = vi.fn();
|
||||
const res = {
|
||||
statusCode: 200,
|
||||
setHeader,
|
||||
end,
|
||||
} as unknown as ServerResponse;
|
||||
return { res, end, setHeader };
|
||||
}
|
||||
|
||||
function createHandler(params?: {
|
||||
dispatchWakeHook?: HooksHandlerDeps["dispatchWakeHook"];
|
||||
dispatchAgentHook?: HooksHandlerDeps["dispatchAgentHook"];
|
||||
bindHost?: string;
|
||||
}) {
|
||||
return createHooksRequestHandler({
|
||||
getHooksConfig: () => createHooksConfig(),
|
||||
bindHost: params?.bindHost ?? "127.0.0.1",
|
||||
port: 18789,
|
||||
logHooks: {
|
||||
warn: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
info: vi.fn(),
|
||||
error: vi.fn(),
|
||||
} as unknown as ReturnType<typeof createSubsystemLogger>,
|
||||
dispatchWakeHook:
|
||||
params?.dispatchWakeHook ??
|
||||
((() => {
|
||||
return;
|
||||
}) as HooksHandlerDeps["dispatchWakeHook"]),
|
||||
dispatchAgentHook:
|
||||
params?.dispatchAgentHook ?? ((() => "run-1") as HooksHandlerDeps["dispatchAgentHook"]),
|
||||
});
|
||||
}
|
||||
|
||||
describe("createHooksRequestHandler timeout status mapping", () => {
|
||||
beforeEach(() => {
|
||||
readJsonBodyMock.mockClear();
|
||||
|
|
@ -82,8 +26,8 @@ describe("createHooksRequestHandler timeout status mapping", () => {
|
|||
readJsonBodyMock.mockResolvedValue({ ok: false, error: "request body timeout" });
|
||||
const dispatchWakeHook = vi.fn();
|
||||
const dispatchAgentHook = vi.fn(() => "run-1");
|
||||
const handler = createHandler({ dispatchWakeHook, dispatchAgentHook });
|
||||
const req = createRequest();
|
||||
const handler = createHooksHandler({ dispatchWakeHook, dispatchAgentHook });
|
||||
const req = createHookRequest();
|
||||
const { res, end } = createResponse();
|
||||
|
||||
const handled = await handler(req, res);
|
||||
|
|
@ -96,10 +40,10 @@ describe("createHooksRequestHandler timeout status mapping", () => {
|
|||
});
|
||||
|
||||
test("shares hook auth rate-limit bucket across ipv4 and ipv4-mapped ipv6 forms", async () => {
|
||||
const handler = createHandler();
|
||||
const handler = createHooksHandler({ bindHost: "127.0.0.1" });
|
||||
|
||||
for (let i = 0; i < 20; i++) {
|
||||
const req = createRequest({
|
||||
const req = createHookRequest({
|
||||
authorization: "Bearer wrong",
|
||||
remoteAddress: "1.2.3.4",
|
||||
});
|
||||
|
|
@ -109,7 +53,7 @@ describe("createHooksRequestHandler timeout status mapping", () => {
|
|||
expect(res.statusCode).toBe(401);
|
||||
}
|
||||
|
||||
const mappedReq = createRequest({
|
||||
const mappedReq = createHookRequest({
|
||||
authorization: "Bearer wrong",
|
||||
remoteAddress: "::ffff:1.2.3.4",
|
||||
});
|
||||
|
|
@ -121,11 +65,41 @@ describe("createHooksRequestHandler timeout status mapping", () => {
|
|||
expect(setHeader).toHaveBeenCalledWith("Retry-After", expect.any(String));
|
||||
});
|
||||
|
||||
test("uses trusted proxy forwarded client ip for hook auth throttling", async () => {
|
||||
const handler = createHooksHandler({
|
||||
getClientIpConfig: () => ({ trustedProxies: ["10.0.0.1"] }),
|
||||
});
|
||||
|
||||
for (let i = 0; i < 20; i++) {
|
||||
const req = createHookRequest({
|
||||
authorization: "Bearer wrong",
|
||||
remoteAddress: "10.0.0.1",
|
||||
headers: { "x-forwarded-for": "1.2.3.4" },
|
||||
});
|
||||
const { res } = createResponse();
|
||||
const handled = await handler(req, res);
|
||||
expect(handled).toBe(true);
|
||||
expect(res.statusCode).toBe(401);
|
||||
}
|
||||
|
||||
const forwardedReq = createHookRequest({
|
||||
authorization: "Bearer wrong",
|
||||
remoteAddress: "10.0.0.1",
|
||||
headers: { "x-forwarded-for": "1.2.3.4, 10.0.0.1" },
|
||||
});
|
||||
const { res: forwardedRes, setHeader } = createResponse();
|
||||
const handled = await handler(forwardedReq, forwardedRes);
|
||||
|
||||
expect(handled).toBe(true);
|
||||
expect(forwardedRes.statusCode).toBe(429);
|
||||
expect(setHeader).toHaveBeenCalledWith("Retry-After", expect.any(String));
|
||||
});
|
||||
|
||||
test.each(["0.0.0.0", "::"])(
|
||||
"does not throw when bindHost=%s while parsing non-hook request URL",
|
||||
async (bindHost) => {
|
||||
const handler = createHandler({ bindHost });
|
||||
const req = createRequest({ url: "/" });
|
||||
const handler = createHooksHandler({ bindHost });
|
||||
const req = createHookRequest({ url: "/" });
|
||||
const { res, end } = createResponse();
|
||||
|
||||
const handled = await handler(req, res);
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ import { withTempConfig } from "./test-temp-config.js";
|
|||
|
||||
export type GatewayHttpServer = ReturnType<typeof createGatewayHttpServer>;
|
||||
export type GatewayServerOptions = Partial<Parameters<typeof createGatewayHttpServer>[0]>;
|
||||
type HooksHandlerDeps = Parameters<typeof createHooksRequestHandler>[0];
|
||||
|
||||
export const AUTH_NONE: ResolvedGatewayAuth = {
|
||||
mode: "none",
|
||||
|
|
@ -30,6 +31,7 @@ export function createRequest(params: {
|
|||
method?: string;
|
||||
remoteAddress?: string;
|
||||
host?: string;
|
||||
headers?: Record<string, string>;
|
||||
}): IncomingMessage {
|
||||
return createGatewayRequest({
|
||||
path: params.path,
|
||||
|
|
@ -37,6 +39,23 @@ export function createRequest(params: {
|
|||
method: params.method,
|
||||
remoteAddress: params.remoteAddress,
|
||||
host: params.host,
|
||||
headers: params.headers,
|
||||
});
|
||||
}
|
||||
|
||||
export function createHookRequest(params?: {
|
||||
authorization?: string;
|
||||
remoteAddress?: string;
|
||||
url?: string;
|
||||
headers?: Record<string, string>;
|
||||
}): IncomingMessage {
|
||||
return createRequest({
|
||||
method: "POST",
|
||||
path: params?.url ?? "/hooks/wake",
|
||||
host: "127.0.0.1:18789",
|
||||
authorization: params?.authorization ?? "Bearer hook-secret",
|
||||
remoteAddress: params?.remoteAddress,
|
||||
headers: params?.headers,
|
||||
});
|
||||
}
|
||||
|
||||
|
|
@ -162,10 +181,20 @@ export function createCanonicalizedChannelPluginHandler() {
|
|||
});
|
||||
}
|
||||
|
||||
export function createHooksHandler(bindHost: string) {
|
||||
export function createHooksHandler(
|
||||
params:
|
||||
| string
|
||||
| {
|
||||
dispatchWakeHook?: HooksHandlerDeps["dispatchWakeHook"];
|
||||
dispatchAgentHook?: HooksHandlerDeps["dispatchAgentHook"];
|
||||
bindHost?: string;
|
||||
getClientIpConfig?: HooksHandlerDeps["getClientIpConfig"];
|
||||
},
|
||||
) {
|
||||
const options = typeof params === "string" ? { bindHost: params } : params;
|
||||
return createHooksRequestHandler({
|
||||
getHooksConfig: () => createHooksConfig(),
|
||||
bindHost,
|
||||
bindHost: options.bindHost ?? "127.0.0.1",
|
||||
port: 18789,
|
||||
logHooks: {
|
||||
warn: vi.fn(),
|
||||
|
|
@ -173,8 +202,9 @@ export function createHooksHandler(bindHost: string) {
|
|||
info: vi.fn(),
|
||||
error: vi.fn(),
|
||||
} as unknown as ReturnType<typeof createSubsystemLogger>,
|
||||
dispatchWakeHook: () => {},
|
||||
dispatchAgentHook: () => "run-1",
|
||||
getClientIpConfig: options.getClientIpConfig,
|
||||
dispatchWakeHook: options.dispatchWakeHook ?? (() => {}),
|
||||
dispatchAgentHook: options.dispatchAgentHook ?? (() => "run-1"),
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -52,6 +52,7 @@ import {
|
|||
} from "./hooks.js";
|
||||
import { sendGatewayAuthFailure, setDefaultSecurityHeaders } from "./http-common.js";
|
||||
import { getBearerToken } from "./http-utils.js";
|
||||
import { resolveRequestClientIp } from "./net.js";
|
||||
import { handleOpenAiHttpRequest } from "./openai-http.js";
|
||||
import { handleOpenResponsesHttpRequest } from "./openresponses-http.js";
|
||||
import {
|
||||
|
|
@ -79,6 +80,11 @@ type HookDispatchers = {
|
|||
dispatchAgentHook: (value: HookAgentDispatchPayload) => string;
|
||||
};
|
||||
|
||||
export type HookClientIpConfig = Readonly<{
|
||||
trustedProxies?: string[];
|
||||
allowRealIpFallback?: boolean;
|
||||
}>;
|
||||
|
||||
function sendJson(res: ServerResponse, status: number, body: unknown) {
|
||||
res.statusCode = status;
|
||||
res.setHeader("Content-Type", "application/json; charset=utf-8");
|
||||
|
|
@ -351,9 +357,10 @@ export function createHooksRequestHandler(
|
|||
bindHost: string;
|
||||
port: number;
|
||||
logHooks: SubsystemLogger;
|
||||
getClientIpConfig?: () => HookClientIpConfig;
|
||||
} & HookDispatchers,
|
||||
): HooksRequestHandler {
|
||||
const { getHooksConfig, logHooks, dispatchAgentHook, dispatchWakeHook } = opts;
|
||||
const { getHooksConfig, logHooks, dispatchAgentHook, dispatchWakeHook, getClientIpConfig } = opts;
|
||||
const hookAuthLimiter = createAuthRateLimiter({
|
||||
maxAttempts: HOOK_AUTH_FAILURE_LIMIT,
|
||||
windowMs: HOOK_AUTH_FAILURE_WINDOW_MS,
|
||||
|
|
@ -364,7 +371,14 @@ export function createHooksRequestHandler(
|
|||
});
|
||||
|
||||
const resolveHookClientKey = (req: IncomingMessage): string => {
|
||||
return normalizeRateLimitClientIp(req.socket?.remoteAddress);
|
||||
const clientIpConfig = getClientIpConfig?.();
|
||||
const clientIp =
|
||||
resolveRequestClientIp(
|
||||
req,
|
||||
clientIpConfig?.trustedProxies,
|
||||
clientIpConfig?.allowRealIpFallback === true,
|
||||
) ?? req.socket?.remoteAddress;
|
||||
return normalizeRateLimitClientIp(clientIp);
|
||||
};
|
||||
|
||||
return async (req, res) => {
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import { exec } from "node:child_process";
|
||||
import { resolveAgentWorkspaceDir, resolveDefaultAgentId } from "../../agents/agent-scope.js";
|
||||
import { listChannelPlugins } from "../../channels/plugins/index.js";
|
||||
import {
|
||||
|
|
@ -529,4 +530,19 @@ export const configHandlers: GatewayRequestHandlers = {
|
|||
undefined,
|
||||
);
|
||||
},
|
||||
"config.openFile": ({ params, respond }) => {
|
||||
if (!assertValidParams(params, validateConfigGetParams, "config.openFile", respond)) {
|
||||
return;
|
||||
}
|
||||
const configPath = createConfigIO().configPath;
|
||||
const platform = process.platform;
|
||||
const cmd = platform === "darwin" ? "open" : platform === "win32" ? "start" : "xdg-open";
|
||||
exec(`${cmd} ${JSON.stringify(configPath)}`, (err) => {
|
||||
if (err) {
|
||||
respond(true, { ok: false, path: configPath, error: err.message }, undefined);
|
||||
return;
|
||||
}
|
||||
respond(true, { ok: true, path: configPath }, undefined);
|
||||
});
|
||||
},
|
||||
};
|
||||
|
|
|
|||
|
|
@ -22,9 +22,12 @@ import type { GatewayReloadPlan } from "./config-reload.js";
|
|||
import { resolveHooksConfig } from "./hooks.js";
|
||||
import { startBrowserControlServerIfEnabled } from "./server-browser.js";
|
||||
import { buildGatewayCronService, type GatewayCronState } from "./server-cron.js";
|
||||
import type { HookClientIpConfig } from "./server-http.js";
|
||||
import { resolveHookClientIpConfig } from "./server/hooks.js";
|
||||
|
||||
type GatewayHotReloadState = {
|
||||
hooksConfig: ReturnType<typeof resolveHooksConfig>;
|
||||
hookClientIpConfig: HookClientIpConfig;
|
||||
heartbeatRunner: HeartbeatRunner;
|
||||
cronState: GatewayCronState;
|
||||
browserControl: Awaited<ReturnType<typeof startBrowserControlServerIfEnabled>> | null;
|
||||
|
|
@ -64,6 +67,7 @@ export function createGatewayReloadHandlers(params: {
|
|||
params.logHooks.warn(`hooks config reload failed: ${String(err)}`);
|
||||
}
|
||||
}
|
||||
nextState.hookClientIpConfig = resolveHookClientIpConfig(nextConfig);
|
||||
|
||||
if (plan.restartHeartbeat) {
|
||||
nextState.heartbeatRunner.updateConfig(nextConfig);
|
||||
|
|
|
|||
|
|
@ -23,7 +23,11 @@ import {
|
|||
createToolEventRecipientRegistry,
|
||||
} from "./server-chat.js";
|
||||
import { MAX_PREAUTH_PAYLOAD_BYTES } from "./server-constants.js";
|
||||
import { attachGatewayUpgradeHandler, createGatewayHttpServer } from "./server-http.js";
|
||||
import {
|
||||
attachGatewayUpgradeHandler,
|
||||
createGatewayHttpServer,
|
||||
type HookClientIpConfig,
|
||||
} from "./server-http.js";
|
||||
import type { DedupeEntry } from "./server-shared.js";
|
||||
import { createGatewayHooksRequestHandler } from "./server/hooks.js";
|
||||
import { listenGatewayHttpServer } from "./server/http-listen.js";
|
||||
|
|
@ -53,6 +57,7 @@ export async function createGatewayRuntimeState(params: {
|
|||
rateLimiter?: AuthRateLimiter;
|
||||
gatewayTls?: GatewayTlsRuntime;
|
||||
hooksConfig: () => HooksConfigResolved | null;
|
||||
getHookClientIpConfig: () => HookClientIpConfig;
|
||||
pluginRegistry: PluginRegistry;
|
||||
deps: CliDeps;
|
||||
canvasRuntime: RuntimeEnv;
|
||||
|
|
@ -113,6 +118,7 @@ export async function createGatewayRuntimeState(params: {
|
|||
const handleHooksRequest = createGatewayHooksRequestHandler({
|
||||
deps: params.deps,
|
||||
getHooksConfig: params.hooksConfig,
|
||||
getClientIpConfig: params.getHookClientIpConfig,
|
||||
bindHost: params.bindHost,
|
||||
port: params.port,
|
||||
logHooks: params.logHooks,
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ import {
|
|||
getFreePort,
|
||||
openWs,
|
||||
originForPort,
|
||||
rpcReq,
|
||||
restoreGatewayToken,
|
||||
startGatewayServer,
|
||||
testState,
|
||||
|
|
@ -62,6 +63,24 @@ describe("gateway auth compatibility baseline", () => {
|
|||
}
|
||||
});
|
||||
|
||||
test("clears client-declared scopes for shared-token operator connects", async () => {
|
||||
const ws = await openWs(port);
|
||||
try {
|
||||
const res = await connectReq(ws, {
|
||||
token: "secret",
|
||||
scopes: ["operator.admin"],
|
||||
device: null,
|
||||
});
|
||||
expect(res.ok).toBe(true);
|
||||
|
||||
const adminRes = await rpcReq(ws, "set-heartbeats", { enabled: false });
|
||||
expect(adminRes.ok).toBe(false);
|
||||
expect(adminRes.error?.message).toBe("missing scope: operator.admin");
|
||||
} finally {
|
||||
ws.close();
|
||||
}
|
||||
});
|
||||
|
||||
test("returns stable token-missing details for control ui without token", async () => {
|
||||
const ws = await openWs(port, { origin: originForPort(port) });
|
||||
try {
|
||||
|
|
@ -163,6 +182,24 @@ describe("gateway auth compatibility baseline", () => {
|
|||
ws.close();
|
||||
}
|
||||
});
|
||||
|
||||
test("clears client-declared scopes for shared-password operator connects", async () => {
|
||||
const ws = await openWs(port);
|
||||
try {
|
||||
const res = await connectReq(ws, {
|
||||
password: "secret",
|
||||
scopes: ["operator.admin"],
|
||||
device: null,
|
||||
});
|
||||
expect(res.ok).toBe(true);
|
||||
|
||||
const adminRes = await rpcReq(ws, "set-heartbeats", { enabled: false });
|
||||
expect(adminRes.ok).toBe(false);
|
||||
expect(adminRes.error?.message).toBe("missing scope: operator.admin");
|
||||
} finally {
|
||||
ws.close();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("none mode", () => {
|
||||
|
|
|
|||
|
|
@ -91,6 +91,11 @@ export function registerControlUiAndPairingSuite(): void {
|
|||
expect(health.ok).toBe(true);
|
||||
};
|
||||
|
||||
const expectAdminRpcOk = async (ws: WebSocket) => {
|
||||
const admin = await rpcReq(ws, "set-heartbeats", { enabled: false });
|
||||
expect(admin.ok).toBe(true);
|
||||
};
|
||||
|
||||
const connectControlUiWithoutDeviceAndExpectOk = async (params: {
|
||||
ws: WebSocket;
|
||||
token?: string;
|
||||
|
|
@ -104,6 +109,7 @@ export function registerControlUiAndPairingSuite(): void {
|
|||
});
|
||||
expect(res.ok).toBe(true);
|
||||
await expectStatusAndHealthOk(params.ws);
|
||||
await expectAdminRpcOk(params.ws);
|
||||
};
|
||||
|
||||
const createOperatorIdentityFixture = async (identityPrefix: string) => {
|
||||
|
|
@ -217,6 +223,9 @@ export function registerControlUiAndPairingSuite(): void {
|
|||
}
|
||||
if (tc.expectStatusChecks) {
|
||||
await expectStatusAndHealthOk(ws);
|
||||
if (tc.role === "operator") {
|
||||
await expectAdminRpcOk(ws);
|
||||
}
|
||||
}
|
||||
ws.close();
|
||||
});
|
||||
|
|
|
|||
|
|
@ -107,6 +107,7 @@ import {
|
|||
incrementPresenceVersion,
|
||||
refreshGatewayHealthSnapshot,
|
||||
} from "./server/health-state.js";
|
||||
import { resolveHookClientIpConfig } from "./server/hooks.js";
|
||||
import { createReadinessChecker } from "./server/readiness.js";
|
||||
import { loadGatewayTlsRuntime } from "./server/tls.js";
|
||||
import {
|
||||
|
|
@ -511,6 +512,7 @@ export async function startGatewayServer(
|
|||
tailscaleMode,
|
||||
} = runtimeConfig;
|
||||
let hooksConfig = runtimeConfig.hooksConfig;
|
||||
let hookClientIpConfig = resolveHookClientIpConfig(cfgAtStart);
|
||||
const canvasHostEnabled = runtimeConfig.canvasHostEnabled;
|
||||
|
||||
// Create auth rate limiters used by connect/auth flows.
|
||||
|
|
@ -613,6 +615,7 @@ export async function startGatewayServer(
|
|||
rateLimiter: authRateLimiter,
|
||||
gatewayTls,
|
||||
hooksConfig: () => hooksConfig,
|
||||
getHookClientIpConfig: () => hookClientIpConfig,
|
||||
pluginRegistry,
|
||||
deps,
|
||||
canvasRuntime,
|
||||
|
|
@ -954,6 +957,7 @@ export async function startGatewayServer(
|
|||
broadcast,
|
||||
getState: () => ({
|
||||
hooksConfig,
|
||||
hookClientIpConfig,
|
||||
heartbeatRunner,
|
||||
cronState,
|
||||
browserControl,
|
||||
|
|
@ -961,6 +965,7 @@ export async function startGatewayServer(
|
|||
}),
|
||||
setState: (nextState) => {
|
||||
hooksConfig = nextState.hooksConfig;
|
||||
hookClientIpConfig = nextState.hookClientIpConfig;
|
||||
heartbeatRunner = nextState.heartbeatRunner;
|
||||
cronState = nextState.cronState;
|
||||
cron = cronState.cron;
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import { randomUUID } from "node:crypto";
|
||||
import type { CliDeps } from "../../cli/deps.js";
|
||||
import { loadConfig } from "../../config/config.js";
|
||||
import { loadConfig, type OpenClawConfig } from "../../config/config.js";
|
||||
import { resolveMainSessionKeyFromConfig } from "../../config/sessions.js";
|
||||
import { runCronIsolatedAgentTurn } from "../../cron/isolated-agent.js";
|
||||
import type { CronJob } from "../../cron/types.js";
|
||||
|
|
@ -12,18 +12,26 @@ import {
|
|||
type HookAgentDispatchPayload,
|
||||
type HooksConfigResolved,
|
||||
} from "../hooks.js";
|
||||
import { createHooksRequestHandler } from "../server-http.js";
|
||||
import { createHooksRequestHandler, type HookClientIpConfig } from "../server-http.js";
|
||||
|
||||
type SubsystemLogger = ReturnType<typeof createSubsystemLogger>;
|
||||
|
||||
export function resolveHookClientIpConfig(cfg: OpenClawConfig): HookClientIpConfig {
|
||||
return {
|
||||
trustedProxies: cfg.gateway?.trustedProxies,
|
||||
allowRealIpFallback: cfg.gateway?.allowRealIpFallback === true,
|
||||
};
|
||||
}
|
||||
|
||||
export function createGatewayHooksRequestHandler(params: {
|
||||
deps: CliDeps;
|
||||
getHooksConfig: () => HooksConfigResolved | null;
|
||||
getClientIpConfig: () => HookClientIpConfig;
|
||||
bindHost: string;
|
||||
port: number;
|
||||
logHooks: SubsystemLogger;
|
||||
}) {
|
||||
const { deps, getHooksConfig, bindHost, port, logHooks } = params;
|
||||
const { deps, getHooksConfig, getClientIpConfig, bindHost, port, logHooks } = params;
|
||||
|
||||
const dispatchWakeHook = (value: { text: string; mode: "now" | "next-heartbeat" }) => {
|
||||
const sessionKey = resolveMainSessionKeyFromConfig();
|
||||
|
|
@ -108,6 +116,7 @@ export function createGatewayHooksRequestHandler(params: {
|
|||
bindHost,
|
||||
port,
|
||||
logHooks,
|
||||
getClientIpConfig,
|
||||
dispatchAgentHook,
|
||||
dispatchWakeHook,
|
||||
});
|
||||
|
|
|
|||
|
|
@ -643,15 +643,12 @@ export function attachGatewayWsMessageHandler(params: {
|
|||
close(1008, truncateCloseReason(authMessage));
|
||||
};
|
||||
const clearUnboundScopes = () => {
|
||||
if (scopes.length > 0 && !controlUiAuthPolicy.allowBypass && !sharedAuthOk) {
|
||||
if (scopes.length > 0) {
|
||||
scopes = [];
|
||||
connectParams.scopes = scopes;
|
||||
}
|
||||
};
|
||||
const handleMissingDeviceIdentity = (): boolean => {
|
||||
if (!device) {
|
||||
clearUnboundScopes();
|
||||
}
|
||||
const trustedProxyAuthOk = isTrustedProxyControlUiOperatorAuth({
|
||||
isControlUi,
|
||||
role,
|
||||
|
|
@ -670,6 +667,9 @@ export function attachGatewayWsMessageHandler(params: {
|
|||
hasSharedAuth,
|
||||
isLocalClient,
|
||||
});
|
||||
if (!device && (!isControlUi || decision.kind !== "allow")) {
|
||||
clearUnboundScopes();
|
||||
}
|
||||
if (decision.kind === "allow") {
|
||||
return true;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -22,6 +22,10 @@ import {
|
|||
resolveSessionStoreKey,
|
||||
} from "./session-utils.js";
|
||||
|
||||
function resolveSyncRealpath(filePath: string): string {
|
||||
return fs.realpathSync.native(filePath);
|
||||
}
|
||||
|
||||
function createSymlinkOrSkip(targetPath: string, linkPath: string): boolean {
|
||||
try {
|
||||
fs.symlinkSync(targetPath, linkPath);
|
||||
|
|
@ -287,7 +291,7 @@ describe("gateway session utils", () => {
|
|||
|
||||
const target = resolveGatewaySessionStoreTarget({ cfg, key: "agent:retired-agent:main" });
|
||||
|
||||
expect(target.storePath).toBe(fs.realpathSync(retiredStorePath));
|
||||
expect(target.storePath).toBe(resolveSyncRealpath(retiredStorePath));
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -316,7 +320,7 @@ describe("gateway session utils", () => {
|
|||
|
||||
const loaded = loadSessionEntry("agent:retired-agent:main");
|
||||
|
||||
expect(loaded.storePath).toBe(fs.realpathSync(retiredStorePath));
|
||||
expect(loaded.storePath).toBe(resolveSyncRealpath(retiredStorePath));
|
||||
expect(loaded.entry?.sessionId).toBe("sess-retired");
|
||||
});
|
||||
} finally {
|
||||
|
|
|
|||
|
|
@ -461,6 +461,391 @@ describe("memory index", () => {
|
|||
}
|
||||
});
|
||||
|
||||
it("targets explicit session files during post-compaction sync", async () => {
|
||||
const stateDir = path.join(fixtureRoot, `state-targeted-${randomUUID()}`);
|
||||
const sessionDir = path.join(stateDir, "agents", "main", "sessions");
|
||||
const firstSessionPath = path.join(sessionDir, "targeted-first.jsonl");
|
||||
const secondSessionPath = path.join(sessionDir, "targeted-second.jsonl");
|
||||
const storePath = path.join(workspaceDir, `index-targeted-${randomUUID()}.sqlite`);
|
||||
const previousStateDir = process.env.OPENCLAW_STATE_DIR;
|
||||
process.env.OPENCLAW_STATE_DIR = stateDir;
|
||||
|
||||
await fs.mkdir(sessionDir, { recursive: true });
|
||||
await fs.writeFile(
|
||||
firstSessionPath,
|
||||
`${JSON.stringify({
|
||||
type: "message",
|
||||
message: { role: "user", content: [{ type: "text", text: "first transcript v1" }] },
|
||||
})}\n`,
|
||||
);
|
||||
await fs.writeFile(
|
||||
secondSessionPath,
|
||||
`${JSON.stringify({
|
||||
type: "message",
|
||||
message: { role: "user", content: [{ type: "text", text: "second transcript v1" }] },
|
||||
})}\n`,
|
||||
);
|
||||
|
||||
try {
|
||||
const result = await getMemorySearchManager({
|
||||
cfg: createCfg({
|
||||
storePath,
|
||||
sources: ["sessions"],
|
||||
sessionMemory: true,
|
||||
}),
|
||||
agentId: "main",
|
||||
});
|
||||
const manager = requireManager(result);
|
||||
await manager.sync?.({ reason: "test" });
|
||||
|
||||
const db = (
|
||||
manager as unknown as {
|
||||
db: {
|
||||
prepare: (sql: string) => {
|
||||
get: (path: string, source: string) => { hash: string } | undefined;
|
||||
};
|
||||
};
|
||||
}
|
||||
).db;
|
||||
const getSessionHash = (sessionPath: string) =>
|
||||
db
|
||||
.prepare(`SELECT hash FROM files WHERE path = ? AND source = ?`)
|
||||
.get(sessionPath, "sessions")?.hash;
|
||||
|
||||
const firstOriginalHash = getSessionHash("sessions/targeted-first.jsonl");
|
||||
const secondOriginalHash = getSessionHash("sessions/targeted-second.jsonl");
|
||||
|
||||
await fs.writeFile(
|
||||
firstSessionPath,
|
||||
`${JSON.stringify({
|
||||
type: "message",
|
||||
message: {
|
||||
role: "user",
|
||||
content: [{ type: "text", text: "first transcript v2 after compaction" }],
|
||||
},
|
||||
})}\n`,
|
||||
);
|
||||
await fs.writeFile(
|
||||
secondSessionPath,
|
||||
`${JSON.stringify({
|
||||
type: "message",
|
||||
message: {
|
||||
role: "user",
|
||||
content: [{ type: "text", text: "second transcript v2 should stay untouched" }],
|
||||
},
|
||||
})}\n`,
|
||||
);
|
||||
|
||||
await manager.sync?.({
|
||||
reason: "post-compaction",
|
||||
sessionFiles: [firstSessionPath],
|
||||
});
|
||||
|
||||
expect(getSessionHash("sessions/targeted-first.jsonl")).not.toBe(firstOriginalHash);
|
||||
expect(getSessionHash("sessions/targeted-second.jsonl")).toBe(secondOriginalHash);
|
||||
await manager.close?.();
|
||||
} finally {
|
||||
if (previousStateDir === undefined) {
|
||||
delete process.env.OPENCLAW_STATE_DIR;
|
||||
} else {
|
||||
process.env.OPENCLAW_STATE_DIR = previousStateDir;
|
||||
}
|
||||
await fs.rm(stateDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
it("preserves unrelated dirty sessions after targeted post-compaction sync", async () => {
|
||||
const stateDir = path.join(fixtureRoot, `state-targeted-dirty-${randomUUID()}`);
|
||||
const sessionDir = path.join(stateDir, "agents", "main", "sessions");
|
||||
const firstSessionPath = path.join(sessionDir, "targeted-dirty-first.jsonl");
|
||||
const secondSessionPath = path.join(sessionDir, "targeted-dirty-second.jsonl");
|
||||
const storePath = path.join(workspaceDir, `index-targeted-dirty-${randomUUID()}.sqlite`);
|
||||
const previousStateDir = process.env.OPENCLAW_STATE_DIR;
|
||||
process.env.OPENCLAW_STATE_DIR = stateDir;
|
||||
|
||||
await fs.mkdir(sessionDir, { recursive: true });
|
||||
await fs.writeFile(
|
||||
firstSessionPath,
|
||||
`${JSON.stringify({
|
||||
type: "message",
|
||||
message: { role: "user", content: [{ type: "text", text: "first transcript v1" }] },
|
||||
})}\n`,
|
||||
);
|
||||
await fs.writeFile(
|
||||
secondSessionPath,
|
||||
`${JSON.stringify({
|
||||
type: "message",
|
||||
message: { role: "user", content: [{ type: "text", text: "second transcript v1" }] },
|
||||
})}\n`,
|
||||
);
|
||||
|
||||
try {
|
||||
const manager = requireManager(
|
||||
await getMemorySearchManager({
|
||||
cfg: createCfg({
|
||||
storePath,
|
||||
sources: ["sessions"],
|
||||
sessionMemory: true,
|
||||
}),
|
||||
agentId: "main",
|
||||
}),
|
||||
);
|
||||
await manager.sync({ reason: "test" });
|
||||
|
||||
const db = (
|
||||
manager as unknown as {
|
||||
db: {
|
||||
prepare: (sql: string) => {
|
||||
get: (path: string, source: string) => { hash: string } | undefined;
|
||||
};
|
||||
};
|
||||
}
|
||||
).db;
|
||||
const getSessionHash = (sessionPath: string) =>
|
||||
db
|
||||
.prepare(`SELECT hash FROM files WHERE path = ? AND source = ?`)
|
||||
.get(sessionPath, "sessions")?.hash;
|
||||
|
||||
const firstOriginalHash = getSessionHash("sessions/targeted-dirty-first.jsonl");
|
||||
const secondOriginalHash = getSessionHash("sessions/targeted-dirty-second.jsonl");
|
||||
|
||||
await fs.writeFile(
|
||||
firstSessionPath,
|
||||
`${JSON.stringify({
|
||||
type: "message",
|
||||
message: {
|
||||
role: "user",
|
||||
content: [{ type: "text", text: "first transcript v2 after compaction" }],
|
||||
},
|
||||
})}\n`,
|
||||
);
|
||||
await fs.writeFile(
|
||||
secondSessionPath,
|
||||
`${JSON.stringify({
|
||||
type: "message",
|
||||
message: {
|
||||
role: "user",
|
||||
content: [{ type: "text", text: "second transcript v2 still pending" }],
|
||||
},
|
||||
})}\n`,
|
||||
);
|
||||
|
||||
const internal = manager as unknown as {
|
||||
sessionsDirty: boolean;
|
||||
sessionsDirtyFiles: Set<string>;
|
||||
};
|
||||
internal.sessionsDirty = true;
|
||||
internal.sessionsDirtyFiles.add(secondSessionPath);
|
||||
|
||||
await manager.sync({
|
||||
reason: "post-compaction",
|
||||
sessionFiles: [firstSessionPath],
|
||||
});
|
||||
|
||||
expect(getSessionHash("sessions/targeted-dirty-first.jsonl")).not.toBe(firstOriginalHash);
|
||||
expect(getSessionHash("sessions/targeted-dirty-second.jsonl")).toBe(secondOriginalHash);
|
||||
expect(internal.sessionsDirtyFiles.has(secondSessionPath)).toBe(true);
|
||||
expect(internal.sessionsDirty).toBe(true);
|
||||
|
||||
await manager.sync({ reason: "test" });
|
||||
|
||||
expect(getSessionHash("sessions/targeted-dirty-second.jsonl")).not.toBe(secondOriginalHash);
|
||||
await manager.close?.();
|
||||
} finally {
|
||||
if (previousStateDir === undefined) {
|
||||
delete process.env.OPENCLAW_STATE_DIR;
|
||||
} else {
|
||||
process.env.OPENCLAW_STATE_DIR = previousStateDir;
|
||||
}
|
||||
await fs.rm(stateDir, { recursive: true, force: true });
|
||||
await fs.rm(storePath, { force: true });
|
||||
}
|
||||
});
|
||||
|
||||
it("queues targeted session sync when another sync is already in progress", async () => {
|
||||
const stateDir = path.join(fixtureRoot, `state-targeted-queued-${randomUUID()}`);
|
||||
const sessionDir = path.join(stateDir, "agents", "main", "sessions");
|
||||
const sessionPath = path.join(sessionDir, "targeted-queued.jsonl");
|
||||
const storePath = path.join(workspaceDir, `index-targeted-queued-${randomUUID()}.sqlite`);
|
||||
const previousStateDir = process.env.OPENCLAW_STATE_DIR;
|
||||
process.env.OPENCLAW_STATE_DIR = stateDir;
|
||||
|
||||
await fs.mkdir(sessionDir, { recursive: true });
|
||||
await fs.writeFile(
|
||||
sessionPath,
|
||||
`${JSON.stringify({
|
||||
type: "message",
|
||||
message: { role: "user", content: [{ type: "text", text: "queued transcript v1" }] },
|
||||
})}\n`,
|
||||
);
|
||||
|
||||
try {
|
||||
const manager = requireManager(
|
||||
await getMemorySearchManager({
|
||||
cfg: createCfg({
|
||||
storePath,
|
||||
sources: ["sessions"],
|
||||
sessionMemory: true,
|
||||
}),
|
||||
agentId: "main",
|
||||
}),
|
||||
);
|
||||
await manager.sync({ reason: "test" });
|
||||
|
||||
const db = (
|
||||
manager as unknown as {
|
||||
db: {
|
||||
prepare: (sql: string) => {
|
||||
get: (path: string, source: string) => { hash: string } | undefined;
|
||||
};
|
||||
};
|
||||
}
|
||||
).db;
|
||||
const getSessionHash = (sessionRelPath: string) =>
|
||||
db
|
||||
.prepare(`SELECT hash FROM files WHERE path = ? AND source = ?`)
|
||||
.get(sessionRelPath, "sessions")?.hash;
|
||||
const originalHash = getSessionHash("sessions/targeted-queued.jsonl");
|
||||
|
||||
const internal = manager as unknown as {
|
||||
runSyncWithReadonlyRecovery: (params?: {
|
||||
reason?: string;
|
||||
sessionFiles?: string[];
|
||||
}) => Promise<void>;
|
||||
};
|
||||
const originalRunSync = internal.runSyncWithReadonlyRecovery.bind(manager);
|
||||
let releaseBusySync: (() => void) | undefined;
|
||||
const busyGate = new Promise<void>((resolve) => {
|
||||
releaseBusySync = resolve;
|
||||
});
|
||||
internal.runSyncWithReadonlyRecovery = async (params) => {
|
||||
if (params?.reason === "busy-sync") {
|
||||
await busyGate;
|
||||
}
|
||||
return await originalRunSync(params);
|
||||
};
|
||||
|
||||
const busySyncPromise = manager.sync({ reason: "busy-sync" });
|
||||
await fs.writeFile(
|
||||
sessionPath,
|
||||
`${JSON.stringify({
|
||||
type: "message",
|
||||
message: {
|
||||
role: "user",
|
||||
content: [{ type: "text", text: "queued transcript v2 after compaction" }],
|
||||
},
|
||||
})}\n`,
|
||||
);
|
||||
|
||||
const targetedSyncPromise = manager.sync({
|
||||
reason: "post-compaction",
|
||||
sessionFiles: [sessionPath],
|
||||
});
|
||||
|
||||
releaseBusySync?.();
|
||||
await Promise.all([busySyncPromise, targetedSyncPromise]);
|
||||
|
||||
expect(getSessionHash("sessions/targeted-queued.jsonl")).not.toBe(originalHash);
|
||||
await manager.close?.();
|
||||
} finally {
|
||||
if (previousStateDir === undefined) {
|
||||
delete process.env.OPENCLAW_STATE_DIR;
|
||||
} else {
|
||||
process.env.OPENCLAW_STATE_DIR = previousStateDir;
|
||||
}
|
||||
await fs.rm(stateDir, { recursive: true, force: true });
|
||||
await fs.rm(storePath, { force: true });
|
||||
}
|
||||
});
|
||||
|
||||
it("runs a full reindex after fallback activates during targeted sync", async () => {
|
||||
const stateDir = path.join(fixtureRoot, `state-targeted-fallback-${randomUUID()}`);
|
||||
const sessionDir = path.join(stateDir, "agents", "main", "sessions");
|
||||
const sessionPath = path.join(sessionDir, "targeted-fallback.jsonl");
|
||||
const storePath = path.join(workspaceDir, `index-targeted-fallback-${randomUUID()}.sqlite`);
|
||||
const previousStateDir = process.env.OPENCLAW_STATE_DIR;
|
||||
process.env.OPENCLAW_STATE_DIR = stateDir;
|
||||
|
||||
await fs.mkdir(sessionDir, { recursive: true });
|
||||
await fs.writeFile(
|
||||
sessionPath,
|
||||
`${JSON.stringify({
|
||||
type: "message",
|
||||
message: { role: "user", content: [{ type: "text", text: "fallback transcript v1" }] },
|
||||
})}\n`,
|
||||
);
|
||||
|
||||
try {
|
||||
const manager = requireManager(
|
||||
await getMemorySearchManager({
|
||||
cfg: createCfg({
|
||||
storePath,
|
||||
sources: ["sessions"],
|
||||
sessionMemory: true,
|
||||
}),
|
||||
agentId: "main",
|
||||
}),
|
||||
);
|
||||
await manager.sync({ reason: "test" });
|
||||
|
||||
const internal = manager as unknown as {
|
||||
syncSessionFiles: (params: {
|
||||
targetSessionFiles?: string[];
|
||||
needsFullReindex: boolean;
|
||||
}) => Promise<void>;
|
||||
shouldFallbackOnError: (message: string) => boolean;
|
||||
activateFallbackProvider: (reason: string) => Promise<boolean>;
|
||||
runUnsafeReindex: (params: {
|
||||
reason?: string;
|
||||
force?: boolean;
|
||||
progress?: unknown;
|
||||
}) => Promise<void>;
|
||||
};
|
||||
const originalSyncSessionFiles = internal.syncSessionFiles.bind(manager);
|
||||
const originalShouldFallbackOnError = internal.shouldFallbackOnError.bind(manager);
|
||||
const originalActivateFallbackProvider = internal.activateFallbackProvider.bind(manager);
|
||||
const originalRunUnsafeReindex = internal.runUnsafeReindex.bind(manager);
|
||||
|
||||
internal.syncSessionFiles = async (params) => {
|
||||
if (params.targetSessionFiles?.length) {
|
||||
throw new Error("embedding backend failed");
|
||||
}
|
||||
return await originalSyncSessionFiles(params);
|
||||
};
|
||||
internal.shouldFallbackOnError = () => true;
|
||||
const activateFallbackProvider = vi.fn(async () => true);
|
||||
internal.activateFallbackProvider = activateFallbackProvider;
|
||||
const runUnsafeReindex = vi.fn(async () => {});
|
||||
internal.runUnsafeReindex = runUnsafeReindex;
|
||||
|
||||
await manager.sync({
|
||||
reason: "post-compaction",
|
||||
sessionFiles: [sessionPath],
|
||||
});
|
||||
|
||||
expect(activateFallbackProvider).toHaveBeenCalledWith("embedding backend failed");
|
||||
expect(runUnsafeReindex).toHaveBeenCalledWith({
|
||||
reason: "post-compaction",
|
||||
force: true,
|
||||
progress: undefined,
|
||||
});
|
||||
|
||||
internal.syncSessionFiles = originalSyncSessionFiles;
|
||||
internal.shouldFallbackOnError = originalShouldFallbackOnError;
|
||||
internal.activateFallbackProvider = originalActivateFallbackProvider;
|
||||
internal.runUnsafeReindex = originalRunUnsafeReindex;
|
||||
await manager.close?.();
|
||||
} finally {
|
||||
if (previousStateDir === undefined) {
|
||||
delete process.env.OPENCLAW_STATE_DIR;
|
||||
} else {
|
||||
process.env.OPENCLAW_STATE_DIR = previousStateDir;
|
||||
}
|
||||
await fs.rm(stateDir, { recursive: true, force: true });
|
||||
await fs.rm(storePath, { force: true });
|
||||
}
|
||||
});
|
||||
|
||||
it("reindexes when the embedding model changes", async () => {
|
||||
const base = createCfg({ storePath: indexModelPath });
|
||||
const baseAgents = base.agents!;
|
||||
|
|
|
|||
|
|
@ -153,6 +153,8 @@ export abstract class MemoryManagerSyncOps {
|
|||
protected abstract sync(params?: {
|
||||
reason?: string;
|
||||
force?: boolean;
|
||||
forceSessions?: boolean;
|
||||
sessionFile?: string;
|
||||
progress?: (update: MemorySyncProgressUpdate) => void;
|
||||
}): Promise<void>;
|
||||
protected abstract withTimeout<T>(
|
||||
|
|
@ -628,6 +630,35 @@ export abstract class MemoryManagerSyncOps {
|
|||
return resolvedFile.startsWith(`${resolvedDir}${path.sep}`);
|
||||
}
|
||||
|
||||
private normalizeTargetSessionFiles(sessionFiles?: string[]): Set<string> | null {
|
||||
if (!sessionFiles || sessionFiles.length === 0) {
|
||||
return null;
|
||||
}
|
||||
const normalized = new Set<string>();
|
||||
for (const sessionFile of sessionFiles) {
|
||||
const trimmed = sessionFile.trim();
|
||||
if (!trimmed) {
|
||||
continue;
|
||||
}
|
||||
const resolved = path.resolve(trimmed);
|
||||
if (this.isSessionFileForAgent(resolved)) {
|
||||
normalized.add(resolved);
|
||||
}
|
||||
}
|
||||
return normalized.size > 0 ? normalized : null;
|
||||
}
|
||||
|
||||
private clearSyncedSessionFiles(targetSessionFiles?: Iterable<string> | null) {
|
||||
if (!targetSessionFiles) {
|
||||
this.sessionsDirtyFiles.clear();
|
||||
} else {
|
||||
for (const targetSessionFile of targetSessionFiles) {
|
||||
this.sessionsDirtyFiles.delete(targetSessionFile);
|
||||
}
|
||||
}
|
||||
this.sessionsDirty = this.sessionsDirtyFiles.size > 0;
|
||||
}
|
||||
|
||||
protected ensureIntervalSync() {
|
||||
const minutes = this.settings.sync.intervalMinutes;
|
||||
if (!minutes || minutes <= 0 || this.intervalTimer) {
|
||||
|
|
@ -657,12 +688,15 @@ export abstract class MemoryManagerSyncOps {
|
|||
}
|
||||
|
||||
private shouldSyncSessions(
|
||||
params?: { reason?: string; force?: boolean },
|
||||
params?: { reason?: string; force?: boolean; sessionFiles?: string[] },
|
||||
needsFullReindex = false,
|
||||
) {
|
||||
if (!this.sources.has("sessions")) {
|
||||
return false;
|
||||
}
|
||||
if (params?.sessionFiles?.some((sessionFile) => sessionFile.trim().length > 0)) {
|
||||
return true;
|
||||
}
|
||||
if (params?.force) {
|
||||
return true;
|
||||
}
|
||||
|
|
@ -769,6 +803,7 @@ export abstract class MemoryManagerSyncOps {
|
|||
|
||||
private async syncSessionFiles(params: {
|
||||
needsFullReindex: boolean;
|
||||
targetSessionFiles?: string[];
|
||||
progress?: MemorySyncProgressState;
|
||||
}) {
|
||||
// FTS-only mode: skip embedding sync (no provider)
|
||||
|
|
@ -777,21 +812,31 @@ export abstract class MemoryManagerSyncOps {
|
|||
return;
|
||||
}
|
||||
|
||||
const files = await listSessionFilesForAgent(this.agentId, {
|
||||
includeResetArchives: this.settings.sync.sessions.includeResetArchives,
|
||||
});
|
||||
const activePaths = new Set(files.map((file) => sessionPathForFile(file)));
|
||||
const sessionRowsBefore = params.needsFullReindex
|
||||
? []
|
||||
: (this.db.prepare(`SELECT path FROM files WHERE source = ?`).all("sessions") as Array<{
|
||||
path: string;
|
||||
}>);
|
||||
const targetSessionFiles = params.needsFullReindex
|
||||
? null
|
||||
: this.normalizeTargetSessionFiles(params.targetSessionFiles);
|
||||
const files = targetSessionFiles
|
||||
? Array.from(targetSessionFiles)
|
||||
: await listSessionFilesForAgent(this.agentId, {
|
||||
includeResetArchives: this.settings.sync.sessions.includeResetArchives,
|
||||
});
|
||||
const activePaths = targetSessionFiles
|
||||
? null
|
||||
: new Set(files.map((file) => sessionPathForFile(file)));
|
||||
const sessionRowsBefore =
|
||||
activePaths === null || params.needsFullReindex
|
||||
? []
|
||||
: (this.db.prepare(`SELECT path FROM files WHERE source = ?`).all("sessions") as Array<{
|
||||
path: string;
|
||||
}>);
|
||||
const knownPaths = new Set(sessionRowsBefore.map((row) => row.path));
|
||||
const indexAll = params.needsFullReindex || this.sessionsDirtyFiles.size === 0;
|
||||
const indexAll =
|
||||
params.needsFullReindex || Boolean(targetSessionFiles) || this.sessionsDirtyFiles.size === 0;
|
||||
log.debug("memory sync: indexing session files", {
|
||||
files: files.length,
|
||||
indexAll,
|
||||
dirtyFiles: this.sessionsDirtyFiles.size,
|
||||
targetedFiles: targetSessionFiles?.size ?? 0,
|
||||
batch: this.batch.enabled,
|
||||
concurrency: this.getIndexConcurrency(),
|
||||
});
|
||||
|
|
@ -854,6 +899,11 @@ export abstract class MemoryManagerSyncOps {
|
|||
});
|
||||
await runWithConcurrency(tasks, this.getIndexConcurrency());
|
||||
|
||||
if (activePaths === null) {
|
||||
// Targeted syncs only refresh the requested transcripts and should not
|
||||
// prune unrelated session rows without a full directory enumeration.
|
||||
return;
|
||||
}
|
||||
for (const stale of sessionRowsBefore) {
|
||||
if (activePaths.has(stale.path)) {
|
||||
continue;
|
||||
|
|
@ -909,6 +959,7 @@ export abstract class MemoryManagerSyncOps {
|
|||
protected async runSync(params?: {
|
||||
reason?: string;
|
||||
force?: boolean;
|
||||
sessionFiles?: string[];
|
||||
progress?: (update: MemorySyncProgressUpdate) => void;
|
||||
}) {
|
||||
const progress = params?.progress ? this.createSyncProgress(params.progress) : undefined;
|
||||
|
|
@ -924,8 +975,47 @@ export abstract class MemoryManagerSyncOps {
|
|||
const configuredSources = this.resolveConfiguredSourcesForMeta();
|
||||
const sessionsSourceEnabled = configuredSources.includes("sessions");
|
||||
const configuredScopeHash = this.resolveConfiguredScopeHash();
|
||||
const targetSessionFiles = this.normalizeTargetSessionFiles(params?.sessionFiles);
|
||||
const hasTargetSessionFiles = targetSessionFiles !== null;
|
||||
if (hasTargetSessionFiles && targetSessionFiles && this.sources.has("sessions")) {
|
||||
// Post-compaction refreshes should only update the explicit transcript files and
|
||||
// leave broader reindex/dirty-work decisions to the regular sync path.
|
||||
try {
|
||||
await this.syncSessionFiles({
|
||||
needsFullReindex: false,
|
||||
targetSessionFiles: Array.from(targetSessionFiles),
|
||||
progress: progress ?? undefined,
|
||||
});
|
||||
this.clearSyncedSessionFiles(targetSessionFiles);
|
||||
} catch (err) {
|
||||
const reason = err instanceof Error ? err.message : String(err);
|
||||
const activated =
|
||||
this.shouldFallbackOnError(reason) && (await this.activateFallbackProvider(reason));
|
||||
if (activated) {
|
||||
if (
|
||||
process.env.OPENCLAW_TEST_FAST === "1" &&
|
||||
process.env.OPENCLAW_TEST_MEMORY_UNSAFE_REINDEX === "1"
|
||||
) {
|
||||
await this.runUnsafeReindex({
|
||||
reason: params?.reason,
|
||||
force: true,
|
||||
progress: progress ?? undefined,
|
||||
});
|
||||
} else {
|
||||
await this.runSafeReindex({
|
||||
reason: params?.reason,
|
||||
force: true,
|
||||
progress: progress ?? undefined,
|
||||
});
|
||||
}
|
||||
return;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
return;
|
||||
}
|
||||
const needsFullReindex =
|
||||
params?.force ||
|
||||
(params?.force && !hasTargetSessionFiles) ||
|
||||
!meta ||
|
||||
(this.provider && meta.model !== this.provider.model) ||
|
||||
(this.provider && meta.provider !== this.provider.id) ||
|
||||
|
|
@ -962,7 +1052,8 @@ export abstract class MemoryManagerSyncOps {
|
|||
}
|
||||
|
||||
const shouldSyncMemory =
|
||||
this.sources.has("memory") && (params?.force || needsFullReindex || this.dirty);
|
||||
this.sources.has("memory") &&
|
||||
((!hasTargetSessionFiles && params?.force) || needsFullReindex || this.dirty);
|
||||
const shouldSyncSessions = this.shouldSyncSessions(params, needsFullReindex);
|
||||
|
||||
if (shouldSyncMemory) {
|
||||
|
|
@ -971,7 +1062,11 @@ export abstract class MemoryManagerSyncOps {
|
|||
}
|
||||
|
||||
if (shouldSyncSessions) {
|
||||
await this.syncSessionFiles({ needsFullReindex, progress: progress ?? undefined });
|
||||
await this.syncSessionFiles({
|
||||
needsFullReindex,
|
||||
targetSessionFiles: targetSessionFiles ? Array.from(targetSessionFiles) : undefined,
|
||||
progress: progress ?? undefined,
|
||||
});
|
||||
this.sessionsDirty = false;
|
||||
this.sessionsDirtyFiles.clear();
|
||||
} else if (this.sessionsDirtyFiles.size > 0) {
|
||||
|
|
|
|||
|
|
@ -125,6 +125,8 @@ export class MemoryIndexManager extends MemoryManagerEmbeddingOps implements Mem
|
|||
>();
|
||||
private sessionWarm = new Set<string>();
|
||||
private syncing: Promise<void> | null = null;
|
||||
private queuedSessionFiles = new Set<string>();
|
||||
private queuedSessionSync: Promise<void> | null = null;
|
||||
private readonlyRecoveryAttempts = 0;
|
||||
private readonlyRecoverySuccesses = 0;
|
||||
private readonlyRecoveryFailures = 0;
|
||||
|
|
@ -452,12 +454,16 @@ export class MemoryIndexManager extends MemoryManagerEmbeddingOps implements Mem
|
|||
async sync(params?: {
|
||||
reason?: string;
|
||||
force?: boolean;
|
||||
sessionFiles?: string[];
|
||||
progress?: (update: MemorySyncProgressUpdate) => void;
|
||||
}): Promise<void> {
|
||||
if (this.closed) {
|
||||
return;
|
||||
}
|
||||
if (this.syncing) {
|
||||
if (params?.sessionFiles?.some((sessionFile) => sessionFile.trim().length > 0)) {
|
||||
return this.enqueueTargetedSessionSync(params.sessionFiles);
|
||||
}
|
||||
return this.syncing;
|
||||
}
|
||||
this.syncing = this.runSyncWithReadonlyRecovery(params).finally(() => {
|
||||
|
|
@ -466,6 +472,36 @@ export class MemoryIndexManager extends MemoryManagerEmbeddingOps implements Mem
|
|||
return this.syncing ?? Promise.resolve();
|
||||
}
|
||||
|
||||
private enqueueTargetedSessionSync(sessionFiles?: string[]): Promise<void> {
|
||||
for (const sessionFile of sessionFiles ?? []) {
|
||||
const trimmed = sessionFile.trim();
|
||||
if (trimmed) {
|
||||
this.queuedSessionFiles.add(trimmed);
|
||||
}
|
||||
}
|
||||
if (this.queuedSessionFiles.size === 0) {
|
||||
return this.syncing ?? Promise.resolve();
|
||||
}
|
||||
if (!this.queuedSessionSync) {
|
||||
this.queuedSessionSync = (async () => {
|
||||
try {
|
||||
await this.syncing?.catch(() => undefined);
|
||||
while (!this.closed && this.queuedSessionFiles.size > 0) {
|
||||
const queuedSessionFiles = Array.from(this.queuedSessionFiles);
|
||||
this.queuedSessionFiles.clear();
|
||||
await this.sync({
|
||||
reason: "queued-session-files",
|
||||
sessionFiles: queuedSessionFiles,
|
||||
});
|
||||
}
|
||||
} finally {
|
||||
this.queuedSessionSync = null;
|
||||
}
|
||||
})();
|
||||
}
|
||||
return this.queuedSessionSync;
|
||||
}
|
||||
|
||||
private isReadonlyDbError(err: unknown): boolean {
|
||||
const readonlyPattern =
|
||||
/attempt to write a readonly database|database is read-only|SQLITE_READONLY/i;
|
||||
|
|
@ -518,6 +554,7 @@ export class MemoryIndexManager extends MemoryManagerEmbeddingOps implements Mem
|
|||
private async runSyncWithReadonlyRecovery(params?: {
|
||||
reason?: string;
|
||||
force?: boolean;
|
||||
sessionFiles?: string[];
|
||||
progress?: (update: MemorySyncProgressUpdate) => void;
|
||||
}): Promise<void> {
|
||||
try {
|
||||
|
|
|
|||
|
|
@ -867,8 +867,12 @@ export class QmdMemoryManager implements MemorySearchManager {
|
|||
async sync(params?: {
|
||||
reason?: string;
|
||||
force?: boolean;
|
||||
sessionFiles?: string[];
|
||||
progress?: (update: MemorySyncProgressUpdate) => void;
|
||||
}): Promise<void> {
|
||||
if (params?.sessionFiles?.some((sessionFile) => sessionFile.trim().length > 0)) {
|
||||
log.debug("qmd sync ignoring targeted sessionFiles hint; running regular update");
|
||||
}
|
||||
if (params?.progress) {
|
||||
params.progress({ completed: 0, total: 1, label: "Updating QMD index…" });
|
||||
}
|
||||
|
|
|
|||
|
|
@ -181,6 +181,7 @@ class FallbackMemoryManager implements MemorySearchManager {
|
|||
async sync(params?: {
|
||||
reason?: string;
|
||||
force?: boolean;
|
||||
sessionFiles?: string[];
|
||||
progress?: (update: MemorySyncProgressUpdate) => void;
|
||||
}) {
|
||||
if (!this.primaryFailed) {
|
||||
|
|
|
|||
|
|
@ -72,6 +72,7 @@ export interface MemorySearchManager {
|
|||
sync?(params?: {
|
||||
reason?: string;
|
||||
force?: boolean;
|
||||
sessionFiles?: string[];
|
||||
progress?: (update: MemorySyncProgressUpdate) => void;
|
||||
}): Promise<void>;
|
||||
probeEmbeddingAvailability(): Promise<MemoryEmbeddingProbeResult>;
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ import { formatExecCommand } from "../infra/system-run-command.js";
|
|||
import {
|
||||
buildSystemRunApprovalPlan,
|
||||
hardenApprovedExecutionPaths,
|
||||
resolveMutableFileOperandSnapshotSync,
|
||||
} from "./invoke-system-run-plan.js";
|
||||
|
||||
type PathTokenSetup = {
|
||||
|
|
@ -94,6 +95,36 @@ function withFakeRuntimeBin<T>(params: { binName: string; run: () => T }): T {
|
|||
}
|
||||
}
|
||||
|
||||
function withFakeRuntimeBins<T>(params: { binNames: string[]; run: () => T }): T {
|
||||
const tmp = fs.mkdtempSync(path.join(os.tmpdir(), "openclaw-runtime-bins-"));
|
||||
const binDir = path.join(tmp, "bin");
|
||||
fs.mkdirSync(binDir, { recursive: true });
|
||||
for (const binName of params.binNames) {
|
||||
const runtimePath =
|
||||
process.platform === "win32"
|
||||
? path.join(binDir, `${binName}.cmd`)
|
||||
: path.join(binDir, binName);
|
||||
const runtimeBody =
|
||||
process.platform === "win32" ? "@echo off\r\nexit /b 0\r\n" : "#!/bin/sh\nexit 0\n";
|
||||
fs.writeFileSync(runtimePath, runtimeBody, { mode: 0o755 });
|
||||
if (process.platform !== "win32") {
|
||||
fs.chmodSync(runtimePath, 0o755);
|
||||
}
|
||||
}
|
||||
const oldPath = process.env.PATH;
|
||||
process.env.PATH = `${binDir}${path.delimiter}${oldPath ?? ""}`;
|
||||
try {
|
||||
return params.run();
|
||||
} finally {
|
||||
if (oldPath === undefined) {
|
||||
delete process.env.PATH;
|
||||
} else {
|
||||
process.env.PATH = oldPath;
|
||||
}
|
||||
fs.rmSync(tmp, { recursive: true, force: true });
|
||||
}
|
||||
}
|
||||
|
||||
describe("hardenApprovedExecutionPaths", () => {
|
||||
const cases: HardeningCase[] = [
|
||||
{
|
||||
|
|
@ -318,16 +349,67 @@ describe("hardenApprovedExecutionPaths", () => {
|
|||
initialBody: 'console.log("SAFE");\n',
|
||||
expectedArgvIndex: 2,
|
||||
},
|
||||
{
|
||||
name: "pnpm exec tsx file",
|
||||
argv: ["pnpm", "exec", "tsx", "./run.ts"],
|
||||
scriptName: "run.ts",
|
||||
initialBody: 'console.log("SAFE");\n',
|
||||
expectedArgvIndex: 3,
|
||||
},
|
||||
{
|
||||
name: "pnpm js shim exec tsx file",
|
||||
argv: ["./pnpm.js", "exec", "tsx", "./run.ts"],
|
||||
scriptName: "run.ts",
|
||||
initialBody: 'console.log("SAFE");\n',
|
||||
expectedArgvIndex: 3,
|
||||
},
|
||||
{
|
||||
name: "pnpm exec double-dash tsx file",
|
||||
argv: ["pnpm", "exec", "--", "tsx", "./run.ts"],
|
||||
scriptName: "run.ts",
|
||||
initialBody: 'console.log("SAFE");\n',
|
||||
expectedArgvIndex: 4,
|
||||
},
|
||||
{
|
||||
name: "npx tsx file",
|
||||
argv: ["npx", "tsx", "./run.ts"],
|
||||
scriptName: "run.ts",
|
||||
initialBody: 'console.log("SAFE");\n',
|
||||
expectedArgvIndex: 2,
|
||||
},
|
||||
{
|
||||
name: "bunx tsx file",
|
||||
argv: ["bunx", "tsx", "./run.ts"],
|
||||
scriptName: "run.ts",
|
||||
initialBody: 'console.log("SAFE");\n',
|
||||
expectedArgvIndex: 2,
|
||||
},
|
||||
{
|
||||
name: "npm exec tsx file",
|
||||
argv: ["npm", "exec", "--", "tsx", "./run.ts"],
|
||||
scriptName: "run.ts",
|
||||
initialBody: 'console.log("SAFE");\n',
|
||||
expectedArgvIndex: 4,
|
||||
},
|
||||
];
|
||||
|
||||
for (const runtimeCase of mutableOperandCases) {
|
||||
it(`captures mutable ${runtimeCase.name} operands in approval plans`, () => {
|
||||
withFakeRuntimeBin({
|
||||
binName: runtimeCase.binName!,
|
||||
const binNames = runtimeCase.binName
|
||||
? [runtimeCase.binName]
|
||||
: ["bunx", "pnpm", "npm", "npx", "tsx"];
|
||||
withFakeRuntimeBins({
|
||||
binNames,
|
||||
run: () => {
|
||||
const tmp = fs.mkdtempSync(path.join(os.tmpdir(), "openclaw-approval-script-plan-"));
|
||||
const fixture = createScriptOperandFixture(tmp, runtimeCase);
|
||||
fs.writeFileSync(fixture.scriptPath, fixture.initialBody);
|
||||
const executablePath = fixture.command[0];
|
||||
if (executablePath?.endsWith("pnpm.js")) {
|
||||
const shimPath = path.join(tmp, "pnpm.js");
|
||||
fs.writeFileSync(shimPath, "#!/usr/bin/env node\nconsole.log('shim')\n");
|
||||
fs.chmodSync(shimPath, 0o755);
|
||||
}
|
||||
try {
|
||||
const prepared = buildSystemRunApprovalPlan({
|
||||
command: fixture.command,
|
||||
|
|
@ -441,4 +523,75 @@ describe("hardenApprovedExecutionPaths", () => {
|
|||
},
|
||||
});
|
||||
});
|
||||
|
||||
it("rejects node inline import operands that cannot be bound to one stable file", () => {
|
||||
withFakeRuntimeBin({
|
||||
binName: "node",
|
||||
run: () => {
|
||||
const tmp = fs.mkdtempSync(path.join(os.tmpdir(), "openclaw-node-import-inline-"));
|
||||
try {
|
||||
fs.writeFileSync(path.join(tmp, "main.mjs"), 'console.log("SAFE")\n');
|
||||
fs.writeFileSync(path.join(tmp, "preload.mjs"), 'console.log("SAFE")\n');
|
||||
const prepared = buildSystemRunApprovalPlan({
|
||||
command: ["node", "--import=./preload.mjs", "./main.mjs"],
|
||||
cwd: tmp,
|
||||
});
|
||||
expect(prepared).toEqual({
|
||||
ok: false,
|
||||
message:
|
||||
"SYSTEM_RUN_DENIED: approval cannot safely bind this interpreter/runtime command",
|
||||
});
|
||||
} finally {
|
||||
fs.rmSync(tmp, { recursive: true, force: true });
|
||||
}
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it("rejects shell payloads that hide mutable interpreter scripts", () => {
|
||||
withFakeRuntimeBin({
|
||||
binName: "node",
|
||||
run: () => {
|
||||
const tmp = fs.mkdtempSync(path.join(os.tmpdir(), "openclaw-inline-shell-node-"));
|
||||
try {
|
||||
fs.writeFileSync(path.join(tmp, "run.js"), 'console.log("SAFE")\n');
|
||||
const prepared = buildSystemRunApprovalPlan({
|
||||
command: ["sh", "-lc", "node ./run.js"],
|
||||
cwd: tmp,
|
||||
});
|
||||
expect(prepared).toEqual({
|
||||
ok: false,
|
||||
message:
|
||||
"SYSTEM_RUN_DENIED: approval cannot safely bind this interpreter/runtime command",
|
||||
});
|
||||
} finally {
|
||||
fs.rmSync(tmp, { recursive: true, force: true });
|
||||
}
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it("captures the real shell script operand after value-taking shell flags", () => {
|
||||
const tmp = fs.mkdtempSync(path.join(os.tmpdir(), "openclaw-shell-option-value-"));
|
||||
try {
|
||||
const scriptPath = path.join(tmp, "run.sh");
|
||||
fs.writeFileSync(scriptPath, "#!/bin/sh\necho SAFE\n");
|
||||
fs.writeFileSync(path.join(tmp, "errexit"), "decoy\n");
|
||||
const snapshot = resolveMutableFileOperandSnapshotSync({
|
||||
argv: ["/bin/bash", "-o", "errexit", "./run.sh"],
|
||||
cwd: tmp,
|
||||
shellCommand: null,
|
||||
});
|
||||
expect(snapshot).toEqual({
|
||||
ok: true,
|
||||
snapshot: {
|
||||
argvIndex: 3,
|
||||
path: fs.realpathSync(scriptPath),
|
||||
sha256: expect.any(String),
|
||||
},
|
||||
});
|
||||
} finally {
|
||||
fs.rmSync(tmp, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -19,6 +19,7 @@ import {
|
|||
resolveInlineCommandMatch,
|
||||
} from "../infra/shell-inline-command.js";
|
||||
import { formatExecCommand, resolveSystemRunCommandRequest } from "../infra/system-run-command.js";
|
||||
import { splitShellArgs } from "../utils/shell-argv.js";
|
||||
|
||||
export type ApprovedCwdSnapshot = {
|
||||
cwd: string;
|
||||
|
|
@ -125,6 +126,47 @@ const DENO_RUN_OPTIONS_WITH_VALUE = new Set([
|
|||
"-L",
|
||||
]);
|
||||
|
||||
const NODE_OPTIONS_WITH_FILE_VALUE = new Set([
|
||||
"-r",
|
||||
"--experimental-loader",
|
||||
"--import",
|
||||
"--loader",
|
||||
"--require",
|
||||
]);
|
||||
|
||||
const POSIX_SHELL_OPTIONS_WITH_VALUE = new Set([
|
||||
"--init-file",
|
||||
"--rcfile",
|
||||
"--startup-script",
|
||||
"-o",
|
||||
]);
|
||||
|
||||
const NPM_EXEC_OPTIONS_WITH_VALUE = new Set([
|
||||
"--cache",
|
||||
"--package",
|
||||
"--prefix",
|
||||
"--script-shell",
|
||||
"--userconfig",
|
||||
"--workspace",
|
||||
"-p",
|
||||
"-w",
|
||||
]);
|
||||
|
||||
const NPM_EXEC_FLAG_OPTIONS = new Set([
|
||||
"--no",
|
||||
"--quiet",
|
||||
"--ws",
|
||||
"--workspaces",
|
||||
"--yes",
|
||||
"-q",
|
||||
"-y",
|
||||
]);
|
||||
|
||||
type FileOperandCollection = {
|
||||
hits: number[];
|
||||
sawOptionValueFile: boolean;
|
||||
};
|
||||
|
||||
function normalizeString(value: unknown): string | null {
|
||||
if (typeof value !== "string") {
|
||||
return null;
|
||||
|
|
@ -225,10 +267,129 @@ function unwrapArgvForMutableOperand(argv: string[]): { argv: string[]; baseInde
|
|||
current = shellMultiplexerUnwrap.argv;
|
||||
continue;
|
||||
}
|
||||
const packageManagerUnwrap = unwrapKnownPackageManagerExecInvocation(current);
|
||||
if (packageManagerUnwrap) {
|
||||
baseIndex += current.length - packageManagerUnwrap.length;
|
||||
current = packageManagerUnwrap;
|
||||
continue;
|
||||
}
|
||||
return { argv: current, baseIndex };
|
||||
}
|
||||
}
|
||||
|
||||
function unwrapKnownPackageManagerExecInvocation(argv: string[]): string[] | null {
|
||||
const executable = normalizePackageManagerExecToken(argv[0] ?? "");
|
||||
switch (executable) {
|
||||
case "npm":
|
||||
return unwrapNpmExecInvocation(argv);
|
||||
case "npx":
|
||||
case "bunx":
|
||||
return unwrapDirectPackageExecInvocation(argv);
|
||||
case "pnpm":
|
||||
return unwrapPnpmExecInvocation(argv);
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function normalizePackageManagerExecToken(token: string): string {
|
||||
const normalized = normalizeExecutableToken(token);
|
||||
if (!normalized) {
|
||||
return normalized;
|
||||
}
|
||||
return normalized.replace(/\.(?:c|m)?js$/i, "");
|
||||
}
|
||||
|
||||
function unwrapPnpmExecInvocation(argv: string[]): string[] | null {
|
||||
let idx = 1;
|
||||
while (idx < argv.length) {
|
||||
const token = argv[idx]?.trim() ?? "";
|
||||
if (!token) {
|
||||
idx += 1;
|
||||
continue;
|
||||
}
|
||||
if (token === "--") {
|
||||
idx += 1;
|
||||
continue;
|
||||
}
|
||||
if (!token.startsWith("-")) {
|
||||
if (token !== "exec" || idx + 1 >= argv.length) {
|
||||
return null;
|
||||
}
|
||||
const tail = argv.slice(idx + 1);
|
||||
return tail[0] === "--" ? (tail.length > 1 ? tail.slice(1) : null) : tail;
|
||||
}
|
||||
if ((token === "-C" || token === "--dir" || token === "--filter") && !token.includes("=")) {
|
||||
idx += 2;
|
||||
continue;
|
||||
}
|
||||
idx += 1;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function unwrapDirectPackageExecInvocation(argv: string[]): string[] | null {
|
||||
let idx = 1;
|
||||
while (idx < argv.length) {
|
||||
const token = argv[idx]?.trim() ?? "";
|
||||
if (!token) {
|
||||
idx += 1;
|
||||
continue;
|
||||
}
|
||||
if (!token.startsWith("-")) {
|
||||
return argv.slice(idx);
|
||||
}
|
||||
const [flag] = token.toLowerCase().split("=", 2);
|
||||
if (flag === "-c" || flag === "--call") {
|
||||
return null;
|
||||
}
|
||||
if (NPM_EXEC_OPTIONS_WITH_VALUE.has(flag)) {
|
||||
idx += token.includes("=") ? 1 : 2;
|
||||
continue;
|
||||
}
|
||||
if (NPM_EXEC_FLAG_OPTIONS.has(flag)) {
|
||||
idx += 1;
|
||||
continue;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function unwrapNpmExecInvocation(argv: string[]): string[] | null {
|
||||
let idx = 1;
|
||||
while (idx < argv.length) {
|
||||
const token = argv[idx]?.trim() ?? "";
|
||||
if (!token) {
|
||||
idx += 1;
|
||||
continue;
|
||||
}
|
||||
if (!token.startsWith("-")) {
|
||||
if (token !== "exec") {
|
||||
return null;
|
||||
}
|
||||
idx += 1;
|
||||
break;
|
||||
}
|
||||
if (
|
||||
(token === "-C" || token === "--prefix" || token === "--userconfig") &&
|
||||
!token.includes("=")
|
||||
) {
|
||||
idx += 2;
|
||||
continue;
|
||||
}
|
||||
idx += 1;
|
||||
}
|
||||
if (idx >= argv.length) {
|
||||
return null;
|
||||
}
|
||||
const tail = argv.slice(idx);
|
||||
if (tail[0] === "--") {
|
||||
return tail.length > 1 ? tail.slice(1) : null;
|
||||
}
|
||||
return unwrapDirectPackageExecInvocation(["npx", ...tail]);
|
||||
}
|
||||
|
||||
function resolvePosixShellScriptOperandIndex(argv: string[]): number | null {
|
||||
if (
|
||||
resolveInlineCommandMatch(argv, POSIX_INLINE_COMMAND_FLAGS, {
|
||||
|
|
@ -254,6 +415,13 @@ function resolvePosixShellScriptOperandIndex(argv: string[]): number | null {
|
|||
return null;
|
||||
}
|
||||
if (!afterDoubleDash && token.startsWith("-")) {
|
||||
const [flag] = token.toLowerCase().split("=", 2);
|
||||
if (POSIX_SHELL_OPTIONS_WITH_VALUE.has(flag)) {
|
||||
if (!token.includes("=")) {
|
||||
i += 1;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
return i;
|
||||
|
|
@ -330,7 +498,8 @@ function collectExistingFileOperandIndexes(params: {
|
|||
argv: string[];
|
||||
startIndex: number;
|
||||
cwd: string | undefined;
|
||||
}): number[] {
|
||||
optionsWithFileValue?: ReadonlySet<string>;
|
||||
}): FileOperandCollection {
|
||||
let afterDoubleDash = false;
|
||||
const hits: number[] = [];
|
||||
for (let i = params.startIndex; i < params.argv.length; i += 1) {
|
||||
|
|
@ -349,28 +518,45 @@ function collectExistingFileOperandIndexes(params: {
|
|||
continue;
|
||||
}
|
||||
if (token === "-") {
|
||||
return [];
|
||||
return { hits: [], sawOptionValueFile: false };
|
||||
}
|
||||
if (token.startsWith("-")) {
|
||||
const [flag, inlineValue] = token.split("=", 2);
|
||||
if (params.optionsWithFileValue?.has(flag.toLowerCase())) {
|
||||
if (inlineValue && resolvesToExistingFileSync(inlineValue, params.cwd)) {
|
||||
hits.push(i);
|
||||
return { hits, sawOptionValueFile: true };
|
||||
}
|
||||
const nextToken = params.argv[i + 1]?.trim() ?? "";
|
||||
if (!inlineValue && nextToken && resolvesToExistingFileSync(nextToken, params.cwd)) {
|
||||
hits.push(i + 1);
|
||||
return { hits, sawOptionValueFile: true };
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (resolvesToExistingFileSync(token, params.cwd)) {
|
||||
hits.push(i);
|
||||
}
|
||||
}
|
||||
return hits;
|
||||
return { hits, sawOptionValueFile: false };
|
||||
}
|
||||
|
||||
function resolveGenericInterpreterScriptOperandIndex(params: {
|
||||
argv: string[];
|
||||
cwd: string | undefined;
|
||||
optionsWithFileValue?: ReadonlySet<string>;
|
||||
}): number | null {
|
||||
const hits = collectExistingFileOperandIndexes({
|
||||
const collection = collectExistingFileOperandIndexes({
|
||||
argv: params.argv,
|
||||
startIndex: 1,
|
||||
cwd: params.cwd,
|
||||
optionsWithFileValue: params.optionsWithFileValue,
|
||||
});
|
||||
return hits.length === 1 ? hits[0] : null;
|
||||
if (collection.sawOptionValueFile) {
|
||||
return null;
|
||||
}
|
||||
return collection.hits.length === 1 ? collection.hits[0] : null;
|
||||
}
|
||||
|
||||
function resolveBunScriptOperandIndex(params: {
|
||||
|
|
@ -462,16 +648,39 @@ function resolveMutableFileOperandIndex(argv: string[], cwd: string | undefined)
|
|||
const genericIndex = resolveGenericInterpreterScriptOperandIndex({
|
||||
argv: unwrapped.argv,
|
||||
cwd,
|
||||
optionsWithFileValue:
|
||||
executable === "node" || executable === "nodejs" ? NODE_OPTIONS_WITH_FILE_VALUE : undefined,
|
||||
});
|
||||
return genericIndex === null ? null : unwrapped.baseIndex + genericIndex;
|
||||
}
|
||||
|
||||
function shellPayloadNeedsStableBinding(shellCommand: string, cwd: string | undefined): boolean {
|
||||
const argv = splitShellArgs(shellCommand);
|
||||
if (!argv || argv.length === 0) {
|
||||
return false;
|
||||
}
|
||||
const snapshot = resolveMutableFileOperandSnapshotSync({
|
||||
argv,
|
||||
cwd,
|
||||
shellCommand: null,
|
||||
});
|
||||
if (!snapshot.ok) {
|
||||
return true;
|
||||
}
|
||||
if (snapshot.snapshot) {
|
||||
return true;
|
||||
}
|
||||
const firstToken = argv[0]?.trim() ?? "";
|
||||
return resolvesToExistingFileSync(firstToken, cwd);
|
||||
}
|
||||
|
||||
function requiresStableInterpreterApprovalBindingWithShellCommand(params: {
|
||||
argv: string[];
|
||||
shellCommand: string | null;
|
||||
cwd: string | undefined;
|
||||
}): boolean {
|
||||
if (params.shellCommand !== null) {
|
||||
return false;
|
||||
return shellPayloadNeedsStableBinding(params.shellCommand, params.cwd);
|
||||
}
|
||||
const unwrapped = unwrapArgvForMutableOperand(params.argv);
|
||||
const executable = normalizeExecutableToken(unwrapped.argv[0] ?? "");
|
||||
|
|
@ -495,6 +704,7 @@ export function resolveMutableFileOperandSnapshotSync(params: {
|
|||
requiresStableInterpreterApprovalBindingWithShellCommand({
|
||||
argv: params.argv,
|
||||
shellCommand: params.shellCommand,
|
||||
cwd: params.cwd,
|
||||
})
|
||||
) {
|
||||
return {
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import { beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import { importFreshModule } from "../../test/helpers/import-fresh.js";
|
||||
|
||||
const diagnosticMocks = vi.hoisted(() => ({
|
||||
logLaneEnqueue: vi.fn(),
|
||||
|
|
@ -334,4 +335,42 @@ describe("command queue", () => {
|
|||
resetAllLanes();
|
||||
await expect(enqueueCommand(async () => "ok")).resolves.toBe("ok");
|
||||
});
|
||||
|
||||
it("shares lane state across distinct module instances", async () => {
|
||||
const commandQueueA = await importFreshModule<typeof import("./command-queue.js")>(
|
||||
import.meta.url,
|
||||
"./command-queue.js?scope=shared-a",
|
||||
);
|
||||
const commandQueueB = await importFreshModule<typeof import("./command-queue.js")>(
|
||||
import.meta.url,
|
||||
"./command-queue.js?scope=shared-b",
|
||||
);
|
||||
const lane = `shared-state-${Date.now()}-${Math.random().toString(16).slice(2)}`;
|
||||
|
||||
let release!: () => void;
|
||||
const blocker = new Promise<void>((resolve) => {
|
||||
release = resolve;
|
||||
});
|
||||
|
||||
commandQueueA.resetAllLanes();
|
||||
|
||||
try {
|
||||
const task = commandQueueA.enqueueCommandInLane(lane, async () => {
|
||||
await blocker;
|
||||
return "done";
|
||||
});
|
||||
|
||||
await vi.waitFor(() => {
|
||||
expect(commandQueueB.getQueueSize(lane)).toBe(1);
|
||||
expect(commandQueueB.getActiveTaskCount()).toBe(1);
|
||||
});
|
||||
|
||||
release();
|
||||
await expect(task).resolves.toBe("done");
|
||||
expect(commandQueueB.getQueueSize(lane)).toBe(0);
|
||||
} finally {
|
||||
release();
|
||||
commandQueueA.resetAllLanes();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import { diagnosticLogger as diag, logLaneDequeue, logLaneEnqueue } from "../logging/diagnostic.js";
|
||||
import { resolveGlobalSingleton } from "../shared/global-singleton.js";
|
||||
import { CommandLane } from "./lanes.js";
|
||||
/**
|
||||
* Dedicated error type thrown when a queued command is rejected because
|
||||
|
|
@ -23,9 +24,6 @@ export class GatewayDrainingError extends Error {
|
|||
}
|
||||
}
|
||||
|
||||
// Set while gateway is draining for restart; new enqueues are rejected.
|
||||
let gatewayDraining = false;
|
||||
|
||||
// Minimal in-process queue to serialize command executions.
|
||||
// Default lane ("main") preserves the existing behavior. Additional lanes allow
|
||||
// low-risk parallelism (e.g. cron jobs) without interleaving stdin / logs for
|
||||
|
|
@ -49,11 +47,20 @@ type LaneState = {
|
|||
generation: number;
|
||||
};
|
||||
|
||||
const lanes = new Map<string, LaneState>();
|
||||
let nextTaskId = 1;
|
||||
/**
|
||||
* Keep queue runtime state on globalThis so every bundled entry/chunk shares
|
||||
* the same lanes, counters, and draining flag in production builds.
|
||||
*/
|
||||
const COMMAND_QUEUE_STATE_KEY = Symbol.for("openclaw.commandQueueState");
|
||||
|
||||
const queueState = resolveGlobalSingleton(COMMAND_QUEUE_STATE_KEY, () => ({
|
||||
gatewayDraining: false,
|
||||
lanes: new Map<string, LaneState>(),
|
||||
nextTaskId: 1,
|
||||
}));
|
||||
|
||||
function getLaneState(lane: string): LaneState {
|
||||
const existing = lanes.get(lane);
|
||||
const existing = queueState.lanes.get(lane);
|
||||
if (existing) {
|
||||
return existing;
|
||||
}
|
||||
|
|
@ -65,7 +72,7 @@ function getLaneState(lane: string): LaneState {
|
|||
draining: false,
|
||||
generation: 0,
|
||||
};
|
||||
lanes.set(lane, created);
|
||||
queueState.lanes.set(lane, created);
|
||||
return created;
|
||||
}
|
||||
|
||||
|
|
@ -105,7 +112,7 @@ function drainLane(lane: string) {
|
|||
);
|
||||
}
|
||||
logLaneDequeue(lane, waitedMs, state.queue.length);
|
||||
const taskId = nextTaskId++;
|
||||
const taskId = queueState.nextTaskId++;
|
||||
const taskGeneration = state.generation;
|
||||
state.activeTaskIds.add(taskId);
|
||||
void (async () => {
|
||||
|
|
@ -148,7 +155,7 @@ function drainLane(lane: string) {
|
|||
* `GatewayDrainingError` instead of being silently killed on shutdown.
|
||||
*/
|
||||
export function markGatewayDraining(): void {
|
||||
gatewayDraining = true;
|
||||
queueState.gatewayDraining = true;
|
||||
}
|
||||
|
||||
export function setCommandLaneConcurrency(lane: string, maxConcurrent: number) {
|
||||
|
|
@ -166,7 +173,7 @@ export function enqueueCommandInLane<T>(
|
|||
onWait?: (waitMs: number, queuedAhead: number) => void;
|
||||
},
|
||||
): Promise<T> {
|
||||
if (gatewayDraining) {
|
||||
if (queueState.gatewayDraining) {
|
||||
return Promise.reject(new GatewayDrainingError());
|
||||
}
|
||||
const cleaned = lane.trim() || CommandLane.Main;
|
||||
|
|
@ -198,7 +205,7 @@ export function enqueueCommand<T>(
|
|||
|
||||
export function getQueueSize(lane: string = CommandLane.Main) {
|
||||
const resolved = lane.trim() || CommandLane.Main;
|
||||
const state = lanes.get(resolved);
|
||||
const state = queueState.lanes.get(resolved);
|
||||
if (!state) {
|
||||
return 0;
|
||||
}
|
||||
|
|
@ -207,7 +214,7 @@ export function getQueueSize(lane: string = CommandLane.Main) {
|
|||
|
||||
export function getTotalQueueSize() {
|
||||
let total = 0;
|
||||
for (const s of lanes.values()) {
|
||||
for (const s of queueState.lanes.values()) {
|
||||
total += s.queue.length + s.activeTaskIds.size;
|
||||
}
|
||||
return total;
|
||||
|
|
@ -215,7 +222,7 @@ export function getTotalQueueSize() {
|
|||
|
||||
export function clearCommandLane(lane: string = CommandLane.Main) {
|
||||
const cleaned = lane.trim() || CommandLane.Main;
|
||||
const state = lanes.get(cleaned);
|
||||
const state = queueState.lanes.get(cleaned);
|
||||
if (!state) {
|
||||
return 0;
|
||||
}
|
||||
|
|
@ -242,9 +249,9 @@ export function clearCommandLane(lane: string = CommandLane.Main) {
|
|||
* `enqueueCommandInLane()` call (which may never come).
|
||||
*/
|
||||
export function resetAllLanes(): void {
|
||||
gatewayDraining = false;
|
||||
queueState.gatewayDraining = false;
|
||||
const lanesToDrain: string[] = [];
|
||||
for (const state of lanes.values()) {
|
||||
for (const state of queueState.lanes.values()) {
|
||||
state.generation += 1;
|
||||
state.activeTaskIds.clear();
|
||||
state.draining = false;
|
||||
|
|
@ -264,7 +271,7 @@ export function resetAllLanes(): void {
|
|||
*/
|
||||
export function getActiveTaskCount(): number {
|
||||
let total = 0;
|
||||
for (const s of lanes.values()) {
|
||||
for (const s of queueState.lanes.values()) {
|
||||
total += s.activeTaskIds.size;
|
||||
}
|
||||
return total;
|
||||
|
|
@ -283,7 +290,7 @@ export function waitForActiveTasks(timeoutMs: number): Promise<{ drained: boolea
|
|||
const POLL_INTERVAL_MS = 50;
|
||||
const deadline = Date.now() + timeoutMs;
|
||||
const activeAtStart = new Set<number>();
|
||||
for (const state of lanes.values()) {
|
||||
for (const state of queueState.lanes.values()) {
|
||||
for (const taskId of state.activeTaskIds) {
|
||||
activeAtStart.add(taskId);
|
||||
}
|
||||
|
|
@ -297,7 +304,7 @@ export function waitForActiveTasks(timeoutMs: number): Promise<{ drained: boolea
|
|||
}
|
||||
|
||||
let hasPending = false;
|
||||
for (const state of lanes.values()) {
|
||||
for (const state of queueState.lanes.values()) {
|
||||
for (const taskId of state.activeTaskIds) {
|
||||
if (activeAtStart.has(taskId)) {
|
||||
hasPending = true;
|
||||
|
|
|
|||
|
|
@ -21,6 +21,7 @@ import {
|
|||
} from "../config/model-input.js";
|
||||
import type { AgentToolsConfig } from "../config/types.tools.js";
|
||||
import { resolveGatewayAuth } from "../gateway/auth.js";
|
||||
import { resolveAllowedAgentIds } from "../gateway/hooks.js";
|
||||
import {
|
||||
DEFAULT_DANGEROUS_NODE_COMMANDS,
|
||||
resolveNodeCommandAllowlist,
|
||||
|
|
@ -663,6 +664,7 @@ export function collectHooksHardeningFindings(
|
|||
const allowRequestSessionKey = cfg.hooks?.allowRequestSessionKey === true;
|
||||
const defaultSessionKey =
|
||||
typeof cfg.hooks?.defaultSessionKey === "string" ? cfg.hooks.defaultSessionKey.trim() : "";
|
||||
const allowedAgentIds = resolveAllowedAgentIds(cfg.hooks?.allowedAgentIds);
|
||||
const allowedPrefixes = Array.isArray(cfg.hooks?.allowedSessionKeyPrefixes)
|
||||
? cfg.hooks.allowedSessionKeyPrefixes
|
||||
.map((prefix) => prefix.trim())
|
||||
|
|
@ -681,6 +683,18 @@ export function collectHooksHardeningFindings(
|
|||
});
|
||||
}
|
||||
|
||||
if (allowedAgentIds === undefined) {
|
||||
findings.push({
|
||||
checkId: "hooks.allowed_agent_ids_unrestricted",
|
||||
severity: remoteExposure ? "critical" : "warn",
|
||||
title: "Hook agent routing allows any configured agent",
|
||||
detail:
|
||||
"hooks.allowedAgentIds is unset or includes '*', so authenticated hook callers may route to any configured agent id.",
|
||||
remediation:
|
||||
'Set hooks.allowedAgentIds to an explicit allowlist (for example, ["hooks", "main"]) or [] to deny explicit agent routing.',
|
||||
});
|
||||
}
|
||||
|
||||
if (allowRequestSessionKey) {
|
||||
findings.push({
|
||||
checkId: "hooks.request_session_key_enabled",
|
||||
|
|
|
|||
|
|
@ -2656,6 +2656,52 @@ description: test skill
|
|||
expectFinding(res, "hooks.default_session_key_unset", "warn");
|
||||
});
|
||||
|
||||
it("scores unrestricted hooks.allowedAgentIds by gateway exposure", async () => {
|
||||
const baseHooks = {
|
||||
enabled: true,
|
||||
token: "shared-gateway-token-1234567890",
|
||||
defaultSessionKey: "hook:ingress",
|
||||
} satisfies NonNullable<OpenClawConfig["hooks"]>;
|
||||
const cases: Array<{
|
||||
name: string;
|
||||
cfg: OpenClawConfig;
|
||||
expectedSeverity: "warn" | "critical";
|
||||
}> = [
|
||||
{
|
||||
name: "local exposure",
|
||||
cfg: { hooks: baseHooks },
|
||||
expectedSeverity: "warn",
|
||||
},
|
||||
{
|
||||
name: "remote exposure",
|
||||
cfg: { gateway: { bind: "lan" }, hooks: baseHooks },
|
||||
expectedSeverity: "critical",
|
||||
},
|
||||
];
|
||||
await Promise.all(
|
||||
cases.map(async (testCase) => {
|
||||
const res = await audit(testCase.cfg);
|
||||
expect(
|
||||
hasFinding(res, "hooks.allowed_agent_ids_unrestricted", testCase.expectedSeverity),
|
||||
testCase.name,
|
||||
).toBe(true);
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it("treats wildcard hooks.allowedAgentIds as unrestricted routing", async () => {
|
||||
const res = await audit({
|
||||
hooks: {
|
||||
enabled: true,
|
||||
token: "shared-gateway-token-1234567890",
|
||||
defaultSessionKey: "hook:ingress",
|
||||
allowedAgentIds: ["*"],
|
||||
},
|
||||
});
|
||||
|
||||
expectFinding(res, "hooks.allowed_agent_ids_unrestricted", "warn");
|
||||
});
|
||||
|
||||
it("scores hooks request sessionKey override by gateway exposure", async () => {
|
||||
const baseHooks = {
|
||||
enabled: true,
|
||||
|
|
|
|||
|
|
@ -0,0 +1,39 @@
|
|||
import { afterEach, describe, expect, it, vi } from "vitest";
|
||||
import { resolveGlobalMap, resolveGlobalSingleton } from "./global-singleton.js";
|
||||
|
||||
const TEST_KEY = Symbol("global-singleton:test");
|
||||
const TEST_MAP_KEY = Symbol("global-singleton:test-map");
|
||||
|
||||
afterEach(() => {
|
||||
delete (globalThis as Record<PropertyKey, unknown>)[TEST_KEY];
|
||||
delete (globalThis as Record<PropertyKey, unknown>)[TEST_MAP_KEY];
|
||||
});
|
||||
|
||||
describe("resolveGlobalSingleton", () => {
|
||||
it("reuses an initialized singleton", () => {
|
||||
const create = vi.fn(() => ({ value: 1 }));
|
||||
|
||||
const first = resolveGlobalSingleton(TEST_KEY, create);
|
||||
const second = resolveGlobalSingleton(TEST_KEY, create);
|
||||
|
||||
expect(first).toBe(second);
|
||||
expect(create).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it("does not re-run the factory when undefined was already stored", () => {
|
||||
const create = vi.fn(() => undefined);
|
||||
|
||||
expect(resolveGlobalSingleton(TEST_KEY, create)).toBeUndefined();
|
||||
expect(resolveGlobalSingleton(TEST_KEY, create)).toBeUndefined();
|
||||
expect(create).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe("resolveGlobalMap", () => {
|
||||
it("reuses the same map instance", () => {
|
||||
const first = resolveGlobalMap<string, number>(TEST_MAP_KEY);
|
||||
const second = resolveGlobalMap<string, number>(TEST_MAP_KEY);
|
||||
|
||||
expect(first).toBe(second);
|
||||
});
|
||||
});
|
||||
|
|
@ -0,0 +1,13 @@
|
|||
export function resolveGlobalSingleton<T>(key: symbol, create: () => T): T {
|
||||
const globalStore = globalThis as Record<PropertyKey, unknown>;
|
||||
if (Object.prototype.hasOwnProperty.call(globalStore, key)) {
|
||||
return globalStore[key] as T;
|
||||
}
|
||||
const created = create();
|
||||
globalStore[key] = created;
|
||||
return created;
|
||||
}
|
||||
|
||||
export function resolveGlobalMap<TKey, TValue>(key: symbol): Map<TKey, TValue> {
|
||||
return resolveGlobalSingleton(key, () => new Map<TKey, TValue>());
|
||||
}
|
||||
|
|
@ -1,4 +1,5 @@
|
|||
import { afterEach, describe, expect, it, vi } from "vitest";
|
||||
import { importFreshModule } from "../../test/helpers/import-fresh.js";
|
||||
import {
|
||||
clearSlackThreadParticipationCache,
|
||||
hasSlackThreadParticipation,
|
||||
|
|
@ -49,6 +50,29 @@ describe("slack sent-thread-cache", () => {
|
|||
expect(hasSlackThreadParticipation("A1", "C456", "1700000000.000002")).toBe(false);
|
||||
});
|
||||
|
||||
it("shares thread participation across distinct module instances", async () => {
|
||||
const cacheA = await importFreshModule<typeof import("./sent-thread-cache.js")>(
|
||||
import.meta.url,
|
||||
"./sent-thread-cache.js?scope=shared-a",
|
||||
);
|
||||
const cacheB = await importFreshModule<typeof import("./sent-thread-cache.js")>(
|
||||
import.meta.url,
|
||||
"./sent-thread-cache.js?scope=shared-b",
|
||||
);
|
||||
|
||||
cacheA.clearSlackThreadParticipationCache();
|
||||
|
||||
try {
|
||||
cacheA.recordSlackThreadParticipation("A1", "C123", "1700000000.000001");
|
||||
expect(cacheB.hasSlackThreadParticipation("A1", "C123", "1700000000.000001")).toBe(true);
|
||||
|
||||
cacheB.clearSlackThreadParticipationCache();
|
||||
expect(cacheA.hasSlackThreadParticipation("A1", "C123", "1700000000.000001")).toBe(false);
|
||||
} finally {
|
||||
cacheA.clearSlackThreadParticipationCache();
|
||||
}
|
||||
});
|
||||
|
||||
it("expired entries return false and are cleaned up on read", () => {
|
||||
recordSlackThreadParticipation("A1", "C123", "1700000000.000001");
|
||||
// Advance time past the 24-hour TTL
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
import { resolveGlobalMap } from "../shared/global-singleton.js";
|
||||
|
||||
/**
|
||||
* In-memory cache of Slack threads the bot has participated in.
|
||||
* Used to auto-respond in threads without requiring @mention after the first reply.
|
||||
|
|
@ -7,7 +9,13 @@
|
|||
const TTL_MS = 24 * 60 * 60 * 1000; // 24 hours
|
||||
const MAX_ENTRIES = 5000;
|
||||
|
||||
const threadParticipation = new Map<string, number>();
|
||||
/**
|
||||
* Keep Slack thread participation shared across bundled chunks so thread
|
||||
* auto-reply gating does not diverge between prepare/dispatch call paths.
|
||||
*/
|
||||
const SLACK_THREAD_PARTICIPATION_KEY = Symbol.for("openclaw.slackThreadParticipation");
|
||||
|
||||
const threadParticipation = resolveGlobalMap<string, number>(SLACK_THREAD_PARTICIPATION_KEY);
|
||||
|
||||
function makeKey(accountId: string, channelId: string, threadTs: string): string {
|
||||
return `${accountId}:${channelId}:${threadTs}`;
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import type { Bot } from "grammy";
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import { createTelegramDraftStream } from "./draft-stream.js";
|
||||
import { importFreshModule } from "../../test/helpers/import-fresh.js";
|
||||
import { __testing, createTelegramDraftStream } from "./draft-stream.js";
|
||||
|
||||
type TelegramDraftStreamParams = Parameters<typeof createTelegramDraftStream>[0];
|
||||
|
||||
|
|
@ -65,6 +66,10 @@ function createForceNewMessageHarness(params: { throttleMs?: number } = {}) {
|
|||
}
|
||||
|
||||
describe("createTelegramDraftStream", () => {
|
||||
afterEach(() => {
|
||||
__testing.resetTelegramDraftStreamForTests();
|
||||
});
|
||||
|
||||
it("sends stream preview message with message_thread_id when provided", async () => {
|
||||
const api = createMockDraftApi();
|
||||
const stream = createForumDraftStream(api);
|
||||
|
|
@ -355,6 +360,46 @@ describe("createTelegramDraftStream", () => {
|
|||
expect(api.editMessageText).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("shares draft-id allocation across distinct module instances", async () => {
|
||||
const draftA = await importFreshModule<typeof import("./draft-stream.js")>(
|
||||
import.meta.url,
|
||||
"./draft-stream.js?scope=shared-a",
|
||||
);
|
||||
const draftB = await importFreshModule<typeof import("./draft-stream.js")>(
|
||||
import.meta.url,
|
||||
"./draft-stream.js?scope=shared-b",
|
||||
);
|
||||
const apiA = createMockDraftApi();
|
||||
const apiB = createMockDraftApi();
|
||||
|
||||
draftA.__testing.resetTelegramDraftStreamForTests();
|
||||
|
||||
try {
|
||||
const streamA = draftA.createTelegramDraftStream({
|
||||
api: apiA as unknown as Bot["api"],
|
||||
chatId: 123,
|
||||
thread: { id: 42, scope: "dm" },
|
||||
previewTransport: "draft",
|
||||
});
|
||||
const streamB = draftB.createTelegramDraftStream({
|
||||
api: apiB as unknown as Bot["api"],
|
||||
chatId: 123,
|
||||
thread: { id: 42, scope: "dm" },
|
||||
previewTransport: "draft",
|
||||
});
|
||||
|
||||
streamA.update("Message A");
|
||||
await streamA.flush();
|
||||
streamB.update("Message B");
|
||||
await streamB.flush();
|
||||
|
||||
expect(apiA.sendMessageDraft.mock.calls[0]?.[1]).toBe(1);
|
||||
expect(apiB.sendMessageDraft.mock.calls[0]?.[1]).toBe(2);
|
||||
} finally {
|
||||
draftA.__testing.resetTelegramDraftStreamForTests();
|
||||
}
|
||||
});
|
||||
|
||||
it("creates new message after forceNewMessage is called", async () => {
|
||||
const { api, stream } = createForceNewMessageHarness();
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import type { Bot } from "grammy";
|
||||
import { createFinalizableDraftLifecycle } from "../channels/draft-stream-controls.js";
|
||||
import { resolveGlobalSingleton } from "../shared/global-singleton.js";
|
||||
import { buildTelegramThreadParams, type TelegramThreadSpec } from "./bot/helpers.js";
|
||||
import { isSafeToRetrySendError, isTelegramClientRejection } from "./network-errors.js";
|
||||
|
||||
|
|
@ -21,11 +22,20 @@ type TelegramSendMessageDraft = (
|
|||
},
|
||||
) => Promise<unknown>;
|
||||
|
||||
let nextDraftId = 0;
|
||||
/**
|
||||
* Keep draft-id allocation shared across bundled chunks so concurrent preview
|
||||
* lanes do not accidentally reuse draft ids when code-split entries coexist.
|
||||
*/
|
||||
const TELEGRAM_DRAFT_STREAM_STATE_KEY = Symbol.for("openclaw.telegramDraftStreamState");
|
||||
|
||||
const draftStreamState = resolveGlobalSingleton(TELEGRAM_DRAFT_STREAM_STATE_KEY, () => ({
|
||||
nextDraftId: 0,
|
||||
}));
|
||||
|
||||
function allocateTelegramDraftId(): number {
|
||||
nextDraftId = nextDraftId >= TELEGRAM_DRAFT_ID_MAX ? 1 : nextDraftId + 1;
|
||||
return nextDraftId;
|
||||
draftStreamState.nextDraftId =
|
||||
draftStreamState.nextDraftId >= TELEGRAM_DRAFT_ID_MAX ? 1 : draftStreamState.nextDraftId + 1;
|
||||
return draftStreamState.nextDraftId;
|
||||
}
|
||||
|
||||
function resolveSendMessageDraftApi(api: Bot["api"]): TelegramSendMessageDraft | undefined {
|
||||
|
|
@ -441,3 +451,9 @@ export function createTelegramDraftStream(params: {
|
|||
sendMayHaveLanded: () => messageSendAttempted && typeof streamMessageId !== "number",
|
||||
};
|
||||
}
|
||||
|
||||
export const __testing = {
|
||||
resetTelegramDraftStreamForTests() {
|
||||
draftStreamState.nextDraftId = 0;
|
||||
},
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import type { Bot } from "grammy";
|
||||
import { afterEach, describe, expect, it, vi } from "vitest";
|
||||
import { importFreshModule } from "../../test/helpers/import-fresh.js";
|
||||
import {
|
||||
getTelegramSendTestMocks,
|
||||
importTelegramSendModule,
|
||||
|
|
@ -88,6 +89,29 @@ describe("sent-message-cache", () => {
|
|||
clearSentMessageCache();
|
||||
expect(wasSentByBot(123, 1)).toBe(false);
|
||||
});
|
||||
|
||||
it("shares sent-message state across distinct module instances", async () => {
|
||||
const cacheA = await importFreshModule<typeof import("./sent-message-cache.js")>(
|
||||
import.meta.url,
|
||||
"./sent-message-cache.js?scope=shared-a",
|
||||
);
|
||||
const cacheB = await importFreshModule<typeof import("./sent-message-cache.js")>(
|
||||
import.meta.url,
|
||||
"./sent-message-cache.js?scope=shared-b",
|
||||
);
|
||||
|
||||
cacheA.clearSentMessageCache();
|
||||
|
||||
try {
|
||||
cacheA.recordSentMessage(123, 1);
|
||||
expect(cacheB.wasSentByBot(123, 1)).toBe(true);
|
||||
|
||||
cacheB.clearSentMessageCache();
|
||||
expect(cacheA.wasSentByBot(123, 1)).toBe(false);
|
||||
} finally {
|
||||
cacheA.clearSentMessageCache();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("buildInlineKeyboard", () => {
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
import { resolveGlobalMap } from "../shared/global-singleton.js";
|
||||
|
||||
/**
|
||||
* In-memory cache of sent message IDs per chat.
|
||||
* Used to identify bot's own messages for reaction filtering ("own" mode).
|
||||
|
|
@ -9,7 +11,13 @@ type CacheEntry = {
|
|||
timestamps: Map<number, number>;
|
||||
};
|
||||
|
||||
const sentMessages = new Map<string, CacheEntry>();
|
||||
/**
|
||||
* Keep sent-message tracking shared across bundled chunks so Telegram reaction
|
||||
* filters see the same sent-message history regardless of which chunk recorded it.
|
||||
*/
|
||||
const TELEGRAM_SENT_MESSAGES_KEY = Symbol.for("openclaw.telegramSentMessages");
|
||||
|
||||
const sentMessages = resolveGlobalMap<string, CacheEntry>(TELEGRAM_SENT_MESSAGES_KEY);
|
||||
|
||||
function getChatKey(chatId: number | string): string {
|
||||
return String(chatId);
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue