fix: align models status provider auth reporting

This commit is contained in:
Peter Steinberger 2026-04-05 22:44:40 +01:00
parent 94256ea1a0
commit dea3ab0aa9
No known key found for this signature in database
13 changed files with 132 additions and 32 deletions

View File

@ -31,6 +31,8 @@ Current usage-window providers: Anthropic, GitHub Copilot, Gemini CLI, OpenAI
Codex, MiniMax, Xiaomi, and z.ai. Usage auth comes from provider-specific hooks
when available; otherwise OpenClaw falls back to matching OAuth/API-key
credentials from auth profiles, env, or config.
In `--json` output, `auth.providers` is the env/config/store-aware provider
overview, while `auth.oauth` is auth-store profile health only.
Add `--probe` to run live auth probes against each configured provider profile.
Probes are real requests (may consume tokens and trigger rate limits).
Use `--agent <id>` to inspect a configured agents model/auth state. When omitted,

View File

@ -175,7 +175,8 @@ resolved primary model.
OAuth status is always shown (and included in `--json` output). If a configured
provider has no credentials, `models status` prints a **Missing auth** section.
JSON includes `auth.oauth` (warn window + profiles) and `auth.providers`
(effective auth per provider).
(effective auth per provider, including env-backed credentials). `auth.oauth`
is auth-store profile health only; env-only providers do not appear there.
Use `--check` for automation (exit `1` when missing/expired, `2` when expiring).
Use `--probe` for live auth checks; probe rows can come from auth profiles, env
credentials, or `models.json`.

View File

@ -6,6 +6,10 @@ import {
buildFalVideoGenerationProvider,
} from "./video-generation-provider.js";
function createMockRequestConfig() {
return {} as ReturnType<typeof providerHttp.resolveProviderHttpRequestConfig>["requestConfig"];
}
describe("fal video generation provider", () => {
const fetchGuardMock = vi.fn();
@ -19,6 +23,7 @@ describe("fal video generation provider", () => {
vi.spyOn(providerAuth, "resolveApiKeyForProvider").mockResolvedValue({
apiKey: "fal-key",
source: "env",
mode: "api-key",
});
vi.spyOn(providerHttp, "resolveProviderHttpRequestConfig").mockReturnValue({
baseUrl: "https://fal.run",
@ -28,6 +33,7 @@ describe("fal video generation provider", () => {
"Content-Type": "application/json",
}),
dispatcherPolicy: undefined,
requestConfig: createMockRequestConfig(),
});
vi.spyOn(providerHttp, "assertOkOrThrowHttpError").mockResolvedValue(undefined);
_setFalVideoFetchGuardForTesting(fetchGuardMock as never);

View File

@ -197,7 +197,9 @@ describe("firecrawl tools", () => {
async () => "ok",
);
const init = fetchSpy.mock.calls[0]?.[1];
const init = (
fetchSpy.mock.calls as unknown as Array<[RequestInfo | URL, RequestInit | undefined]>
)[0]?.[1];
const authHeader = new Headers(init?.headers).get("Authorization");
expect(authHeader).toBe("Bearer firecrawl-test-key");
});

View File

@ -7,7 +7,6 @@ describePluginRegistrationContract({
imageGenerationProviderIds: ["google"],
videoGenerationProviderIds: ["google"],
webSearchProviderIds: ["gemini"],
cliBackendIds: ["google-gemini-cli"],
requireDescribeImages: true,
requireGenerateImage: true,
requireGenerateVideo: true,

View File

@ -38,6 +38,7 @@ describe("google video generation provider", () => {
vi.spyOn(providerAuthRuntime, "resolveApiKeyForProvider").mockResolvedValue({
apiKey: "google-key",
source: "env",
mode: "api-key",
});
generateVideosMock.mockResolvedValue({
done: false,
@ -100,6 +101,7 @@ describe("google video generation provider", () => {
vi.spyOn(providerAuthRuntime, "resolveApiKeyForProvider").mockResolvedValue({
apiKey: "google-key",
source: "env",
mode: "api-key",
});
const provider = buildGoogleVideoGenerationProvider();

View File

@ -9,7 +9,6 @@ describePluginRegistrationContract({
mediaUnderstandingProviderIds: ["openai", "openai-codex"],
imageGenerationProviderIds: ["openai"],
videoGenerationProviderIds: ["openai"],
cliBackendIds: ["codex-cli"],
requireGenerateImage: true,
requireGenerateVideo: true,
});

View File

@ -26,7 +26,7 @@ const providerFilter = parseCsvFilter(process.env.OPENCLAW_LIVE_VIDEO_GENERATION
const envModelMap = parseProviderModelMap(process.env.OPENCLAW_LIVE_VIDEO_GENERATION_MODELS);
type LiveProviderCase = {
plugin: { register: (api: unknown) => void | Promise<void> };
plugin: Parameters<typeof registerProviderPlugin>[0]["plugin"];
pluginId: string;
pluginName: string;
providerId: string;

View File

@ -96,7 +96,7 @@ function resolveProviderRuntimeHooks(): ProviderRuntimeHooks | null {
try {
const loaded = requireProviderRuntime(
"../../plugins/provider-runtime.js",
) as ProviderRuntimeHooks;
) as unknown as ProviderRuntimeHooks;
cachedProviderRuntimeHooks = {
classifyProviderFailoverReasonWithPlugin: loaded.classifyProviderFailoverReasonWithPlugin,
matchesProviderContextOverflowWithPlugin: loaded.matchesProviderContextOverflowWithPlugin,

View File

@ -540,11 +540,12 @@ export function createSubscriptionMock(): SubscriptionMock {
let runEmbeddedAttemptPromise:
| Promise<typeof import("./attempt.js").runEmbeddedAttempt>
| undefined;
const ATTEMPT_SPAWN_WORKSPACE_TEST_SPECIFIER = "./attempt.ts?spawn-workspace-test";
async function loadRunEmbeddedAttempt() {
runEmbeddedAttemptPromise ??= import("./attempt.ts?spawn-workspace-test").then(
(mod) => mod.runEmbeddedAttempt,
);
runEmbeddedAttemptPromise ??= (
import(ATTEMPT_SPAWN_WORKSPACE_TEST_SPECIFIER) as Promise<typeof import("./attempt.js")>
).then((mod) => mod.runEmbeddedAttempt);
return await runEmbeddedAttemptPromise;
}

View File

@ -1,5 +1,14 @@
import type { SubscribeEmbeddedPiSessionParams } from "../../pi-embedded-subscribe.types.js";
type IdleAwareAgent = {
waitForIdle?: (() => Promise<void>) | undefined;
};
type ToolResultFlushManager = {
flushPendingToolResults?: (() => void) | undefined;
clearPendingToolResults?: (() => void) | undefined;
};
export function buildEmbeddedSubscriptionParams(
params: SubscribeEmbeddedPiSessionParams,
): SubscribeEmbeddedPiSessionParams {
@ -9,9 +18,10 @@ export function buildEmbeddedSubscriptionParams(
export async function cleanupEmbeddedAttemptResources(params: {
removeToolResultContextGuard?: () => void;
flushPendingToolResultsAfterIdle: (params: {
agent: unknown;
sessionManager: unknown;
clearPendingOnTimeout: boolean;
agent: IdleAwareAgent | null | undefined;
sessionManager: ToolResultFlushManager | null | undefined;
timeoutMs?: number;
clearPendingOnTimeout?: boolean;
}) => Promise<void>;
session?: { agent?: unknown; dispose(): void };
sessionManager: unknown;
@ -28,8 +38,8 @@ export async function cleanupEmbeddedAttemptResources(params: {
}
try {
await params.flushPendingToolResultsAfterIdle({
agent: params.session?.agent,
sessionManager: params.sessionManager,
agent: params.session?.agent as IdleAwareAgent | null | undefined,
sessionManager: params.sessionManager as ToolResultFlushManager | null | undefined,
clearPendingOnTimeout: true,
});
} catch {

View File

@ -15,6 +15,7 @@ import {
resolveAuthStorePathForDisplay,
resolveProfileUnusableUntilForDisplay,
} from "../../agents/auth-profiles.js";
import { resolveProviderEnvApiKeyCandidates } from "../../agents/model-auth-env-vars.js";
import { resolveEnvApiKey } from "../../agents/model-auth.js";
import {
buildModelAliasIndex,
@ -143,23 +144,9 @@ export async function modelsStatusCommand(
}
const providersFromEnv = new Set<string>();
// Keep in sync with resolveEnvApiKey() mappings (we want visibility even when
// a provider isn't currently selected in config/models).
const envProbeProviders = [
"anthropic",
"github-copilot",
"google-vertex",
"openai",
"google",
"groq",
"cerebras",
"xai",
"openrouter",
"zai",
"mistral",
"synthetic",
];
for (const provider of envProbeProviders) {
// Use the shared provider-env registry so `models status` stays aligned with
// env-backed providers beyond the text-model defaults (for example image-gen).
for (const provider of Object.keys(resolveProviderEnvApiKeyCandidates()).toSorted()) {
if (resolveEnvApiKey(provider)) {
providersFromEnv.add(provider);
}
@ -272,7 +259,6 @@ export async function modelsStatusCommand(
store,
cfg,
warnAfterMs: DEFAULT_OAUTH_WARN_MS,
providers,
});
const oauthProfiles = authHealth.profiles.filter(
(profile) => profile.type === "oauth" || profile.type === "token",

View File

@ -63,6 +63,27 @@ const mocks = vi.hoisted(() => {
}
return null;
}),
resolveProviderEnvApiKeyCandidates: vi.fn().mockReturnValue({
anthropic: ["ANTHROPIC_API_KEY"],
google: ["GEMINI_API_KEY", "GOOGLE_API_KEY"],
minimax: ["MINIMAX_API_KEY"],
"minimax-portal": ["MINIMAX_OAUTH_TOKEN", "MINIMAX_API_KEY"],
openai: ["OPENAI_API_KEY"],
"openai-codex": ["OPENAI_OAUTH_TOKEN"],
fal: ["FAL_KEY"],
}),
listKnownProviderEnvApiKeyNames: vi
.fn()
.mockReturnValue([
"ANTHROPIC_API_KEY",
"GEMINI_API_KEY",
"GOOGLE_API_KEY",
"MINIMAX_API_KEY",
"MINIMAX_OAUTH_TOKEN",
"OPENAI_API_KEY",
"OPENAI_OAUTH_TOKEN",
"FAL_KEY",
]),
hasUsableCustomProviderApiKey: vi.fn().mockReturnValue(false),
resolveUsableCustomProviderApiKey: vi.fn().mockReturnValue(null),
getCustomProviderApiKey: vi.fn().mockReturnValue(undefined),
@ -113,6 +134,10 @@ async function loadFreshModelsStatusCommandModuleForTest() {
resolveUsableCustomProviderApiKey: mocks.resolveUsableCustomProviderApiKey,
getCustomProviderApiKey: mocks.getCustomProviderApiKey,
}));
vi.doMock("../../agents/model-auth-env-vars.js", () => ({
resolveProviderEnvApiKeyCandidates: mocks.resolveProviderEnvApiKeyCandidates,
listKnownProviderEnvApiKeyNames: mocks.listKnownProviderEnvApiKeyNames,
}));
vi.doMock("../../infra/shell-env.js", () => ({
getShellEnvAppliedKeys: mocks.getShellEnvAppliedKeys,
shouldEnableShellEnvFallback: mocks.shouldEnableShellEnvFallback,
@ -233,6 +258,11 @@ describe("modelsStatusCommand auth overview", () => {
const openai = providers.find((p) => p.provider === "openai");
expect(openai?.env?.source).toContain("OPENAI_API_KEY");
expect(openai?.env?.value).toContain("...");
expect(
(payload.auth.oauth.providers as Array<{ provider: string }>).some(
(provider) => provider.provider === "openai",
),
).toBe(false);
expect(
(payload.auth.providersWithOAuth as string[]).some((e) => e.startsWith("anthropic")),
@ -271,6 +301,68 @@ describe("modelsStatusCommand auth overview", () => {
}
});
it("includes env-backed image-generation providers in effective auth output", async () => {
const localRuntime = createRuntime();
const originalEnvImpl = mocks.resolveEnvApiKey.getMockImplementation();
mocks.resolveEnvApiKey.mockImplementation((provider: string) => {
if (provider === "openai") {
return {
apiKey: "sk-openai-0123456789abcdefghijklmnopqrstuvwxyz", // pragma: allowlist secret
source: "shell env: OPENAI_API_KEY",
};
}
if (provider === "anthropic") {
return {
apiKey: "sk-ant-oat01-ACCESS-TOKEN-1234567890", // pragma: allowlist secret
source: "env: ANTHROPIC_OAUTH_TOKEN",
};
}
if (provider === "minimax") {
return {
apiKey: "sk-minimax-0123456789abcdefghijklmnopqrstuvwxyz", // pragma: allowlist secret
source: "env: MINIMAX_API_KEY",
};
}
if (provider === "fal") {
return {
apiKey: "fal_test_0123456789abcdefghijklmnopqrstuvwxyz", // pragma: allowlist secret
source: "env: FAL_KEY",
};
}
return null;
});
try {
await modelsStatusCommand({ json: true }, localRuntime as never);
const payload = JSON.parse(String((localRuntime.log as Mock).mock.calls[0]?.[0]));
const providers = payload.auth.providers as Array<{
provider: string;
effective: { kind: string };
}>;
expect(providers).toEqual(
expect.arrayContaining([
expect.objectContaining({
provider: "minimax",
effective: expect.objectContaining({ kind: "env" }),
}),
expect.objectContaining({
provider: "fal",
effective: expect.objectContaining({ kind: "env" }),
}),
]),
);
} finally {
if (originalEnvImpl) {
mocks.resolveEnvApiKey.mockImplementation(originalEnvImpl);
} else if (defaultResolveEnvApiKeyImpl) {
mocks.resolveEnvApiKey.mockImplementation(defaultResolveEnvApiKeyImpl);
} else {
mocks.resolveEnvApiKey.mockImplementation(() => null);
}
}
});
it("uses agent overrides and reports sources", async () => {
const localRuntime = createRuntime();
await withAgentScopeOverrides(