fix(agents): preserve blank local custom-provider API keys after onboarding

Co-authored-by: Xinhua Gu <xinhua.gu@gmail.com>
This commit is contained in:
Frank Yang 2026-03-14 11:08:19 +08:00 committed by GitHub
parent bed661609e
commit 01674c575e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
6 changed files with 442 additions and 17 deletions

View File

@ -60,6 +60,7 @@ Docs: https://docs.openclaw.ai
- macOS/voice wake: stop crashing wake-word command extraction when speech segment ranges come from a different transcript instance.
- Discord/allowlists: honor raw `guild_id` when hydrated guild objects are missing so allowlisted channels and threads like `#maintainers` no longer get false-dropped before channel allowlist checks.
- macOS/runtime locator: require Node >=22.16.0 during macOS runtime discovery so the app no longer accepts Node versions that the main runtime guard rejects later. Thanks @sumleo.
- Agents/custom providers: preserve blank API keys for loopback OpenAI-compatible custom providers by clearing the synthetic Authorization header at runtime, while keeping explicit apiKey and oauth/token config from silently downgrading into fake bearer auth. (#45631) Thanks @xinhuagu.
## 2026.3.12

View File

@ -4,6 +4,7 @@ import { listKnownProviderEnvApiKeyNames } from "./model-auth-env-vars.js";
export const MINIMAX_OAUTH_MARKER = "minimax-oauth";
export const QWEN_OAUTH_MARKER = "qwen-oauth";
export const OLLAMA_LOCAL_AUTH_MARKER = "ollama-local";
export const CUSTOM_LOCAL_AUTH_MARKER = "custom-local";
export const NON_ENV_SECRETREF_MARKER = "secretref-managed"; // pragma: allowlist secret
export const SECRETREF_ENV_HEADER_MARKER_PREFIX = "secretref-env:"; // pragma: allowlist secret
@ -71,6 +72,7 @@ export function isNonSecretApiKeyMarker(
trimmed === MINIMAX_OAUTH_MARKER ||
trimmed === QWEN_OAUTH_MARKER ||
trimmed === OLLAMA_LOCAL_AUTH_MARKER ||
trimmed === CUSTOM_LOCAL_AUTH_MARKER ||
trimmed === NON_ENV_SECRETREF_MARKER ||
isAwsSdkAuthMarker(trimmed);
if (isKnownMarker) {

View File

@ -1,9 +1,12 @@
import { describe, expect, it } from "vitest";
import { streamSimpleOpenAICompletions, type Model } from "@mariozechner/pi-ai";
import { afterEach, describe, expect, it, vi } from "vitest";
import type { AuthProfileStore } from "./auth-profiles.js";
import { NON_ENV_SECRETREF_MARKER } from "./model-auth-markers.js";
import { CUSTOM_LOCAL_AUTH_MARKER, NON_ENV_SECRETREF_MARKER } from "./model-auth-markers.js";
import {
applyLocalNoAuthHeaderOverride,
hasUsableCustomProviderApiKey,
requireApiKey,
resolveApiKeyForProvider,
resolveAwsSdkEnvVarName,
resolveModelAuthMode,
resolveUsableCustomProviderApiKey,
@ -223,3 +226,334 @@ describe("resolveUsableCustomProviderApiKey", () => {
}
});
});
describe("resolveApiKeyForProvider synthetic local auth for custom providers", () => {
it("synthesizes a local auth marker for custom providers with a local baseUrl and no apiKey", async () => {
const auth = await resolveApiKeyForProvider({
provider: "custom-127-0-0-1-8080",
cfg: {
models: {
providers: {
"custom-127-0-0-1-8080": {
baseUrl: "http://127.0.0.1:8080/v1",
api: "openai-completions",
models: [
{
id: "qwen-3.5",
name: "Qwen 3.5",
reasoning: false,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 8192,
maxTokens: 4096,
},
],
},
},
},
},
});
expect(auth.apiKey).toBe(CUSTOM_LOCAL_AUTH_MARKER);
expect(auth.source).toContain("synthetic local key");
});
it("synthesizes a local auth marker for localhost custom providers", async () => {
const auth = await resolveApiKeyForProvider({
provider: "my-local",
cfg: {
models: {
providers: {
"my-local": {
baseUrl: "http://localhost:11434/v1",
api: "openai-completions",
models: [
{
id: "llama3",
name: "Llama 3",
reasoning: false,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 8192,
maxTokens: 4096,
},
],
},
},
},
},
});
expect(auth.apiKey).toBe(CUSTOM_LOCAL_AUTH_MARKER);
});
it("synthesizes a local auth marker for IPv6 loopback (::1)", async () => {
const auth = await resolveApiKeyForProvider({
provider: "my-ipv6",
cfg: {
models: {
providers: {
"my-ipv6": {
baseUrl: "http://[::1]:8080/v1",
api: "openai-completions",
models: [
{
id: "llama3",
name: "Llama 3",
reasoning: false,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 8192,
maxTokens: 4096,
},
],
},
},
},
},
});
expect(auth.apiKey).toBe(CUSTOM_LOCAL_AUTH_MARKER);
});
it("synthesizes a local auth marker for 0.0.0.0", async () => {
const auth = await resolveApiKeyForProvider({
provider: "my-wildcard",
cfg: {
models: {
providers: {
"my-wildcard": {
baseUrl: "http://0.0.0.0:11434/v1",
api: "openai-completions",
models: [
{
id: "qwen",
name: "Qwen",
reasoning: false,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 8192,
maxTokens: 4096,
},
],
},
},
},
},
});
expect(auth.apiKey).toBe(CUSTOM_LOCAL_AUTH_MARKER);
});
it("synthesizes a local auth marker for IPv4-mapped IPv6 (::ffff:127.0.0.1)", async () => {
const auth = await resolveApiKeyForProvider({
provider: "my-mapped",
cfg: {
models: {
providers: {
"my-mapped": {
baseUrl: "http://[::ffff:127.0.0.1]:8080/v1",
api: "openai-completions",
models: [
{
id: "llama3",
name: "Llama 3",
reasoning: false,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 8192,
maxTokens: 4096,
},
],
},
},
},
},
});
expect(auth.apiKey).toBe(CUSTOM_LOCAL_AUTH_MARKER);
});
it("does not synthesize auth for remote custom providers without apiKey", async () => {
await expect(
resolveApiKeyForProvider({
provider: "my-remote",
cfg: {
models: {
providers: {
"my-remote": {
baseUrl: "https://api.example.com/v1",
api: "openai-completions",
models: [
{
id: "gpt-5",
name: "GPT-5",
reasoning: false,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 8192,
maxTokens: 4096,
},
],
},
},
},
},
}),
).rejects.toThrow("No API key found");
});
it("does not synthesize local auth when apiKey is explicitly configured but unresolved", async () => {
const previous = process.env.OPENAI_API_KEY;
delete process.env.OPENAI_API_KEY;
try {
await expect(
resolveApiKeyForProvider({
provider: "custom",
cfg: {
models: {
providers: {
custom: {
baseUrl: "http://127.0.0.1:8080/v1",
api: "openai-completions",
apiKey: "OPENAI_API_KEY",
models: [
{
id: "llama3",
name: "Llama 3",
reasoning: false,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 8192,
maxTokens: 4096,
},
],
},
},
},
},
}),
).rejects.toThrow('No API key found for provider "custom"');
} finally {
if (previous === undefined) {
delete process.env.OPENAI_API_KEY;
} else {
process.env.OPENAI_API_KEY = previous;
}
}
});
it("does not synthesize local auth when auth mode explicitly requires oauth", async () => {
await expect(
resolveApiKeyForProvider({
provider: "custom",
cfg: {
models: {
providers: {
custom: {
baseUrl: "http://127.0.0.1:8080/v1",
api: "openai-completions",
auth: "oauth",
models: [
{
id: "llama3",
name: "Llama 3",
reasoning: false,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 8192,
maxTokens: 4096,
},
],
},
},
},
},
}),
).rejects.toThrow('No API key found for provider "custom"');
});
it("keeps built-in aws-sdk fallback for local baseUrl overrides", async () => {
const auth = await resolveApiKeyForProvider({
provider: "amazon-bedrock",
cfg: {
models: {
providers: {
"amazon-bedrock": {
baseUrl: "http://127.0.0.1:8080/v1",
models: [],
},
},
},
},
});
expect(auth.mode).toBe("aws-sdk");
expect(auth.apiKey).toBeUndefined();
});
});
describe("applyLocalNoAuthHeaderOverride", () => {
const originalFetch = globalThis.fetch;
afterEach(() => {
globalThis.fetch = originalFetch;
vi.restoreAllMocks();
});
it("clears Authorization for synthetic local OpenAI-compatible auth markers", async () => {
let capturedAuthorization: string | null | undefined;
let capturedXTest: string | null | undefined;
let resolveRequest: (() => void) | undefined;
const requestSeen = new Promise<void>((resolve) => {
resolveRequest = resolve;
});
globalThis.fetch = vi.fn(async (_input, init) => {
const headers = new Headers(init?.headers);
capturedAuthorization = headers.get("Authorization");
capturedXTest = headers.get("X-Test");
resolveRequest?.();
return new Response(JSON.stringify({ error: { message: "unauthorized" } }), {
status: 401,
headers: { "content-type": "application/json" },
});
}) as typeof fetch;
const model = applyLocalNoAuthHeaderOverride(
{
id: "local-llm",
name: "local-llm",
api: "openai-completions",
provider: "custom",
baseUrl: "http://127.0.0.1:8080/v1",
reasoning: false,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 8192,
maxTokens: 4096,
headers: { "X-Test": "1" },
} as Model<"openai-completions">,
{
apiKey: CUSTOM_LOCAL_AUTH_MARKER,
source: "models.providers.custom (synthetic local key)",
mode: "api-key",
},
);
streamSimpleOpenAICompletions(
model,
{
messages: [
{
role: "user",
content: "hello",
timestamp: Date.now(),
},
],
},
{
apiKey: CUSTOM_LOCAL_AUTH_MARKER,
},
);
await requestSeen;
expect(capturedAuthorization).toBeNull();
expect(capturedXTest).toBe("1");
});
});

View File

@ -3,6 +3,7 @@ import { type Api, getEnvApiKey, type Model } from "@mariozechner/pi-ai";
import { formatCliCommand } from "../cli/command-format.js";
import type { OpenClawConfig } from "../config/config.js";
import type { ModelProviderAuthMode, ModelProviderConfig } from "../config/types.js";
import { coerceSecretRef } from "../config/types.secrets.js";
import { getShellEnvAppliedKeys } from "../infra/shell-env.js";
import { createSubsystemLogger } from "../logging/subsystem.js";
import {
@ -19,6 +20,7 @@ import {
} from "./auth-profiles.js";
import { PROVIDER_ENV_API_KEY_CANDIDATES } from "./model-auth-env-vars.js";
import {
CUSTOM_LOCAL_AUTH_MARKER,
isKnownEnvApiKeyMarker,
isNonSecretApiKeyMarker,
OLLAMA_LOCAL_AUTH_MARKER,
@ -119,15 +121,44 @@ function resolveProviderAuthOverride(
return undefined;
}
function isLocalBaseUrl(baseUrl: string): boolean {
try {
const host = new URL(baseUrl).hostname.toLowerCase();
return (
host === "localhost" ||
host === "127.0.0.1" ||
host === "0.0.0.0" ||
host === "[::1]" ||
host === "[::ffff:7f00:1]" ||
host === "[::ffff:127.0.0.1]"
);
} catch {
return false;
}
}
function hasExplicitProviderApiKeyConfig(providerConfig: ModelProviderConfig): boolean {
return (
normalizeOptionalSecretInput(providerConfig.apiKey) !== undefined ||
coerceSecretRef(providerConfig.apiKey) !== null
);
}
function isCustomLocalProviderConfig(providerConfig: ModelProviderConfig): boolean {
return (
typeof providerConfig.baseUrl === "string" &&
providerConfig.baseUrl.trim().length > 0 &&
typeof providerConfig.api === "string" &&
providerConfig.api.trim().length > 0 &&
Array.isArray(providerConfig.models) &&
providerConfig.models.length > 0
);
}
function resolveSyntheticLocalProviderAuth(params: {
cfg: OpenClawConfig | undefined;
provider: string;
}): ResolvedProviderAuth | null {
const normalizedProvider = normalizeProviderId(params.provider);
if (normalizedProvider !== "ollama") {
return null;
}
const providerConfig = resolveProviderConfig(params.cfg, params.provider);
if (!providerConfig) {
return null;
@ -141,11 +172,38 @@ function resolveSyntheticLocalProviderAuth(params: {
return null;
}
return {
apiKey: OLLAMA_LOCAL_AUTH_MARKER,
source: "models.providers.ollama (synthetic local key)",
mode: "api-key",
};
const normalizedProvider = normalizeProviderId(params.provider);
if (normalizedProvider === "ollama") {
return {
apiKey: OLLAMA_LOCAL_AUTH_MARKER,
source: "models.providers.ollama (synthetic local key)",
mode: "api-key",
};
}
const authOverride = resolveProviderAuthOverride(params.cfg, params.provider);
if (authOverride && authOverride !== "api-key") {
return null;
}
if (!isCustomLocalProviderConfig(providerConfig)) {
return null;
}
if (hasExplicitProviderApiKeyConfig(providerConfig)) {
return null;
}
// Custom providers pointing at a local server (e.g. llama.cpp, vLLM, LocalAI)
// typically don't require auth. Synthesize a local key so the auth resolver
// doesn't reject them when the user left the API key blank during onboarding.
if (providerConfig.baseUrl && isLocalBaseUrl(providerConfig.baseUrl)) {
return {
apiKey: CUSTOM_LOCAL_AUTH_MARKER,
source: `models.providers.${params.provider} (synthetic local key)`,
mode: "api-key",
};
}
return null;
}
function resolveEnvSourceLabel(params: {
@ -439,3 +497,25 @@ export function requireApiKey(auth: ResolvedProviderAuth, provider: string): str
}
throw new Error(`No API key resolved for provider "${provider}" (auth mode: ${auth.mode}).`);
}
export function applyLocalNoAuthHeaderOverride<T extends Model<Api>>(
model: T,
auth: ResolvedProviderAuth | null | undefined,
): T {
if (auth?.apiKey !== CUSTOM_LOCAL_AUTH_MARKER || model.api !== "openai-completions") {
return model;
}
// OpenAI's SDK always generates Authorization from apiKey. Keep the non-secret
// placeholder so construction succeeds, then clear the header at request build
// time for local servers that intentionally do not require auth.
const headers = {
...model.headers,
Authorization: null,
} as unknown as Record<string, string>;
return {
...model,
headers,
};
}

View File

@ -41,7 +41,11 @@ import { formatUserTime, resolveUserTimeFormat, resolveUserTimezone } from "../d
import { DEFAULT_CONTEXT_TOKENS, DEFAULT_MODEL, DEFAULT_PROVIDER } from "../defaults.js";
import { resolveOpenClawDocsPath } from "../docs-path.js";
import { resolveMemorySearchConfig } from "../memory-search.js";
import { getApiKeyForModel, resolveModelAuthMode } from "../model-auth.js";
import {
applyLocalNoAuthHeaderOverride,
getApiKeyForModel,
resolveModelAuthMode,
} from "../model-auth.js";
import { supportsModelTools } from "../model-tool-support.js";
import { ensureOpenClawModelsJson } from "../models-config.js";
import { createConfiguredOllamaStreamFn } from "../ollama-stream.js";
@ -429,8 +433,9 @@ export async function compactEmbeddedPiSessionDirect(
const reason = error ?? `Unknown model: ${provider}/${modelId}`;
return fail(reason);
}
let apiKeyInfo: Awaited<ReturnType<typeof getApiKeyForModel>> | null = null;
try {
const apiKeyInfo = await getApiKeyForModel({
apiKeyInfo = await getApiKeyForModel({
model,
cfg: params.config,
profileId: authProfileId,
@ -518,10 +523,12 @@ export async function compactEmbeddedPiSessionDirect(
modelContextWindow: model.contextWindow,
defaultTokens: DEFAULT_CONTEXT_TOKENS,
});
const effectiveModel =
const effectiveModel = applyLocalNoAuthHeaderOverride(
ctxInfo.tokens < (model.contextWindow ?? Infinity)
? { ...model, contextWindow: ctxInfo.tokens }
: model;
: model,
apiKeyInfo,
);
const runAbortController = new AbortController();
const toolsRaw = createOpenClawCodingTools({

View File

@ -30,6 +30,7 @@ import {
import { DEFAULT_CONTEXT_TOKENS, DEFAULT_MODEL, DEFAULT_PROVIDER } from "../defaults.js";
import { FailoverError, resolveFailoverStatus } from "../failover-error.js";
import {
applyLocalNoAuthHeaderOverride,
ensureAuthProfileStore,
getApiKeyForModel,
resolveAuthProfileOrder,
@ -884,7 +885,7 @@ export async function runEmbeddedPiAgent(
disableTools: params.disableTools,
provider,
modelId,
model: effectiveModel,
model: applyLocalNoAuthHeaderOverride(effectiveModel, apiKeyInfo),
authProfileId: lastProfileId,
authProfileIdSource: lockedProfileId ? "user" : "auto",
authStorage,