mirror of https://github.com/openclaw/openclaw.git
fix(ollama): pass provider headers to Ollama stream function (#24285)
createOllamaStreamFn() only accepted baseUrl, ignoring custom headers configured in models.providers.<provider>.headers. This caused 403 errors when Ollama endpoints are behind reverse proxies that require auth headers (e.g. X-OLLAMA-KEY via HAProxy). Add optional defaultHeaders parameter to createOllamaStreamFn() and merge them into every fetch request. Provider headers from config are now passed through at the call site in the embedded runner. Fixes #24285
This commit is contained in:
parent
76bfd9b5e6
commit
7597fc556c
|
|
@ -405,7 +405,10 @@ function resolveOllamaChatUrl(baseUrl: string): string {
|
|||
return `${apiBase}/api/chat`;
|
||||
}
|
||||
|
||||
export function createOllamaStreamFn(baseUrl: string): StreamFn {
|
||||
export function createOllamaStreamFn(
|
||||
baseUrl: string,
|
||||
defaultHeaders?: Record<string, string>,
|
||||
): StreamFn {
|
||||
const chatUrl = resolveOllamaChatUrl(baseUrl);
|
||||
|
||||
return (model, context, options) => {
|
||||
|
|
@ -440,6 +443,7 @@ export function createOllamaStreamFn(baseUrl: string): StreamFn {
|
|||
|
||||
const headers: Record<string, string> = {
|
||||
"Content-Type": "application/json",
|
||||
...defaultHeaders,
|
||||
...options?.headers,
|
||||
};
|
||||
if (options?.apiKey) {
|
||||
|
|
|
|||
|
|
@ -1022,7 +1022,7 @@ export async function runEmbeddedAttempt(
|
|||
modelBaseUrl,
|
||||
providerBaseUrl,
|
||||
});
|
||||
activeSession.agent.streamFn = createOllamaStreamFn(ollamaBaseUrl);
|
||||
activeSession.agent.streamFn = createOllamaStreamFn(ollamaBaseUrl, params.model.headers);
|
||||
} else if (params.model.api === "openai-responses" && params.provider === "openai") {
|
||||
const wsApiKey = await params.authStorage.getApiKey(params.provider);
|
||||
if (wsApiKey) {
|
||||
|
|
|
|||
Loading…
Reference in New Issue