Memory: extract embedding runtime surface

This commit is contained in:
Gustavo Madeira Santana 2026-03-15 20:01:19 +00:00
parent e298f5b1b8
commit edae8761b3
7 changed files with 125 additions and 90 deletions

View File

@ -4,8 +4,8 @@ import { resolveMemorySearchConfig } from "../agents/memory-search.js";
import { resolveApiKeyForProvider } from "../agents/model-auth.js";
import { formatCliCommand } from "../cli/command-format.js";
import type { OpenClawConfig } from "../config/config.js";
import { DEFAULT_LOCAL_EMBEDDING_MODEL } from "../extension-host/embedding-runtime.js";
import { resolveMemoryBackendConfig } from "../memory/backend-config.js";
import { DEFAULT_LOCAL_MODEL } from "../memory/embeddings.js";
import { hasConfiguredMemorySecretInput } from "../memory/secret-input.js";
import { note } from "../terminal/note.js";
import { resolveUserPath } from "../utils.js";
@ -160,7 +160,7 @@ export async function noteMemorySearchHealth(
*
* When `useDefaultFallback` is true (explicit `provider: "local"`), an empty
* modelPath is treated as available because the runtime falls back to
* DEFAULT_LOCAL_MODEL (an auto-downloaded HuggingFace model).
* DEFAULT_LOCAL_EMBEDDING_MODEL (an auto-downloaded HuggingFace model).
*
* When false (provider: "auto"), we only consider local available if the user
* explicitly configured a local file path matching `canAutoSelectLocal()`
@ -168,7 +168,7 @@ export async function noteMemorySearchHealth(
*/
function hasLocalEmbeddings(local: { modelPath?: string }, useDefaultFallback = false): boolean {
const modelPath =
local.modelPath?.trim() || (useDefaultFallback ? DEFAULT_LOCAL_MODEL : undefined);
local.modelPath?.trim() || (useDefaultFallback ? DEFAULT_LOCAL_EMBEDDING_MODEL : undefined);
if (!modelPath) {
return false;
}

View File

@ -23,14 +23,14 @@ import {
createVoyageEmbeddingProvider,
type VoyageEmbeddingClient,
} from "../memory/embeddings-voyage.js";
import { importNodeLlamaCpp } from "../memory/node-llama.js";
import { resolveUserPath } from "../utils.js";
import type {
EmbeddingProvider,
EmbeddingProviderId,
EmbeddingProviderOptions,
EmbeddingProviderResult,
} from "../memory/embeddings.js";
import { importNodeLlamaCpp } from "../memory/node-llama.js";
import { resolveUserPath } from "../utils.js";
} from "./embedding-runtime-types.js";
export type {
GeminiEmbeddingClient,

View File

@ -0,0 +1,61 @@
import type { OpenClawConfig } from "../config/config.js";
import type { SecretInput } from "../config/types.secrets.js";
import type { EmbeddingInput } from "../memory/embedding-inputs.js";
import type { GeminiEmbeddingClient, GeminiTaskType } from "../memory/embeddings-gemini.js";
import type { MistralEmbeddingClient } from "../memory/embeddings-mistral.js";
import type { OllamaEmbeddingClient } from "../memory/embeddings-ollama.js";
import type { OpenAiEmbeddingClient } from "../memory/embeddings-openai.js";
import type { VoyageEmbeddingClient } from "../memory/embeddings-voyage.js";
export type { GeminiEmbeddingClient } from "../memory/embeddings-gemini.js";
export type { MistralEmbeddingClient } from "../memory/embeddings-mistral.js";
export type { OpenAiEmbeddingClient } from "../memory/embeddings-openai.js";
export type { VoyageEmbeddingClient } from "../memory/embeddings-voyage.js";
export type { OllamaEmbeddingClient } from "../memory/embeddings-ollama.js";
export type EmbeddingProvider = {
id: string;
model: string;
maxInputTokens?: number;
embedQuery: (text: string) => Promise<number[]>;
embedBatch: (texts: string[]) => Promise<number[][]>;
embedBatchInputs?: (inputs: EmbeddingInput[]) => Promise<number[][]>;
};
export type EmbeddingProviderId = "openai" | "local" | "gemini" | "voyage" | "mistral" | "ollama";
export type EmbeddingProviderRequest = EmbeddingProviderId | "auto";
export type EmbeddingProviderFallback = EmbeddingProviderId | "none";
export type EmbeddingProviderResult = {
provider: EmbeddingProvider | null;
requestedProvider: EmbeddingProviderRequest;
fallbackFrom?: EmbeddingProviderId;
fallbackReason?: string;
providerUnavailableReason?: string;
openAi?: OpenAiEmbeddingClient;
gemini?: GeminiEmbeddingClient;
voyage?: VoyageEmbeddingClient;
mistral?: MistralEmbeddingClient;
ollama?: OllamaEmbeddingClient;
};
export type EmbeddingProviderOptions = {
config: OpenClawConfig;
agentDir?: string;
provider: EmbeddingProviderRequest;
remote?: {
baseUrl?: string;
apiKey?: SecretInput;
headers?: Record<string, string>;
};
model: string;
fallback: EmbeddingProviderFallback;
local?: {
modelPath?: string;
modelCacheDir?: string;
};
/** Gemini embedding-2: output vector dimensions (768, 1536, or 3072). */
outputDimensionality?: number;
/** Gemini: override the default task type sent with embedding requests. */
taskType?: GeminiTaskType;
};

View File

@ -0,0 +1,30 @@
import {
DEFAULT_EXTENSION_HOST_LOCAL_EMBEDDING_MODEL,
createExtensionHostEmbeddingProvider,
} from "./embedding-runtime-registry.js";
import type {
EmbeddingProviderOptions,
EmbeddingProviderResult,
} from "./embedding-runtime-types.js";
export type {
EmbeddingProvider,
EmbeddingProviderFallback,
EmbeddingProviderId,
EmbeddingProviderOptions,
EmbeddingProviderRequest,
EmbeddingProviderResult,
GeminiEmbeddingClient,
MistralEmbeddingClient,
OllamaEmbeddingClient,
OpenAiEmbeddingClient,
VoyageEmbeddingClient,
} from "./embedding-runtime-types.js";
export const DEFAULT_LOCAL_EMBEDDING_MODEL = DEFAULT_EXTENSION_HOST_LOCAL_EMBEDDING_MODEL;
export async function createEmbeddingProvider(
options: EmbeddingProviderOptions,
): Promise<EmbeddingProviderResult> {
return createExtensionHostEmbeddingProvider(options);
}

View File

@ -1,73 +1,17 @@
import type { OpenClawConfig } from "../config/config.js";
import type { SecretInput } from "../config/types.secrets.js";
import {
DEFAULT_EXTENSION_HOST_LOCAL_EMBEDDING_MODEL,
createExtensionHostEmbeddingProvider,
} from "../extension-host/embedding-runtime-registry.js";
import type { EmbeddingInput } from "./embedding-inputs.js";
import { type GeminiEmbeddingClient, type GeminiTaskType } from "./embeddings-gemini.js";
import { type MistralEmbeddingClient } from "./embeddings-mistral.js";
import type { OllamaEmbeddingClient } from "./embeddings-ollama.js";
import type { OpenAiEmbeddingClient } from "./embeddings-openai.js";
import type { VoyageEmbeddingClient } from "./embeddings-voyage.js";
export type { GeminiEmbeddingClient } from "./embeddings-gemini.js";
export type { MistralEmbeddingClient } from "./embeddings-mistral.js";
export type { OpenAiEmbeddingClient } from "./embeddings-openai.js";
export type { VoyageEmbeddingClient } from "./embeddings-voyage.js";
export type { OllamaEmbeddingClient } from "./embeddings-ollama.js";
export type EmbeddingProvider = {
id: string;
model: string;
maxInputTokens?: number;
embedQuery: (text: string) => Promise<number[]>;
embedBatch: (texts: string[]) => Promise<number[][]>;
embedBatchInputs?: (inputs: EmbeddingInput[]) => Promise<number[][]>;
};
export type EmbeddingProviderId = "openai" | "local" | "gemini" | "voyage" | "mistral" | "ollama";
export type EmbeddingProviderRequest = EmbeddingProviderId | "auto";
export type EmbeddingProviderFallback = EmbeddingProviderId | "none";
export type EmbeddingProviderResult = {
provider: EmbeddingProvider | null;
requestedProvider: EmbeddingProviderRequest;
fallbackFrom?: EmbeddingProviderId;
fallbackReason?: string;
providerUnavailableReason?: string;
openAi?: OpenAiEmbeddingClient;
gemini?: GeminiEmbeddingClient;
voyage?: VoyageEmbeddingClient;
mistral?: MistralEmbeddingClient;
ollama?: OllamaEmbeddingClient;
};
export type EmbeddingProviderOptions = {
config: OpenClawConfig;
agentDir?: string;
provider: EmbeddingProviderRequest;
remote?: {
baseUrl?: string;
apiKey?: SecretInput;
headers?: Record<string, string>;
};
model: string;
fallback: EmbeddingProviderFallback;
local?: {
modelPath?: string;
modelCacheDir?: string;
};
/** Gemini embedding-2: output vector dimensions (768, 1536, or 3072). */
outputDimensionality?: number;
/** Gemini: override the default task type sent with embedding requests. */
taskType?: GeminiTaskType;
};
export const DEFAULT_LOCAL_MODEL = DEFAULT_EXTENSION_HOST_LOCAL_EMBEDDING_MODEL;
export async function createEmbeddingProvider(
options: EmbeddingProviderOptions,
): Promise<EmbeddingProviderResult> {
return createExtensionHostEmbeddingProvider(options);
}
export {
createEmbeddingProvider,
DEFAULT_LOCAL_EMBEDDING_MODEL as DEFAULT_LOCAL_MODEL,
} from "../extension-host/embedding-runtime.js";
export type {
EmbeddingProvider,
EmbeddingProviderFallback,
EmbeddingProviderId,
EmbeddingProviderOptions,
EmbeddingProviderRequest,
EmbeddingProviderResult,
GeminiEmbeddingClient,
MistralEmbeddingClient,
OllamaEmbeddingClient,
OpenAiEmbeddingClient,
VoyageEmbeddingClient,
} from "../extension-host/embedding-runtime.js";

View File

@ -8,14 +8,6 @@ import { resolveAgentDir } from "../agents/agent-scope.js";
import { ResolvedMemorySearchConfig } from "../agents/memory-search.js";
import { type OpenClawConfig } from "../config/config.js";
import { resolveSessionTranscriptsDirForAgent } from "../config/sessions/paths.js";
import { createSubsystemLogger } from "../logging/subsystem.js";
import { onSessionTranscriptUpdate } from "../sessions/transcript-events.js";
import { resolveUserPath } from "../utils.js";
import { DEFAULT_GEMINI_EMBEDDING_MODEL } from "./embeddings-gemini.js";
import { DEFAULT_MISTRAL_EMBEDDING_MODEL } from "./embeddings-mistral.js";
import { DEFAULT_OLLAMA_EMBEDDING_MODEL } from "./embeddings-ollama.js";
import { DEFAULT_OPENAI_EMBEDDING_MODEL } from "./embeddings-openai.js";
import { DEFAULT_VOYAGE_EMBEDDING_MODEL } from "./embeddings-voyage.js";
import {
createEmbeddingProvider,
type EmbeddingProvider,
@ -24,7 +16,15 @@ import {
type OllamaEmbeddingClient,
type OpenAiEmbeddingClient,
type VoyageEmbeddingClient,
} from "./embeddings.js";
} from "../extension-host/embedding-runtime.js";
import { createSubsystemLogger } from "../logging/subsystem.js";
import { onSessionTranscriptUpdate } from "../sessions/transcript-events.js";
import { resolveUserPath } from "../utils.js";
import { DEFAULT_GEMINI_EMBEDDING_MODEL } from "./embeddings-gemini.js";
import { DEFAULT_MISTRAL_EMBEDDING_MODEL } from "./embeddings-mistral.js";
import { DEFAULT_OLLAMA_EMBEDDING_MODEL } from "./embeddings-ollama.js";
import { DEFAULT_OPENAI_EMBEDDING_MODEL } from "./embeddings-openai.js";
import { DEFAULT_VOYAGE_EMBEDDING_MODEL } from "./embeddings-voyage.js";
import { isFileMissingError } from "./fs-utils.js";
import {
buildFileEntry,

View File

@ -6,7 +6,6 @@ import { resolveAgentDir, resolveAgentWorkspaceDir } from "../agents/agent-scope
import type { ResolvedMemorySearchConfig } from "../agents/memory-search.js";
import { resolveMemorySearchConfig } from "../agents/memory-search.js";
import type { OpenClawConfig } from "../config/config.js";
import { createSubsystemLogger } from "../logging/subsystem.js";
import {
createEmbeddingProvider,
type EmbeddingProvider,
@ -16,7 +15,8 @@ import {
type OllamaEmbeddingClient,
type OpenAiEmbeddingClient,
type VoyageEmbeddingClient,
} from "./embeddings.js";
} from "../extension-host/embedding-runtime.js";
import { createSubsystemLogger } from "../logging/subsystem.js";
import { isFileMissingError, statRegularFile } from "./fs-utils.js";
import { bm25RankToScore, buildFtsQuery, mergeHybridResults } from "./hybrid.js";
import { isMemoryPath, normalizeExtraMemoryPaths } from "./internal.js";