mirror of https://github.com/openclaw/openclaw.git
refactor: remove ollama sdk facades
This commit is contained in:
parent
155f4300ba
commit
2aafa8fb7d
|
|
@ -325,7 +325,7 @@ The same rule applies to other generated bundled-helper families such as:
|
|||
`plugin-sdk/chutes`, `plugin-sdk/deepseek`, `plugin-sdk/google`,
|
||||
`plugin-sdk/huggingface`, `plugin-sdk/kimi-coding`,
|
||||
`plugin-sdk/kilocode`, `plugin-sdk/minimax`, `plugin-sdk/mistral`,
|
||||
`plugin-sdk/nvidia`, `plugin-sdk/ollama*`, `plugin-sdk/opencode`,
|
||||
`plugin-sdk/nvidia`, `plugin-sdk/opencode`,
|
||||
`plugin-sdk/opencode-go`, `plugin-sdk/qianfan`, `plugin-sdk/sglang`,
|
||||
`plugin-sdk/synthetic`, `plugin-sdk/venice`, `plugin-sdk/vllm`,
|
||||
`plugin-sdk/xai`, `plugin-sdk/volcengine`,
|
||||
|
|
|
|||
|
|
@ -272,7 +272,7 @@ explicitly promotes one as public.
|
|||
| Line | `plugin-sdk/line`, `plugin-sdk/line-core`, `plugin-sdk/line-runtime`, `plugin-sdk/line-surface` | Bundled LINE helper/runtime surface |
|
||||
| IRC | `plugin-sdk/irc`, `plugin-sdk/irc-surface` | Bundled IRC helper surface |
|
||||
| Channel-specific helpers | `plugin-sdk/googlechat`, `plugin-sdk/whatsapp-surface`, `plugin-sdk/zalouser`, `plugin-sdk/bluebubbles`, `plugin-sdk/bluebubbles-policy`, `plugin-sdk/mattermost`, `plugin-sdk/mattermost-policy`, `plugin-sdk/feishu-conversation`, `plugin-sdk/msteams`, `plugin-sdk/nextcloud-talk`, `plugin-sdk/nostr`, `plugin-sdk/tlon`, `plugin-sdk/twitch` | Bundled channel compatibility/helper seams. `plugin-sdk/whatsapp-surface` currently exports `DEFAULT_WEB_MEDIA_BYTES`, WhatsApp auth/account helpers, directory-config helpers, group-policy helpers, outbound-target resolution, and the narrow `WebChannelStatus` / `WebInboundMessage` / `WebListenerCloseReason` / `WebMonitorTuning` types. |
|
||||
| Provider-specific helpers | `plugin-sdk/openai`, `plugin-sdk/moonshot`, `plugin-sdk/qwen`, `plugin-sdk/qwen-definitions`, `plugin-sdk/modelstudio`, `plugin-sdk/modelstudio-definitions`, `plugin-sdk/provider-moonshot`, `plugin-sdk/together`, `plugin-sdk/amazon-bedrock`, `plugin-sdk/anthropic-vertex`, `plugin-sdk/cloudflare-ai-gateway`, `plugin-sdk/byteplus`, `plugin-sdk/chutes`, `plugin-sdk/deepseek`, `plugin-sdk/google`, `plugin-sdk/huggingface`, `plugin-sdk/kimi-coding`, `plugin-sdk/kilocode`, `plugin-sdk/minimax`, `plugin-sdk/mistral`, `plugin-sdk/nvidia`, `plugin-sdk/ollama`, `plugin-sdk/ollama-surface`, `plugin-sdk/opencode`, `plugin-sdk/opencode-go`, `plugin-sdk/qianfan`, `plugin-sdk/sglang`, `plugin-sdk/synthetic`, `plugin-sdk/venice`, `plugin-sdk/vllm`, `plugin-sdk/xai`, `plugin-sdk/volcengine` | Bundled provider-specific helper seams; prefer canonical `qwen*`, keep `modelstudio*` as compatibility aliases |
|
||||
| Provider-specific helpers | `plugin-sdk/openai`, `plugin-sdk/moonshot`, `plugin-sdk/qwen`, `plugin-sdk/qwen-definitions`, `plugin-sdk/modelstudio`, `plugin-sdk/modelstudio-definitions`, `plugin-sdk/provider-moonshot`, `plugin-sdk/together`, `plugin-sdk/amazon-bedrock`, `plugin-sdk/anthropic-vertex`, `plugin-sdk/cloudflare-ai-gateway`, `plugin-sdk/byteplus`, `plugin-sdk/chutes`, `plugin-sdk/deepseek`, `plugin-sdk/google`, `plugin-sdk/huggingface`, `plugin-sdk/kimi-coding`, `plugin-sdk/kilocode`, `plugin-sdk/minimax`, `plugin-sdk/mistral`, `plugin-sdk/nvidia`, `plugin-sdk/opencode`, `plugin-sdk/opencode-go`, `plugin-sdk/qianfan`, `plugin-sdk/sglang`, `plugin-sdk/synthetic`, `plugin-sdk/venice`, `plugin-sdk/vllm`, `plugin-sdk/xai`, `plugin-sdk/volcengine` | Bundled provider-specific helper seams; prefer canonical `qwen*`, keep `modelstudio*` as compatibility aliases |
|
||||
| Auth/plugin-specific helpers | `plugin-sdk/github-copilot-login`, `plugin-sdk/github-copilot-token`, `plugin-sdk/diagnostics-otel`, `plugin-sdk/diffs`, `plugin-sdk/llm-task`, `plugin-sdk/thread-ownership`, `plugin-sdk/voice-call` | Bundled feature/plugin helper seams; `plugin-sdk/github-copilot-token` currently exports `DEFAULT_COPILOT_API_BASE_URL`, `deriveCopilotApiBaseUrlFromToken`, and `resolveCopilotApiToken` |
|
||||
</Accordion>
|
||||
</AccordionGroup>
|
||||
|
|
|
|||
|
|
@ -783,14 +783,6 @@
|
|||
"types": "./dist/plugin-sdk/nostr.d.ts",
|
||||
"default": "./dist/plugin-sdk/nostr.js"
|
||||
},
|
||||
"./plugin-sdk/ollama": {
|
||||
"types": "./dist/plugin-sdk/ollama.d.ts",
|
||||
"default": "./dist/plugin-sdk/ollama.js"
|
||||
},
|
||||
"./plugin-sdk/ollama-surface": {
|
||||
"types": "./dist/plugin-sdk/ollama-surface.d.ts",
|
||||
"default": "./dist/plugin-sdk/ollama-surface.js"
|
||||
},
|
||||
"./plugin-sdk/openai": {
|
||||
"types": "./dist/plugin-sdk/openai.d.ts",
|
||||
"default": "./dist/plugin-sdk/openai.js"
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
export type { OllamaEmbeddingClient } from "../../../../src/plugin-sdk/ollama.js";
|
||||
export type { OllamaEmbeddingClient } from "../../../../extensions/ollama/runtime-api.js";
|
||||
export {
|
||||
createOllamaEmbeddingProvider,
|
||||
DEFAULT_OLLAMA_EMBEDDING_MODEL,
|
||||
} from "../../../../src/plugin-sdk/ollama.js";
|
||||
} from "../../../../extensions/ollama/runtime-api.js";
|
||||
|
|
|
|||
|
|
@ -185,8 +185,6 @@
|
|||
"nextcloud-talk",
|
||||
"nvidia",
|
||||
"nostr",
|
||||
"ollama",
|
||||
"ollama-surface",
|
||||
"openai",
|
||||
"opencode",
|
||||
"opencode-go",
|
||||
|
|
|
|||
|
|
@ -651,53 +651,6 @@ export const GENERATED_PLUGIN_SDK_FACADES = [
|
|||
source: pluginSource("nvidia", "api.js"),
|
||||
exports: ["buildNvidiaProvider"],
|
||||
},
|
||||
{
|
||||
subpath: "ollama",
|
||||
source: pluginSource("ollama", "runtime-api.js"),
|
||||
exports: [
|
||||
"buildAssistantMessage",
|
||||
"buildOllamaChatRequest",
|
||||
"convertToOllamaMessages",
|
||||
"createOllamaEmbeddingProvider",
|
||||
"createConfiguredOllamaCompatNumCtxWrapper",
|
||||
"createConfiguredOllamaCompatStreamWrapper",
|
||||
"createConfiguredOllamaStreamFn",
|
||||
"createOllamaStreamFn",
|
||||
"DEFAULT_OLLAMA_EMBEDDING_MODEL",
|
||||
"isOllamaCompatProvider",
|
||||
"OLLAMA_NATIVE_BASE_URL",
|
||||
"parseNdjsonStream",
|
||||
"resolveOllamaBaseUrlForRun",
|
||||
"resolveOllamaCompatNumCtxEnabled",
|
||||
"shouldInjectOllamaCompatNumCtx",
|
||||
"wrapOllamaCompatNumCtx",
|
||||
],
|
||||
typeExports: ["OllamaEmbeddingClient", "OllamaEmbeddingProvider"],
|
||||
},
|
||||
{
|
||||
subpath: "ollama-surface",
|
||||
source: pluginSource("ollama", "api.js"),
|
||||
exports: [
|
||||
"buildOllamaModelDefinition",
|
||||
"buildOllamaProvider",
|
||||
"configureOllamaNonInteractive",
|
||||
"ensureOllamaModelPulled",
|
||||
"enrichOllamaModelsWithContext",
|
||||
"fetchOllamaModels",
|
||||
"OLLAMA_DEFAULT_BASE_URL",
|
||||
"OLLAMA_DEFAULT_CONTEXT_WINDOW",
|
||||
"OLLAMA_DEFAULT_COST",
|
||||
"OLLAMA_DEFAULT_MAX_TOKENS",
|
||||
"OLLAMA_DEFAULT_MODEL",
|
||||
"OllamaModelWithContext",
|
||||
"OllamaTagModel",
|
||||
"OllamaTagsResponse",
|
||||
"promptAndConfigureOllama",
|
||||
"queryOllamaContextWindow",
|
||||
"resolveOllamaApiBase",
|
||||
],
|
||||
typeExports: ["OllamaModelWithContext", "OllamaTagModel", "OllamaTagsResponse"],
|
||||
},
|
||||
{
|
||||
subpath: "openai",
|
||||
source: pluginSource("openai", "api.js"),
|
||||
|
|
|
|||
|
|
@ -412,31 +412,6 @@ export interface PluginSdkFacadeTypeMap {
|
|||
};
|
||||
types: {};
|
||||
};
|
||||
ollama: {
|
||||
module: typeof import("@openclaw/ollama/runtime-api.js");
|
||||
sourceModules: {
|
||||
source1: {
|
||||
module: typeof import("@openclaw/ollama/runtime-api.js");
|
||||
};
|
||||
};
|
||||
types: {
|
||||
OllamaEmbeddingClient: import("@openclaw/ollama/runtime-api.js").OllamaEmbeddingClient;
|
||||
OllamaEmbeddingProvider: import("@openclaw/ollama/runtime-api.js").OllamaEmbeddingProvider;
|
||||
};
|
||||
};
|
||||
"ollama-surface": {
|
||||
module: typeof import("@openclaw/ollama/api.js");
|
||||
sourceModules: {
|
||||
source1: {
|
||||
module: typeof import("@openclaw/ollama/api.js");
|
||||
};
|
||||
};
|
||||
types: {
|
||||
OllamaModelWithContext: import("@openclaw/ollama/api.js").OllamaModelWithContext;
|
||||
OllamaTagModel: import("@openclaw/ollama/api.js").OllamaTagModel;
|
||||
OllamaTagsResponse: import("@openclaw/ollama/api.js").OllamaTagsResponse;
|
||||
};
|
||||
};
|
||||
openai: {
|
||||
module: typeof import("@openclaw/openai/api.js");
|
||||
sourceModules: {
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
export type { OllamaEmbeddingClient } from "../../plugin-sdk/ollama.js";
|
||||
export type { OllamaEmbeddingClient } from "../../../extensions/ollama/runtime-api.js";
|
||||
export {
|
||||
createOllamaEmbeddingProvider,
|
||||
DEFAULT_OLLAMA_EMBEDDING_MODEL,
|
||||
} from "../../plugin-sdk/ollama.js";
|
||||
} from "../../../extensions/ollama/runtime-api.js";
|
||||
|
|
|
|||
|
|
@ -1,63 +0,0 @@
|
|||
// Generated by scripts/generate-plugin-sdk-facades.mjs. Do not edit manually.
|
||||
import type { PluginSdkFacadeTypeMap } from "../generated/plugin-sdk-facade-type-map.generated.js";
|
||||
type FacadeEntry = PluginSdkFacadeTypeMap["ollama-surface"];
|
||||
type FacadeModule = FacadeEntry["module"];
|
||||
import {
|
||||
createLazyFacadeObjectValue,
|
||||
loadBundledPluginPublicSurfaceModuleSync,
|
||||
} from "./facade-runtime.js";
|
||||
|
||||
function loadFacadeModule(): FacadeModule {
|
||||
return loadBundledPluginPublicSurfaceModuleSync<FacadeModule>({
|
||||
dirName: "ollama",
|
||||
artifactBasename: "api.js",
|
||||
});
|
||||
}
|
||||
export const buildOllamaModelDefinition: FacadeModule["buildOllamaModelDefinition"] = ((...args) =>
|
||||
loadFacadeModule()["buildOllamaModelDefinition"](
|
||||
...args,
|
||||
)) as FacadeModule["buildOllamaModelDefinition"];
|
||||
export const buildOllamaProvider: FacadeModule["buildOllamaProvider"] = ((...args) =>
|
||||
loadFacadeModule()["buildOllamaProvider"](...args)) as FacadeModule["buildOllamaProvider"];
|
||||
export const configureOllamaNonInteractive: FacadeModule["configureOllamaNonInteractive"] = ((
|
||||
...args
|
||||
) =>
|
||||
loadFacadeModule()["configureOllamaNonInteractive"](
|
||||
...args,
|
||||
)) as FacadeModule["configureOllamaNonInteractive"];
|
||||
export const ensureOllamaModelPulled: FacadeModule["ensureOllamaModelPulled"] = ((...args) =>
|
||||
loadFacadeModule()["ensureOllamaModelPulled"](
|
||||
...args,
|
||||
)) as FacadeModule["ensureOllamaModelPulled"];
|
||||
export const enrichOllamaModelsWithContext: FacadeModule["enrichOllamaModelsWithContext"] = ((
|
||||
...args
|
||||
) =>
|
||||
loadFacadeModule()["enrichOllamaModelsWithContext"](
|
||||
...args,
|
||||
)) as FacadeModule["enrichOllamaModelsWithContext"];
|
||||
export const fetchOllamaModels: FacadeModule["fetchOllamaModels"] = ((...args) =>
|
||||
loadFacadeModule()["fetchOllamaModels"](...args)) as FacadeModule["fetchOllamaModels"];
|
||||
export const OLLAMA_DEFAULT_BASE_URL: FacadeModule["OLLAMA_DEFAULT_BASE_URL"] =
|
||||
loadFacadeModule()["OLLAMA_DEFAULT_BASE_URL"];
|
||||
export const OLLAMA_DEFAULT_CONTEXT_WINDOW: FacadeModule["OLLAMA_DEFAULT_CONTEXT_WINDOW"] =
|
||||
loadFacadeModule()["OLLAMA_DEFAULT_CONTEXT_WINDOW"];
|
||||
export const OLLAMA_DEFAULT_COST: FacadeModule["OLLAMA_DEFAULT_COST"] = createLazyFacadeObjectValue(
|
||||
() => loadFacadeModule()["OLLAMA_DEFAULT_COST"] as object,
|
||||
) as FacadeModule["OLLAMA_DEFAULT_COST"];
|
||||
export const OLLAMA_DEFAULT_MAX_TOKENS: FacadeModule["OLLAMA_DEFAULT_MAX_TOKENS"] =
|
||||
loadFacadeModule()["OLLAMA_DEFAULT_MAX_TOKENS"];
|
||||
export const OLLAMA_DEFAULT_MODEL: FacadeModule["OLLAMA_DEFAULT_MODEL"] =
|
||||
loadFacadeModule()["OLLAMA_DEFAULT_MODEL"];
|
||||
export const promptAndConfigureOllama: FacadeModule["promptAndConfigureOllama"] = ((...args) =>
|
||||
loadFacadeModule()["promptAndConfigureOllama"](
|
||||
...args,
|
||||
)) as FacadeModule["promptAndConfigureOllama"];
|
||||
export const queryOllamaContextWindow: FacadeModule["queryOllamaContextWindow"] = ((...args) =>
|
||||
loadFacadeModule()["queryOllamaContextWindow"](
|
||||
...args,
|
||||
)) as FacadeModule["queryOllamaContextWindow"];
|
||||
export const resolveOllamaApiBase: FacadeModule["resolveOllamaApiBase"] = ((...args) =>
|
||||
loadFacadeModule()["resolveOllamaApiBase"](...args)) as FacadeModule["resolveOllamaApiBase"];
|
||||
export type OllamaModelWithContext = FacadeEntry["types"]["OllamaModelWithContext"];
|
||||
export type OllamaTagModel = FacadeEntry["types"]["OllamaTagModel"];
|
||||
export type OllamaTagsResponse = FacadeEntry["types"]["OllamaTagsResponse"];
|
||||
|
|
@ -1,72 +0,0 @@
|
|||
// Generated by scripts/generate-plugin-sdk-facades.mjs. Do not edit manually.
|
||||
import type { PluginSdkFacadeTypeMap } from "../generated/plugin-sdk-facade-type-map.generated.js";
|
||||
type FacadeEntry = PluginSdkFacadeTypeMap["ollama"];
|
||||
type FacadeModule = FacadeEntry["module"];
|
||||
import { loadBundledPluginPublicSurfaceModuleSync } from "./facade-runtime.js";
|
||||
|
||||
function loadFacadeModule(): FacadeModule {
|
||||
return loadBundledPluginPublicSurfaceModuleSync<FacadeModule>({
|
||||
dirName: "ollama",
|
||||
artifactBasename: "runtime-api.js",
|
||||
});
|
||||
}
|
||||
export const buildAssistantMessage: FacadeModule["buildAssistantMessage"] = ((...args) =>
|
||||
loadFacadeModule()["buildAssistantMessage"](...args)) as FacadeModule["buildAssistantMessage"];
|
||||
export const buildOllamaChatRequest: FacadeModule["buildOllamaChatRequest"] = ((...args) =>
|
||||
loadFacadeModule()["buildOllamaChatRequest"](...args)) as FacadeModule["buildOllamaChatRequest"];
|
||||
export const convertToOllamaMessages: FacadeModule["convertToOllamaMessages"] = ((...args) =>
|
||||
loadFacadeModule()["convertToOllamaMessages"](
|
||||
...args,
|
||||
)) as FacadeModule["convertToOllamaMessages"];
|
||||
export const createOllamaEmbeddingProvider: FacadeModule["createOllamaEmbeddingProvider"] = ((
|
||||
...args
|
||||
) =>
|
||||
loadFacadeModule()["createOllamaEmbeddingProvider"](
|
||||
...args,
|
||||
)) as FacadeModule["createOllamaEmbeddingProvider"];
|
||||
export const createConfiguredOllamaCompatNumCtxWrapper: FacadeModule["createConfiguredOllamaCompatNumCtxWrapper"] =
|
||||
((...args) =>
|
||||
loadFacadeModule()["createConfiguredOllamaCompatNumCtxWrapper"](
|
||||
...args,
|
||||
)) as FacadeModule["createConfiguredOllamaCompatNumCtxWrapper"];
|
||||
export const createConfiguredOllamaCompatStreamWrapper: FacadeModule["createConfiguredOllamaCompatStreamWrapper"] =
|
||||
((...args) =>
|
||||
loadFacadeModule()["createConfiguredOllamaCompatStreamWrapper"](
|
||||
...args,
|
||||
)) as FacadeModule["createConfiguredOllamaCompatStreamWrapper"];
|
||||
export const createConfiguredOllamaStreamFn: FacadeModule["createConfiguredOllamaStreamFn"] = ((
|
||||
...args
|
||||
) =>
|
||||
loadFacadeModule()["createConfiguredOllamaStreamFn"](
|
||||
...args,
|
||||
)) as FacadeModule["createConfiguredOllamaStreamFn"];
|
||||
export const createOllamaStreamFn: FacadeModule["createOllamaStreamFn"] = ((...args) =>
|
||||
loadFacadeModule()["createOllamaStreamFn"](...args)) as FacadeModule["createOllamaStreamFn"];
|
||||
export const DEFAULT_OLLAMA_EMBEDDING_MODEL: FacadeModule["DEFAULT_OLLAMA_EMBEDDING_MODEL"] =
|
||||
loadFacadeModule()["DEFAULT_OLLAMA_EMBEDDING_MODEL"];
|
||||
export const isOllamaCompatProvider: FacadeModule["isOllamaCompatProvider"] = ((...args) =>
|
||||
loadFacadeModule()["isOllamaCompatProvider"](...args)) as FacadeModule["isOllamaCompatProvider"];
|
||||
export const OLLAMA_NATIVE_BASE_URL: FacadeModule["OLLAMA_NATIVE_BASE_URL"] =
|
||||
loadFacadeModule()["OLLAMA_NATIVE_BASE_URL"];
|
||||
export const parseNdjsonStream: FacadeModule["parseNdjsonStream"] = ((...args) =>
|
||||
loadFacadeModule()["parseNdjsonStream"](...args)) as FacadeModule["parseNdjsonStream"];
|
||||
export const resolveOllamaBaseUrlForRun: FacadeModule["resolveOllamaBaseUrlForRun"] = ((...args) =>
|
||||
loadFacadeModule()["resolveOllamaBaseUrlForRun"](
|
||||
...args,
|
||||
)) as FacadeModule["resolveOllamaBaseUrlForRun"];
|
||||
export const resolveOllamaCompatNumCtxEnabled: FacadeModule["resolveOllamaCompatNumCtxEnabled"] = ((
|
||||
...args
|
||||
) =>
|
||||
loadFacadeModule()["resolveOllamaCompatNumCtxEnabled"](
|
||||
...args,
|
||||
)) as FacadeModule["resolveOllamaCompatNumCtxEnabled"];
|
||||
export const shouldInjectOllamaCompatNumCtx: FacadeModule["shouldInjectOllamaCompatNumCtx"] = ((
|
||||
...args
|
||||
) =>
|
||||
loadFacadeModule()["shouldInjectOllamaCompatNumCtx"](
|
||||
...args,
|
||||
)) as FacadeModule["shouldInjectOllamaCompatNumCtx"];
|
||||
export const wrapOllamaCompatNumCtx: FacadeModule["wrapOllamaCompatNumCtx"] = ((...args) =>
|
||||
loadFacadeModule()["wrapOllamaCompatNumCtx"](...args)) as FacadeModule["wrapOllamaCompatNumCtx"];
|
||||
export type OllamaEmbeddingClient = FacadeEntry["types"]["OllamaEmbeddingClient"];
|
||||
export type OllamaEmbeddingProvider = FacadeEntry["types"]["OllamaEmbeddingProvider"];
|
||||
|
|
@ -691,7 +691,6 @@ describe("plugin-sdk subpath exports", () => {
|
|||
"VLLM_DEFAULT_BASE_URL",
|
||||
],
|
||||
});
|
||||
expectSourceOmitsSnippet("provider-setup", "./ollama-surface.js");
|
||||
expectSourceOmitsImportPattern("provider-setup", "./vllm.js");
|
||||
expectSourceOmitsImportPattern("provider-setup", "./sglang.js");
|
||||
expectSourceMentions("provider-auth", [
|
||||
|
|
|
|||
Loading…
Reference in New Issue