Plugin SDK: split setup and sandbox subpaths

This commit is contained in:
Gustavo Madeira Santana 2026-03-16 12:04:32 +00:00
parent e78b51baea
commit 9fc6c1929a
No known key found for this signature in database
17 changed files with 214 additions and 147 deletions

View File

@ -1,10 +1,5 @@
import {
buildOllamaProvider,
emptyPluginConfigSchema,
ensureOllamaModelPulled,
OLLAMA_DEFAULT_BASE_URL,
promptAndConfigureOllama,
configureOllamaNonInteractive,
type OpenClawPluginApi,
type ProviderAuthContext,
type ProviderAuthMethodNonInteractiveContext,
@ -12,10 +7,15 @@ import {
type ProviderDiscoveryContext,
} from "openclaw/plugin-sdk/core";
import { resolveOllamaApiBase } from "../../src/agents/models-config.providers.discovery.js";
import { OLLAMA_DEFAULT_BASE_URL } from "../../src/agents/ollama-defaults.js";
const PROVIDER_ID = "ollama";
const DEFAULT_API_KEY = "ollama-local";
async function loadProviderSetup() {
return await import("openclaw/plugin-sdk/provider-setup");
}
const ollamaPlugin = {
id: "ollama",
name: "Ollama Provider",
@ -34,7 +34,8 @@ const ollamaPlugin = {
hint: "Cloud and local open models",
kind: "custom",
run: async (ctx: ProviderAuthContext): Promise<ProviderAuthResult> => {
const result = await promptAndConfigureOllama({
const providerSetup = await loadProviderSetup();
const result = await providerSetup.promptAndConfigureOllama({
cfg: ctx.config,
prompter: ctx.prompter,
});
@ -53,12 +54,14 @@ const ollamaPlugin = {
defaultModel: `ollama/${result.defaultModelId}`,
};
},
runNonInteractive: async (ctx: ProviderAuthMethodNonInteractiveContext) =>
configureOllamaNonInteractive({
runNonInteractive: async (ctx: ProviderAuthMethodNonInteractiveContext) => {
const providerSetup = await loadProviderSetup();
return await providerSetup.configureOllamaNonInteractive({
nextConfig: ctx.config,
opts: ctx.opts,
runtime: ctx.runtime,
}),
});
},
},
],
discovery: {
@ -81,7 +84,8 @@ const ollamaPlugin = {
};
}
const provider = await buildOllamaProvider(explicit?.baseUrl, {
const providerSetup = await loadProviderSetup();
const provider = await providerSetup.buildOllamaProvider(explicit?.baseUrl, {
quiet: !ollamaKey && !explicit,
});
if (provider.models.length === 0 && !ollamaKey && !explicit?.apiKey) {
@ -115,7 +119,8 @@ const ollamaPlugin = {
if (!model.startsWith("ollama/")) {
return;
}
await ensureOllamaModelPulled({ config, prompter });
const providerSetup = await loadProviderSetup();
await providerSetup.ensureOllamaModelPulled({ config, prompter });
},
});
},

View File

@ -1,5 +1,5 @@
import type { OpenClawPluginApi } from "openclaw/plugin-sdk/core";
import { registerSandboxBackend } from "openclaw/plugin-sdk/core";
import { registerSandboxBackend } from "openclaw/plugin-sdk/sandbox";
import {
createOpenShellSandboxBackendFactory,
createOpenShellSandboxBackendManager,

View File

@ -11,13 +11,13 @@ import type {
SandboxBackendHandle,
SandboxBackendManager,
SshSandboxSession,
} from "openclaw/plugin-sdk/core";
} from "openclaw/plugin-sdk/sandbox";
import {
createRemoteShellSandboxFsBridge,
disposeSshSandboxSession,
resolvePreferredOpenClawTmpDir,
runSshSandboxCommand,
} from "openclaw/plugin-sdk/core";
} from "openclaw/plugin-sdk/sandbox";
import {
buildExecRemoteCommand,
buildRemoteCommand,

View File

@ -4,10 +4,10 @@ import {
runPluginCommandWithTimeout,
shellEscape,
type SshSandboxSession,
} from "openclaw/plugin-sdk/core";
} from "openclaw/plugin-sdk/sandbox";
import type { ResolvedOpenShellPluginConfig } from "./config.js";
export { buildExecRemoteCommand, shellEscape } from "openclaw/plugin-sdk/core";
export { buildExecRemoteCommand, shellEscape } from "openclaw/plugin-sdk/sandbox";
export type OpenShellExecContext = {
config: ResolvedOpenShellPluginConfig;

View File

@ -5,7 +5,7 @@ import type {
SandboxFsBridge,
SandboxFsStat,
SandboxResolvedPath,
} from "openclaw/plugin-sdk/core";
} from "openclaw/plugin-sdk/sandbox";
import type { OpenShellSandboxBackend } from "./backend.js";
import { movePathWithCopyFallback } from "./mirror.js";

View File

@ -3,7 +3,7 @@ import {
type RemoteShellSandboxHandle,
type SandboxContext,
type SandboxFsBridge,
} from "openclaw/plugin-sdk/core";
} from "openclaw/plugin-sdk/sandbox";
export function createOpenShellRemoteFsBridge(params: {
sandbox: SandboxContext;

View File

@ -1,15 +1,20 @@
import {
buildSglangProvider,
configureOpenAICompatibleSelfHostedProviderNonInteractive,
discoverOpenAICompatibleSelfHostedProvider,
emptyPluginConfigSchema,
promptAndConfigureOpenAICompatibleSelfHostedProviderAuth,
type OpenClawPluginApi,
type ProviderAuthMethodNonInteractiveContext,
} from "openclaw/plugin-sdk/core";
import {
SGLANG_DEFAULT_API_KEY_ENV_VAR,
SGLANG_DEFAULT_BASE_URL,
SGLANG_MODEL_PLACEHOLDER,
SGLANG_PROVIDER_LABEL,
} from "../../src/agents/sglang-defaults.js";
const PROVIDER_ID = "sglang";
const DEFAULT_BASE_URL = "http://127.0.0.1:30000/v1";
async function loadProviderSetup() {
return await import("openclaw/plugin-sdk/provider-setup");
}
const sglangPlugin = {
id: "sglang",
@ -25,38 +30,44 @@ const sglangPlugin = {
auth: [
{
id: "custom",
label: "SGLang",
label: SGLANG_PROVIDER_LABEL,
hint: "Fast self-hosted OpenAI-compatible server",
kind: "custom",
run: async (ctx) =>
promptAndConfigureOpenAICompatibleSelfHostedProviderAuth({
run: async (ctx) => {
const providerSetup = await loadProviderSetup();
return await providerSetup.promptAndConfigureOpenAICompatibleSelfHostedProviderAuth({
cfg: ctx.config,
prompter: ctx.prompter,
providerId: PROVIDER_ID,
providerLabel: "SGLang",
defaultBaseUrl: DEFAULT_BASE_URL,
defaultApiKeyEnvVar: "SGLANG_API_KEY",
modelPlaceholder: "Qwen/Qwen3-8B",
}),
runNonInteractive: async (ctx: ProviderAuthMethodNonInteractiveContext) =>
configureOpenAICompatibleSelfHostedProviderNonInteractive({
providerLabel: SGLANG_PROVIDER_LABEL,
defaultBaseUrl: SGLANG_DEFAULT_BASE_URL,
defaultApiKeyEnvVar: SGLANG_DEFAULT_API_KEY_ENV_VAR,
modelPlaceholder: SGLANG_MODEL_PLACEHOLDER,
});
},
runNonInteractive: async (ctx: ProviderAuthMethodNonInteractiveContext) => {
const providerSetup = await loadProviderSetup();
return await providerSetup.configureOpenAICompatibleSelfHostedProviderNonInteractive({
ctx,
providerId: PROVIDER_ID,
providerLabel: "SGLang",
defaultBaseUrl: DEFAULT_BASE_URL,
defaultApiKeyEnvVar: "SGLANG_API_KEY",
modelPlaceholder: "Qwen/Qwen3-8B",
}),
providerLabel: SGLANG_PROVIDER_LABEL,
defaultBaseUrl: SGLANG_DEFAULT_BASE_URL,
defaultApiKeyEnvVar: SGLANG_DEFAULT_API_KEY_ENV_VAR,
modelPlaceholder: SGLANG_MODEL_PLACEHOLDER,
});
},
},
],
discovery: {
order: "late",
run: async (ctx) =>
discoverOpenAICompatibleSelfHostedProvider({
run: async (ctx) => {
const providerSetup = await loadProviderSetup();
return await providerSetup.discoverOpenAICompatibleSelfHostedProvider({
ctx,
providerId: PROVIDER_ID,
buildProvider: buildSglangProvider,
}),
buildProvider: providerSetup.buildSglangProvider,
});
},
},
wizard: {
setup: {

View File

@ -1,15 +1,20 @@
import {
buildVllmProvider,
configureOpenAICompatibleSelfHostedProviderNonInteractive,
discoverOpenAICompatibleSelfHostedProvider,
emptyPluginConfigSchema,
promptAndConfigureOpenAICompatibleSelfHostedProviderAuth,
type OpenClawPluginApi,
type ProviderAuthMethodNonInteractiveContext,
} from "openclaw/plugin-sdk/core";
import {
VLLM_DEFAULT_API_KEY_ENV_VAR,
VLLM_DEFAULT_BASE_URL,
VLLM_MODEL_PLACEHOLDER,
VLLM_PROVIDER_LABEL,
} from "../../src/agents/vllm-defaults.js";
const PROVIDER_ID = "vllm";
const DEFAULT_BASE_URL = "http://127.0.0.1:8000/v1";
async function loadProviderSetup() {
return await import("openclaw/plugin-sdk/provider-setup");
}
const vllmPlugin = {
id: "vllm",
@ -25,38 +30,44 @@ const vllmPlugin = {
auth: [
{
id: "custom",
label: "vLLM",
label: VLLM_PROVIDER_LABEL,
hint: "Local/self-hosted OpenAI-compatible server",
kind: "custom",
run: async (ctx) =>
promptAndConfigureOpenAICompatibleSelfHostedProviderAuth({
run: async (ctx) => {
const providerSetup = await loadProviderSetup();
return await providerSetup.promptAndConfigureOpenAICompatibleSelfHostedProviderAuth({
cfg: ctx.config,
prompter: ctx.prompter,
providerId: PROVIDER_ID,
providerLabel: "vLLM",
defaultBaseUrl: DEFAULT_BASE_URL,
defaultApiKeyEnvVar: "VLLM_API_KEY",
modelPlaceholder: "meta-llama/Meta-Llama-3-8B-Instruct",
}),
runNonInteractive: async (ctx: ProviderAuthMethodNonInteractiveContext) =>
configureOpenAICompatibleSelfHostedProviderNonInteractive({
providerLabel: VLLM_PROVIDER_LABEL,
defaultBaseUrl: VLLM_DEFAULT_BASE_URL,
defaultApiKeyEnvVar: VLLM_DEFAULT_API_KEY_ENV_VAR,
modelPlaceholder: VLLM_MODEL_PLACEHOLDER,
});
},
runNonInteractive: async (ctx: ProviderAuthMethodNonInteractiveContext) => {
const providerSetup = await loadProviderSetup();
return await providerSetup.configureOpenAICompatibleSelfHostedProviderNonInteractive({
ctx,
providerId: PROVIDER_ID,
providerLabel: "vLLM",
defaultBaseUrl: DEFAULT_BASE_URL,
defaultApiKeyEnvVar: "VLLM_API_KEY",
modelPlaceholder: "meta-llama/Meta-Llama-3-8B-Instruct",
}),
providerLabel: VLLM_PROVIDER_LABEL,
defaultBaseUrl: VLLM_DEFAULT_BASE_URL,
defaultApiKeyEnvVar: VLLM_DEFAULT_API_KEY_ENV_VAR,
modelPlaceholder: VLLM_MODEL_PLACEHOLDER,
});
},
},
],
discovery: {
order: "late",
run: async (ctx) =>
discoverOpenAICompatibleSelfHostedProvider({
run: async (ctx) => {
const providerSetup = await loadProviderSetup();
return await providerSetup.discoverOpenAICompatibleSelfHostedProvider({
ctx,
providerId: PROVIDER_ID,
buildProvider: buildVllmProvider,
}),
buildProvider: providerSetup.buildVllmProvider,
});
},
},
wizard: {
setup: {

View File

@ -50,6 +50,14 @@
"types": "./dist/plugin-sdk/compat.d.ts",
"default": "./dist/plugin-sdk/compat.js"
},
"./plugin-sdk/provider-setup": {
"types": "./dist/plugin-sdk/provider-setup.d.ts",
"default": "./dist/plugin-sdk/provider-setup.js"
},
"./plugin-sdk/sandbox": {
"types": "./dist/plugin-sdk/sandbox.d.ts",
"default": "./dist/plugin-sdk/sandbox.js"
},
"./plugin-sdk/routing": {
"types": "./dist/plugin-sdk/routing.d.ts",
"default": "./dist/plugin-sdk/routing.js"

View File

@ -0,0 +1 @@
export const OLLAMA_DEFAULT_BASE_URL = "http://127.0.0.1:11434";

View File

@ -0,0 +1,4 @@
export const SGLANG_DEFAULT_BASE_URL = "http://127.0.0.1:30000/v1";
export const SGLANG_PROVIDER_LABEL = "SGLang";
export const SGLANG_DEFAULT_API_KEY_ENV_VAR = "SGLANG_API_KEY";
export const SGLANG_MODEL_PLACEHOLDER = "Qwen/Qwen3-8B";

View File

@ -0,0 +1,4 @@
export const VLLM_DEFAULT_BASE_URL = "http://127.0.0.1:8000/v1";
export const VLLM_PROVIDER_LABEL = "vLLM";
export const VLLM_DEFAULT_API_KEY_ENV_VAR = "VLLM_API_KEY";
export const VLLM_MODEL_PLACEHOLDER = "meta-llama/Meta-Llama-3-8B-Instruct";

View File

@ -30,27 +30,6 @@ export type {
ProviderAuthMethod,
ProviderAuthResult,
} from "../plugins/types.js";
export type {
CreateSandboxBackendParams,
RemoteShellSandboxHandle,
RunSshSandboxCommandParams,
SandboxBackendCommandParams,
SandboxBackendCommandResult,
SandboxBackendExecSpec,
SandboxBackendFactory,
SandboxFsBridge,
SandboxFsStat,
SandboxBackendHandle,
SandboxBackendId,
SandboxBackendManager,
SandboxBackendRegistration,
SandboxBackendRuntimeInfo,
SandboxContext,
SandboxResolvedPath,
SandboxSshConfig,
SshSandboxSession,
SshSandboxSettings,
} from "../agents/sandbox.js";
export type { OpenClawConfig } from "../config/config.js";
export type { GatewayRequestHandlerOptions } from "../gateway/server-methods/types.js";
export type {
@ -66,58 +45,7 @@ export type {
} from "./channel-plugin-common.js";
export { emptyPluginConfigSchema } from "./channel-plugin-common.js";
export {
buildExecRemoteCommand,
buildRemoteCommand,
buildSshSandboxArgv,
createRemoteShellSandboxFsBridge,
createSshSandboxSessionFromConfigText,
createSshSandboxSessionFromSettings,
disposeSshSandboxSession,
getSandboxBackendFactory,
getSandboxBackendManager,
registerSandboxBackend,
runSshSandboxCommand,
shellEscape,
uploadDirectoryToSshTarget,
requireSandboxBackendFactory,
} from "../agents/sandbox.js";
export { buildOauthProviderAuthResult } from "./provider-auth-result.js";
export {
applyProviderDefaultModel,
configureOpenAICompatibleSelfHostedProviderNonInteractive,
discoverOpenAICompatibleSelfHostedProvider,
promptAndConfigureOpenAICompatibleSelfHostedProvider,
promptAndConfigureOpenAICompatibleSelfHostedProviderAuth,
SELF_HOSTED_DEFAULT_CONTEXT_WINDOW,
SELF_HOSTED_DEFAULT_COST,
SELF_HOSTED_DEFAULT_MAX_TOKENS,
} from "../commands/self-hosted-provider-setup.js";
export {
OLLAMA_DEFAULT_BASE_URL,
OLLAMA_DEFAULT_MODEL,
configureOllamaNonInteractive,
ensureOllamaModelPulled,
promptAndConfigureOllama,
} from "../commands/ollama-setup.js";
export {
VLLM_DEFAULT_BASE_URL,
VLLM_DEFAULT_CONTEXT_WINDOW,
VLLM_DEFAULT_COST,
VLLM_DEFAULT_MAX_TOKENS,
promptAndConfigureVllm,
} from "../commands/vllm-setup.js";
export {
buildOllamaProvider,
buildSglangProvider,
buildVllmProvider,
} from "../agents/models-config.providers.discovery.js";
export {
approveDevicePairing,
listDevicePairing,
rejectDevicePairing,
} from "../infra/device-pairing.js";
export {
DEFAULT_SECRET_FILE_MAX_BYTES,
loadSecretFileSync,
@ -126,13 +54,6 @@ export {
} from "../infra/secret-file.js";
export type { SecretFileReadOptions, SecretFileReadResult } from "../infra/secret-file.js";
export {
runPluginCommandWithTimeout,
type PluginCommandRunOptions,
type PluginCommandRunResult,
} from "./run-command.js";
export { resolvePreferredOpenClawTmpDir } from "../infra/tmp-openclaw-dir.js";
export { resolveGatewayBindUrl } from "../shared/gateway-bind-url.js";
export type { GatewayBindUrlResult } from "../shared/gateway-bind-url.js";

View File

@ -0,0 +1,37 @@
export type {
OpenClawPluginApi,
ProviderAuthContext,
ProviderAuthMethodNonInteractiveContext,
ProviderAuthResult,
ProviderDiscoveryContext,
} from "../plugins/types.js";
export {
applyProviderDefaultModel,
configureOpenAICompatibleSelfHostedProviderNonInteractive,
discoverOpenAICompatibleSelfHostedProvider,
promptAndConfigureOpenAICompatibleSelfHostedProvider,
promptAndConfigureOpenAICompatibleSelfHostedProviderAuth,
SELF_HOSTED_DEFAULT_CONTEXT_WINDOW,
SELF_HOSTED_DEFAULT_COST,
SELF_HOSTED_DEFAULT_MAX_TOKENS,
} from "../commands/self-hosted-provider-setup.js";
export {
OLLAMA_DEFAULT_BASE_URL,
OLLAMA_DEFAULT_MODEL,
configureOllamaNonInteractive,
ensureOllamaModelPulled,
promptAndConfigureOllama,
} from "../commands/ollama-setup.js";
export {
VLLM_DEFAULT_BASE_URL,
VLLM_DEFAULT_CONTEXT_WINDOW,
VLLM_DEFAULT_COST,
VLLM_DEFAULT_MAX_TOKENS,
promptAndConfigureVllm,
} from "../commands/vllm-setup.js";
export {
buildOllamaProvider,
buildSglangProvider,
buildVllmProvider,
} from "../agents/models-config.providers.discovery.js";

45
src/plugin-sdk/sandbox.ts Normal file
View File

@ -0,0 +1,45 @@
export type {
CreateSandboxBackendParams,
RemoteShellSandboxHandle,
RunSshSandboxCommandParams,
SandboxBackendCommandParams,
SandboxBackendCommandResult,
SandboxBackendExecSpec,
SandboxBackendFactory,
SandboxFsBridge,
SandboxFsStat,
SandboxBackendHandle,
SandboxBackendId,
SandboxBackendManager,
SandboxBackendRegistration,
SandboxBackendRuntimeInfo,
SandboxContext,
SandboxResolvedPath,
SandboxSshConfig,
SshSandboxSession,
SshSandboxSettings,
} from "../agents/sandbox.js";
export {
buildExecRemoteCommand,
buildRemoteCommand,
buildSshSandboxArgv,
createRemoteShellSandboxFsBridge,
createSshSandboxSessionFromConfigText,
createSshSandboxSessionFromSettings,
disposeSshSandboxSession,
getSandboxBackendFactory,
getSandboxBackendManager,
registerSandboxBackend,
requireSandboxBackendFactory,
runSshSandboxCommand,
shellEscape,
uploadDirectoryToSshTarget,
} from "../agents/sandbox.js";
export {
runPluginCommandWithTimeout,
type PluginCommandRunOptions,
type PluginCommandRunResult,
} from "./run-command.js";
export { resolvePreferredOpenClawTmpDir } from "../infra/tmp-openclaw-dir.js";

View File

@ -11,6 +11,8 @@ import * as imessageSdk from "openclaw/plugin-sdk/imessage";
import * as lineSdk from "openclaw/plugin-sdk/line";
import * as msteamsSdk from "openclaw/plugin-sdk/msteams";
import * as nostrSdk from "openclaw/plugin-sdk/nostr";
import * as providerSetupSdk from "openclaw/plugin-sdk/provider-setup";
import * as sandboxSdk from "openclaw/plugin-sdk/sandbox";
import * as signalSdk from "openclaw/plugin-sdk/signal";
import * as slackSdk from "openclaw/plugin-sdk/slack";
import * as telegramSdk from "openclaw/plugin-sdk/telegram";
@ -46,6 +48,24 @@ describe("plugin-sdk subpath exports", () => {
expect(typeof coreSdk.resolveThreadSessionKeys).toBe("function");
expect(typeof coreSdk.runPassiveAccountLifecycle).toBe("function");
expect(typeof coreSdk.createLoggerBackedRuntime).toBe("function");
expect("registerSandboxBackend" in asExports(coreSdk)).toBe(false);
expect("promptAndConfigureOpenAICompatibleSelfHostedProviderAuth" in asExports(coreSdk)).toBe(
false,
);
});
it("exports provider setup helpers from the dedicated subpath", () => {
expect(typeof providerSetupSdk.buildVllmProvider).toBe("function");
expect(typeof providerSetupSdk.discoverOpenAICompatibleSelfHostedProvider).toBe("function");
expect(typeof providerSetupSdk.promptAndConfigureOpenAICompatibleSelfHostedProviderAuth).toBe(
"function",
);
});
it("exports sandbox helpers from the dedicated subpath", () => {
expect(typeof sandboxSdk.registerSandboxBackend).toBe("function");
expect(typeof sandboxSdk.runPluginCommandWithTimeout).toBe("function");
expect(typeof sandboxSdk.createRemoteShellSandboxFsBridge).toBe("function");
});
it("exports shared core types used by bundled channels", () => {

View File

@ -22,8 +22,8 @@ vi.mock("../../../extensions/github-copilot/token.js", async () => {
};
});
vi.mock("openclaw/plugin-sdk/core", async () => {
const actual = await vi.importActual<object>("openclaw/plugin-sdk/core");
vi.mock("openclaw/plugin-sdk/provider-setup", async () => {
const actual = await vi.importActual<object>("openclaw/plugin-sdk/provider-setup");
return {
...actual,
buildOllamaProvider: (...args: unknown[]) => buildOllamaProviderMock(...args),