mirror of https://github.com/openclaw/openclaw.git
Core: move web media seam out of plugin sdk
This commit is contained in:
parent
947dac48f2
commit
73539ac787
|
|
@ -1,298 +1,295 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
import fs from "node:fs";
|
||||
import { builtinModules } from "node:module";
|
||||
import { promises as fs } from "node:fs";
|
||||
import path from "node:path";
|
||||
import process from "node:process";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import ts from "typescript";
|
||||
|
||||
const REPO_ROOT = process.cwd();
|
||||
const SCAN_ROOTS = ["src", "extensions", "scripts", "ui", "test"];
|
||||
const CODE_EXTENSIONS = new Set([".ts", ".tsx", ".mts", ".cts", ".js", ".jsx", ".mjs", ".cjs"]);
|
||||
const SKIP_DIRS = new Set([".git", "node_modules", "dist", "coverage", ".turbo", ".next", "build"]);
|
||||
const BUILTIN_PREFIXES = new Set(["node:"]);
|
||||
const BUILTIN_MODULES = new Set(
|
||||
builtinModules.flatMap((name) => [name, name.replace(/^node:/, "")]),
|
||||
);
|
||||
const INTERNAL_PREFIXES = ["openclaw/plugin-sdk", "openclaw/", "@/", "~/", "#"];
|
||||
const compareStrings = (a, b) => a.localeCompare(b);
|
||||
const repoRoot = path.resolve(path.dirname(fileURLToPath(import.meta.url)), "..");
|
||||
const srcRoot = path.join(repoRoot, "src");
|
||||
const workspacePackagePaths = ["ui/package.json"];
|
||||
const compareStrings = (left, right) => left.localeCompare(right);
|
||||
|
||||
function readJson(filePath) {
|
||||
return JSON.parse(fs.readFileSync(filePath, "utf8"));
|
||||
}
|
||||
|
||||
function normalizeSlashes(input) {
|
||||
return input.split(path.sep).join("/");
|
||||
}
|
||||
|
||||
function listFiles(rootRel) {
|
||||
const rootAbs = path.join(REPO_ROOT, rootRel);
|
||||
if (!fs.existsSync(rootAbs)) {
|
||||
return [];
|
||||
}
|
||||
const out = [];
|
||||
const stack = [rootAbs];
|
||||
while (stack.length > 0) {
|
||||
const current = stack.pop();
|
||||
if (!current) {
|
||||
continue;
|
||||
}
|
||||
const entries = fs.readdirSync(current, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
const abs = path.join(current, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
if (!SKIP_DIRS.has(entry.name)) {
|
||||
stack.push(abs);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (!entry.isFile()) {
|
||||
continue;
|
||||
}
|
||||
if (!CODE_EXTENSIONS.has(path.extname(entry.name))) {
|
||||
continue;
|
||||
}
|
||||
out.push(abs);
|
||||
async function collectWorkspacePackagePaths() {
|
||||
const extensionsRoot = path.join(repoRoot, "extensions");
|
||||
const entries = await fs.readdir(extensionsRoot, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
if (entry.isDirectory()) {
|
||||
workspacePackagePaths.push(path.join("extensions", entry.name, "package.json"));
|
||||
}
|
||||
}
|
||||
out.sort((a, b) =>
|
||||
normalizeSlashes(path.relative(REPO_ROOT, a)).localeCompare(
|
||||
normalizeSlashes(path.relative(REPO_ROOT, b)),
|
||||
),
|
||||
}
|
||||
|
||||
function normalizePath(filePath) {
|
||||
return path.relative(repoRoot, filePath).split(path.sep).join("/");
|
||||
}
|
||||
|
||||
function isCodeFile(fileName) {
|
||||
return /\.(ts|tsx|mts|cts|js|jsx|mjs|cjs)$/.test(fileName);
|
||||
}
|
||||
|
||||
function isProductionLikeFile(relativePath) {
|
||||
return (
|
||||
!/(^|\/)(__tests__|fixtures)\//.test(relativePath) &&
|
||||
!/\.(test|spec)\.(ts|tsx|mts|cts|js|jsx|mjs|cjs)$/.test(relativePath)
|
||||
);
|
||||
return out;
|
||||
}
|
||||
|
||||
function extractSpecifiers(sourceText) {
|
||||
const specifiers = [];
|
||||
const patterns = [
|
||||
/\bimport\s+type\s+[^"'`]*?\sfrom\s+["'`]([^"'`]+)["'`]/g,
|
||||
/\bimport\s+[^"'`]*?\sfrom\s+["'`]([^"'`]+)["'`]/g,
|
||||
/\bexport\s+[^"'`]*?\sfrom\s+["'`]([^"'`]+)["'`]/g,
|
||||
/\bimport\s*\(\s*["'`]([^"'`]+)["'`]\s*\)/g,
|
||||
];
|
||||
for (const pattern of patterns) {
|
||||
for (const match of sourceText.matchAll(pattern)) {
|
||||
const specifier = match[1]?.trim();
|
||||
if (specifier) {
|
||||
specifiers.push(specifier);
|
||||
async function walkCodeFiles(rootDir) {
|
||||
const out = [];
|
||||
async function walk(dir) {
|
||||
const entries = await fs.readdir(dir, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
if (entry.name === "dist" || entry.name === "node_modules") {
|
||||
continue;
|
||||
}
|
||||
const fullPath = path.join(dir, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
await walk(fullPath);
|
||||
continue;
|
||||
}
|
||||
if (!entry.isFile() || !isCodeFile(entry.name)) {
|
||||
continue;
|
||||
}
|
||||
const relativePath = normalizePath(fullPath);
|
||||
if (!isProductionLikeFile(relativePath)) {
|
||||
continue;
|
||||
}
|
||||
out.push(fullPath);
|
||||
}
|
||||
}
|
||||
return specifiers;
|
||||
await walk(rootDir);
|
||||
return out.toSorted((left, right) => normalizePath(left).localeCompare(normalizePath(right)));
|
||||
}
|
||||
|
||||
function toRepoRelative(absPath) {
|
||||
return normalizeSlashes(path.relative(REPO_ROOT, absPath));
|
||||
function toLine(sourceFile, node) {
|
||||
return sourceFile.getLineAndCharacterOfPosition(node.getStart(sourceFile)).line + 1;
|
||||
}
|
||||
|
||||
function resolveRelativeImport(fileAbs, specifier) {
|
||||
if (!specifier.startsWith(".") && !specifier.startsWith("/")) {
|
||||
function resolveRelativeSpecifier(specifier, importerFile) {
|
||||
if (!specifier.startsWith(".")) {
|
||||
return null;
|
||||
}
|
||||
const fromDir = path.dirname(fileAbs);
|
||||
const baseAbs = specifier.startsWith("/")
|
||||
? path.join(REPO_ROOT, specifier)
|
||||
: path.resolve(fromDir, specifier);
|
||||
const candidatePaths = [
|
||||
baseAbs,
|
||||
`${baseAbs}.ts`,
|
||||
`${baseAbs}.tsx`,
|
||||
`${baseAbs}.mts`,
|
||||
`${baseAbs}.cts`,
|
||||
`${baseAbs}.js`,
|
||||
`${baseAbs}.jsx`,
|
||||
`${baseAbs}.mjs`,
|
||||
`${baseAbs}.cjs`,
|
||||
path.join(baseAbs, "index.ts"),
|
||||
path.join(baseAbs, "index.tsx"),
|
||||
path.join(baseAbs, "index.mts"),
|
||||
path.join(baseAbs, "index.cts"),
|
||||
path.join(baseAbs, "index.js"),
|
||||
path.join(baseAbs, "index.jsx"),
|
||||
path.join(baseAbs, "index.mjs"),
|
||||
path.join(baseAbs, "index.cjs"),
|
||||
];
|
||||
for (const candidate of candidatePaths) {
|
||||
if (fs.existsSync(candidate) && fs.statSync(candidate).isFile()) {
|
||||
return toRepoRelative(candidate);
|
||||
return normalizePath(path.resolve(path.dirname(importerFile), specifier));
|
||||
}
|
||||
|
||||
function normalizePluginSdkFamily(resolvedPath) {
|
||||
const relative = resolvedPath.replace(/^src\/plugin-sdk\//, "");
|
||||
return relative.replace(/\.(m|c)?[jt]sx?$/, "");
|
||||
}
|
||||
|
||||
function compareImports(left, right) {
|
||||
return (
|
||||
left.family.localeCompare(right.family) ||
|
||||
left.file.localeCompare(right.file) ||
|
||||
left.line - right.line ||
|
||||
left.kind.localeCompare(right.kind) ||
|
||||
left.specifier.localeCompare(right.specifier)
|
||||
);
|
||||
}
|
||||
|
||||
function collectPluginSdkImports(filePath, sourceFile) {
|
||||
const entries = [];
|
||||
|
||||
function push(kind, specifierNode, specifier) {
|
||||
const resolvedPath = resolveRelativeSpecifier(specifier, filePath);
|
||||
if (!resolvedPath?.startsWith("src/plugin-sdk/")) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
return normalizeSlashes(path.relative(REPO_ROOT, baseAbs));
|
||||
}
|
||||
|
||||
function getExternalPackageRoot(specifier) {
|
||||
if (!specifier) {
|
||||
return null;
|
||||
}
|
||||
if (!/^[a-zA-Z0-9@][a-zA-Z0-9@._/+:-]*$/.test(specifier)) {
|
||||
return null;
|
||||
}
|
||||
if (specifier.startsWith(".") || specifier.startsWith("/")) {
|
||||
return null;
|
||||
}
|
||||
if (Array.from(BUILTIN_PREFIXES).some((prefix) => specifier.startsWith(prefix))) {
|
||||
return null;
|
||||
}
|
||||
if (
|
||||
INTERNAL_PREFIXES.some((prefix) => specifier === prefix || specifier.startsWith(`${prefix}/`))
|
||||
) {
|
||||
return null;
|
||||
}
|
||||
if (BUILTIN_MODULES.has(specifier)) {
|
||||
return null;
|
||||
}
|
||||
if (specifier.startsWith("@")) {
|
||||
const [scope, name] = specifier.split("/");
|
||||
return scope && name ? `${scope}/${name}` : specifier;
|
||||
}
|
||||
const root = specifier.split("/")[0] ?? specifier;
|
||||
if (BUILTIN_MODULES.has(root)) {
|
||||
return null;
|
||||
}
|
||||
return root;
|
||||
}
|
||||
|
||||
function ensureArrayMap(map, key) {
|
||||
if (!map.has(key)) {
|
||||
map.set(key, []);
|
||||
}
|
||||
return map.get(key);
|
||||
}
|
||||
|
||||
const packageJson = readJson(path.join(REPO_ROOT, "package.json"));
|
||||
const declaredPackages = new Set([
|
||||
...Object.keys(packageJson.dependencies ?? {}),
|
||||
...Object.keys(packageJson.devDependencies ?? {}),
|
||||
...Object.keys(packageJson.peerDependencies ?? {}),
|
||||
...Object.keys(packageJson.optionalDependencies ?? {}),
|
||||
]);
|
||||
|
||||
const fileRecords = [];
|
||||
const publicSeamUsage = new Map();
|
||||
const sourceSeamUsage = new Map();
|
||||
const missingExternalUsage = new Map();
|
||||
|
||||
for (const root of SCAN_ROOTS) {
|
||||
for (const fileAbs of listFiles(root)) {
|
||||
const fileRel = toRepoRelative(fileAbs);
|
||||
const sourceText = fs.readFileSync(fileAbs, "utf8");
|
||||
const specifiers = extractSpecifiers(sourceText);
|
||||
const publicSeams = new Set();
|
||||
const sourceSeams = new Set();
|
||||
const externalPackages = new Set();
|
||||
|
||||
for (const specifier of specifiers) {
|
||||
if (specifier === "openclaw/plugin-sdk") {
|
||||
publicSeams.add("index");
|
||||
ensureArrayMap(publicSeamUsage, "index").push(fileRel);
|
||||
continue;
|
||||
}
|
||||
if (specifier.startsWith("openclaw/plugin-sdk/")) {
|
||||
const seam = specifier.slice("openclaw/plugin-sdk/".length);
|
||||
publicSeams.add(seam);
|
||||
ensureArrayMap(publicSeamUsage, seam).push(fileRel);
|
||||
continue;
|
||||
}
|
||||
|
||||
const resolvedRel = resolveRelativeImport(fileAbs, specifier);
|
||||
if (resolvedRel?.startsWith("src/plugin-sdk/")) {
|
||||
const seam = resolvedRel
|
||||
.slice("src/plugin-sdk/".length)
|
||||
.replace(/\.(tsx?|mts|cts|jsx?|mjs|cjs)$/, "")
|
||||
.replace(/\/index$/, "");
|
||||
sourceSeams.add(seam);
|
||||
ensureArrayMap(sourceSeamUsage, seam).push(fileRel);
|
||||
continue;
|
||||
}
|
||||
|
||||
const externalRoot = getExternalPackageRoot(specifier);
|
||||
if (!externalRoot) {
|
||||
continue;
|
||||
}
|
||||
externalPackages.add(externalRoot);
|
||||
if (!declaredPackages.has(externalRoot)) {
|
||||
ensureArrayMap(missingExternalUsage, externalRoot).push(fileRel);
|
||||
}
|
||||
}
|
||||
|
||||
fileRecords.push({
|
||||
file: fileRel,
|
||||
publicSeams: [...publicSeams].toSorted(compareStrings),
|
||||
sourceSeams: [...sourceSeams].toSorted(compareStrings),
|
||||
externalPackages: [...externalPackages].toSorted(compareStrings),
|
||||
entries.push({
|
||||
family: normalizePluginSdkFamily(resolvedPath),
|
||||
file: normalizePath(filePath),
|
||||
kind,
|
||||
line: toLine(sourceFile, specifierNode),
|
||||
resolvedPath,
|
||||
specifier,
|
||||
});
|
||||
}
|
||||
|
||||
function visit(node) {
|
||||
if (ts.isImportDeclaration(node) && ts.isStringLiteral(node.moduleSpecifier)) {
|
||||
push("import", node.moduleSpecifier, node.moduleSpecifier.text);
|
||||
} else if (
|
||||
ts.isExportDeclaration(node) &&
|
||||
node.moduleSpecifier &&
|
||||
ts.isStringLiteral(node.moduleSpecifier)
|
||||
) {
|
||||
push("export", node.moduleSpecifier, node.moduleSpecifier.text);
|
||||
} else if (
|
||||
ts.isCallExpression(node) &&
|
||||
node.expression.kind === ts.SyntaxKind.ImportKeyword &&
|
||||
node.arguments.length === 1 &&
|
||||
ts.isStringLiteral(node.arguments[0])
|
||||
) {
|
||||
push("dynamic-import", node.arguments[0], node.arguments[0].text);
|
||||
}
|
||||
ts.forEachChild(node, visit);
|
||||
}
|
||||
|
||||
visit(sourceFile);
|
||||
return entries;
|
||||
}
|
||||
|
||||
fileRecords.sort((a, b) => a.file.localeCompare(b.file));
|
||||
|
||||
const overlapFiles = fileRecords
|
||||
.filter((record) => record.publicSeams.length > 0 && record.sourceSeams.length > 0)
|
||||
.map((record) => ({
|
||||
file: record.file,
|
||||
publicSeams: record.publicSeams,
|
||||
sourceSeams: record.sourceSeams,
|
||||
overlappingSeams: record.publicSeams.filter((seam) => record.sourceSeams.includes(seam)),
|
||||
}))
|
||||
.toSorted((a, b) => a.file.localeCompare(b.file));
|
||||
|
||||
const seamFamilies = [...new Set([...publicSeamUsage.keys(), ...sourceSeamUsage.keys()])]
|
||||
.toSorted((a, b) => a.localeCompare(b))
|
||||
.map((seam) => ({
|
||||
seam,
|
||||
publicImporterCount: new Set(publicSeamUsage.get(seam) ?? []).size,
|
||||
sourceImporterCount: new Set(sourceSeamUsage.get(seam) ?? []).size,
|
||||
publicImporters: [...new Set(publicSeamUsage.get(seam) ?? [])].toSorted(compareStrings),
|
||||
sourceImporters: [...new Set(sourceSeamUsage.get(seam) ?? [])].toSorted(compareStrings),
|
||||
}))
|
||||
.filter((entry) => entry.publicImporterCount > 0 || entry.sourceImporterCount > 0);
|
||||
|
||||
const duplicatedSeamFamilies = seamFamilies.filter(
|
||||
(entry) => entry.publicImporterCount > 0 && entry.sourceImporterCount > 0,
|
||||
);
|
||||
|
||||
const missingPackages = [...missingExternalUsage.entries()]
|
||||
.map(([packageName, files]) => {
|
||||
const uniqueFiles = [...new Set(files)].toSorted(compareStrings);
|
||||
const byTopLevel = {};
|
||||
for (const file of uniqueFiles) {
|
||||
const topLevel = file.split("/")[0] ?? file;
|
||||
byTopLevel[topLevel] ??= [];
|
||||
byTopLevel[topLevel].push(file);
|
||||
async function collectCorePluginSdkImports() {
|
||||
const files = await walkCodeFiles(srcRoot);
|
||||
const inventory = [];
|
||||
for (const filePath of files) {
|
||||
if (normalizePath(filePath).startsWith("src/plugin-sdk/")) {
|
||||
continue;
|
||||
}
|
||||
const topLevelCounts = Object.entries(byTopLevel)
|
||||
.map(([scope, scopeFiles]) => ({
|
||||
scope,
|
||||
fileCount: scopeFiles.length,
|
||||
}))
|
||||
.toSorted((a, b) => b.fileCount - a.fileCount || a.scope.localeCompare(b.scope));
|
||||
return {
|
||||
packageName,
|
||||
importerCount: uniqueFiles.length,
|
||||
importers: uniqueFiles,
|
||||
topLevelCounts,
|
||||
};
|
||||
})
|
||||
.toSorted(
|
||||
(a, b) => b.importerCount - a.importerCount || a.packageName.localeCompare(b.packageName),
|
||||
const source = await fs.readFile(filePath, "utf8");
|
||||
const scriptKind =
|
||||
filePath.endsWith(".tsx") || filePath.endsWith(".jsx") ? ts.ScriptKind.TSX : ts.ScriptKind.TS;
|
||||
const sourceFile = ts.createSourceFile(
|
||||
filePath,
|
||||
source,
|
||||
ts.ScriptTarget.Latest,
|
||||
true,
|
||||
scriptKind,
|
||||
);
|
||||
inventory.push(...collectPluginSdkImports(filePath, sourceFile));
|
||||
}
|
||||
return inventory.toSorted(compareImports);
|
||||
}
|
||||
|
||||
function buildDuplicatedSeamFamilies(inventory) {
|
||||
const grouped = new Map();
|
||||
for (const entry of inventory) {
|
||||
const bucket = grouped.get(entry.family) ?? [];
|
||||
bucket.push(entry);
|
||||
grouped.set(entry.family, bucket);
|
||||
}
|
||||
|
||||
const duplicated = Object.fromEntries(
|
||||
[...grouped.entries()]
|
||||
.map(([family, entries]) => {
|
||||
const files = [...new Set(entries.map((entry) => entry.file))].toSorted(compareStrings);
|
||||
return [
|
||||
family,
|
||||
{
|
||||
count: entries.length,
|
||||
files,
|
||||
imports: entries,
|
||||
},
|
||||
];
|
||||
})
|
||||
.filter(([, value]) => value.files.length > 1)
|
||||
.toSorted((left, right) => right[1].count - left[1].count || left[0].localeCompare(right[0])),
|
||||
);
|
||||
|
||||
const summary = {
|
||||
scannedFileCount: fileRecords.length,
|
||||
filesUsingPublicPluginSdk: fileRecords.filter((record) => record.publicSeams.length > 0).length,
|
||||
filesUsingSourcePluginSdk: fileRecords.filter((record) => record.sourceSeams.length > 0).length,
|
||||
filesUsingBothPublicAndSourcePluginSdk: overlapFiles.length,
|
||||
duplicatedSeamFamilyCount: duplicatedSeamFamilies.length,
|
||||
missingExternalPackageCount: missingPackages.length,
|
||||
return duplicated;
|
||||
}
|
||||
|
||||
function buildOverlapFiles(inventory) {
|
||||
const byFile = new Map();
|
||||
for (const entry of inventory) {
|
||||
const bucket = byFile.get(entry.file) ?? [];
|
||||
bucket.push(entry);
|
||||
byFile.set(entry.file, bucket);
|
||||
}
|
||||
|
||||
return [...byFile.entries()]
|
||||
.map(([file, entries]) => {
|
||||
const families = [...new Set(entries.map((entry) => entry.family))].toSorted(compareStrings);
|
||||
return {
|
||||
file,
|
||||
families,
|
||||
imports: entries,
|
||||
};
|
||||
})
|
||||
.filter((entry) => entry.families.length > 1)
|
||||
.toSorted((left, right) => {
|
||||
return (
|
||||
right.families.length - left.families.length ||
|
||||
right.imports.length - left.imports.length ||
|
||||
left.file.localeCompare(right.file)
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
function packageClusterMeta(relativePackagePath) {
|
||||
if (relativePackagePath === "ui/package.json") {
|
||||
return {
|
||||
cluster: "ui",
|
||||
packageName: "openclaw-control-ui",
|
||||
packagePath: relativePackagePath,
|
||||
reachability: "workspace-ui",
|
||||
};
|
||||
}
|
||||
const cluster = relativePackagePath.split("/")[1];
|
||||
return {
|
||||
cluster,
|
||||
packageName: null,
|
||||
packagePath: relativePackagePath,
|
||||
reachability: relativePackagePath.startsWith("extensions/")
|
||||
? "extension-workspace"
|
||||
: "workspace",
|
||||
};
|
||||
}
|
||||
|
||||
async function buildMissingPackages() {
|
||||
const rootPackage = JSON.parse(await fs.readFile(path.join(repoRoot, "package.json"), "utf8"));
|
||||
const rootDeps = new Set([
|
||||
...Object.keys(rootPackage.dependencies ?? {}),
|
||||
...Object.keys(rootPackage.optionalDependencies ?? {}),
|
||||
...Object.keys(rootPackage.devDependencies ?? {}),
|
||||
]);
|
||||
|
||||
const pluginSdkEntrySources = await walkCodeFiles(path.join(repoRoot, "src", "plugin-sdk"));
|
||||
const pluginSdkReachability = new Map();
|
||||
for (const filePath of pluginSdkEntrySources) {
|
||||
const source = await fs.readFile(filePath, "utf8");
|
||||
const matches = [...source.matchAll(/from\s+"(\.\.\/\.\.\/extensions\/([^/]+)\/[^"]+)"/g)];
|
||||
for (const match of matches) {
|
||||
const cluster = match[2];
|
||||
const bucket = pluginSdkReachability.get(cluster) ?? new Set();
|
||||
bucket.add(normalizePath(filePath));
|
||||
pluginSdkReachability.set(cluster, bucket);
|
||||
}
|
||||
}
|
||||
|
||||
const output = [];
|
||||
for (const relativePackagePath of workspacePackagePaths.toSorted(compareStrings)) {
|
||||
const packagePath = path.join(repoRoot, relativePackagePath);
|
||||
let pkg;
|
||||
try {
|
||||
pkg = JSON.parse(await fs.readFile(packagePath, "utf8"));
|
||||
} catch {
|
||||
continue;
|
||||
}
|
||||
const missing = Object.keys(pkg.dependencies ?? {})
|
||||
.filter((dep) => dep !== "openclaw" && !rootDeps.has(dep))
|
||||
.toSorted(compareStrings);
|
||||
if (missing.length === 0) {
|
||||
continue;
|
||||
}
|
||||
const meta = packageClusterMeta(relativePackagePath);
|
||||
const pluginSdkEntries = [...(pluginSdkReachability.get(meta.cluster) ?? new Set())].toSorted(
|
||||
compareStrings,
|
||||
);
|
||||
output.push({
|
||||
cluster: meta.cluster,
|
||||
packageName: pkg.name ?? meta.packageName,
|
||||
packagePath: relativePackagePath,
|
||||
npmSpec: pkg.openclaw?.install?.npmSpec ?? null,
|
||||
private: pkg.private === true,
|
||||
pluginSdkReachability:
|
||||
pluginSdkEntries.length > 0 ? { staticEntryPoints: pluginSdkEntries } : undefined,
|
||||
missing,
|
||||
});
|
||||
}
|
||||
|
||||
return output.toSorted((left, right) => {
|
||||
return right.missing.length - left.missing.length || left.cluster.localeCompare(right.cluster);
|
||||
});
|
||||
}
|
||||
|
||||
await collectWorkspacePackagePaths();
|
||||
const inventory = await collectCorePluginSdkImports();
|
||||
const result = {
|
||||
duplicatedSeamFamilies: buildDuplicatedSeamFamilies(inventory),
|
||||
overlapFiles: buildOverlapFiles(inventory),
|
||||
missingPackages: await buildMissingPackages(),
|
||||
};
|
||||
|
||||
const report = {
|
||||
generatedAtUtc: new Date().toISOString(),
|
||||
repoRoot: REPO_ROOT,
|
||||
summary,
|
||||
duplicatedSeamFamilies,
|
||||
overlapFiles,
|
||||
missingPackages,
|
||||
};
|
||||
|
||||
process.stdout.write(`${JSON.stringify(report, null, 2)}\n`);
|
||||
process.stdout.write(`${JSON.stringify(result, null, 2)}\n`);
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import path from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import type { ImageContent } from "@mariozechner/pi-ai";
|
||||
import { loadWebMedia } from "../../../plugin-sdk/web-media.js";
|
||||
import { loadWebMedia } from "../../../media/web-media.js";
|
||||
import { resolveUserPath } from "../../../utils.js";
|
||||
import type { ImageSanitizationLimits } from "../../image-sanitization.js";
|
||||
import {
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
|
|||
import * as imageGenerationRuntime from "../../image-generation/runtime.js";
|
||||
import * as imageOps from "../../media/image-ops.js";
|
||||
import * as mediaStore from "../../media/store.js";
|
||||
import * as webMedia from "../../plugin-sdk/web-media.js";
|
||||
import * as webMedia from "../../media/web-media.js";
|
||||
import {
|
||||
createImageGenerateTool,
|
||||
resolveImageGenerationModelConfigForTool,
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ import type {
|
|||
} from "../../image-generation/types.js";
|
||||
import { getImageMetadata } from "../../media/image-ops.js";
|
||||
import { saveMediaBuffer } from "../../media/store.js";
|
||||
import { loadWebMedia } from "../../plugin-sdk/web-media.js";
|
||||
import { loadWebMedia } from "../../media/web-media.js";
|
||||
import { resolveUserPath } from "../../utils.js";
|
||||
import { ToolInputError, readNumberParam, readStringParam } from "./common.js";
|
||||
import { decodeDataUrl } from "./image-tool.helpers.js";
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import { Type } from "@sinclair/typebox";
|
|||
import type { OpenClawConfig } from "../../config/config.js";
|
||||
import { getMediaUnderstandingProvider } from "../../media-understanding/providers/index.js";
|
||||
import { buildProviderRegistry } from "../../media-understanding/runner.js";
|
||||
import { loadWebMedia } from "../../plugin-sdk/web-media.js";
|
||||
import { loadWebMedia } from "../../media/web-media.js";
|
||||
import { resolveUserPath } from "../../utils.js";
|
||||
import { isMinimaxVlmProvider } from "../minimax-vlm.js";
|
||||
import {
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import { type Api, type Model } from "@mariozechner/pi-ai";
|
||||
import type { OpenClawConfig } from "../../config/config.js";
|
||||
import { getDefaultLocalRoots } from "../../plugin-sdk/web-media.js";
|
||||
import { getDefaultLocalRoots } from "../../media/web-media.js";
|
||||
import type { ImageModelConfig } from "./image-tool.helpers.js";
|
||||
import type { ToolModelConfig } from "./model-config.helpers.js";
|
||||
import { getApiKeyForModel, normalizeWorkspaceDir, requireApiKey } from "./tool-runtime.helpers.js";
|
||||
|
|
|
|||
|
|
@ -140,7 +140,7 @@ async function stubPdfToolInfra(
|
|||
modelFound?: boolean;
|
||||
},
|
||||
) {
|
||||
const webMedia = await import("../../../extensions/whatsapp/src/media.js");
|
||||
const webMedia = await import("../../media/web-media.js");
|
||||
const loadSpy = vi.spyOn(webMedia, "loadWebMediaRaw").mockResolvedValue(FAKE_PDF_MEDIA as never);
|
||||
|
||||
const modelDiscovery = await import("../pi-model-discovery.js");
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import { type Context, complete } from "@mariozechner/pi-ai";
|
|||
import { Type } from "@sinclair/typebox";
|
||||
import type { OpenClawConfig } from "../../config/config.js";
|
||||
import { extractPdfContent, type PdfExtractedContent } from "../../media/pdf-extract.js";
|
||||
import { loadWebMediaRaw } from "../../plugin-sdk/web-media.js";
|
||||
import { loadWebMediaRaw } from "../../media/web-media.js";
|
||||
import { resolveUserPath } from "../../utils.js";
|
||||
import {
|
||||
coerceImageModelConfig,
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ export {
|
|||
} from "./plugin-sdk/whatsapp.js";
|
||||
export { extractMediaPlaceholder, extractText, monitorWebInbox } from "./plugin-sdk/whatsapp.js";
|
||||
export { loginWeb } from "./plugin-sdk/whatsapp.js";
|
||||
export { loadWebMedia, optimizeImageToJpeg } from "./plugin-sdk/whatsapp.js";
|
||||
export { loadWebMedia, optimizeImageToJpeg } from "./media/web-media.js";
|
||||
export { sendMessageWhatsApp } from "./plugin-sdk/whatsapp.js";
|
||||
export {
|
||||
createWaSocket,
|
||||
|
|
|
|||
|
|
@ -6,8 +6,8 @@ import type { ChannelId, ChannelMessageActionName } from "../../channels/plugins
|
|||
import type { OpenClawConfig } from "../../config/config.js";
|
||||
import { createRootScopedReadFile } from "../../infra/fs-safe.js";
|
||||
import { extensionForMime } from "../../media/mime.js";
|
||||
import { loadWebMedia } from "../../media/web-media.js";
|
||||
import { readBooleanParam as readBooleanParamShared } from "../../plugin-sdk/boolean-param.js";
|
||||
import { loadWebMedia } from "../../plugin-sdk/web-media.js";
|
||||
|
||||
export const readBooleanParam = readBooleanParamShared;
|
||||
|
||||
|
|
|
|||
|
|
@ -9,9 +9,9 @@ import { setActivePluginRegistry } from "../../plugins/runtime.js";
|
|||
import { createTestRegistry } from "../../test-utils/channel-plugins.js";
|
||||
import { resolvePreferredOpenClawTmpDir } from "../tmp-openclaw-dir.js";
|
||||
|
||||
vi.mock("../../../extensions/whatsapp/src/media.js", async () => {
|
||||
const actual = await vi.importActual<typeof import("../../../extensions/whatsapp/src/media.js")>(
|
||||
"../../../extensions/whatsapp/src/media.js",
|
||||
vi.mock("../../media/web-media.js", async () => {
|
||||
const actual = await vi.importActual<typeof import("../../media/web-media.js")>(
|
||||
"../../media/web-media.js",
|
||||
);
|
||||
return {
|
||||
...actual,
|
||||
|
|
@ -77,13 +77,13 @@ async function expectSandboxMediaRewrite(params: {
|
|||
}
|
||||
|
||||
type MessageActionRunnerModule = typeof import("./message-action-runner.js");
|
||||
type WhatsAppMediaModule = typeof import("../../../extensions/whatsapp/src/media.js");
|
||||
type WebMediaModule = typeof import("../../media/web-media.js");
|
||||
type SlackChannelModule = typeof import("../../../extensions/slack/src/channel.js");
|
||||
type RuntimeIndexModule = typeof import("../../plugins/runtime/index.js");
|
||||
type SlackRuntimeModule = typeof import("../../../extensions/slack/src/runtime.js");
|
||||
|
||||
let runMessageAction: MessageActionRunnerModule["runMessageAction"];
|
||||
let loadWebMedia: WhatsAppMediaModule["loadWebMedia"];
|
||||
let loadWebMedia: WebMediaModule["loadWebMedia"];
|
||||
let slackPlugin: SlackChannelModule["slackPlugin"];
|
||||
let createPluginRuntime: RuntimeIndexModule["createPluginRuntime"];
|
||||
let setSlackRuntime: SlackRuntimeModule["setSlackRuntime"];
|
||||
|
|
@ -96,7 +96,7 @@ function installSlackRuntime() {
|
|||
describe("runMessageAction media behavior", () => {
|
||||
beforeAll(async () => {
|
||||
({ runMessageAction } = await import("./message-action-runner.js"));
|
||||
({ loadWebMedia } = await import("../../../extensions/whatsapp/src/media.js"));
|
||||
({ loadWebMedia } = await import("../../media/web-media.js"));
|
||||
({ slackPlugin } = await import("../../../extensions/slack/src/channel.js"));
|
||||
({ createPluginRuntime } = await import("../../plugins/runtime/index.js"));
|
||||
({ setSlackRuntime } = await import("../../../extensions/slack/src/runtime.js"));
|
||||
|
|
@ -169,9 +169,9 @@ describe("runMessageAction media behavior", () => {
|
|||
});
|
||||
|
||||
async function restoreRealMediaLoader() {
|
||||
const actual = await vi.importActual<
|
||||
typeof import("../../../extensions/whatsapp/src/media.js")
|
||||
>("../../../extensions/whatsapp/src/media.js");
|
||||
const actual = await vi.importActual<typeof import("../../media/web-media.js")>(
|
||||
"../../media/web-media.js",
|
||||
);
|
||||
vi.mocked(loadWebMedia).mockImplementation(actual.loadWebMedia);
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import { loadWebMedia } from "../plugin-sdk/web-media.js";
|
||||
import { buildOutboundMediaLoadOptions } from "./load-options.js";
|
||||
import { saveMediaBuffer } from "./store.js";
|
||||
import { loadWebMedia } from "./web-media.js";
|
||||
|
||||
export async function resolveOutboundAttachmentFromUrl(
|
||||
mediaUrl: string,
|
||||
|
|
|
|||
|
|
@ -0,0 +1,493 @@
|
|||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import { logVerbose, shouldLogVerbose } from "../globals.js";
|
||||
import { SafeOpenError, readLocalFileSafely } from "../infra/fs-safe.js";
|
||||
import type { SsrFPolicy } from "../infra/net/ssrf.js";
|
||||
import { resolveUserPath } from "../utils.js";
|
||||
import { maxBytesForKind, type MediaKind } from "./constants.js";
|
||||
import { fetchRemoteMedia } from "./fetch.js";
|
||||
import {
|
||||
convertHeicToJpeg,
|
||||
hasAlphaChannel,
|
||||
optimizeImageToPng,
|
||||
resizeToJpeg,
|
||||
} from "./image-ops.js";
|
||||
import { getDefaultMediaLocalRoots } from "./local-roots.js";
|
||||
import { detectMime, extensionForMime, kindFromMime } from "./mime.js";
|
||||
|
||||
export type WebMediaResult = {
|
||||
buffer: Buffer;
|
||||
contentType?: string;
|
||||
kind: MediaKind | undefined;
|
||||
fileName?: string;
|
||||
};
|
||||
|
||||
type WebMediaOptions = {
|
||||
maxBytes?: number;
|
||||
optimizeImages?: boolean;
|
||||
ssrfPolicy?: SsrFPolicy;
|
||||
/** Allowed root directories for local path reads. "any" is deprecated; prefer sandboxValidated + readFile. */
|
||||
localRoots?: readonly string[] | "any";
|
||||
/** Caller already validated the local path (sandbox/other guards); requires readFile override. */
|
||||
sandboxValidated?: boolean;
|
||||
readFile?: (filePath: string) => Promise<Buffer>;
|
||||
};
|
||||
|
||||
function resolveWebMediaOptions(params: {
|
||||
maxBytesOrOptions?: number | WebMediaOptions;
|
||||
options?: { ssrfPolicy?: SsrFPolicy; localRoots?: readonly string[] | "any" };
|
||||
optimizeImages: boolean;
|
||||
}): WebMediaOptions {
|
||||
if (typeof params.maxBytesOrOptions === "number" || params.maxBytesOrOptions === undefined) {
|
||||
return {
|
||||
maxBytes: params.maxBytesOrOptions,
|
||||
optimizeImages: params.optimizeImages,
|
||||
ssrfPolicy: params.options?.ssrfPolicy,
|
||||
localRoots: params.options?.localRoots,
|
||||
};
|
||||
}
|
||||
return {
|
||||
...params.maxBytesOrOptions,
|
||||
optimizeImages: params.optimizeImages
|
||||
? (params.maxBytesOrOptions.optimizeImages ?? true)
|
||||
: false,
|
||||
};
|
||||
}
|
||||
|
||||
export type LocalMediaAccessErrorCode =
|
||||
| "path-not-allowed"
|
||||
| "invalid-root"
|
||||
| "invalid-file-url"
|
||||
| "unsafe-bypass"
|
||||
| "not-found"
|
||||
| "invalid-path"
|
||||
| "not-file";
|
||||
|
||||
export class LocalMediaAccessError extends Error {
|
||||
code: LocalMediaAccessErrorCode;
|
||||
|
||||
constructor(code: LocalMediaAccessErrorCode, message: string, options?: ErrorOptions) {
|
||||
super(message, options);
|
||||
this.code = code;
|
||||
this.name = "LocalMediaAccessError";
|
||||
}
|
||||
}
|
||||
|
||||
export function getDefaultLocalRoots(): readonly string[] {
|
||||
return getDefaultMediaLocalRoots();
|
||||
}
|
||||
|
||||
async function assertLocalMediaAllowed(
|
||||
mediaPath: string,
|
||||
localRoots: readonly string[] | "any" | undefined,
|
||||
): Promise<void> {
|
||||
if (localRoots === "any") {
|
||||
return;
|
||||
}
|
||||
const roots = localRoots ?? getDefaultLocalRoots();
|
||||
// Resolve symlinks so a symlink under /tmp pointing to /etc/passwd is caught.
|
||||
let resolved: string;
|
||||
try {
|
||||
resolved = await fs.realpath(mediaPath);
|
||||
} catch {
|
||||
resolved = path.resolve(mediaPath);
|
||||
}
|
||||
|
||||
// Hardening: the default allowlist includes the OpenClaw temp dir, and tests/CI may
|
||||
// override the state dir into tmp. Avoid accidentally allowing per-agent
|
||||
// `workspace-*` state roots via the temp-root prefix match; require explicit
|
||||
// localRoots for those.
|
||||
if (localRoots === undefined) {
|
||||
const workspaceRoot = roots.find((root) => path.basename(root) === "workspace");
|
||||
if (workspaceRoot) {
|
||||
const stateDir = path.dirname(workspaceRoot);
|
||||
const rel = path.relative(stateDir, resolved);
|
||||
if (rel && !rel.startsWith("..") && !path.isAbsolute(rel)) {
|
||||
const firstSegment = rel.split(path.sep)[0] ?? "";
|
||||
if (firstSegment.startsWith("workspace-")) {
|
||||
throw new LocalMediaAccessError(
|
||||
"path-not-allowed",
|
||||
`Local media path is not under an allowed directory: ${mediaPath}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for (const root of roots) {
|
||||
let resolvedRoot: string;
|
||||
try {
|
||||
resolvedRoot = await fs.realpath(root);
|
||||
} catch {
|
||||
resolvedRoot = path.resolve(root);
|
||||
}
|
||||
if (resolvedRoot === path.parse(resolvedRoot).root) {
|
||||
throw new LocalMediaAccessError(
|
||||
"invalid-root",
|
||||
`Invalid localRoots entry (refuses filesystem root): ${root}. Pass a narrower directory.`,
|
||||
);
|
||||
}
|
||||
if (resolved === resolvedRoot || resolved.startsWith(resolvedRoot + path.sep)) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
throw new LocalMediaAccessError(
|
||||
"path-not-allowed",
|
||||
`Local media path is not under an allowed directory: ${mediaPath}`,
|
||||
);
|
||||
}
|
||||
|
||||
const HEIC_MIME_RE = /^image\/hei[cf]$/i;
|
||||
const HEIC_EXT_RE = /\.(heic|heif)$/i;
|
||||
const MB = 1024 * 1024;
|
||||
|
||||
function formatMb(bytes: number, digits = 2): string {
|
||||
return (bytes / MB).toFixed(digits);
|
||||
}
|
||||
|
||||
function formatCapLimit(label: string, cap: number, size: number): string {
|
||||
return `${label} exceeds ${formatMb(cap, 0)}MB limit (got ${formatMb(size)}MB)`;
|
||||
}
|
||||
|
||||
function formatCapReduce(label: string, cap: number, size: number): string {
|
||||
return `${label} could not be reduced below ${formatMb(cap, 0)}MB (got ${formatMb(size)}MB)`;
|
||||
}
|
||||
|
||||
function isHeicSource(opts: { contentType?: string; fileName?: string }): boolean {
|
||||
if (opts.contentType && HEIC_MIME_RE.test(opts.contentType.trim())) {
|
||||
return true;
|
||||
}
|
||||
if (opts.fileName && HEIC_EXT_RE.test(opts.fileName.trim())) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
function toJpegFileName(fileName?: string): string | undefined {
|
||||
if (!fileName) {
|
||||
return undefined;
|
||||
}
|
||||
const trimmed = fileName.trim();
|
||||
if (!trimmed) {
|
||||
return fileName;
|
||||
}
|
||||
const parsed = path.parse(trimmed);
|
||||
if (!parsed.ext || HEIC_EXT_RE.test(parsed.ext)) {
|
||||
return path.format({ dir: parsed.dir, name: parsed.name || trimmed, ext: ".jpg" });
|
||||
}
|
||||
return path.format({ dir: parsed.dir, name: parsed.name, ext: ".jpg" });
|
||||
}
|
||||
|
||||
type OptimizedImage = {
|
||||
buffer: Buffer;
|
||||
optimizedSize: number;
|
||||
resizeSide: number;
|
||||
format: "jpeg" | "png";
|
||||
quality?: number;
|
||||
compressionLevel?: number;
|
||||
};
|
||||
|
||||
function logOptimizedImage(params: { originalSize: number; optimized: OptimizedImage }): void {
|
||||
if (!shouldLogVerbose()) {
|
||||
return;
|
||||
}
|
||||
if (params.optimized.optimizedSize >= params.originalSize) {
|
||||
return;
|
||||
}
|
||||
if (params.optimized.format === "png") {
|
||||
logVerbose(
|
||||
`Optimized PNG (preserving alpha) from ${formatMb(params.originalSize)}MB to ${formatMb(params.optimized.optimizedSize)}MB (side<=${params.optimized.resizeSide}px)`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
logVerbose(
|
||||
`Optimized media from ${formatMb(params.originalSize)}MB to ${formatMb(params.optimized.optimizedSize)}MB (side<=${params.optimized.resizeSide}px, q=${params.optimized.quality})`,
|
||||
);
|
||||
}
|
||||
|
||||
async function optimizeImageWithFallback(params: {
|
||||
buffer: Buffer;
|
||||
cap: number;
|
||||
meta?: { contentType?: string; fileName?: string };
|
||||
}): Promise<OptimizedImage> {
|
||||
const { buffer, cap, meta } = params;
|
||||
const isPng = meta?.contentType === "image/png" || meta?.fileName?.toLowerCase().endsWith(".png");
|
||||
const hasAlpha = isPng && (await hasAlphaChannel(buffer));
|
||||
|
||||
if (hasAlpha) {
|
||||
const optimized = await optimizeImageToPng(buffer, cap);
|
||||
if (optimized.buffer.length <= cap) {
|
||||
return { ...optimized, format: "png" };
|
||||
}
|
||||
if (shouldLogVerbose()) {
|
||||
logVerbose(
|
||||
`PNG with alpha still exceeds ${formatMb(cap, 0)}MB after optimization; falling back to JPEG`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const optimized = await optimizeImageToJpeg(buffer, cap, meta);
|
||||
return { ...optimized, format: "jpeg" };
|
||||
}
|
||||
|
||||
async function loadWebMediaInternal(
|
||||
mediaUrl: string,
|
||||
options: WebMediaOptions = {},
|
||||
): Promise<WebMediaResult> {
|
||||
const {
|
||||
maxBytes,
|
||||
optimizeImages = true,
|
||||
ssrfPolicy,
|
||||
localRoots,
|
||||
sandboxValidated = false,
|
||||
readFile: readFileOverride,
|
||||
} = options;
|
||||
// Strip MEDIA: prefix used by agent tools (e.g. TTS) to tag media paths.
|
||||
// Be lenient: LLM output may add extra whitespace (e.g. " MEDIA : /tmp/x.png").
|
||||
mediaUrl = mediaUrl.replace(/^\s*MEDIA\s*:\s*/i, "");
|
||||
// Use fileURLToPath for proper handling of file:// URLs (handles file://localhost/path, etc.)
|
||||
if (mediaUrl.startsWith("file://")) {
|
||||
try {
|
||||
mediaUrl = fileURLToPath(mediaUrl);
|
||||
} catch {
|
||||
throw new LocalMediaAccessError("invalid-file-url", `Invalid file:// URL: ${mediaUrl}`);
|
||||
}
|
||||
}
|
||||
|
||||
const optimizeAndClampImage = async (
|
||||
buffer: Buffer,
|
||||
cap: number,
|
||||
meta?: { contentType?: string; fileName?: string },
|
||||
) => {
|
||||
const originalSize = buffer.length;
|
||||
const optimized = await optimizeImageWithFallback({ buffer, cap, meta });
|
||||
logOptimizedImage({ originalSize, optimized });
|
||||
|
||||
if (optimized.buffer.length > cap) {
|
||||
throw new Error(formatCapReduce("Media", cap, optimized.buffer.length));
|
||||
}
|
||||
|
||||
const contentType = optimized.format === "png" ? "image/png" : "image/jpeg";
|
||||
const fileName =
|
||||
optimized.format === "jpeg" && meta && isHeicSource(meta)
|
||||
? toJpegFileName(meta.fileName)
|
||||
: meta?.fileName;
|
||||
|
||||
return {
|
||||
buffer: optimized.buffer,
|
||||
contentType,
|
||||
kind: "image" as const,
|
||||
fileName,
|
||||
};
|
||||
};
|
||||
|
||||
const clampAndFinalize = async (params: {
|
||||
buffer: Buffer;
|
||||
contentType?: string;
|
||||
kind: MediaKind | undefined;
|
||||
fileName?: string;
|
||||
}): Promise<WebMediaResult> => {
|
||||
// If caller explicitly provides maxBytes, trust it (for channels that handle large files).
|
||||
// Otherwise fall back to per-kind defaults.
|
||||
const cap = maxBytes !== undefined ? maxBytes : maxBytesForKind(params.kind ?? "document");
|
||||
if (params.kind === "image") {
|
||||
const isGif = params.contentType === "image/gif";
|
||||
if (isGif || !optimizeImages) {
|
||||
if (params.buffer.length > cap) {
|
||||
throw new Error(formatCapLimit(isGif ? "GIF" : "Media", cap, params.buffer.length));
|
||||
}
|
||||
return {
|
||||
buffer: params.buffer,
|
||||
contentType: params.contentType,
|
||||
kind: params.kind,
|
||||
fileName: params.fileName,
|
||||
};
|
||||
}
|
||||
return {
|
||||
...(await optimizeAndClampImage(params.buffer, cap, {
|
||||
contentType: params.contentType,
|
||||
fileName: params.fileName,
|
||||
})),
|
||||
};
|
||||
}
|
||||
if (params.buffer.length > cap) {
|
||||
throw new Error(formatCapLimit("Media", cap, params.buffer.length));
|
||||
}
|
||||
return {
|
||||
buffer: params.buffer,
|
||||
contentType: params.contentType ?? undefined,
|
||||
kind: params.kind,
|
||||
fileName: params.fileName,
|
||||
};
|
||||
};
|
||||
|
||||
if (/^https?:\/\//i.test(mediaUrl)) {
|
||||
// Enforce a download cap during fetch to avoid unbounded memory usage.
|
||||
// For optimized images, allow fetching larger payloads before compression.
|
||||
const defaultFetchCap = maxBytesForKind("document");
|
||||
const fetchCap =
|
||||
maxBytes === undefined
|
||||
? defaultFetchCap
|
||||
: optimizeImages
|
||||
? Math.max(maxBytes, defaultFetchCap)
|
||||
: maxBytes;
|
||||
const fetched = await fetchRemoteMedia({ url: mediaUrl, maxBytes: fetchCap, ssrfPolicy });
|
||||
const { buffer, contentType, fileName } = fetched;
|
||||
const kind = kindFromMime(contentType);
|
||||
return await clampAndFinalize({ buffer, contentType, kind, fileName });
|
||||
}
|
||||
|
||||
// Expand tilde paths to absolute paths (e.g., ~/Downloads/photo.jpg)
|
||||
if (mediaUrl.startsWith("~")) {
|
||||
mediaUrl = resolveUserPath(mediaUrl);
|
||||
}
|
||||
|
||||
if ((sandboxValidated || localRoots === "any") && !readFileOverride) {
|
||||
throw new LocalMediaAccessError(
|
||||
"unsafe-bypass",
|
||||
"Refusing localRoots bypass without readFile override. Use sandboxValidated with readFile, or pass explicit localRoots.",
|
||||
);
|
||||
}
|
||||
|
||||
// Guard local reads against allowed directory roots to prevent file exfiltration.
|
||||
if (!(sandboxValidated || localRoots === "any")) {
|
||||
await assertLocalMediaAllowed(mediaUrl, localRoots);
|
||||
}
|
||||
|
||||
// Local path
|
||||
let data: Buffer;
|
||||
if (readFileOverride) {
|
||||
data = await readFileOverride(mediaUrl);
|
||||
} else {
|
||||
try {
|
||||
data = (await readLocalFileSafely({ filePath: mediaUrl })).buffer;
|
||||
} catch (err) {
|
||||
if (err instanceof SafeOpenError) {
|
||||
if (err.code === "not-found") {
|
||||
throw new LocalMediaAccessError("not-found", `Local media file not found: ${mediaUrl}`, {
|
||||
cause: err,
|
||||
});
|
||||
}
|
||||
if (err.code === "not-file") {
|
||||
throw new LocalMediaAccessError(
|
||||
"not-file",
|
||||
`Local media path is not a file: ${mediaUrl}`,
|
||||
{ cause: err },
|
||||
);
|
||||
}
|
||||
throw new LocalMediaAccessError(
|
||||
"invalid-path",
|
||||
`Local media path is not safe to read: ${mediaUrl}`,
|
||||
{ cause: err },
|
||||
);
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
const mime = await detectMime({ buffer: data, filePath: mediaUrl });
|
||||
const kind = kindFromMime(mime);
|
||||
let fileName = path.basename(mediaUrl) || undefined;
|
||||
if (fileName && !path.extname(fileName) && mime) {
|
||||
const ext = extensionForMime(mime);
|
||||
if (ext) {
|
||||
fileName = `${fileName}${ext}`;
|
||||
}
|
||||
}
|
||||
return await clampAndFinalize({
|
||||
buffer: data,
|
||||
contentType: mime,
|
||||
kind,
|
||||
fileName,
|
||||
});
|
||||
}
|
||||
|
||||
export async function loadWebMedia(
|
||||
mediaUrl: string,
|
||||
maxBytesOrOptions?: number | WebMediaOptions,
|
||||
options?: { ssrfPolicy?: SsrFPolicy; localRoots?: readonly string[] | "any" },
|
||||
): Promise<WebMediaResult> {
|
||||
return await loadWebMediaInternal(
|
||||
mediaUrl,
|
||||
resolveWebMediaOptions({ maxBytesOrOptions, options, optimizeImages: true }),
|
||||
);
|
||||
}
|
||||
|
||||
export async function loadWebMediaRaw(
|
||||
mediaUrl: string,
|
||||
maxBytesOrOptions?: number | WebMediaOptions,
|
||||
options?: { ssrfPolicy?: SsrFPolicy; localRoots?: readonly string[] | "any" },
|
||||
): Promise<WebMediaResult> {
|
||||
return await loadWebMediaInternal(
|
||||
mediaUrl,
|
||||
resolveWebMediaOptions({ maxBytesOrOptions, options, optimizeImages: false }),
|
||||
);
|
||||
}
|
||||
|
||||
export async function optimizeImageToJpeg(
|
||||
buffer: Buffer,
|
||||
maxBytes: number,
|
||||
opts: { contentType?: string; fileName?: string } = {},
|
||||
): Promise<{
|
||||
buffer: Buffer;
|
||||
optimizedSize: number;
|
||||
resizeSide: number;
|
||||
quality: number;
|
||||
}> {
|
||||
// Try a grid of sizes/qualities until under the limit.
|
||||
let source = buffer;
|
||||
if (isHeicSource(opts)) {
|
||||
try {
|
||||
source = await convertHeicToJpeg(buffer);
|
||||
} catch (err) {
|
||||
throw new Error(`HEIC image conversion failed: ${String(err)}`, { cause: err });
|
||||
}
|
||||
}
|
||||
const sides = [2048, 1536, 1280, 1024, 800];
|
||||
const qualities = [80, 70, 60, 50, 40];
|
||||
let smallest: {
|
||||
buffer: Buffer;
|
||||
size: number;
|
||||
resizeSide: number;
|
||||
quality: number;
|
||||
} | null = null;
|
||||
|
||||
for (const side of sides) {
|
||||
for (const quality of qualities) {
|
||||
try {
|
||||
const out = await resizeToJpeg({
|
||||
buffer: source,
|
||||
maxSide: side,
|
||||
quality,
|
||||
withoutEnlargement: true,
|
||||
});
|
||||
const size = out.length;
|
||||
if (!smallest || size < smallest.size) {
|
||||
smallest = { buffer: out, size, resizeSide: side, quality };
|
||||
}
|
||||
if (size <= maxBytes) {
|
||||
return {
|
||||
buffer: out,
|
||||
optimizedSize: size,
|
||||
resizeSide: side,
|
||||
quality,
|
||||
};
|
||||
}
|
||||
} catch {
|
||||
// Continue trying other size/quality combinations
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (smallest) {
|
||||
return {
|
||||
buffer: smallest.buffer,
|
||||
optimizedSize: smallest.size,
|
||||
resizeSide: smallest.resizeSide,
|
||||
quality: smallest.quality,
|
||||
};
|
||||
}
|
||||
|
||||
throw new Error("Failed to optimize image");
|
||||
}
|
||||
|
||||
export { optimizeImageToPng };
|
||||
|
|
@ -2,7 +2,7 @@ import { beforeEach, describe, expect, it, vi } from "vitest";
|
|||
|
||||
const loadWebMediaMock = vi.hoisted(() => vi.fn());
|
||||
|
||||
vi.mock("../../extensions/whatsapp/src/media.js", () => ({
|
||||
vi.mock("../media/web-media.js", () => ({
|
||||
loadWebMedia: loadWebMediaMock,
|
||||
}));
|
||||
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
import { loadWebMedia } from "../../../extensions/whatsapp/runtime-api.js";
|
||||
import { isVoiceCompatibleAudio } from "../../media/audio.js";
|
||||
import { mediaKindFromMime } from "../../media/constants.js";
|
||||
import { getImageMetadata, resizeToJpeg } from "../../media/image-ops.js";
|
||||
import { detectMime } from "../../media/mime.js";
|
||||
import { loadWebMedia } from "../../media/web-media.js";
|
||||
import type { PluginRuntime } from "./types.js";
|
||||
|
||||
export function createRuntimeMedia(): PluginRuntime["media"] {
|
||||
|
|
|
|||
Loading…
Reference in New Issue