mirror of https://github.com/openclaw/openclaw.git
Docs: add config drift baseline statefile (#45891)
* Docs: add config drift statefile generator * Docs: generate config drift baseline * CI: move config docs drift runner into workflow sanity * Docs: emit config drift baseline json * Docs: commit config drift baseline json * Docs: wire config baseline into release checks * Config: fix baseline drift walker coverage * Docs: regenerate config drift baselines
This commit is contained in:
parent
432ea11248
commit
cbec476b6b
|
|
@ -4,6 +4,7 @@ on:
|
|||
pull_request:
|
||||
push:
|
||||
branches: [main]
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: workflow-sanity-${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
|
|
@ -14,6 +15,7 @@ env:
|
|||
|
||||
jobs:
|
||||
no-tabs:
|
||||
if: github.event_name != 'workflow_dispatch'
|
||||
runs-on: blacksmith-16vcpu-ubuntu-2404
|
||||
steps:
|
||||
- name: Checkout
|
||||
|
|
@ -45,6 +47,7 @@ jobs:
|
|||
PY
|
||||
|
||||
actionlint:
|
||||
if: github.event_name != 'workflow_dispatch'
|
||||
runs-on: blacksmith-16vcpu-ubuntu-2404
|
||||
steps:
|
||||
- name: Checkout
|
||||
|
|
@ -68,3 +71,19 @@ jobs:
|
|||
|
||||
- name: Disallow direct inputs interpolation in composite run blocks
|
||||
run: python3 scripts/check-composite-action-input-interpolation.py
|
||||
|
||||
config-docs-drift:
|
||||
if: github.event_name == 'workflow_dispatch'
|
||||
runs-on: blacksmith-16vcpu-ubuntu-2404
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Setup Node environment
|
||||
uses: ./.github/actions/setup-node-env
|
||||
with:
|
||||
install-bun: "false"
|
||||
use-sticky-disk: "false"
|
||||
|
||||
- name: Check config docs drift statefile
|
||||
run: pnpm config:docs:check
|
||||
|
|
|
|||
|
|
@ -0,0 +1,8 @@
|
|||
# Generated Docs Artifacts
|
||||
|
||||
These baseline artifacts are generated from the repo-owned OpenClaw config schema and bundled channel/plugin metadata.
|
||||
|
||||
- Do not edit `config-baseline.json` by hand.
|
||||
- Do not edit `config-baseline.jsonl` by hand.
|
||||
- Regenerate it with `pnpm config:docs:gen`.
|
||||
- Validate it in CI or locally with `pnpm config:docs:check`.
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
|
@ -76,6 +76,7 @@ Historical note:
|
|||
- [ ] `pnpm check`
|
||||
- [ ] `pnpm test` (or `pnpm test:coverage` if you need coverage output)
|
||||
- [ ] `pnpm release:check` (verifies npm pack contents)
|
||||
- [ ] If `pnpm config:docs:check` fails as part of release validation and the config-surface change is intentional, run `pnpm config:docs:gen`, review `docs/.generated/config-baseline.json` and `docs/.generated/config-baseline.jsonl`, commit the updated baselines, then rerun `pnpm release:check`.
|
||||
- [ ] `OPENCLAW_INSTALL_SMOKE_SKIP_NONROOT=1 pnpm test:install:smoke` (Docker install smoke test, fast path; required before release)
|
||||
- If the immediate previous npm release is known broken, set `OPENCLAW_INSTALL_SMOKE_PREVIOUS=<last-good-version>` or `OPENCLAW_INSTALL_SMOKE_SKIP_PREVIOUS=1` for the preinstall step.
|
||||
- [ ] (Optional) Full installer smoke (adds non-root + CLI coverage): `pnpm test:install:smoke`
|
||||
|
|
|
|||
|
|
@ -233,6 +233,8 @@
|
|||
"check:docs": "pnpm format:docs:check && pnpm lint:docs && pnpm docs:check-links",
|
||||
"check:host-env-policy:swift": "node scripts/generate-host-env-security-policy-swift.mjs --check",
|
||||
"check:loc": "node --import tsx scripts/check-ts-max-loc.ts --max 500",
|
||||
"config:docs:check": "node --import tsx scripts/generate-config-doc-baseline.ts --check",
|
||||
"config:docs:gen": "node --import tsx scripts/generate-config-doc-baseline.ts --write",
|
||||
"deadcode:ci": "pnpm deadcode:report:ci:knip",
|
||||
"deadcode:knip": "pnpm dlx knip --config knip.config.ts --isolate-workspaces --production --no-progress --reporter compact --files --dependencies",
|
||||
"deadcode:report": "pnpm deadcode:knip; pnpm deadcode:ts-prune; pnpm deadcode:ts-unused",
|
||||
|
|
@ -298,7 +300,7 @@
|
|||
"protocol:check": "pnpm protocol:gen && pnpm protocol:gen:swift && git diff --exit-code -- dist/protocol.schema.json apps/macos/Sources/OpenClawProtocol/GatewayModels.swift apps/shared/OpenClawKit/Sources/OpenClawProtocol/GatewayModels.swift",
|
||||
"protocol:gen": "node --import tsx scripts/protocol-gen.ts",
|
||||
"protocol:gen:swift": "node --import tsx scripts/protocol-gen-swift.ts",
|
||||
"release:check": "node --import tsx scripts/release-check.ts",
|
||||
"release:check": "pnpm config:docs:check && node --import tsx scripts/release-check.ts",
|
||||
"release:openclaw:npm:check": "node --import tsx scripts/openclaw-npm-release-check.ts",
|
||||
"start": "node scripts/run-node.mjs",
|
||||
"test": "node scripts/test-parallel.mjs",
|
||||
|
|
|
|||
|
|
@ -0,0 +1,44 @@
|
|||
#!/usr/bin/env node
|
||||
import path from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import { writeConfigDocBaselineStatefile } from "../src/config/doc-baseline.js";
|
||||
|
||||
const args = new Set(process.argv.slice(2));
|
||||
const checkOnly = args.has("--check");
|
||||
|
||||
if (checkOnly && args.has("--write")) {
|
||||
console.error("Use either --check or --write, not both.");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const repoRoot = path.resolve(path.dirname(fileURLToPath(import.meta.url)), "..");
|
||||
const result = await writeConfigDocBaselineStatefile({
|
||||
repoRoot,
|
||||
check: checkOnly,
|
||||
});
|
||||
|
||||
if (checkOnly) {
|
||||
if (!result.changed) {
|
||||
console.log(
|
||||
`OK ${path.relative(repoRoot, result.jsonPath)} ${path.relative(repoRoot, result.statefilePath)}`,
|
||||
);
|
||||
process.exit(0);
|
||||
}
|
||||
console.error(
|
||||
[
|
||||
"Config baseline drift detected.",
|
||||
`Expected current: ${path.relative(repoRoot, result.jsonPath)}`,
|
||||
`Expected current: ${path.relative(repoRoot, result.statefilePath)}`,
|
||||
"If this config-surface change is intentional, run `pnpm config:docs:gen` and commit the updated baseline files.",
|
||||
"If not intentional, treat this as docs drift or a possible breaking config change and fix the schema/help changes first.",
|
||||
].join("\n"),
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
console.log(
|
||||
[
|
||||
`Wrote ${path.relative(repoRoot, result.jsonPath)}`,
|
||||
`Wrote ${path.relative(repoRoot, result.statefilePath)}`,
|
||||
].join("\n"),
|
||||
);
|
||||
|
|
@ -0,0 +1,160 @@
|
|||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { afterEach, describe, expect, it } from "vitest";
|
||||
import {
|
||||
buildConfigDocBaseline,
|
||||
collectConfigDocBaselineEntries,
|
||||
dedupeConfigDocBaselineEntries,
|
||||
normalizeConfigDocBaselineHelpPath,
|
||||
renderConfigDocBaselineStatefile,
|
||||
writeConfigDocBaselineStatefile,
|
||||
} from "./doc-baseline.js";
|
||||
|
||||
describe("config doc baseline", () => {
|
||||
const tempRoots: string[] = [];
|
||||
|
||||
afterEach(async () => {
|
||||
await Promise.all(
|
||||
tempRoots.splice(0).map(async (tempRoot) => {
|
||||
await fs.rm(tempRoot, { recursive: true, force: true });
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it("is deterministic across repeated runs", async () => {
|
||||
const first = await renderConfigDocBaselineStatefile();
|
||||
const second = await renderConfigDocBaselineStatefile();
|
||||
|
||||
expect(second.json).toBe(first.json);
|
||||
expect(second.jsonl).toBe(first.jsonl);
|
||||
});
|
||||
|
||||
it("normalizes array and record paths to wildcard form", async () => {
|
||||
const baseline = await buildConfigDocBaseline();
|
||||
const paths = new Set(baseline.entries.map((entry) => entry.path));
|
||||
|
||||
expect(paths.has("session.sendPolicy.rules.*.match.keyPrefix")).toBe(true);
|
||||
expect(paths.has("env.*")).toBe(true);
|
||||
expect(normalizeConfigDocBaselineHelpPath("agents.list[].skills")).toBe("agents.list.*.skills");
|
||||
});
|
||||
|
||||
it("includes core, channel, and plugin config metadata", async () => {
|
||||
const baseline = await buildConfigDocBaseline();
|
||||
const byPath = new Map(baseline.entries.map((entry) => [entry.path, entry]));
|
||||
|
||||
expect(byPath.get("gateway.auth.token")).toMatchObject({
|
||||
kind: "core",
|
||||
sensitive: true,
|
||||
});
|
||||
expect(byPath.get("channels.telegram.botToken")).toMatchObject({
|
||||
kind: "channel",
|
||||
sensitive: true,
|
||||
});
|
||||
expect(byPath.get("plugins.entries.voice-call.config.twilio.authToken")).toMatchObject({
|
||||
kind: "plugin",
|
||||
sensitive: true,
|
||||
});
|
||||
});
|
||||
|
||||
it("preserves help text and tags from merged schema hints", async () => {
|
||||
const baseline = await buildConfigDocBaseline();
|
||||
const byPath = new Map(baseline.entries.map((entry) => [entry.path, entry]));
|
||||
const tokenEntry = byPath.get("gateway.auth.token");
|
||||
|
||||
expect(tokenEntry?.help).toContain("gateway access");
|
||||
expect(tokenEntry?.tags).toContain("auth");
|
||||
expect(tokenEntry?.tags).toContain("security");
|
||||
});
|
||||
|
||||
it("matches array help hints that still use [] notation", async () => {
|
||||
const baseline = await buildConfigDocBaseline();
|
||||
const byPath = new Map(baseline.entries.map((entry) => [entry.path, entry]));
|
||||
|
||||
expect(byPath.get("session.sendPolicy.rules.*.match.keyPrefix")).toMatchObject({
|
||||
help: expect.stringContaining("prefer rawKeyPrefix when exact full-key matching is required"),
|
||||
sensitive: false,
|
||||
});
|
||||
});
|
||||
|
||||
it("walks union branches for nested config keys", async () => {
|
||||
const baseline = await buildConfigDocBaseline();
|
||||
const byPath = new Map(baseline.entries.map((entry) => [entry.path, entry]));
|
||||
|
||||
expect(byPath.get("bindings.*")).toMatchObject({
|
||||
hasChildren: true,
|
||||
});
|
||||
expect(byPath.get("bindings.*.type")).toBeDefined();
|
||||
expect(byPath.get("bindings.*.match.channel")).toBeDefined();
|
||||
expect(byPath.get("bindings.*.match.peer.id")).toBeDefined();
|
||||
});
|
||||
|
||||
it("merges tuple item metadata instead of dropping earlier entries", () => {
|
||||
const entries = dedupeConfigDocBaselineEntries(
|
||||
collectConfigDocBaselineEntries(
|
||||
{
|
||||
type: "array",
|
||||
items: [
|
||||
{
|
||||
type: "string",
|
||||
enum: ["alpha"],
|
||||
},
|
||||
{
|
||||
type: "number",
|
||||
enum: [42],
|
||||
},
|
||||
],
|
||||
},
|
||||
{},
|
||||
"tupleValues",
|
||||
),
|
||||
);
|
||||
const tupleEntry = new Map(entries.map((entry) => [entry.path, entry])).get("tupleValues.*");
|
||||
|
||||
expect(tupleEntry).toMatchObject({
|
||||
type: ["number", "string"],
|
||||
});
|
||||
expect(tupleEntry?.enumValues).toEqual(expect.arrayContaining([42, "alpha"]));
|
||||
expect(tupleEntry?.enumValues).toHaveLength(2);
|
||||
});
|
||||
|
||||
it("supports check mode for stale generated artifacts", async () => {
|
||||
const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-config-doc-baseline-"));
|
||||
tempRoots.push(tempRoot);
|
||||
|
||||
const initial = await writeConfigDocBaselineStatefile({
|
||||
repoRoot: tempRoot,
|
||||
jsonPath: "docs/.generated/config-baseline.json",
|
||||
statefilePath: "docs/.generated/config-baseline.jsonl",
|
||||
});
|
||||
expect(initial.wrote).toBe(true);
|
||||
|
||||
const current = await writeConfigDocBaselineStatefile({
|
||||
repoRoot: tempRoot,
|
||||
jsonPath: "docs/.generated/config-baseline.json",
|
||||
statefilePath: "docs/.generated/config-baseline.jsonl",
|
||||
check: true,
|
||||
});
|
||||
expect(current.changed).toBe(false);
|
||||
|
||||
await fs.writeFile(
|
||||
path.join(tempRoot, "docs/.generated/config-baseline.json"),
|
||||
'{"generatedBy":"broken","entries":[]}\n',
|
||||
"utf8",
|
||||
);
|
||||
await fs.writeFile(
|
||||
path.join(tempRoot, "docs/.generated/config-baseline.jsonl"),
|
||||
'{"recordType":"meta","generatedBy":"broken","totalPaths":0}\n',
|
||||
"utf8",
|
||||
);
|
||||
|
||||
const stale = await writeConfigDocBaselineStatefile({
|
||||
repoRoot: tempRoot,
|
||||
jsonPath: "docs/.generated/config-baseline.json",
|
||||
statefilePath: "docs/.generated/config-baseline.jsonl",
|
||||
check: true,
|
||||
});
|
||||
expect(stale.changed).toBe(true);
|
||||
expect(stale.wrote).toBe(false);
|
||||
});
|
||||
});
|
||||
|
|
@ -0,0 +1,578 @@
|
|||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { fileURLToPath, pathToFileURL } from "node:url";
|
||||
import type { ChannelPlugin } from "../channels/plugins/index.js";
|
||||
import { resolveOpenClawPackageRootSync } from "../infra/openclaw-root.js";
|
||||
import { loadPluginManifestRegistry } from "../plugins/manifest-registry.js";
|
||||
import { FIELD_HELP } from "./schema.help.js";
|
||||
import { buildConfigSchema, type ConfigSchemaResponse } from "./schema.js";
|
||||
|
||||
type JsonValue = null | boolean | number | string | JsonValue[] | { [key: string]: JsonValue };
|
||||
|
||||
type JsonSchemaNode = Record<string, unknown>;
|
||||
|
||||
type JsonSchemaObject = JsonSchemaNode & {
|
||||
type?: string | string[];
|
||||
properties?: Record<string, JsonSchemaObject>;
|
||||
required?: string[];
|
||||
additionalProperties?: JsonSchemaObject | boolean;
|
||||
items?: JsonSchemaObject | JsonSchemaObject[];
|
||||
enum?: unknown[];
|
||||
default?: unknown;
|
||||
deprecated?: boolean;
|
||||
anyOf?: JsonSchemaObject[];
|
||||
allOf?: JsonSchemaObject[];
|
||||
oneOf?: JsonSchemaObject[];
|
||||
};
|
||||
|
||||
export type ConfigDocBaselineKind = "core" | "channel" | "plugin";
|
||||
|
||||
export type ConfigDocBaselineEntry = {
|
||||
path: string;
|
||||
kind: ConfigDocBaselineKind;
|
||||
type?: string | string[];
|
||||
required: boolean;
|
||||
enumValues?: JsonValue[];
|
||||
defaultValue?: JsonValue;
|
||||
deprecated: boolean;
|
||||
sensitive: boolean;
|
||||
tags: string[];
|
||||
label?: string;
|
||||
help?: string;
|
||||
hasChildren: boolean;
|
||||
};
|
||||
|
||||
export type ConfigDocBaseline = {
|
||||
generatedBy: "scripts/generate-config-doc-baseline.ts";
|
||||
entries: ConfigDocBaselineEntry[];
|
||||
};
|
||||
|
||||
export type ConfigDocBaselineStatefileRender = {
|
||||
json: string;
|
||||
jsonl: string;
|
||||
baseline: ConfigDocBaseline;
|
||||
};
|
||||
|
||||
export type ConfigDocBaselineStatefileWriteResult = {
|
||||
changed: boolean;
|
||||
wrote: boolean;
|
||||
jsonPath: string;
|
||||
statefilePath: string;
|
||||
};
|
||||
|
||||
const GENERATED_BY = "scripts/generate-config-doc-baseline.ts" as const;
|
||||
const DEFAULT_JSON_OUTPUT = "docs/.generated/config-baseline.json";
|
||||
const DEFAULT_STATEFILE_OUTPUT = "docs/.generated/config-baseline.jsonl";
|
||||
function resolveRepoRoot(): string {
|
||||
const fromPackage = resolveOpenClawPackageRootSync({
|
||||
cwd: path.dirname(fileURLToPath(import.meta.url)),
|
||||
moduleUrl: import.meta.url,
|
||||
});
|
||||
if (fromPackage) {
|
||||
return fromPackage;
|
||||
}
|
||||
return path.resolve(path.dirname(fileURLToPath(import.meta.url)), "../..");
|
||||
}
|
||||
|
||||
function normalizeBaselinePath(rawPath: string): string {
|
||||
return rawPath
|
||||
.trim()
|
||||
.replace(/\[\]/g, ".*")
|
||||
.replace(/\[(\*|\d+)\]/g, ".*")
|
||||
.replace(/^\.+|\.+$/g, "")
|
||||
.replace(/\.+/g, ".");
|
||||
}
|
||||
|
||||
function normalizeJsonValue(value: unknown): JsonValue | undefined {
|
||||
if (value === null) {
|
||||
return null;
|
||||
}
|
||||
if (typeof value === "string" || typeof value === "boolean") {
|
||||
return value;
|
||||
}
|
||||
if (typeof value === "number") {
|
||||
return Number.isFinite(value) ? value : undefined;
|
||||
}
|
||||
if (Array.isArray(value)) {
|
||||
const normalized = value
|
||||
.map((entry) => normalizeJsonValue(entry))
|
||||
.filter((entry): entry is JsonValue => entry !== undefined);
|
||||
return normalized;
|
||||
}
|
||||
if (!value || typeof value !== "object") {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const entries = Object.entries(value as Record<string, unknown>)
|
||||
.toSorted(([left], [right]) => left.localeCompare(right))
|
||||
.map(([key, entry]) => {
|
||||
const normalized = normalizeJsonValue(entry);
|
||||
return normalized === undefined ? null : ([key, normalized] as const);
|
||||
})
|
||||
.filter((entry): entry is readonly [string, JsonValue] => entry !== null);
|
||||
|
||||
return Object.fromEntries(entries);
|
||||
}
|
||||
|
||||
function normalizeEnumValues(values: unknown[] | undefined): JsonValue[] | undefined {
|
||||
if (!values) {
|
||||
return undefined;
|
||||
}
|
||||
const normalized = values
|
||||
.map((entry) => normalizeJsonValue(entry))
|
||||
.filter((entry): entry is JsonValue => entry !== undefined);
|
||||
return normalized.length > 0 ? normalized : undefined;
|
||||
}
|
||||
|
||||
function asSchemaObject(value: unknown): JsonSchemaObject | null {
|
||||
if (!value || typeof value !== "object" || Array.isArray(value)) {
|
||||
return null;
|
||||
}
|
||||
return value as JsonSchemaObject;
|
||||
}
|
||||
|
||||
function schemaHasChildren(schema: JsonSchemaObject): boolean {
|
||||
if (schema.properties && Object.keys(schema.properties).length > 0) {
|
||||
return true;
|
||||
}
|
||||
if (schema.additionalProperties && typeof schema.additionalProperties === "object") {
|
||||
return true;
|
||||
}
|
||||
if (Array.isArray(schema.items)) {
|
||||
return schema.items.some((entry) => typeof entry === "object" && entry !== null);
|
||||
}
|
||||
for (const branch of [schema.oneOf, schema.anyOf, schema.allOf]) {
|
||||
if (branch?.some((entry) => entry && typeof entry === "object" && schemaHasChildren(entry))) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return Boolean(schema.items && typeof schema.items === "object");
|
||||
}
|
||||
|
||||
function splitHintLookupPath(path: string): string[] {
|
||||
const normalized = normalizeBaselinePath(path);
|
||||
return normalized ? normalized.split(".").filter(Boolean) : [];
|
||||
}
|
||||
|
||||
function resolveUiHintMatch(
|
||||
uiHints: ConfigSchemaResponse["uiHints"],
|
||||
path: string,
|
||||
): ConfigSchemaResponse["uiHints"][string] | undefined {
|
||||
const targetParts = splitHintLookupPath(path);
|
||||
let bestMatch:
|
||||
| {
|
||||
hint: ConfigSchemaResponse["uiHints"][string];
|
||||
wildcardCount: number;
|
||||
}
|
||||
| undefined;
|
||||
|
||||
for (const [hintPath, hint] of Object.entries(uiHints)) {
|
||||
const hintParts = splitHintLookupPath(hintPath);
|
||||
if (hintParts.length !== targetParts.length) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let wildcardCount = 0;
|
||||
let matches = true;
|
||||
for (let index = 0; index < hintParts.length; index += 1) {
|
||||
const hintPart = hintParts[index];
|
||||
const targetPart = targetParts[index];
|
||||
if (hintPart === targetPart) {
|
||||
continue;
|
||||
}
|
||||
if (hintPart === "*") {
|
||||
wildcardCount += 1;
|
||||
continue;
|
||||
}
|
||||
matches = false;
|
||||
break;
|
||||
}
|
||||
|
||||
if (!matches) {
|
||||
continue;
|
||||
}
|
||||
if (!bestMatch || wildcardCount < bestMatch.wildcardCount) {
|
||||
bestMatch = { hint, wildcardCount };
|
||||
}
|
||||
}
|
||||
|
||||
return bestMatch?.hint;
|
||||
}
|
||||
|
||||
function normalizeTypeValue(value: string | string[] | undefined): string | string[] | undefined {
|
||||
if (!value) {
|
||||
return undefined;
|
||||
}
|
||||
if (Array.isArray(value)) {
|
||||
const normalized = [...new Set(value)].toSorted((left, right) => left.localeCompare(right));
|
||||
return normalized.length === 1 ? normalized[0] : normalized;
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
function mergeTypeValues(
|
||||
left: string | string[] | undefined,
|
||||
right: string | string[] | undefined,
|
||||
): string | string[] | undefined {
|
||||
const merged = new Set<string>();
|
||||
for (const value of [left, right]) {
|
||||
if (!value) {
|
||||
continue;
|
||||
}
|
||||
if (Array.isArray(value)) {
|
||||
for (const entry of value) {
|
||||
merged.add(entry);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
merged.add(value);
|
||||
}
|
||||
return normalizeTypeValue([...merged]);
|
||||
}
|
||||
|
||||
function areJsonValuesEqual(left: JsonValue | undefined, right: JsonValue | undefined): boolean {
|
||||
return JSON.stringify(left) === JSON.stringify(right);
|
||||
}
|
||||
|
||||
function mergeJsonValueArrays(
|
||||
left: JsonValue[] | undefined,
|
||||
right: JsonValue[] | undefined,
|
||||
): JsonValue[] | undefined {
|
||||
if (!left?.length) {
|
||||
return right ? [...right] : undefined;
|
||||
}
|
||||
if (!right?.length) {
|
||||
return [...left];
|
||||
}
|
||||
|
||||
const merged = new Map<string, JsonValue>();
|
||||
for (const value of [...left, ...right]) {
|
||||
merged.set(JSON.stringify(value), value);
|
||||
}
|
||||
return [...merged.entries()]
|
||||
.toSorted(([leftKey], [rightKey]) => leftKey.localeCompare(rightKey))
|
||||
.map(([, value]) => value);
|
||||
}
|
||||
|
||||
function mergeConfigDocBaselineEntry(
|
||||
current: ConfigDocBaselineEntry,
|
||||
next: ConfigDocBaselineEntry,
|
||||
): ConfigDocBaselineEntry {
|
||||
const label = current.label === next.label ? current.label : (current.label ?? next.label);
|
||||
const help = current.help === next.help ? current.help : (current.help ?? next.help);
|
||||
const defaultValue = areJsonValuesEqual(current.defaultValue, next.defaultValue)
|
||||
? (current.defaultValue ?? next.defaultValue)
|
||||
: undefined;
|
||||
|
||||
return {
|
||||
path: current.path,
|
||||
kind: current.kind,
|
||||
type: mergeTypeValues(current.type, next.type),
|
||||
required: current.required && next.required,
|
||||
enumValues: mergeJsonValueArrays(current.enumValues, next.enumValues),
|
||||
defaultValue,
|
||||
deprecated: current.deprecated || next.deprecated,
|
||||
sensitive: current.sensitive || next.sensitive,
|
||||
tags: [...new Set([...current.tags, ...next.tags])].toSorted((left, right) =>
|
||||
left.localeCompare(right),
|
||||
),
|
||||
label,
|
||||
help,
|
||||
hasChildren: current.hasChildren || next.hasChildren,
|
||||
};
|
||||
}
|
||||
|
||||
function resolveEntryKind(configPath: string): ConfigDocBaselineKind {
|
||||
if (configPath.startsWith("channels.")) {
|
||||
return "channel";
|
||||
}
|
||||
if (configPath.startsWith("plugins.entries.")) {
|
||||
return "plugin";
|
||||
}
|
||||
return "core";
|
||||
}
|
||||
|
||||
async function resolveFirstExistingPath(candidates: string[]): Promise<string | null> {
|
||||
for (const candidate of candidates) {
|
||||
try {
|
||||
await fs.access(candidate);
|
||||
return candidate;
|
||||
} catch {
|
||||
// Keep scanning for other source file variants.
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function isChannelPlugin(value: unknown): value is ChannelPlugin {
|
||||
if (!value || typeof value !== "object") {
|
||||
return false;
|
||||
}
|
||||
const candidate = value as { id?: unknown; meta?: unknown; capabilities?: unknown };
|
||||
return typeof candidate.id === "string" && typeof candidate.meta === "object";
|
||||
}
|
||||
|
||||
async function importChannelPluginModule(rootDir: string): Promise<ChannelPlugin> {
|
||||
const modulePath = await resolveFirstExistingPath([
|
||||
path.join(rootDir, "src", "channel.ts"),
|
||||
path.join(rootDir, "src", "channel.js"),
|
||||
path.join(rootDir, "src", "plugin.ts"),
|
||||
path.join(rootDir, "src", "plugin.js"),
|
||||
path.join(rootDir, "src", "index.ts"),
|
||||
path.join(rootDir, "src", "index.js"),
|
||||
path.join(rootDir, "src", "channel.mts"),
|
||||
path.join(rootDir, "src", "channel.mjs"),
|
||||
path.join(rootDir, "src", "plugin.mts"),
|
||||
path.join(rootDir, "src", "plugin.mjs"),
|
||||
]);
|
||||
if (!modulePath) {
|
||||
throw new Error(`channel source not found under ${rootDir}`);
|
||||
}
|
||||
|
||||
const imported = (await import(pathToFileURL(modulePath).href)) as Record<string, unknown>;
|
||||
for (const value of Object.values(imported)) {
|
||||
if (isChannelPlugin(value)) {
|
||||
return value;
|
||||
}
|
||||
if (typeof value === "function" && value.length === 0) {
|
||||
const resolved = value();
|
||||
if (isChannelPlugin(resolved)) {
|
||||
return resolved;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error(`channel plugin export not found in ${modulePath}`);
|
||||
}
|
||||
|
||||
async function loadBundledConfigSchemaResponse(): Promise<ConfigSchemaResponse> {
|
||||
const repoRoot = resolveRepoRoot();
|
||||
const env = {
|
||||
...process.env,
|
||||
HOME: os.tmpdir(),
|
||||
OPENCLAW_STATE_DIR: path.join(os.tmpdir(), "openclaw-config-doc-baseline-state"),
|
||||
OPENCLAW_BUNDLED_PLUGINS_DIR: path.join(repoRoot, "extensions"),
|
||||
};
|
||||
|
||||
const manifestRegistry = loadPluginManifestRegistry({
|
||||
cache: false,
|
||||
env,
|
||||
config: {},
|
||||
});
|
||||
const channelPlugins = await Promise.all(
|
||||
manifestRegistry.plugins
|
||||
.filter((plugin) => plugin.origin === "bundled" && plugin.channels.length > 0)
|
||||
.map(async (plugin) => ({
|
||||
id: plugin.id,
|
||||
channel: await importChannelPluginModule(plugin.rootDir),
|
||||
})),
|
||||
);
|
||||
|
||||
return buildConfigSchema({
|
||||
plugins: manifestRegistry.plugins
|
||||
.filter((plugin) => plugin.origin === "bundled")
|
||||
.map((plugin) => ({
|
||||
id: plugin.id,
|
||||
name: plugin.name,
|
||||
description: plugin.description,
|
||||
configUiHints: plugin.configUiHints,
|
||||
configSchema: plugin.configSchema,
|
||||
})),
|
||||
channels: channelPlugins.map((entry) => ({
|
||||
id: entry.channel.id,
|
||||
label: entry.channel.meta.label,
|
||||
description: entry.channel.meta.blurb,
|
||||
configSchema: entry.channel.configSchema?.schema,
|
||||
configUiHints: entry.channel.configSchema?.uiHints,
|
||||
})),
|
||||
});
|
||||
}
|
||||
|
||||
export function collectConfigDocBaselineEntries(
|
||||
schema: JsonSchemaObject,
|
||||
uiHints: ConfigSchemaResponse["uiHints"],
|
||||
pathPrefix = "",
|
||||
required = false,
|
||||
entries: ConfigDocBaselineEntry[] = [],
|
||||
): ConfigDocBaselineEntry[] {
|
||||
const normalizedPath = normalizeBaselinePath(pathPrefix);
|
||||
if (normalizedPath) {
|
||||
const hint = resolveUiHintMatch(uiHints, normalizedPath);
|
||||
entries.push({
|
||||
path: normalizedPath,
|
||||
kind: resolveEntryKind(normalizedPath),
|
||||
type: normalizeTypeValue(schema.type),
|
||||
required,
|
||||
enumValues: normalizeEnumValues(schema.enum),
|
||||
defaultValue: normalizeJsonValue(schema.default),
|
||||
deprecated: schema.deprecated === true,
|
||||
sensitive: hint?.sensitive === true,
|
||||
tags: [...(hint?.tags ?? [])].toSorted((left, right) => left.localeCompare(right)),
|
||||
label: hint?.label,
|
||||
help: hint?.help,
|
||||
hasChildren: schemaHasChildren(schema),
|
||||
});
|
||||
}
|
||||
|
||||
const requiredKeys = new Set(schema.required ?? []);
|
||||
for (const key of Object.keys(schema.properties ?? {}).toSorted((left, right) =>
|
||||
left.localeCompare(right),
|
||||
)) {
|
||||
const child = asSchemaObject(schema.properties?.[key]);
|
||||
if (!child) {
|
||||
continue;
|
||||
}
|
||||
const childPath = normalizedPath ? `${normalizedPath}.${key}` : key;
|
||||
collectConfigDocBaselineEntries(child, uiHints, childPath, requiredKeys.has(key), entries);
|
||||
}
|
||||
|
||||
if (schema.additionalProperties && typeof schema.additionalProperties === "object") {
|
||||
const wildcard = asSchemaObject(schema.additionalProperties);
|
||||
if (wildcard) {
|
||||
const wildcardPath = normalizedPath ? `${normalizedPath}.*` : "*";
|
||||
collectConfigDocBaselineEntries(wildcard, uiHints, wildcardPath, false, entries);
|
||||
}
|
||||
}
|
||||
|
||||
if (Array.isArray(schema.items)) {
|
||||
for (const item of schema.items) {
|
||||
const child = asSchemaObject(item);
|
||||
if (!child) {
|
||||
continue;
|
||||
}
|
||||
const itemPath = normalizedPath ? `${normalizedPath}.*` : "*";
|
||||
collectConfigDocBaselineEntries(child, uiHints, itemPath, false, entries);
|
||||
}
|
||||
} else if (schema.items && typeof schema.items === "object") {
|
||||
const itemSchema = asSchemaObject(schema.items);
|
||||
if (itemSchema) {
|
||||
const itemPath = normalizedPath ? `${normalizedPath}.*` : "*";
|
||||
collectConfigDocBaselineEntries(itemSchema, uiHints, itemPath, false, entries);
|
||||
}
|
||||
}
|
||||
|
||||
for (const branchSchema of [schema.oneOf, schema.anyOf, schema.allOf]) {
|
||||
for (const branch of branchSchema ?? []) {
|
||||
const child = asSchemaObject(branch);
|
||||
if (!child) {
|
||||
continue;
|
||||
}
|
||||
collectConfigDocBaselineEntries(child, uiHints, normalizedPath, required, entries);
|
||||
}
|
||||
}
|
||||
|
||||
return entries;
|
||||
}
|
||||
|
||||
export function dedupeConfigDocBaselineEntries(
|
||||
entries: ConfigDocBaselineEntry[],
|
||||
): ConfigDocBaselineEntry[] {
|
||||
const byPath = new Map<string, ConfigDocBaselineEntry>();
|
||||
for (const entry of entries) {
|
||||
const current = byPath.get(entry.path);
|
||||
byPath.set(entry.path, current ? mergeConfigDocBaselineEntry(current, entry) : entry);
|
||||
}
|
||||
return [...byPath.values()].toSorted((left, right) => left.path.localeCompare(right.path));
|
||||
}
|
||||
|
||||
export async function buildConfigDocBaseline(): Promise<ConfigDocBaseline> {
|
||||
const response = await loadBundledConfigSchemaResponse();
|
||||
const schemaRoot = asSchemaObject(response.schema);
|
||||
if (!schemaRoot) {
|
||||
throw new Error("config schema root is not an object");
|
||||
}
|
||||
const entries = dedupeConfigDocBaselineEntries(
|
||||
collectConfigDocBaselineEntries(schemaRoot, response.uiHints),
|
||||
);
|
||||
return {
|
||||
generatedBy: GENERATED_BY,
|
||||
entries,
|
||||
};
|
||||
}
|
||||
|
||||
export async function renderConfigDocBaselineStatefile(
|
||||
baseline?: ConfigDocBaseline,
|
||||
): Promise<ConfigDocBaselineStatefileRender> {
|
||||
const resolvedBaseline = baseline ?? (await buildConfigDocBaseline());
|
||||
const json = `${JSON.stringify(resolvedBaseline, null, 2)}\n`;
|
||||
const metadataLine = JSON.stringify({
|
||||
generatedBy: GENERATED_BY,
|
||||
recordType: "meta",
|
||||
totalPaths: resolvedBaseline.entries.length,
|
||||
});
|
||||
const entryLines = resolvedBaseline.entries.map((entry) =>
|
||||
JSON.stringify({
|
||||
recordType: "path",
|
||||
...entry,
|
||||
}),
|
||||
);
|
||||
return {
|
||||
json,
|
||||
jsonl: `${[metadataLine, ...entryLines].join("\n")}\n`,
|
||||
baseline: resolvedBaseline,
|
||||
};
|
||||
}
|
||||
|
||||
async function readIfExists(filePath: string): Promise<string | null> {
|
||||
try {
|
||||
return await fs.readFile(filePath, "utf8");
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async function writeIfChanged(filePath: string, next: string): Promise<boolean> {
|
||||
const current = await readIfExists(filePath);
|
||||
if (current === next) {
|
||||
return false;
|
||||
}
|
||||
await fs.mkdir(path.dirname(filePath), { recursive: true });
|
||||
await fs.writeFile(filePath, next, "utf8");
|
||||
return true;
|
||||
}
|
||||
|
||||
export async function writeConfigDocBaselineStatefile(params?: {
|
||||
repoRoot?: string;
|
||||
check?: boolean;
|
||||
jsonPath?: string;
|
||||
statefilePath?: string;
|
||||
}): Promise<ConfigDocBaselineStatefileWriteResult> {
|
||||
const repoRoot = params?.repoRoot ?? resolveRepoRoot();
|
||||
const jsonPath = path.resolve(repoRoot, params?.jsonPath ?? DEFAULT_JSON_OUTPUT);
|
||||
const statefilePath = path.resolve(repoRoot, params?.statefilePath ?? DEFAULT_STATEFILE_OUTPUT);
|
||||
const rendered = await renderConfigDocBaselineStatefile();
|
||||
const currentJson = await readIfExists(jsonPath);
|
||||
const currentStatefile = await readIfExists(statefilePath);
|
||||
const changed = currentJson !== rendered.json || currentStatefile !== rendered.jsonl;
|
||||
|
||||
if (params?.check) {
|
||||
return {
|
||||
changed,
|
||||
wrote: false,
|
||||
jsonPath,
|
||||
statefilePath,
|
||||
};
|
||||
}
|
||||
|
||||
const wroteJson = await writeIfChanged(jsonPath, rendered.json);
|
||||
const wroteStatefile = await writeIfChanged(statefilePath, rendered.jsonl);
|
||||
return {
|
||||
changed,
|
||||
wrote: wroteJson || wroteStatefile,
|
||||
jsonPath,
|
||||
statefilePath,
|
||||
};
|
||||
}
|
||||
|
||||
export function normalizeConfigDocBaselineHelpPath(pathValue: string): string {
|
||||
return normalizeBaselinePath(pathValue);
|
||||
}
|
||||
|
||||
export function getNormalizedFieldHelp(): Record<string, string> {
|
||||
return Object.fromEntries(
|
||||
Object.entries(FIELD_HELP)
|
||||
.map(([configPath, help]) => [normalizeBaselinePath(configPath), help] as const)
|
||||
.toSorted(([left], [right]) => left.localeCompare(right)),
|
||||
);
|
||||
}
|
||||
|
|
@ -2,6 +2,7 @@ import fs from "node:fs";
|
|||
import path from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { normalizeConfigDocBaselineHelpPath } from "./doc-baseline.js";
|
||||
import { FIELD_HELP } from "./schema.help.js";
|
||||
import {
|
||||
describeTalkSilenceTimeoutDefaults,
|
||||
|
|
@ -17,8 +18,18 @@ function readRepoFile(relativePath: string): string {
|
|||
describe("talk silence timeout defaults", () => {
|
||||
it("keeps help text and docs aligned with the policy", () => {
|
||||
const defaultsDescription = describeTalkSilenceTimeoutDefaults();
|
||||
const baselineLines = readRepoFile("docs/.generated/config-baseline.jsonl")
|
||||
.trim()
|
||||
.split("\n")
|
||||
.map((line) => JSON.parse(line) as { recordType: string; path?: string; help?: string });
|
||||
const talkEntry = baselineLines.find(
|
||||
(entry) =>
|
||||
entry.recordType === "path" &&
|
||||
entry.path === normalizeConfigDocBaselineHelpPath("talk.silenceTimeoutMs"),
|
||||
);
|
||||
|
||||
expect(FIELD_HELP["talk.silenceTimeoutMs"]).toContain(defaultsDescription);
|
||||
expect(talkEntry?.help).toContain(defaultsDescription);
|
||||
expect(readRepoFile("docs/gateway/configuration-reference.md")).toContain(defaultsDescription);
|
||||
expect(readRepoFile("docs/nodes/talk.md")).toContain(defaultsDescription);
|
||||
});
|
||||
|
|
|
|||
Loading…
Reference in New Issue