Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
53 changes: 48 additions & 5 deletions scripts/verify.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,44 @@ function readJson(rel) {
return JSON.parse(fs.readFileSync(p, "utf8"));
}

function listFilesRecursive(absDir) {
const out = [];
for (const entry of fs.readdirSync(absDir, { withFileTypes: true })) {
const abs = path.join(absDir, entry.name);
if (entry.isDirectory()) {
out.push(...listFilesRecursive(abs));
} else if (entry.isFile()) {
out.push(abs);
}
}
return out;
}

function assert(condition, message, exitCode) {
if (!condition) {
console.error(message);
process.exit(exitCode);
}
}

function validateSchema(rel, schema) {
assert(schema && typeof schema === "object" && !Array.isArray(schema), `${rel}: schema must be a JSON object`, 5);
assert(typeof schema.$schema === "string" && schema.$schema.length > 0, `${rel}: missing $schema`, 5);
assert(schema.type === "object", `${rel}: top-level type must be object`, 5);
assert(schema.properties && typeof schema.properties === "object" && !Array.isArray(schema.properties), `${rel}: missing properties object`, 5);

if (schema.required !== undefined) {
assert(Array.isArray(schema.required), `${rel}: required must be an array`, 5);
const seen = new Set();
for (const key of schema.required) {
assert(typeof key === "string" && key.length > 0, `${rel}: required entries must be non-empty strings`, 5);
assert(!seen.has(key), `${rel}: duplicate required entry '${key}'`, 5);
seen.add(key);
assert(Object.prototype.hasOwnProperty.call(schema.properties, key), `${rel}: required key '${key}' missing from properties`, 5);
}
}
Comment on lines +50 to +65
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

The validateSchema function provides a good set of initial structural checks for JSON schema files. However, for a more comprehensive hardening of schema verification, consider integrating a dedicated JSON Schema validation library (e.g., ajv). This would allow for full validation against the JSON Schema specification, catching more complex issues such as incorrect usage of keywords (e.g., items missing for an array type, or invalid format values) that are not covered by the current basic structural assertions.

}

// Core files
requireFile("README.md");
requireFile("mechanisms/m010-reputation-signal/SPEC.md");
Expand All @@ -38,11 +76,16 @@ requireFile("mechanisms/m010-reputation-signal/datasets/fixtures/v0_sample.json"
// Mechanism index check
run("node", ["scripts/build-mechanism-index.mjs", "--check"]);

// Basic schema sanity
const kpiSchema = readJson("mechanisms/m010-reputation-signal/schemas/m010_kpi.schema.json");
if (!kpiSchema.required || !kpiSchema.required.includes("mechanism_id")) {
console.error("KPI schema missing required fields.");
process.exit(4);
// Schema sanity for all canonical schema artifacts.
const allFiles = listFilesRecursive(repoRoot);
const schemaFiles = allFiles
.map((abs) => path.relative(repoRoot, abs))
.filter((rel) => rel.endsWith(".schema.json"))
.sort();

assert(schemaFiles.length > 0, "No .schema.json files found.", 4);
for (const rel of schemaFiles) {
validateSchema(rel, readJson(rel));
}

console.log("agentic-tokenomics verify: PASS");