Initial commit with translated description

This commit is contained in:
2026-03-29 08:33:08 +08:00
commit a9c97c9562
98 changed files with 23152 additions and 0 deletions

63
scripts/a2a_export.js Normal file
View File

@@ -0,0 +1,63 @@
const { loadGenes, loadCapsules, readAllEvents } = require('../src/gep/assetStore');
const { exportEligibleCapsules, exportEligibleGenes, isAllowedA2AAsset } = require('../src/gep/a2a');
const { buildPublish, buildHello, getTransport } = require('../src/gep/a2aProtocol');
const { computeAssetId, SCHEMA_VERSION } = require('../src/gep/contentHash');
function main() {
var args = process.argv.slice(2);
var asJson = args.includes('--json');
var asProtocol = args.includes('--protocol');
var withHello = args.includes('--hello');
var persist = args.includes('--persist');
var includeEvents = args.includes('--include-events');
var capsules = loadCapsules();
var genes = loadGenes();
var events = readAllEvents();
// Build eligible list: Capsules (filtered) + Genes (filtered) + Events (opt-in)
var eligibleCapsules = exportEligibleCapsules({ capsules: capsules, events: events });
var eligibleGenes = exportEligibleGenes({ genes: genes });
var eligible = eligibleCapsules.concat(eligibleGenes);
if (includeEvents) {
var eligibleEvents = (Array.isArray(events) ? events : []).filter(function (e) {
return isAllowedA2AAsset(e) && e.type === 'EvolutionEvent';
});
for (var ei = 0; ei < eligibleEvents.length; ei++) {
var ev = eligibleEvents[ei];
if (!ev.schema_version) ev.schema_version = SCHEMA_VERSION;
if (!ev.asset_id) { try { ev.asset_id = computeAssetId(ev); } catch (e) {} }
}
eligible = eligible.concat(eligibleEvents);
}
if (withHello || asProtocol) {
var hello = buildHello({ geneCount: genes.length, capsuleCount: capsules.length });
process.stdout.write(JSON.stringify(hello) + '\n');
if (persist) { try { getTransport().send(hello); } catch (e) {} }
}
if (asProtocol) {
for (var i = 0; i < eligible.length; i++) {
var msg = buildPublish({ asset: eligible[i] });
process.stdout.write(JSON.stringify(msg) + '\n');
if (persist) { try { getTransport().send(msg); } catch (e) {} }
}
return;
}
if (asJson) {
process.stdout.write(JSON.stringify(eligible, null, 2) + '\n');
return;
}
for (var j = 0; j < eligible.length; j++) {
process.stdout.write(JSON.stringify(eligible[j]) + '\n');
}
}
try { main(); } catch (e) {
process.stderr.write((e && e.message ? e.message : String(e)) + '\n');
process.exit(1);
}

79
scripts/a2a_ingest.js Normal file
View File

@@ -0,0 +1,79 @@
var fs = require('fs');
var assetStore = require('../src/gep/assetStore');
var a2a = require('../src/gep/a2a');
var memGraph = require('../src/gep/memoryGraphAdapter');
var contentHash = require('../src/gep/contentHash');
var a2aProto = require('../src/gep/a2aProtocol');
function readStdin() {
try { return fs.readFileSync(0, 'utf8'); } catch (e) { return ''; }
}
function parseSignalsFromEnv() {
var raw = process.env.A2A_SIGNALS || '';
if (!raw) return [];
try {
var maybe = JSON.parse(raw);
if (Array.isArray(maybe)) return maybe.map(String).filter(Boolean);
} catch (e) {}
return String(raw).split(',').map(function (s) { return s.trim(); }).filter(Boolean);
}
function main() {
var args = process.argv.slice(2);
var inputPath = '';
for (var i = 0; i < args.length; i++) {
if (args[i] && !args[i].startsWith('--')) { inputPath = args[i]; break; }
}
var source = process.env.A2A_SOURCE || 'external';
var factor = Number.isFinite(Number(process.env.A2A_EXTERNAL_CONFIDENCE_FACTOR))
? Number(process.env.A2A_EXTERNAL_CONFIDENCE_FACTOR) : 0.6;
var text = inputPath ? a2a.readTextIfExists(inputPath) : readStdin();
var parsed = a2a.parseA2AInput(text);
var signals = parseSignalsFromEnv();
var accepted = 0;
var rejected = 0;
var emitDecisions = process.env.A2A_EMIT_DECISIONS === 'true';
for (var j = 0; j < parsed.length; j++) {
var obj = parsed[j];
if (!a2a.isAllowedA2AAsset(obj)) continue;
if (obj.asset_id && typeof obj.asset_id === 'string') {
if (!contentHash.verifyAssetId(obj)) {
rejected += 1;
if (emitDecisions) {
try {
var dm = a2aProto.buildDecision({ assetId: obj.asset_id, localId: obj.id, decision: 'reject', reason: 'asset_id integrity check failed' });
a2aProto.getTransport().send(dm);
} catch (e) {}
}
continue;
}
}
var staged = a2a.lowerConfidence(obj, { source: source, factor: factor });
if (!staged) continue;
assetStore.appendExternalCandidateJsonl(staged);
try { memGraph.recordExternalCandidate({ asset: staged, source: source, signals: signals }); } catch (e) {}
if (emitDecisions) {
try {
var dm2 = a2aProto.buildDecision({ assetId: staged.asset_id, localId: staged.id, decision: 'quarantine', reason: 'staged as external candidate' });
a2aProto.getTransport().send(dm2);
} catch (e) {}
}
accepted += 1;
}
process.stdout.write('accepted=' + accepted + ' rejected=' + rejected + '\n');
}
try { main(); } catch (e) {
process.stderr.write((e && e.message ? e.message : String(e)) + '\n');
process.exit(1);
}

118
scripts/a2a_promote.js Normal file
View File

@@ -0,0 +1,118 @@
var assetStore = require('../src/gep/assetStore');
var solidifyMod = require('../src/gep/solidify');
var contentHash = require('../src/gep/contentHash');
var a2aProto = require('../src/gep/a2aProtocol');
function parseArgs(argv) {
var out = { flags: new Set(), kv: new Map(), positionals: [] };
for (var i = 0; i < argv.length; i++) {
var a = argv[i];
if (!a) continue;
if (a.startsWith('--')) {
var eq = a.indexOf('=');
if (eq > -1) { out.kv.set(a.slice(2, eq), a.slice(eq + 1)); }
else {
var key = a.slice(2);
var next = argv[i + 1];
if (next && !String(next).startsWith('--')) { out.kv.set(key, next); i++; }
else { out.flags.add(key); }
}
} else { out.positionals.push(a); }
}
return out;
}
function main() {
var args = parseArgs(process.argv.slice(2));
var id = String(args.kv.get('id') || '').trim();
var typeRaw = String(args.kv.get('type') || '').trim().toLowerCase();
var validated = args.flags.has('validated') || String(args.kv.get('validated') || '') === 'true';
var limit = Number.isFinite(Number(args.kv.get('limit'))) ? Number(args.kv.get('limit')) : 500;
if (!id || !typeRaw) throw new Error('Usage: node scripts/a2a_promote.js --type capsule|gene|event --id <id> --validated');
if (!validated) throw new Error('Refusing to promote without --validated (local verification must be done first).');
var type = typeRaw === 'capsule' ? 'Capsule' : typeRaw === 'gene' ? 'Gene' : typeRaw === 'event' ? 'EvolutionEvent' : '';
if (!type) throw new Error('Invalid --type. Use capsule, gene, or event.');
var external = assetStore.readRecentExternalCandidates(limit);
var candidate = null;
for (var i = 0; i < external.length; i++) {
if (external[i] && external[i].type === type && String(external[i].id) === id) { candidate = external[i]; break; }
}
if (!candidate) throw new Error('Candidate not found in external zone: type=' + type + ' id=' + id);
if (type === 'Gene') {
var validation = Array.isArray(candidate.validation) ? candidate.validation : [];
for (var j = 0; j < validation.length; j++) {
var c = String(validation[j] || '').trim();
if (!c) continue;
if (!solidifyMod.isValidationCommandAllowed(c)) {
throw new Error('Refusing to promote Gene ' + id + ': validation command rejected by safety check: "' + c + '". Only node/npm/npx commands without shell operators are allowed.');
}
}
}
var promoted = JSON.parse(JSON.stringify(candidate));
if (!promoted.a2a || typeof promoted.a2a !== 'object') promoted.a2a = {};
promoted.a2a.status = 'promoted';
promoted.a2a.promoted_at = new Date().toISOString();
if (!promoted.schema_version) promoted.schema_version = contentHash.SCHEMA_VERSION;
promoted.asset_id = contentHash.computeAssetId(promoted);
var emitDecisions = process.env.A2A_EMIT_DECISIONS === 'true';
if (type === 'EvolutionEvent') {
assetStore.appendEventJsonl(promoted);
if (emitDecisions) {
try {
var dmEv = a2aProto.buildDecision({ assetId: promoted.asset_id, localId: id, decision: 'accept', reason: 'event promoted for provenance tracking' });
a2aProto.getTransport().send(dmEv);
} catch (e) {}
}
process.stdout.write('promoted_event=' + id + '\n');
return;
}
if (type === 'Capsule') {
assetStore.appendCapsule(promoted);
if (emitDecisions) {
try {
var dm = a2aProto.buildDecision({ assetId: promoted.asset_id, localId: id, decision: 'accept', reason: 'capsule promoted after validation' });
a2aProto.getTransport().send(dm);
} catch (e) {}
}
process.stdout.write('promoted_capsule=' + id + '\n');
return;
}
var localGenes = assetStore.loadGenes();
var exists = false;
for (var k = 0; k < localGenes.length; k++) {
if (localGenes[k] && localGenes[k].type === 'Gene' && String(localGenes[k].id) === id) { exists = true; break; }
}
if (exists) {
if (emitDecisions) {
try {
var dm2 = a2aProto.buildDecision({ assetId: promoted.asset_id, localId: id, decision: 'reject', reason: 'local gene with same ID already exists' });
a2aProto.getTransport().send(dm2);
} catch (e) {}
}
process.stdout.write('conflict_keep_local_gene=' + id + '\n');
return;
}
assetStore.upsertGene(promoted);
if (emitDecisions) {
try {
var dm3 = a2aProto.buildDecision({ assetId: promoted.asset_id, localId: id, decision: 'accept', reason: 'gene promoted after safety audit' });
a2aProto.getTransport().send(dm3);
} catch (e) {}
}
process.stdout.write('promoted_gene=' + id + '\n');
}
try { main(); } catch (e) {
process.stderr.write((e && e.message ? e.message : String(e)) + '\n');
process.exit(1);
}

121
scripts/analyze_by_skill.js Normal file
View File

@@ -0,0 +1,121 @@
const fs = require('fs');
const path = require('path');
const REPO_ROOT = path.resolve(__dirname, '..');
const LOG_FILE = path.join(REPO_ROOT, 'evolution_history_full.md');
const OUT_FILE = path.join(REPO_ROOT, 'evolution_detailed_report.md');
function analyzeEvolution() {
if (!fs.existsSync(LOG_FILE)) {
console.error("Source file missing.");
return;
}
const content = fs.readFileSync(LOG_FILE, 'utf8');
// Split by divider
const entries = content.split('---').map(e => e.trim()).filter(e => e.length > 0);
const skillUpdates = {}; // Map<SkillName, Array<Changes>>
const generalUpdates = []; // Array<Changes>
// Regex to detect skills/paths
// e.g. `skills/feishu-card/send.js` or **Target**: `skills/git-sync`
const skillRegex = /skills\/([a-zA-Z0-9\-_]+)/;
const actionRegex = /Action:\s*([\s\S]*?)(?=\n\n|\n[A-Z]|$)/i; // Capture Action text
const statusRegex = /Status:\s*\[?([A-Z\s_]+)\]?/i;
entries.forEach(entry => {
// Extract basic info
const statusMatch = entry.match(statusRegex);
const status = statusMatch ? statusMatch[1].trim().toUpperCase() : 'UNKNOWN';
// Skip routine checks if we want a *detailed evolution* report (focus on changes)
// But user asked for "what happened", so routine scans might be boring unless they found something.
// Let's filter out "STABILITY" or "RUNNING" unless there is a clear "Mutated" or "Fixed" keyword.
const isInteresting =
entry.includes('Fixed') ||
entry.includes('Hardened') ||
entry.includes('Optimized') ||
entry.includes('Patched') ||
entry.includes('Created') ||
entry.includes('Added') ||
status === 'SUCCESS' ||
status === 'COMPLETED';
if (!isInteresting) return;
// Find associated skill
const skillMatch = entry.match(skillRegex);
let skillName = 'General / System';
if (skillMatch) {
skillName = skillMatch[1];
} else {
// Try heuristics
if (entry.toLowerCase().includes('feishu card')) skillName = 'feishu-card';
else if (entry.toLowerCase().includes('git sync')) skillName = 'git-sync';
else if (entry.toLowerCase().includes('logger')) skillName = 'interaction-logger';
else if (entry.toLowerCase().includes('evolve')) skillName = 'capability-evolver';
}
// Extract description
let description = "";
const actionMatch = entry.match(actionRegex);
if (actionMatch) {
description = actionMatch[1].trim();
} else {
// Fallback: take lines that look like bullet points or text after header
const lines = entry.split('\n');
description = lines.filter(l => l.match(/^[•\-\*]|\w/)).slice(1).join('\n').trim();
}
// Clean up description (remove duplicate "Action:" prefix if captured)
description = description.replace(/^Action:\s*/i, '');
if (!skillUpdates[skillName]) skillUpdates[skillName] = [];
// Dedup descriptions slightly (simple check)
const isDuplicate = skillUpdates[skillName].some(u => u.desc.includes(description.substring(0, 20)));
if (!isDuplicate) {
// Extract Date if possible
const dateMatch = entry.match(/\((\d{4}\/\d{1,2}\/\d{1,2}.*?)\)/);
const date = dateMatch ? dateMatch[1] : 'Unknown';
skillUpdates[skillName].push({
date,
status,
desc: description
});
}
});
// Generate Markdown
let md = "# Detailed Evolution Report (By Skill)\n\n> Comprehensive breakdown of system changes.\n\n";
// Sort skills alphabetically
const sortedSkills = Object.keys(skillUpdates).sort();
sortedSkills.forEach(skill => {
md += `## ${skill}\n`;
const updates = skillUpdates[skill];
updates.forEach(u => {
// Icon based on content
let icon = '*';
const lowerDesc = u.desc.toLowerCase();
if (lowerDesc.includes('optimiz')) icon = '[optimize]';
if (lowerDesc.includes('secur') || lowerDesc.includes('harden') || lowerDesc.includes('permission')) icon = '[security]';
if (lowerDesc.includes('fix') || lowerDesc.includes('patch')) icon = '[repair]';
if (lowerDesc.includes('creat') || lowerDesc.includes('add')) icon = '[add]';
md += `### ${icon} ${u.date}\n`;
md += `${u.desc}\n\n`;
});
md += `---\n`;
});
fs.writeFileSync(OUT_FILE, md);
console.log(`Generated report for ${sortedSkills.length} skills.`);
}
analyzeEvolution();

355
scripts/build_public.js Normal file
View File

@@ -0,0 +1,355 @@
const fs = require('fs');
const path = require('path');
const { execSync } = require('child_process');
const REPO_ROOT = path.resolve(__dirname, '..');
function readJson(filePath) {
return JSON.parse(fs.readFileSync(filePath, 'utf8'));
}
function ensureDir(dir) {
if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true });
}
function rmDir(dir) {
if (!fs.existsSync(dir)) return;
fs.rmSync(dir, { recursive: true, force: true });
}
function normalizePosix(p) {
return p.split(path.sep).join('/');
}
function isUnder(child, parent) {
const rel = path.relative(parent, child);
return !!rel && !rel.startsWith('..') && !path.isAbsolute(rel);
}
function listFilesRec(dir) {
const out = [];
const entries = fs.readdirSync(dir, { withFileTypes: true });
for (const ent of entries) {
const p = path.join(dir, ent.name);
if (ent.isDirectory()) out.push(...listFilesRec(p));
else if (ent.isFile()) out.push(p);
}
return out;
}
function globToRegex(glob) {
// Supports "*" within a single segment and "**" for any depth.
const norm = normalizePosix(glob);
const parts = norm.split('/').filter(p => p.length > 0);
const out = [];
for (const part of parts) {
if (part === '**') {
// any number of path segments
out.push('(?:.*)');
continue;
}
// Escape regex special chars, then expand "*" wildcards within segment.
const esc = part.replace(/[.+^${}()|[\]\\]/g, '\\$&').replace(/\*/g, '[^/]*');
out.push(esc);
}
const re = out.join('\\/');
return new RegExp(`^${re}$`);
}
function matchesAnyGlobs(relPath, globs) {
const p = normalizePosix(relPath);
for (const g of globs || []) {
const re = globToRegex(g);
if (re.test(p)) return true;
}
return false;
}
function copyFile(srcAbs, destAbs) {
ensureDir(path.dirname(destAbs));
fs.copyFileSync(srcAbs, destAbs);
}
function copyEntry(spec, outDirAbs) {
const copied = [];
// Directory glob
if (spec.includes('*')) {
const all = listFilesRec(REPO_ROOT);
const includeRe = globToRegex(spec);
for (const abs of all) {
const rel = normalizePosix(path.relative(REPO_ROOT, abs));
if (!includeRe.test(rel)) continue;
const destAbs = path.join(outDirAbs, rel);
copyFile(abs, destAbs);
copied.push(rel);
}
return copied;
}
const srcAbs = path.join(REPO_ROOT, spec);
if (!fs.existsSync(srcAbs)) return [];
const st = fs.statSync(srcAbs);
if (st.isFile()) {
const rel = normalizePosix(spec);
copyFile(srcAbs, path.join(outDirAbs, rel));
copied.push(rel);
return copied;
}
if (st.isDirectory()) {
const files = listFilesRec(srcAbs);
for (const abs of files) {
const rel = normalizePosix(path.relative(REPO_ROOT, abs));
copyFile(abs, path.join(outDirAbs, rel));
copied.push(rel);
}
}
return copied;
}
function applyRewrite(outDirAbs, rewrite) {
const rules = rewrite || {};
for (const [relFile, cfg] of Object.entries(rules)) {
const target = path.join(outDirAbs, relFile);
if (!fs.existsSync(target)) continue;
let content = fs.readFileSync(target, 'utf8');
const reps = (cfg && cfg.replace) || [];
for (const r of reps) {
const from = String(r.from || '');
const to = String(r.to || '');
if (!from) continue;
content = content.split(from).join(to);
}
fs.writeFileSync(target, content, 'utf8');
}
}
function rewritePackageJson(outDirAbs) {
const p = path.join(outDirAbs, 'package.json');
if (!fs.existsSync(p)) return;
try {
const pkg = JSON.parse(fs.readFileSync(p, 'utf8'));
pkg.scripts = {
start: 'node index.js',
run: 'node index.js run',
solidify: 'node index.js solidify',
review: 'node index.js review',
'a2a:export': 'node scripts/a2a_export.js',
'a2a:ingest': 'node scripts/a2a_ingest.js',
'a2a:promote': 'node scripts/a2a_promote.js',
};
fs.writeFileSync(p, JSON.stringify(pkg, null, 2) + '\n', 'utf8');
} catch (e) {
// ignore
}
}
function parseSemver(v) {
const m = String(v || '').trim().match(/^(\d+)\.(\d+)\.(\d+)$/);
if (!m) return null;
return { major: Number(m[1]), minor: Number(m[2]), patch: Number(m[3]) };
}
function formatSemver(x) {
return `${x.major}.${x.minor}.${x.patch}`;
}
function bumpSemver(base, bump) {
const v = parseSemver(base);
if (!v) return null;
if (bump === 'major') return `${v.major + 1}.0.0`;
if (bump === 'minor') return `${v.major}.${v.minor + 1}.0`;
if (bump === 'patch') return `${v.major}.${v.minor}.${v.patch + 1}`;
return formatSemver(v);
}
function git(cmd) {
return execSync(cmd, { cwd: REPO_ROOT, encoding: 'utf8', stdio: ['ignore', 'pipe', 'ignore'] }).trim();
}
function getBaseReleaseCommit() {
// Prefer last "prepare vX.Y.Z" commit; fallback to HEAD~50 range later.
try {
const hash = git('git log -n 1 --pretty=%H --grep="chore(release): prepare v"');
return hash || null;
} catch (e) {
return null;
}
}
function getCommitSubjectsSince(baseCommit) {
try {
if (!baseCommit) {
const out = git('git log -n 30 --pretty=%s');
return out ? out.split('\n').filter(Boolean) : [];
}
const out = git(`git log ${baseCommit}..HEAD --pretty=%s`);
return out ? out.split('\n').filter(Boolean) : [];
} catch (e) {
return [];
}
}
function inferBumpFromSubjects(subjects) {
const subs = (subjects || []).map(s => String(s));
const hasBreaking = subs.some(s => /\bBREAKING CHANGE\b/i.test(s) || /^[a-z]+(\(.+\))?!:/.test(s));
if (hasBreaking) return { bump: 'major', reason: 'breaking change marker in commit subject' };
const hasFeat = subs.some(s => /^feat(\(.+\))?:/i.test(s));
if (hasFeat) return { bump: 'minor', reason: 'feature commit detected (feat:)' };
const hasFix = subs.some(s => /^(fix|perf)(\(.+\))?:/i.test(s));
if (hasFix) return { bump: 'patch', reason: 'fix/perf commit detected' };
if (subs.length === 0) return { bump: 'none', reason: 'no commits since base release commit' };
return { bump: 'patch', reason: 'default to patch for non-breaking changes' };
}
function suggestVersion() {
const pkgPath = path.join(REPO_ROOT, 'package.json');
let baseVersion = null;
try {
baseVersion = JSON.parse(fs.readFileSync(pkgPath, 'utf8')).version;
} catch (e) {}
const baseCommit = getBaseReleaseCommit();
const subjects = getCommitSubjectsSince(baseCommit);
const decision = inferBumpFromSubjects(subjects);
let suggested = null;
if (decision.bump === 'none') suggested = baseVersion;
else suggested = bumpSemver(baseVersion, decision.bump);
return { baseVersion, baseCommit, subjects, decision, suggestedVersion: suggested };
}
function writePrivateSemverNote(note) {
const privateDir = path.join(REPO_ROOT, 'memory');
ensureDir(privateDir);
fs.writeFileSync(path.join(privateDir, 'semver_suggestion.json'), JSON.stringify(note, null, 2) + '\n', 'utf8');
}
function writePrivateSemverPrompt(note) {
const privateDir = path.join(REPO_ROOT, 'memory');
ensureDir(privateDir);
const subjects = Array.isArray(note.subjects) ? note.subjects : [];
const semverRule = [
'MAJOR.MINOR.PATCH',
'- MAJOR: incompatible changes',
'- MINOR: backward-compatible features',
'- PATCH: backward-compatible bug fixes',
].join('\n');
const prompt = [
'You are a release versioning assistant.',
'Decide the next version bump using SemVer rules below.',
'',
semverRule,
'',
`Base version: ${note.baseVersion || '(unknown)'}`,
`Base commit: ${note.baseCommit || '(unknown)'}`,
'',
'Recent commit subjects (newest first):',
...subjects.map(s => `- ${s}`),
'',
'Output JSON only:',
'{ "bump": "major|minor|patch|none", "suggestedVersion": "x.y.z", "reason": ["..."] }',
].join('\n');
fs.writeFileSync(path.join(privateDir, 'semver_prompt.md'), prompt + '\n', 'utf8');
}
function writeDistVersion(outDirAbs, version) {
if (!version) return;
const p = path.join(outDirAbs, 'package.json');
if (!fs.existsSync(p)) return;
try {
const pkg = JSON.parse(fs.readFileSync(p, 'utf8'));
pkg.version = version;
fs.writeFileSync(p, JSON.stringify(pkg, null, 2) + '\n', 'utf8');
} catch (e) {}
}
function pruneExcluded(outDirAbs, excludeGlobs) {
const all = listFilesRec(outDirAbs);
for (const abs of all) {
const rel = normalizePosix(path.relative(outDirAbs, abs));
if (matchesAnyGlobs(rel, excludeGlobs)) {
fs.rmSync(abs, { force: true });
}
}
}
function validateNoPrivatePaths(outDirAbs) {
// Basic safeguard: forbid docs/ and memory/ in output.
const forbiddenPrefixes = ['docs/', 'memory/'];
const all = listFilesRec(outDirAbs);
for (const abs of all) {
const rel = normalizePosix(path.relative(outDirAbs, abs));
for (const pref of forbiddenPrefixes) {
if (rel.startsWith(pref)) {
throw new Error(`Build validation failed: forbidden path in output: ${rel}`);
}
}
}
}
function main() {
const manifestPath = path.join(REPO_ROOT, 'public.manifest.json');
const manifest = readJson(manifestPath);
const outDir = String(manifest.outDir || 'dist-public');
const outDirAbs = path.join(REPO_ROOT, outDir);
// SemVer suggestion (private). This does not modify the source repo version.
const semver = suggestVersion();
writePrivateSemverNote(semver);
writePrivateSemverPrompt(semver);
rmDir(outDirAbs);
ensureDir(outDirAbs);
const include = manifest.include || [];
const exclude = manifest.exclude || [];
const copied = [];
for (const spec of include) {
copied.push(...copyEntry(spec, outDirAbs));
}
pruneExcluded(outDirAbs, exclude);
applyRewrite(outDirAbs, manifest.rewrite);
rewritePackageJson(outDirAbs);
// Prefer explicit version; otherwise use suggested version.
const releaseVersion = process.env.RELEASE_VERSION || semver.suggestedVersion;
if (releaseVersion) writeDistVersion(outDirAbs, releaseVersion);
validateNoPrivatePaths(outDirAbs);
// Write build manifest for private verification (do not include in dist-public/).
const buildInfo = {
built_at: new Date().toISOString(),
outDir,
files: copied.sort(),
};
const privateDir = path.join(REPO_ROOT, 'memory');
ensureDir(privateDir);
fs.writeFileSync(path.join(privateDir, 'public_build_info.json'), JSON.stringify(buildInfo, null, 2) + '\n', 'utf8');
process.stdout.write(`Built public output at ${outDir}\n`);
if (semver && semver.suggestedVersion) {
process.stdout.write(`Suggested version: ${semver.suggestedVersion}\n`);
process.stdout.write(`SemVer decision: ${semver.decision ? semver.decision.bump : 'unknown'}\n`);
}
}
try {
main();
} catch (e) {
process.stderr.write(`${e.message}\n`);
process.exit(1);
}

85
scripts/extract_log.js Normal file
View File

@@ -0,0 +1,85 @@
const fs = require('fs');
const path = require('path');
const REPO_ROOT = path.resolve(__dirname, '..');
const LOG_FILE = path.join(REPO_ROOT, 'memory', 'mad_dog_evolution.log');
const OUT_FILE = path.join(REPO_ROOT, 'evolution_history.md');
function parseLog() {
if (!fs.existsSync(LOG_FILE)) {
console.log("Log file not found.");
return;
}
const content = fs.readFileSync(LOG_FILE, 'utf8');
const lines = content.split('\n');
const reports = [];
let currentTimestamp = null;
// Regex for Feishu command
// node skills/feishu-card/send.js --title "..." --color ... --text "..."
const cmdRegex = /node skills\/feishu-card\/send\.js --title "(.*?)" --color \w+ --text "(.*?)"/;
for (let i = 0; i < lines.length; i++) {
const line = lines[i];
// 1. Capture Timestamp
if (line.includes('Cycle Start:')) {
// Format: Cycle Start: Sun Feb 1 19:17:44 UTC 2026
const dateStr = line.split('Cycle Start: ')[1].trim();
try {
currentTimestamp = new Date(dateStr);
} catch (e) {
currentTimestamp = null;
}
}
const match = line.match(cmdRegex);
if (match) {
const title = match[1];
let text = match[2];
// Clean up text (unescape newlines)
text = text.replace(/\\n/g, '\n').replace(/\\"/g, '"');
if (currentTimestamp) {
reports.push({
ts: currentTimestamp,
title: title,
text: text,
id: title // Cycle ID is in title
});
}
}
}
// Deduplicate by ID (keep latest timestamp?)
const uniqueReports = {};
reports.forEach(r => {
uniqueReports[r.id] = r;
});
const sortedReports = Object.values(uniqueReports).sort((a, b) => a.ts - b.ts);
let md = "# Evolution History (Extracted)\n\n";
sortedReports.forEach(r => {
// Convert to CST (UTC+8)
const cstDate = r.ts.toLocaleString("zh-CN", {
timeZone: "Asia/Shanghai",
hour12: false,
year: 'numeric', month: '2-digit', day: '2-digit',
hour: '2-digit', minute: '2-digit', second: '2-digit'
});
md += `### ${r.title} (${cstDate})\n`;
md += `${r.text}\n\n`;
md += `---\n\n`;
});
fs.writeFileSync(OUT_FILE, md);
console.log(`Extracted ${sortedReports.length} reports to ${OUT_FILE}`);
}
parseLog();

View File

@@ -0,0 +1,75 @@
const { execSync } = require('child_process');
const fs = require('fs');
const path = require('path');
// Separator for git log parsing (something unlikely to be in commit messages)
const SEP = '|||';
const REPO_ROOT = path.resolve(__dirname, '..');
try {
// Git command:
// --reverse: Oldest to Newest (Time Sequence)
// --grep: Filter by keyword
// --format: Hash, Date (ISO), Author, Subject, Body
const cmd = `git log --reverse --grep="Evolution" --format="%H${SEP}%ai${SEP}%an${SEP}%s${SEP}%b"`;
console.log('Executing git log...');
const output = execSync(cmd, {
encoding: 'utf8',
cwd: REPO_ROOT,
maxBuffer: 1024 * 1024 * 10 // 10MB buffer just in case
});
const entries = output.split('\n').filter(line => line.trim().length > 0);
let markdown = '# Evolution History (Time Sequence)\n\n';
markdown += '> Filter: "Evolution"\n';
markdown += '> Timezone: CST (UTC+8)\n\n';
let count = 0;
entries.forEach(entry => {
const parts = entry.split(SEP);
if (parts.length < 4) return;
const hash = parts[0];
const dateStr = parts[1];
const author = parts[2];
const subject = parts[3];
const body = parts[4] || '';
// Parse Date and Convert to UTC+8
const date = new Date(dateStr);
// Add 8 hours (28800000 ms) to UTC timestamp to shift it
// Then formatting it as ISO will look like UTC but represent CST values
const cstDate = new Date(date.getTime() + 8 * 60 * 60 * 1000);
// Format: YYYY-MM-DD HH:mm:ss
const timeStr = cstDate.toISOString().replace('T', ' ').substring(0, 19);
markdown += `## ${timeStr}\n`;
markdown += `- Commit: \`${hash.substring(0, 7)}\`\n`;
markdown += `- Subject: ${subject}\n`;
if (body.trim()) {
// Indent body for better readability
const formattedBody = body.trim().split('\n').map(l => `> ${l}`).join('\n');
markdown += `- Details:\n${formattedBody}\n`;
}
markdown += '\n';
count++;
});
const outDir = path.join(REPO_ROOT, 'memory');
if (!fs.existsSync(outDir)) fs.mkdirSync(outDir, { recursive: true });
const outPath = path.join(outDir, 'evolution_history.md');
fs.writeFileSync(outPath, markdown);
console.log(`Successfully generated report with ${count} entries.`);
console.log(`Saved to: ${outPath}`);
} catch (e) {
console.error('Error generating history:', e.message);
process.exit(1);
}

View File

@@ -0,0 +1,96 @@
const fs = require('fs');
const { appendEventJsonl } = require('../src/gep/assetStore');
function readStdin() {
try {
return fs.readFileSync(0, 'utf8');
} catch {
return '';
}
}
function readTextIfExists(p) {
try {
if (!p) return '';
if (!fs.existsSync(p)) return '';
return fs.readFileSync(p, 'utf8');
} catch {
return '';
}
}
function parseInput(text) {
const raw = String(text || '').trim();
if (!raw) return [];
// Accept JSON array or single JSON.
try {
const maybe = JSON.parse(raw);
if (Array.isArray(maybe)) return maybe;
if (maybe && typeof maybe === 'object') return [maybe];
} catch (e) {}
// Fallback: JSONL.
const lines = raw.split('\n').map(l => l.trim()).filter(Boolean);
const out = [];
for (const line of lines) {
try {
const obj = JSON.parse(line);
out.push(obj);
} catch (e) {}
}
return out;
}
function isValidEvolutionEvent(ev) {
if (!ev || ev.type !== 'EvolutionEvent') return false;
if (!ev.id || typeof ev.id !== 'string') return false;
// parent may be null or string
if (!(ev.parent === null || typeof ev.parent === 'string')) return false;
if (!ev.intent || typeof ev.intent !== 'string') return false;
if (!Array.isArray(ev.signals)) return false;
if (!Array.isArray(ev.genes_used)) return false;
// GEP v1.4: mutation + personality are mandatory evolution dimensions
if (!ev.mutation_id || typeof ev.mutation_id !== 'string') return false;
if (!ev.personality_state || typeof ev.personality_state !== 'object') return false;
if (ev.personality_state.type !== 'PersonalityState') return false;
for (const k of ['rigor', 'creativity', 'verbosity', 'risk_tolerance', 'obedience']) {
const v = Number(ev.personality_state[k]);
if (!Number.isFinite(v) || v < 0 || v > 1) return false;
}
if (!ev.blast_radius || typeof ev.blast_radius !== 'object') return false;
if (!Number.isFinite(Number(ev.blast_radius.files))) return false;
if (!Number.isFinite(Number(ev.blast_radius.lines))) return false;
if (!ev.outcome || typeof ev.outcome !== 'object') return false;
if (!ev.outcome.status || typeof ev.outcome.status !== 'string') return false;
const score = Number(ev.outcome.score);
if (!Number.isFinite(score) || score < 0 || score > 1) return false;
// capsule_id is optional, but if present must be string or null.
if (!('capsule_id' in ev)) return true;
return ev.capsule_id === null || typeof ev.capsule_id === 'string';
}
function main() {
const args = process.argv.slice(2);
const inputPath = args.find(a => a && !a.startsWith('--')) || '';
const text = inputPath ? readTextIfExists(inputPath) : readStdin();
const items = parseInput(text);
let appended = 0;
for (const it of items) {
if (!isValidEvolutionEvent(it)) continue;
appendEventJsonl(it);
appended += 1;
}
process.stdout.write(`appended=${appended}\n`);
}
try {
main();
} catch (e) {
process.stderr.write(`${e && e.message ? e.message : String(e)}\n`);
process.exit(1);
}

View File

@@ -0,0 +1,234 @@
const fs = require('fs');
const path = require('path');
const { getRepoRoot, getMemoryDir, getGepAssetsDir } = require('../src/gep/paths');
const { normalizePersonalityState, personalityKey, defaultPersonalityState } = require('../src/gep/personality');
function readJsonIfExists(p, fallback) {
try {
if (!fs.existsSync(p)) return fallback;
const raw = fs.readFileSync(p, 'utf8');
if (!raw.trim()) return fallback;
return JSON.parse(raw);
} catch {
return fallback;
}
}
function readJsonlIfExists(p, limitLines = 5000) {
try {
if (!fs.existsSync(p)) return [];
const raw = fs.readFileSync(p, 'utf8');
const lines = raw
.split('\n')
.map(l => l.trim())
.filter(Boolean);
const recent = lines.slice(Math.max(0, lines.length - limitLines));
return recent
.map(l => {
try {
return JSON.parse(l);
} catch {
return null;
}
})
.filter(Boolean);
} catch {
return [];
}
}
function clamp01(x) {
const n = Number(x);
if (!Number.isFinite(n)) return 0;
return Math.max(0, Math.min(1, n));
}
function pct(x) {
const n = Number(x);
if (!Number.isFinite(n)) return '0.0%';
return `${(n * 100).toFixed(1)}%`;
}
function pad(s, n) {
const str = String(s == null ? '' : s);
if (str.length >= n) return str.slice(0, n);
return str + ' '.repeat(n - str.length);
}
function scoreFromCounts(success, fail, avgScore) {
const succ = Number(success) || 0;
const fl = Number(fail) || 0;
const total = succ + fl;
const p = (succ + 1) / (total + 2); // Laplace smoothing
const sampleWeight = Math.min(1, total / 8);
const q = avgScore == null ? 0.5 : clamp01(avgScore);
return p * 0.75 + q * 0.25 * sampleWeight;
}
function aggregateFromEvents(events) {
const map = new Map();
for (const ev of Array.isArray(events) ? events : []) {
if (!ev || ev.type !== 'EvolutionEvent') continue;
const ps = ev.personality_state && typeof ev.personality_state === 'object' ? ev.personality_state : null;
if (!ps) continue;
const key = personalityKey(normalizePersonalityState(ps));
const cur = map.get(key) || {
key,
success: 0,
fail: 0,
n: 0,
avg_score: 0.5,
last_event_id: null,
last_at: null,
mutation: { repair: 0, optimize: 0, innovate: 0 },
mutation_success: { repair: 0, optimize: 0, innovate: 0 },
};
const st = ev.outcome && ev.outcome.status ? String(ev.outcome.status) : 'unknown';
if (st === 'success') cur.success += 1;
else if (st === 'failed') cur.fail += 1;
const sc = ev.outcome && Number.isFinite(Number(ev.outcome.score)) ? clamp01(Number(ev.outcome.score)) : null;
if (sc != null) {
cur.n += 1;
cur.avg_score = cur.avg_score + (sc - cur.avg_score) / cur.n;
}
const cat = ev.intent ? String(ev.intent) : null;
if (cat && cur.mutation[cat] != null) {
cur.mutation[cat] += 1;
if (st === 'success') cur.mutation_success[cat] += 1;
}
cur.last_event_id = ev.id || cur.last_event_id;
const at = ev.meta && ev.meta.at ? String(ev.meta.at) : null;
cur.last_at = at || cur.last_at;
map.set(key, cur);
}
return Array.from(map.values());
}
function main() {
const repoRoot = getRepoRoot();
const memoryDir = getMemoryDir();
const assetsDir = getGepAssetsDir();
const personalityPath = path.join(memoryDir, 'personality_state.json');
const model = readJsonIfExists(personalityPath, null);
const current = model && model.current ? normalizePersonalityState(model.current) : defaultPersonalityState();
const currentKey = personalityKey(current);
const eventsPath = path.join(assetsDir, 'events.jsonl');
const events = readJsonlIfExists(eventsPath, 10000);
const evs = events.filter(e => e && e.type === 'EvolutionEvent');
const agg = aggregateFromEvents(evs);
// Prefer model.stats if present, but still show event-derived aggregation (ground truth).
const stats = model && model.stats && typeof model.stats === 'object' ? model.stats : {};
const statRows = Object.entries(stats).map(([key, e]) => {
const entry = e && typeof e === 'object' ? e : {};
const success = Number(entry.success) || 0;
const fail = Number(entry.fail) || 0;
const total = success + fail;
const avg = Number.isFinite(Number(entry.avg_score)) ? clamp01(Number(entry.avg_score)) : null;
const score = scoreFromCounts(success, fail, avg);
return { key, success, fail, total, avg_score: avg, score, updated_at: entry.updated_at || null, source: 'model' };
});
const evRows = agg.map(e => {
const success = Number(e.success) || 0;
const fail = Number(e.fail) || 0;
const total = success + fail;
const avg = Number.isFinite(Number(e.avg_score)) ? clamp01(Number(e.avg_score)) : null;
const score = scoreFromCounts(success, fail, avg);
return { key: e.key, success, fail, total, avg_score: avg, score, updated_at: e.last_at || null, source: 'events', _ev: e };
});
// Merge rows by key (events take precedence for total/success/fail; model provides updated_at if events missing).
const byKey = new Map();
for (const r of [...statRows, ...evRows]) {
const prev = byKey.get(r.key);
if (!prev) {
byKey.set(r.key, r);
continue;
}
// Prefer events for counts and avg_score
if (r.source === 'events') byKey.set(r.key, { ...prev, ...r });
else byKey.set(r.key, { ...r, ...prev });
}
const merged = Array.from(byKey.values()).sort((a, b) => b.score - a.score);
process.stdout.write(`Repo: ${repoRoot}\n`);
process.stdout.write(`MemoryDir: ${memoryDir}\n`);
process.stdout.write(`AssetsDir: ${assetsDir}\n\n`);
process.stdout.write(`[Current Personality]\n`);
process.stdout.write(`${currentKey}\n`);
process.stdout.write(`${JSON.stringify(current, null, 2)}\n\n`);
process.stdout.write(`[Personality Stats] (ranked by score)\n`);
if (merged.length === 0) {
process.stdout.write('(no stats yet; run a few cycles and solidify)\n');
return;
}
const header =
pad('rank', 5) +
pad('total', 8) +
pad('succ', 8) +
pad('fail', 8) +
pad('succ_rate', 11) +
pad('avg', 7) +
pad('score', 8) +
'key';
process.stdout.write(header + '\n');
process.stdout.write('-'.repeat(Math.min(140, header.length + 40)) + '\n');
const topN = Math.min(25, merged.length);
for (let i = 0; i < topN; i++) {
const r = merged[i];
const succ = Number(r.success) || 0;
const fail = Number(r.fail) || 0;
const total = Number(r.total) || succ + fail;
const succRate = total > 0 ? succ / total : 0;
const avg = r.avg_score == null ? '-' : Number(r.avg_score).toFixed(2);
const line =
pad(String(i + 1), 5) +
pad(String(total), 8) +
pad(String(succ), 8) +
pad(String(fail), 8) +
pad(pct(succRate), 11) +
pad(String(avg), 7) +
pad(Number(r.score).toFixed(3), 8) +
String(r.key);
process.stdout.write(line + '\n');
if (r._ev) {
const ev = r._ev;
const ms = ev.mutation || {};
const mSucc = ev.mutation_success || {};
const parts = [];
for (const cat of ['repair', 'optimize', 'innovate']) {
const n = Number(ms[cat]) || 0;
if (n <= 0) continue;
const s = Number(mSucc[cat]) || 0;
parts.push(`${cat}:${s}/${n}`);
}
if (parts.length) process.stdout.write(` mutation_success: ${parts.join(' | ')}\n`);
}
}
process.stdout.write('\n');
process.stdout.write(`[Notes]\n`);
process.stdout.write(`- score is a smoothed composite of success_rate + avg_score (sample-weighted)\n`);
process.stdout.write(`- current_key appears in the ranking once enough data accumulates\n`);
}
try {
main();
} catch (e) {
process.stderr.write((e && e.message) || String(e));
process.stderr.write('\n');
process.exit(1);
}

147
scripts/human_report.js Normal file
View File

@@ -0,0 +1,147 @@
const fs = require('fs');
const path = require('path');
const REPO_ROOT = path.resolve(__dirname, '..');
const IN_FILE = path.join(REPO_ROOT, 'evolution_history_full.md');
const OUT_FILE = path.join(REPO_ROOT, 'evolution_human_summary.md');
function generateHumanReport() {
if (!fs.existsSync(IN_FILE)) return console.error("No input file");
const content = fs.readFileSync(IN_FILE, 'utf8');
const entries = content.split('---').map(e => e.trim()).filter(e => e.length > 0);
const categories = {
'Security & Stability': [],
'Performance & Optimization': [],
'Tooling & Features': [],
'Documentation & Process': []
};
const componentMap = {}; // Component -> Change List
entries.forEach(entry => {
// Extract basic info
const lines = entry.split('\n');
const header = lines[0]; // ### Title (Date)
const body = lines.slice(1).join('\n');
const dateMatch = header.match(/\((.*?)\)/);
const dateStr = dateMatch ? dateMatch[1] : '';
const time = dateStr.split(' ')[1] || ''; // HH:mm:ss
// Classify
let category = 'Tooling & Features';
let component = 'System';
let summary = '';
const lowerBody = body.toLowerCase();
// Detect Component
if (lowerBody.includes('feishu-card')) component = 'feishu-card';
else if (lowerBody.includes('feishu-sticker')) component = 'feishu-sticker';
else if (lowerBody.includes('git-sync')) component = 'git-sync';
else if (lowerBody.includes('capability-evolver') || lowerBody.includes('evolve.js')) component = 'capability-evolver';
else if (lowerBody.includes('interaction-logger')) component = 'interaction-logger';
else if (lowerBody.includes('chat-to-image')) component = 'chat-to-image';
else if (lowerBody.includes('safe_publish')) component = 'capability-evolver';
// Detect Category
if (lowerBody.includes('security') || lowerBody.includes('permission') || lowerBody.includes('auth') || lowerBody.includes('harden')) {
category = 'Security & Stability';
} else if (lowerBody.includes('optimiz') || lowerBody.includes('performance') || lowerBody.includes('memory') || lowerBody.includes('fast')) {
category = 'Performance & Optimization';
} else if (lowerBody.includes('doc') || lowerBody.includes('readme')) {
category = 'Documentation & Process';
}
// Extract Human Summary (First meaningful line that isn't Status/Action/Date)
const summaryLines = lines.filter(l =>
!l.startsWith('###') &&
!l.startsWith('Status:') &&
!l.startsWith('Action:') &&
l.trim().length > 10
);
if (summaryLines.length > 0) {
// Clean up the line
summary = summaryLines[0]
.replace(/^-\s*/, '') // Remove bullets
.replace(/\*\*/g, '') // Remove bold
.replace(/`/, '')
.trim();
// Deduplicate
const key = `${component}:${summary.substring(0, 20)}`;
const exists = categories[category].some(i => i.key === key);
if (!exists && !summary.includes("Stability Scan OK") && !summary.includes("Workspace Sync")) {
categories[category].push({ time, component, summary, key });
if (!componentMap[component]) componentMap[component] = [];
componentMap[component].push(summary);
}
}
});
// --- Generate Markdown ---
const today = new Date().toISOString().slice(0, 10);
let md = `# Evolution Summary: The Day in Review (${today})\n\n`;
md += `> Overview: Grouped summary of changes extracted from evolution history.\n\n`;
// Section 1: By Theme (Evolution Direction)
md += `## 1. Evolution Direction\n`;
for (const [cat, items] of Object.entries(categories)) {
if (items.length === 0) continue;
md += `### ${cat}\n`;
// Group by component within theme
const compGroup = {};
items.forEach(i => {
if (!compGroup[i.component]) compGroup[i.component] = [];
compGroup[i.component].push(i.summary);
});
for (const [comp, sums] of Object.entries(compGroup)) {
// Unique summaries only
const uniqueSums = [...new Set(sums)];
uniqueSums.forEach(s => {
md += `- **${comp}**: ${s}\n`;
});
}
md += `\n`;
}
// Section 2: By Timeline (High Level)
md += `## 2. Timeline of Critical Events\n`;
// Flatten and sort all items by time
const allItems = [];
Object.values(categories).forEach(list => allItems.push(...list));
allItems.sort((a, b) => a.time.localeCompare(b.time));
// Filter for "Critical" keywords
const criticalItems = allItems.filter(i =>
i.summary.toLowerCase().includes('fix') ||
i.summary.toLowerCase().includes('patch') ||
i.summary.toLowerCase().includes('create') ||
i.summary.toLowerCase().includes('optimiz')
);
criticalItems.forEach(i => {
md += `- \`${i.time}\` (${i.component}): ${i.summary}\n`;
});
// Section 3: Package Adjustments
md += `\n## 3. Package & Documentation Adjustments\n`;
const comps = Object.keys(componentMap).sort();
comps.forEach(comp => {
const count = new Set(componentMap[comp]).size;
md += `- **${comp}**: Received ${count} significant updates.\n`;
});
fs.writeFileSync(OUT_FILE, md);
console.log("Human report generated.");
}
generateHumanReport();

614
scripts/publish_public.js Normal file
View File

@@ -0,0 +1,614 @@
const { execSync, spawnSync } = require('child_process');
const fs = require('fs');
const https = require('https');
const os = require('os');
const path = require('path');
function run(cmd, opts = {}) {
const { dryRun = false } = opts;
if (dryRun) {
process.stdout.write(`[dry-run] ${cmd}\n`);
return '';
}
return execSync(cmd, { encoding: 'utf8', stdio: ['ignore', 'pipe', 'pipe'] }).trim();
}
function hasCommand(cmd) {
try {
if (process.platform === 'win32') {
const res = spawnSync('where', [cmd], { stdio: 'ignore' });
return res.status === 0;
}
const res = spawnSync('which', [cmd], { stdio: 'ignore' });
return res.status === 0;
} catch (e) {
return false;
}
}
function resolveGhExecutable() {
if (hasCommand('gh')) return 'gh';
const candidates = [
'C:\\Program Files\\GitHub CLI\\gh.exe',
'C:\\Program Files (x86)\\GitHub CLI\\gh.exe',
];
for (const p of candidates) {
try {
if (fs.existsSync(p)) return p;
} catch (e) {
// ignore
}
}
return null;
}
function resolveClawhubExecutable() {
// On Windows, Node spawn/spawnSync does not always resolve PATHEXT the same way as shells.
// Prefer the explicit .cmd shim when available to avoid false "not logged in" detection.
if (process.platform === 'win32') {
if (hasCommand('clawhub.cmd')) return 'clawhub.cmd';
if (hasCommand('clawhub')) return 'clawhub';
} else {
if (hasCommand('clawhub')) return 'clawhub';
}
// Common npm global bin location on Windows.
const candidates = [
'C:\\Users\\Administrator\\AppData\\Roaming\\npm\\clawhub.cmd',
'C:\\Users\\Administrator\\AppData\\Roaming\\npm\\clawhub.exe',
'C:\\Users\\Administrator\\AppData\\Roaming\\npm\\clawhub.ps1',
];
for (const p of candidates) {
try {
if (fs.existsSync(p)) return p;
} catch (e) {
// ignore
}
}
return null;
}
function canUseClawhub() {
const exe = resolveClawhubExecutable();
if (!exe) return { ok: false, reason: 'clawhub CLI not found (install: npm i -g clawhub)' };
return { ok: true, exe };
}
function isClawhubLoggedIn() {
const exe = resolveClawhubExecutable();
if (!exe) return false;
try {
const res = spawnClawhub(exe, ['whoami'], { stdio: 'ignore' });
return res.status === 0;
} catch (e) {
return false;
}
}
function spawnClawhub(exe, args, options) {
// On Windows, directly spawning a .cmd can be flaky; using cmd.exe preserves argument parsing.
// (Using shell:true can break clap/commander style option parsing for some CLIs.)
const opts = options || {};
if (process.platform === 'win32' && typeof exe === 'string') {
const lower = exe.toLowerCase();
if (lower.endsWith('.cmd')) {
return spawnSync('cmd.exe', ['/d', '/s', '/c', exe, ...(args || [])], opts);
}
}
return spawnSync(exe, args || [], opts);
}
function publishToClawhub({ skillDir, slug, name, version, changelog, tags, dryRun }) {
const ok = canUseClawhub();
if (!ok.ok) throw new Error(ok.reason);
// Idempotency: if this version already exists on ClawHub, skip publishing.
try {
const inspect = spawnClawhub(ok.exe, ['inspect', slug, '--version', version], { stdio: 'ignore' });
if (inspect.status === 0) {
process.stdout.write(`ClawHub already has ${slug}@${version}. Skipping.\n`);
return;
}
} catch (e) {
// ignore inspect failures; publish will surface errors if needed
}
if (!dryRun && !isClawhubLoggedIn()) {
throw new Error('Not logged in to ClawHub. Run: clawhub login');
}
const args = ['publish', skillDir, '--slug', slug, '--name', name, '--version', version];
if (changelog) args.push('--changelog', changelog);
if (tags) args.push('--tags', tags);
if (dryRun) {
process.stdout.write(`[dry-run] ${ok.exe} ${args.map(a => (/\s/.test(a) ? `"${a}"` : a)).join(' ')}\n`);
return;
}
// Capture output to handle "version already exists" idempotently.
const res = spawnClawhub(ok.exe, args, { encoding: 'utf8' });
const out = `${res.stdout || ''}\n${res.stderr || ''}`.trim();
if (res.status === 0) {
if (out) process.stdout.write(out + '\n');
return;
}
// Some clawhub deployments do not support reliable "inspect" by slug.
// Treat "Version already exists" as success to make publishing idempotent.
if (/version already exists/i.test(out)) {
process.stdout.write(`ClawHub already has ${slug}@${version}. Skipping.\n`);
return;
}
if (out) process.stderr.write(out + '\n');
throw new Error(`clawhub publish failed for slug ${slug}`);
}
function requireEnv(name, value) {
if (!value) {
throw new Error(`Missing required env var: ${name}`);
}
}
function ensureClean(dryRun) {
const status = run('git status --porcelain', { dryRun });
if (!dryRun && status) {
throw new Error('Working tree is not clean. Commit or stash before publishing.');
}
}
function ensureBranch(expected, dryRun) {
const current = run('git rev-parse --abbrev-ref HEAD', { dryRun }) || expected;
if (!dryRun && current !== expected) {
throw new Error(`Current branch is ${current}. Expected ${expected}.`);
}
}
function ensureRemote(remote, dryRun) {
try {
run(`git remote get-url ${remote}`, { dryRun });
} catch (e) {
throw new Error(`Remote "${remote}" not found. Add it manually before running this script.`);
}
}
function ensureTagAvailable(tag, dryRun) {
if (!tag) return;
const exists = run(`git tag --list ${tag}`, { dryRun });
if (!dryRun && exists) {
throw new Error(`Tag ${tag} already exists.`);
}
}
function ensureDir(dir, dryRun) {
if (dryRun) return;
if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true });
}
function rmDir(dir, dryRun) {
if (dryRun) return;
if (!fs.existsSync(dir)) return;
fs.rmSync(dir, { recursive: true, force: true });
}
function copyDir(src, dest, dryRun) {
if (dryRun) return;
if (!fs.existsSync(src)) throw new Error(`Missing build output dir: ${src}`);
ensureDir(dest, dryRun);
const entries = fs.readdirSync(src, { withFileTypes: true });
for (const ent of entries) {
const s = path.join(src, ent.name);
const d = path.join(dest, ent.name);
if (ent.isDirectory()) copyDir(s, d, dryRun);
else if (ent.isFile()) {
ensureDir(path.dirname(d), dryRun);
fs.copyFileSync(s, d);
}
}
}
function createReleaseWithGh({ repo, tag, title, notes, notesFile, dryRun }) {
if (!repo || !tag) return;
const ghExe = resolveGhExecutable();
if (!ghExe) {
throw new Error('gh CLI not found. Install GitHub CLI or provide a GitHub token for API-based release creation.');
}
const args = ['release', 'create', tag, '--repo', repo];
if (title) args.push('-t', title);
if (notesFile) args.push('-F', notesFile);
else if (notes) args.push('-n', notes);
else args.push('-n', 'Release created by publish script.');
if (dryRun) {
process.stdout.write(`[dry-run] ${ghExe} ${args.join(' ')}\n`);
return;
}
const res = spawnSync(ghExe, args, { stdio: 'inherit' });
if (res.status !== 0) {
throw new Error('gh release create failed');
}
}
function canUseGhForRelease() {
const ghExe = resolveGhExecutable();
if (!ghExe) return { ok: false, reason: 'gh CLI not found' };
try {
// Non-interactive check: returns 0 when authenticated.
const res = spawnSync(ghExe, ['auth', 'status', '-h', 'github.com'], { stdio: 'ignore' });
if (res.status === 0) return { ok: true };
return { ok: false, reason: 'gh not authenticated (run: gh auth login)' };
} catch (e) {
return { ok: false, reason: 'failed to check gh auth status' };
}
}
function getGithubToken() {
return process.env.GITHUB_TOKEN || process.env.GH_TOKEN || process.env.GITHUB_PAT || '';
}
function readReleaseNotes(notes, notesFile) {
if (notesFile) {
try {
return fs.readFileSync(notesFile, 'utf8');
} catch (e) {
throw new Error(`Failed to read RELEASE_NOTES_FILE: ${notesFile}`);
}
}
if (notes) return String(notes);
return '';
}
function githubRequestJson({ method, repo, apiPath, token, body, dryRun }) {
if (dryRun) {
process.stdout.write(`[dry-run] GitHub API ${method} ${repo} ${apiPath}\n`);
return Promise.resolve({ status: 200, json: null });
}
const data = body ? Buffer.from(JSON.stringify(body)) : null;
const opts = {
method,
hostname: 'api.github.com',
path: `/repos/${repo}${apiPath}`,
headers: {
'User-Agent': 'evolver-publish-script',
Accept: 'application/vnd.github+json',
...(token ? { Authorization: `token ${token}` } : {}),
...(data ? { 'Content-Type': 'application/json', 'Content-Length': String(data.length) } : {}),
},
};
return new Promise((resolve, reject) => {
const req = https.request(opts, res => {
let raw = '';
res.setEncoding('utf8');
res.on('data', chunk => (raw += chunk));
res.on('end', () => {
let json = null;
try {
json = raw ? JSON.parse(raw) : null;
} catch (e) {
json = null;
}
resolve({ status: res.statusCode || 0, json, raw });
});
});
req.on('error', reject);
if (data) req.write(data);
req.end();
});
}
async function ensureReleaseWithApi({ repo, tag, title, notes, notesFile, dryRun }) {
if (!repo || !tag) return;
const token = getGithubToken();
if (!dryRun) {
requireEnv('GITHUB_TOKEN (or GH_TOKEN/GITHUB_PAT)', token);
}
// If release already exists, skip.
const existing = await githubRequestJson({
method: 'GET',
repo,
apiPath: `/releases/tags/${encodeURIComponent(tag)}`,
token,
dryRun,
});
if (!dryRun && existing.status === 200) {
process.stdout.write(`Release already exists for tag ${tag}. Skipping.\n`);
return;
}
const bodyText = readReleaseNotes(notes, notesFile) || 'Release created by publish script.';
const payload = {
tag_name: tag,
name: title || tag,
body: bodyText,
draft: false,
prerelease: false,
};
const created = await githubRequestJson({
method: 'POST',
repo,
apiPath: '/releases',
token,
body: payload,
dryRun,
});
if (!dryRun && (created.status < 200 || created.status >= 300)) {
const msg = (created.json && created.json.message) || created.raw || 'Unknown error';
throw new Error(`Failed to create GitHub Release (${created.status}): ${msg}`);
}
process.stdout.write(`Created GitHub Release for tag ${tag}\n`);
}
// Collect unique external contributors from private repo commits since the last release.
// Returns an array of "Name <email>" strings suitable for Co-authored-by trailers.
// GitHub counts Co-authored-by toward the Contributors graph.
function getContributorsSinceLastRelease() {
const EXCLUDED = new Set([
'evolver-publish@local',
'evolver@local',
'openclaw@users.noreply.github.com',
]);
try {
let baseCommit = '';
try {
baseCommit = execSync(
'git log -n 1 --pretty=%H --grep="chore(release): prepare v"',
{ encoding: 'utf8', cwd: process.cwd(), stdio: ['ignore', 'pipe', 'ignore'] }
).trim();
} catch (_) {}
const range = baseCommit ? `${baseCommit}..HEAD` : '-30';
const raw = execSync(
`git log ${range} --pretty="%aN <%aE>"`,
{ encoding: 'utf8', cwd: process.cwd(), stdio: ['ignore', 'pipe', 'ignore'] }
).trim();
if (!raw) return [];
const seen = new Set();
const contributors = [];
for (const line of raw.split('\n')) {
const trimmed = line.trim();
if (!trimmed) continue;
const emailMatch = trimmed.match(/<([^>]+)>/);
const email = emailMatch ? emailMatch[1].toLowerCase() : '';
if (EXCLUDED.has(email)) continue;
if (seen.has(email)) continue;
seen.add(email);
contributors.push(trimmed);
}
return contributors;
} catch (_) {
return [];
}
}
function main() {
const dryRun = String(process.env.DRY_RUN || '').toLowerCase() === 'true';
const sourceBranch = process.env.SOURCE_BRANCH || 'main';
const publicRemote = process.env.PUBLIC_REMOTE || 'public';
const publicBranch = process.env.PUBLIC_BRANCH || 'main';
const publicRepo = process.env.PUBLIC_REPO || '';
const outDir = process.env.PUBLIC_OUT_DIR || 'dist-public';
const useBuildOutput = String(process.env.PUBLIC_USE_BUILD_OUTPUT || 'true').toLowerCase() === 'true';
const releaseOnly = String(process.env.PUBLIC_RELEASE_ONLY || '').toLowerCase() === 'true';
const clawhubSkip = String(process.env.CLAWHUB_SKIP || '').toLowerCase() === 'true';
const clawhubPublish = String(process.env.CLAWHUB_PUBLISH || '').toLowerCase() === 'false' ? false : !clawhubSkip;
// Workaround for registry redirect/auth issues: default to the www endpoint.
const clawhubRegistry = process.env.CLAWHUB_REGISTRY || 'https://www.clawhub.ai';
// If publishing build output, require a repo URL or GH repo slug for cloning.
if (useBuildOutput) {
requireEnv('PUBLIC_REPO', publicRepo);
}
let releaseTag = process.env.RELEASE_TAG || '';
let releaseTitle = process.env.RELEASE_TITLE || '';
const releaseNotes = process.env.RELEASE_NOTES || '';
const releaseNotesFile = process.env.RELEASE_NOTES_FILE || '';
const releaseSkip = String(process.env.RELEASE_SKIP || '').toLowerCase() === 'true';
// Default behavior: create release unless explicitly skipped.
// Backward compatibility: RELEASE_CREATE=true forces creation.
// Note: RELEASE_CREATE=false is ignored; use RELEASE_SKIP=true instead.
const releaseCreate = String(process.env.RELEASE_CREATE || '').toLowerCase() === 'true' ? true : !releaseSkip;
const releaseUseGh = String(process.env.RELEASE_USE_GH || '').toLowerCase() === 'true';
// If not provided, infer from build output package.json version.
if (!releaseTag && useBuildOutput) {
try {
const builtPkg = JSON.parse(fs.readFileSync(path.resolve(process.cwd(), outDir, 'package.json'), 'utf8'));
if (builtPkg && builtPkg.version) releaseTag = `v${builtPkg.version}`;
if (!releaseTitle && releaseTag) releaseTitle = releaseTag;
} catch (e) {}
}
const releaseVersion = String(releaseTag || '').startsWith('v') ? String(releaseTag).slice(1) : '';
// Fail fast on missing release prerequisites to avoid half-publishing.
// Strategy:
// - If RELEASE_USE_GH=true: require gh + auth
// - Else: prefer gh+auth; fallback to API token; else fail
let releaseMode = 'none';
if (releaseCreate && releaseTag) {
if (releaseUseGh) {
const ghOk = canUseGhForRelease();
if (!dryRun && !ghOk.ok) {
throw new Error(`Cannot create release via gh: ${ghOk.reason}`);
}
releaseMode = 'gh';
} else {
const ghOk = canUseGhForRelease();
if (ghOk.ok) {
releaseMode = 'gh';
} else {
const token = getGithubToken();
if (!dryRun && !token) {
throw new Error(
'Cannot create GitHub Release: neither gh (installed+authenticated) nor GITHUB_TOKEN (or GH_TOKEN/GITHUB_PAT) is available.'
);
}
releaseMode = 'api';
}
}
}
// In release-only mode we do not push code or tags, only create a GitHub Release for an existing tag.
if (!releaseOnly) {
ensureClean(dryRun);
ensureBranch(sourceBranch, dryRun);
ensureTagAvailable(releaseTag, dryRun);
} else {
requireEnv('RELEASE_TAG', releaseTag);
}
if (!releaseOnly) {
if (!useBuildOutput) {
ensureRemote(publicRemote, dryRun);
run(`git push ${publicRemote} ${sourceBranch}:${publicBranch}`, { dryRun });
} else {
const tmpBase = path.join(os.tmpdir(), 'evolver-public-publish');
const tmpRepoDir = path.join(tmpBase, `repo_${Date.now()}`);
const buildAbs = path.resolve(process.cwd(), outDir);
rmDir(tmpRepoDir, dryRun);
ensureDir(tmpRepoDir, dryRun);
run(`git clone --depth 1 https://github.com/${publicRepo}.git "${tmpRepoDir}"`, { dryRun });
run(`git -C "${tmpRepoDir}" checkout -B ${publicBranch}`, { dryRun });
// Replace repo contents with build output (except .git)
if (!dryRun) {
const entries = fs.readdirSync(tmpRepoDir, { withFileTypes: true });
for (const ent of entries) {
if (ent.name === '.git') continue;
fs.rmSync(path.join(tmpRepoDir, ent.name), { recursive: true, force: true });
}
}
copyDir(buildAbs, tmpRepoDir, dryRun);
run(`git -C "${tmpRepoDir}" add -A`, { dryRun });
const msg = releaseTag ? `Release ${releaseTag}` : `Publish build output`;
// If build output is identical to current public branch, skip commit/push.
const pending = run(`git -C "${tmpRepoDir}" status --porcelain`, { dryRun });
if (!dryRun && !pending) {
process.stdout.write('Public repo already matches build output. Skipping commit/push.\n');
} else {
const contributors = getContributorsSinceLastRelease();
let commitMsg = msg.replace(/"/g, '\\"');
if (contributors.length > 0) {
const trailers = contributors.map(c => `Co-authored-by: ${c}`).join('\n');
commitMsg += `\n\n${trailers.replace(/"/g, '\\"')}`;
process.stdout.write(`Including ${contributors.length} contributor(s) in publish commit.\n`);
}
run(
`git -C "${tmpRepoDir}" -c user.name="evolver-publish" -c user.email="evolver-publish@local" commit -m "${commitMsg}"`,
{ dryRun }
);
run(`git -C "${tmpRepoDir}" push origin ${publicBranch}`, { dryRun });
}
if (releaseTag) {
const tagMsg = releaseTitle || `Release ${releaseTag}`;
// If tag already exists in the public repo, do not recreate it.
try {
run(`git -C "${tmpRepoDir}" fetch --tags`, { dryRun });
const exists = run(`git -C "${tmpRepoDir}" tag --list ${releaseTag}`, { dryRun });
if (!dryRun && exists) {
process.stdout.write(`Tag ${releaseTag} already exists in public repo. Skipping tag creation.\n`);
} else {
run(`git -C "${tmpRepoDir}" tag -a ${releaseTag} -m "${tagMsg.replace(/"/g, '\\"')}"`, { dryRun });
run(`git -C "${tmpRepoDir}" push origin ${releaseTag}`, { dryRun });
}
} catch (e) {
// If tag operations fail, rethrow to avoid publishing a release without a tag.
throw e;
}
}
}
if (releaseTag) {
if (!useBuildOutput) {
const msg = releaseTitle || `Release ${releaseTag}`;
run(`git tag -a ${releaseTag} -m "${msg.replace(/"/g, '\\"')}"`, { dryRun });
run(`git push ${publicRemote} ${releaseTag}`, { dryRun });
}
}
}
if (releaseCreate) {
if (releaseMode === 'gh') {
createReleaseWithGh({
repo: publicRepo,
tag: releaseTag,
title: releaseTitle,
notes: releaseNotes,
notesFile: releaseNotesFile,
dryRun,
});
} else if (releaseMode === 'api') {
return ensureReleaseWithApi({
repo: publicRepo,
tag: releaseTag,
title: releaseTitle,
notes: releaseNotes,
notesFile: releaseNotesFile,
dryRun,
});
}
}
// Publish to ClawHub after GitHub release succeeds (default enabled).
if (clawhubPublish && releaseVersion) {
process.env.CLAWHUB_REGISTRY = clawhubRegistry;
const skillDir = useBuildOutput ? path.resolve(process.cwd(), outDir) : process.cwd();
const changelog = releaseTitle ? `GitHub Release ${releaseTitle}` : `GitHub Release ${releaseTag}`;
publishToClawhub({
skillDir,
slug: 'evolver',
name: 'Evolver',
version: releaseVersion,
changelog,
tags: 'latest',
dryRun,
});
publishToClawhub({
skillDir,
slug: 'capability-evolver',
name: 'Evolver',
version: releaseVersion,
changelog,
tags: 'latest',
dryRun,
});
}
}
try {
const maybePromise = main();
if (maybePromise && typeof maybePromise.then === 'function') {
maybePromise.catch(e => {
process.stderr.write(`${e.message}\n`);
process.exit(1);
});
}
} catch (e) {
process.stderr.write(`${e.message}\n`);
process.exit(1);
}

61
scripts/recover_loop.js Normal file
View File

@@ -0,0 +1,61 @@
#!/usr/bin/env node
/* eslint-disable no-console */
const fs = require('fs');
const path = require('path');
const { spawnSync } = require('child_process');
function exists(p) {
try {
return fs.existsSync(p);
} catch (e) {
return false;
}
}
function sleepMs(ms) {
const n = Number(ms);
const t = Number.isFinite(n) ? Math.max(0, n) : 0;
if (t <= 0) return;
spawnSync('sleep', [String(Math.ceil(t / 1000))], { stdio: 'ignore' });
}
function resolveWorkspaceRoot() {
// In OpenClaw exec, cwd is usually the workspace root.
// Keep it simple: do not try to walk up arbitrarily.
return process.cwd();
}
function resolveEvolverEntry(workspaceRoot) {
const candidates = [
path.join(workspaceRoot, 'skills', 'evolver', 'index.js'),
path.join(workspaceRoot, 'skills', 'capability-evolver', 'index.js'),
];
for (const p of candidates) {
if (exists(p)) return p;
}
return null;
}
function main() {
const waitMs = parseInt(String(process.env.EVOLVER_RECOVER_WAIT_MS || '10000'), 10);
const wait = Number.isFinite(waitMs) ? Math.max(0, waitMs) : 10000;
console.log(`[RECOVERY] Waiting ${wait}ms before restart...`);
sleepMs(wait);
const workspaceRoot = resolveWorkspaceRoot();
const entry = resolveEvolverEntry(workspaceRoot);
if (!entry) {
console.error('[RECOVERY] Failed: cannot locate evolver entry under skills/.');
process.exit(2);
}
console.log(`[RECOVERY] Restarting loop via ${path.relative(workspaceRoot, entry)} ...`);
const r = spawnSync(process.execPath, [entry, '--loop'], { stdio: 'inherit' });
process.exit(typeof r.status === 'number' ? r.status : 1);
}
if (require.main === module) {
main();
}

View File

@@ -0,0 +1,89 @@
const fs = require('fs');
const path = require('path');
const { execSync } = require('child_process');
const REPO_ROOT = path.resolve(__dirname, '..');
function ensureDir(dir) {
if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true });
}
function parseSemver(v) {
const m = String(v || '').trim().match(/^(\d+)\.(\d+)\.(\d+)$/);
if (!m) return null;
return { major: Number(m[1]), minor: Number(m[2]), patch: Number(m[3]) };
}
function bumpSemver(base, bump) {
const v = parseSemver(base);
if (!v) return null;
if (bump === 'major') return `${v.major + 1}.0.0`;
if (bump === 'minor') return `${v.major}.${v.minor + 1}.0`;
if (bump === 'patch') return `${v.major}.${v.minor}.${v.patch + 1}`;
return `${v.major}.${v.minor}.${v.patch}`;
}
function git(cmd) {
return execSync(cmd, { cwd: REPO_ROOT, encoding: 'utf8', stdio: ['ignore', 'pipe', 'ignore'] }).trim();
}
function getBaseReleaseCommit() {
try {
const hash = git('git log -n 1 --pretty=%H --grep="chore(release): prepare v"');
return hash || null;
} catch (e) {
return null;
}
}
function getCommitSubjectsSince(baseCommit) {
try {
if (!baseCommit) {
const out = git('git log -n 30 --pretty=%s');
return out ? out.split('\n').filter(Boolean) : [];
}
const out = git(`git log ${baseCommit}..HEAD --pretty=%s`);
return out ? out.split('\n').filter(Boolean) : [];
} catch (e) {
return [];
}
}
function inferBumpFromSubjects(subjects) {
const subs = (subjects || []).map(s => String(s));
const hasBreaking = subs.some(s => /\bBREAKING CHANGE\b/i.test(s) || /^[a-z]+(\(.+\))?!:/.test(s));
if (hasBreaking) return { bump: 'major', reason: 'breaking change marker in commit subject' };
const hasFeat = subs.some(s => /^feat(\(.+\))?:/i.test(s));
if (hasFeat) return { bump: 'minor', reason: 'feature commit detected (feat:)' };
const hasFix = subs.some(s => /^(fix|perf)(\(.+\))?:/i.test(s));
if (hasFix) return { bump: 'patch', reason: 'fix/perf commit detected' };
if (subs.length === 0) return { bump: 'none', reason: 'no commits since base release commit' };
return { bump: 'patch', reason: 'default to patch for non-breaking changes' };
}
function main() {
const pkgPath = path.join(REPO_ROOT, 'package.json');
const baseVersion = JSON.parse(fs.readFileSync(pkgPath, 'utf8')).version;
const baseCommit = getBaseReleaseCommit();
const subjects = getCommitSubjectsSince(baseCommit);
const decision = inferBumpFromSubjects(subjects);
const suggestedVersion = decision.bump === 'none' ? baseVersion : bumpSemver(baseVersion, decision.bump);
const out = { baseVersion, baseCommit, subjects, decision, suggestedVersion };
const memDir = path.join(REPO_ROOT, 'memory');
ensureDir(memDir);
fs.writeFileSync(path.join(memDir, 'semver_suggestion.json'), JSON.stringify(out, null, 2) + '\n', 'utf8');
process.stdout.write(JSON.stringify(out, null, 2) + '\n');
}
try {
main();
} catch (e) {
process.stderr.write(`${e.message}\n`);
process.exit(1);
}

View File

@@ -0,0 +1,38 @@
// Usage: node scripts/validate-modules.js ./src/evolve ./src/gep/solidify
// Validates that each module (1) loads without errors, (2) exports something
// meaningful, and (3) exported functions are callable (typeof check).
const path = require('path');
const modules = process.argv.slice(2);
if (!modules.length) { console.error('No modules specified'); process.exit(1); }
let checked = 0;
for (const m of modules) {
const resolved = path.resolve(m);
const exported = require(resolved);
if (exported === undefined || exported === null) {
console.error('FAIL: ' + m + ' exports null/undefined');
process.exit(1);
}
const t = typeof exported;
if (t === 'object' && Object.keys(exported).length === 0) {
console.error('FAIL: ' + m + ' exports an empty object (no public API)');
process.exit(1);
}
if (t === 'object') {
const keys = Object.keys(exported);
for (const k of keys) {
if (typeof exported[k] === 'function') {
if (typeof exported[k] !== 'function') {
console.error('FAIL: ' + m + '.' + k + ' is declared but not a callable function');
process.exit(1);
}
}
}
}
checked++;
}
console.log('ok: ' + checked + ' module(s) validated');

51
scripts/validate-suite.js Normal file
View File

@@ -0,0 +1,51 @@
// Usage: node scripts/validate-suite.js [test-glob-pattern]
// Runs the project's test suite (node --test) and fails if any test fails.
// When called without arguments, runs all tests in test/.
// When called with a glob pattern, runs only matching tests.
//
// This script is intended to be used as a Gene validation command.
// It provides stronger assurance than validate-modules.js (which only
// checks that modules can be loaded).
const { execSync } = require('child_process');
const path = require('path');
const pattern = process.argv[2] || 'test/**/*.test.js';
const repoRoot = process.cwd();
const cmd = `node --test ${pattern}`;
try {
const output = execSync(cmd, {
cwd: repoRoot,
stdio: ['pipe', 'pipe', 'pipe'],
timeout: 120000,
env: Object.assign({}, process.env, {
NODE_ENV: 'test',
EVOLVER_REPO_ROOT: repoRoot,
GEP_ASSETS_DIR: path.join(repoRoot, 'assets', 'gep'),
}),
});
const out = output.toString('utf8');
const passMatch = out.match(/# pass (\d+)/);
const failMatch = out.match(/# fail (\d+)/);
const passCount = passMatch ? Number(passMatch[1]) : 0;
const failCount = failMatch ? Number(failMatch[1]) : 0;
if (failCount > 0) {
console.error('FAIL: ' + failCount + ' test(s) failed');
process.exit(1);
}
if (passCount === 0) {
console.error('FAIL: no tests found matching pattern: ' + pattern);
process.exit(1);
}
console.log('ok: ' + passCount + ' test(s) passed, 0 failed');
} catch (e) {
const stderr = e.stderr ? e.stderr.toString('utf8').slice(-500) : '';
const stdout = e.stdout ? e.stdout.toString('utf8').slice(-500) : '';
console.error('FAIL: test suite exited with code ' + (e.status || 'unknown'));
if (stderr) console.error(stderr);
if (stdout) console.error(stdout);
process.exit(1);
}