Synced 79 commits from EveryInc/compound-engineering-plugin upstream while
preserving fork-specific customizations (Python/FastAPI pivot, Zoominfo-internal
review agents, deploy-wiring operational lessons, custom personas).
## Triage decisions (15 conflicts resolved)
Keep deleted (7) -- fork already removed these in prior cleanups:
- agents/design/{design-implementation-reviewer,design-iterator,figma-design-sync}
(no fork successor; backend-Python focus doesn't need UI/Figma agents)
- agents/docs/ankane-readme-writer (replaced by python-package-readme-writer)
- agents/review/{data-migration-expert,performance-oracle,security-sentinel}
(replaced by *-reviewer naming convention: data-migrations-reviewer,
performance-reviewer, security-reviewer)
Keep local (1):
- agents/workflow/lint.md (Python tooling: ruff/mypy/djlint/bandit; upstream
deleted the file). Fixed pre-existing duplicate "2." numbering bug.
Restore from upstream (1):
- agents/review/data-integrity-guardian.md (kept for GDPR/CCPA privacy
compliance angle not covered by data-migrations-reviewer)
Merge both (6) -- upstream structural wins layered with fork intent:
- agents/research/best-practices-researcher.md (upstream <examples> removal +
fork's Rails/Ruby -> Python/FastAPI translations)
- skills/ce-brainstorm/SKILL.md (universal-brainstorming routing + Slack
context + non-obvious angles + fork's Deploy wiring flag)
- skills/ce-plan/SKILL.md (universal-planning routing + planning-bootstrap +
fork's two Deploy wiring check bullets)
- skills/ce-review/SKILL.md (Run ID, model tiering haiku->sonnet, compact-JSON
artifact contract, file-type awareness, cli-readiness-reviewer + fork's
zip-agent-validator, design-conformance-reviewer, Stage 6 Zip Agent
Validation)
- skills/ce-review/references/persona-catalog.md (cli-readiness row + adversarial
refinement + fork's Language & Framework Conditional layer; 22 personas total)
- skills/ce-work/SKILL.md (Parallel Safety Check, parallel-subagent constraints,
Phase 3-4 compression + fork's deploy-values self-review row, with duplicate
checklist bullet collapsed to single occurrence)
## Auto-applied (no triage needed)
- 225 remote-only files: accepted as-is (new docs, brainstorms, plans,
upstream skills, tests, scripts)
- 70 local-only files: 46 preserved as-is (kieran-python, tiangolo-fastapi,
zip-agent-validator, design-conformance-reviewer, essay/proof commands,
excalidraw-png-export, etc.); 24 stayed deleted (dhh-rails-style,
andrew-kane-gem-writer, dspy-ruby Ruby skills no longer needed)
## README updated
- Removed Design section (3 deleted agents)
- Removed deleted Review entries (data-migration-expert, dhh-rails-reviewer,
kieran-rails-reviewer, performance-oracle, security-sentinel)
- Added new Review entries: design-conformance-reviewer, previous-comments-reviewer,
tiangolo-fastapi-reviewer, zip-agent-validator
- Workflow: added lint
- Docs: replaced ankane-readme-writer with python-package-readme-writer
## Known issues (not introduced by merge decisions)
- 9 detect-project-type.sh tests fail on macOS bash 3.2 (script uses
`declare -A` which requires bash 4+). Upstream regression in commit 070092d
(#568). Resolution: install bash 4+ via `brew install bash` locally;
upstream fix tracked separately.
- 2 review-skill-contract tests reference deleted agents (dhh-rails-reviewer,
data-migration-expert). Pre-existing fork inconsistency, not new.
bun run release:validate: passes (46 agents, 51 skills, 0 MCP servers)
179 lines
5.5 KiB
JavaScript
Executable File
179 lines
5.5 KiB
JavaScript
Executable File
#!/usr/bin/env node
|
|
/**
|
|
* Convert raw Excalidraw MCP checkpoint JSON into a valid .excalidraw file.
|
|
* Filters pseudo-elements, adds required defaults, expands labels into bound text.
|
|
*/
|
|
import { readFileSync, writeFileSync } from 'fs';
|
|
import { dirname, join } from 'path';
|
|
import { fileURLToPath } from 'url';
|
|
import { createRequire } from 'module';
|
|
|
|
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
const runtimeRequire = createRequire(join(__dirname, '.export-runtime', 'package.json'));
|
|
|
|
// Canvas-based text measurement with graceful fallback to heuristic.
|
|
// Excalidraw renders with Virgil (hand-drawn font); system sans-serif
|
|
// is a reasonable proxy. The 1.1x multiplier accounts for Virgil being wider.
|
|
let measureText;
|
|
try {
|
|
const canvas = runtimeRequire('canvas');
|
|
const { createCanvas } = canvas;
|
|
const cvs = createCanvas(1, 1);
|
|
const ctx = cvs.getContext('2d');
|
|
measureText = (text, fontSize) => {
|
|
ctx.font = `${fontSize}px sans-serif`;
|
|
const lines = text.split('\n');
|
|
const widths = lines.map(line => ctx.measureText(line).width * 1.1);
|
|
return {
|
|
width: Math.max(...widths),
|
|
height: lines.length * (fontSize * 1.25),
|
|
};
|
|
};
|
|
} catch {
|
|
console.warn('WARN: canvas not available, using heuristic text sizing (install canvas for accurate measurement)');
|
|
measureText = (text, fontSize) => {
|
|
const lines = text.split('\n');
|
|
return {
|
|
width: Math.max(...lines.map(l => l.length)) * fontSize * 0.55,
|
|
height: lines.length * (fontSize + 4),
|
|
};
|
|
};
|
|
}
|
|
|
|
const [,, inputFile, outputFile] = process.argv;
|
|
if (!inputFile || !outputFile) {
|
|
console.error('Usage: node convert.mjs <input.json> <output.excalidraw>');
|
|
process.exit(1);
|
|
}
|
|
|
|
const raw = JSON.parse(readFileSync(inputFile, 'utf8'));
|
|
const elements = raw.elements || raw;
|
|
|
|
let seed = 1000;
|
|
const nextSeed = () => seed++;
|
|
|
|
const processed = [];
|
|
|
|
for (const el of elements) {
|
|
if (['cameraUpdate', 'delete', 'restoreCheckpoint'].includes(el.type)) continue;
|
|
|
|
const base = {
|
|
angle: 0,
|
|
roughness: 1,
|
|
opacity: el.opacity ?? 100,
|
|
groupIds: [],
|
|
seed: nextSeed(),
|
|
version: 1,
|
|
versionNonce: nextSeed(),
|
|
isDeleted: false,
|
|
boundElements: null,
|
|
link: null,
|
|
locked: false,
|
|
strokeColor: el.strokeColor || '#1e1e1e',
|
|
backgroundColor: el.backgroundColor || 'transparent',
|
|
fillStyle: el.fillStyle || 'solid',
|
|
strokeWidth: el.strokeWidth ?? 2,
|
|
strokeStyle: el.strokeStyle || 'solid',
|
|
};
|
|
|
|
if (el.type === 'text') {
|
|
const fontSize = el.fontSize || 16;
|
|
const measured = measureText(el.text, fontSize);
|
|
processed.push({
|
|
...base,
|
|
type: 'text',
|
|
id: el.id,
|
|
x: el.x,
|
|
y: el.y,
|
|
width: measured.width,
|
|
height: measured.height,
|
|
text: el.text,
|
|
fontSize, fontFamily: 1,
|
|
textAlign: 'left',
|
|
verticalAlign: 'top',
|
|
baseline: fontSize,
|
|
containerId: null,
|
|
originalText: el.text,
|
|
});
|
|
} else if (el.type === 'arrow') {
|
|
const arrowEl = {
|
|
...base,
|
|
type: 'arrow',
|
|
id: el.id,
|
|
x: el.x,
|
|
y: el.y,
|
|
width: el.width || 0,
|
|
height: el.height || 0,
|
|
points: el.points || [[0, 0]],
|
|
startArrowhead: el.startArrowhead || null,
|
|
endArrowhead: el.endArrowhead ?? 'arrow',
|
|
startBinding: el.startBinding ? { ...el.startBinding, focus: 0, gap: 5 } : null,
|
|
endBinding: el.endBinding ? { ...el.endBinding, focus: 0, gap: 5 } : null,
|
|
roundness: { type: 2 },
|
|
boundElements: [],
|
|
};
|
|
processed.push(arrowEl);
|
|
|
|
if (el.label) {
|
|
const labelId = el.id + '_label';
|
|
const text = el.label.text || '';
|
|
const fontSize = el.label.fontSize || 14;
|
|
const { width: w, height: h } = measureText(text, fontSize);
|
|
const midPt = el.points[Math.floor(el.points.length / 2)] || [0, 0];
|
|
|
|
processed.push({
|
|
...base,
|
|
type: 'text', id: labelId,
|
|
x: el.x + midPt[0] - w / 2,
|
|
y: el.y + midPt[1] - h / 2 - 12,
|
|
width: w, height: h,
|
|
text, fontSize, fontFamily: 1,
|
|
textAlign: 'center', verticalAlign: 'middle',
|
|
baseline: fontSize, containerId: el.id, originalText: text,
|
|
strokeColor: el.strokeColor || '#1e1e1e',
|
|
backgroundColor: 'transparent',
|
|
});
|
|
arrowEl.boundElements = [{ id: labelId, type: 'text' }];
|
|
}
|
|
} else if (['rectangle', 'ellipse', 'diamond'].includes(el.type)) {
|
|
const shapeEl = {
|
|
...base,
|
|
type: el.type, id: el.id,
|
|
x: el.x, y: el.y, width: el.width, height: el.height,
|
|
roundness: el.roundness || null,
|
|
boundElements: [],
|
|
};
|
|
processed.push(shapeEl);
|
|
|
|
if (el.label) {
|
|
const labelId = el.id + '_label';
|
|
const text = el.label.text || '';
|
|
const fontSize = el.label.fontSize || 16;
|
|
const { width: w, height: h } = measureText(text, fontSize);
|
|
|
|
processed.push({
|
|
...base,
|
|
type: 'text', id: labelId,
|
|
x: el.x + (el.width - w) / 2,
|
|
y: el.y + (el.height - h) / 2,
|
|
width: w, height: h,
|
|
text, fontSize, fontFamily: 1,
|
|
textAlign: 'center', verticalAlign: 'middle',
|
|
baseline: fontSize, containerId: el.id, originalText: text,
|
|
strokeColor: el.strokeColor || '#1e1e1e',
|
|
backgroundColor: 'transparent',
|
|
});
|
|
shapeEl.boundElements = [{ id: labelId, type: 'text' }];
|
|
}
|
|
}
|
|
}
|
|
|
|
writeFileSync(outputFile, JSON.stringify({
|
|
type: 'excalidraw', version: 2, source: 'claude-code',
|
|
elements: processed,
|
|
appState: { exportBackground: true, viewBackgroundColor: '#ffffff' },
|
|
files: {},
|
|
}, null, 2));
|
|
|
|
console.log(`Wrote ${processed.length} elements to ${outputFile}`);
|