feat: add OpenCode/Codex outputs and update changelog (#104)
* Add OpenCode converter coverage and specs * Add Codex target support and spec docs * Generate Codex command skills and refresh spec docs * Add global Codex install path * fix: harden plugin path loading and codex descriptions * feat: ensure codex agents block on convert/install * docs: clarify target branch usage for review * chore: prep npm package metadata and release notes * docs: mention opencode and codex in changelog * docs: update CLI usage and remove stale todos * feat: install from GitHub with global outputs
This commit is contained in:
64
src/utils/codex-agents.ts
Normal file
64
src/utils/codex-agents.ts
Normal file
@@ -0,0 +1,64 @@
|
||||
import path from "path"
|
||||
import { ensureDir, pathExists, readText, writeText } from "./files"
|
||||
|
||||
export const CODEX_AGENTS_BLOCK_START = "<!-- BEGIN COMPOUND CODEX TOOL MAP -->"
|
||||
export const CODEX_AGENTS_BLOCK_END = "<!-- END COMPOUND CODEX TOOL MAP -->"
|
||||
|
||||
const CODEX_AGENTS_BLOCK_BODY = `## Compound Codex Tool Mapping (Claude Compatibility)
|
||||
|
||||
This section maps Claude Code plugin tool references to Codex behavior.
|
||||
Only this block is managed automatically.
|
||||
|
||||
Tool mapping:
|
||||
- Read: use shell reads (cat/sed) or rg
|
||||
- Write: create files via shell redirection or apply_patch
|
||||
- Edit/MultiEdit: use apply_patch
|
||||
- Bash: use shell_command
|
||||
- Grep: use rg (fallback: grep)
|
||||
- Glob: use rg --files or find
|
||||
- LS: use ls via shell_command
|
||||
- WebFetch/WebSearch: use curl or Context7 for library docs
|
||||
- AskUserQuestion/Question: ask the user in chat
|
||||
- Task/Subagent/Parallel: run sequentially in main thread; use multi_tool_use.parallel for tool calls
|
||||
- TodoWrite/TodoRead: use file-based todos in todos/ with file-todos skill
|
||||
- Skill: open the referenced SKILL.md and follow it
|
||||
- ExitPlanMode: ignore
|
||||
`
|
||||
|
||||
export async function ensureCodexAgentsFile(codexHome: string): Promise<void> {
|
||||
await ensureDir(codexHome)
|
||||
const filePath = path.join(codexHome, "AGENTS.md")
|
||||
const block = buildCodexAgentsBlock()
|
||||
|
||||
if (!(await pathExists(filePath))) {
|
||||
await writeText(filePath, block + "\n")
|
||||
return
|
||||
}
|
||||
|
||||
const existing = await readText(filePath)
|
||||
const updated = upsertBlock(existing, block)
|
||||
if (updated !== existing) {
|
||||
await writeText(filePath, updated)
|
||||
}
|
||||
}
|
||||
|
||||
function buildCodexAgentsBlock(): string {
|
||||
return [CODEX_AGENTS_BLOCK_START, CODEX_AGENTS_BLOCK_BODY.trim(), CODEX_AGENTS_BLOCK_END].join("\n")
|
||||
}
|
||||
|
||||
function upsertBlock(existing: string, block: string): string {
|
||||
const startIndex = existing.indexOf(CODEX_AGENTS_BLOCK_START)
|
||||
const endIndex = existing.indexOf(CODEX_AGENTS_BLOCK_END)
|
||||
|
||||
if (startIndex !== -1 && endIndex !== -1 && endIndex > startIndex) {
|
||||
const before = existing.slice(0, startIndex).trimEnd()
|
||||
const after = existing.slice(endIndex + CODEX_AGENTS_BLOCK_END.length).trimStart()
|
||||
return [before, block, after].filter(Boolean).join("\n\n") + "\n"
|
||||
}
|
||||
|
||||
if (existing.trim().length === 0) {
|
||||
return block + "\n"
|
||||
}
|
||||
|
||||
return existing.trimEnd() + "\n\n" + block + "\n"
|
||||
}
|
||||
64
src/utils/files.ts
Normal file
64
src/utils/files.ts
Normal file
@@ -0,0 +1,64 @@
|
||||
import { promises as fs } from "fs"
|
||||
import path from "path"
|
||||
|
||||
export async function pathExists(filePath: string): Promise<boolean> {
|
||||
try {
|
||||
await fs.access(filePath)
|
||||
return true
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
export async function ensureDir(dirPath: string): Promise<void> {
|
||||
await fs.mkdir(dirPath, { recursive: true })
|
||||
}
|
||||
|
||||
export async function readText(filePath: string): Promise<string> {
|
||||
return fs.readFile(filePath, "utf8")
|
||||
}
|
||||
|
||||
export async function readJson<T>(filePath: string): Promise<T> {
|
||||
const raw = await readText(filePath)
|
||||
return JSON.parse(raw) as T
|
||||
}
|
||||
|
||||
export async function writeText(filePath: string, content: string): Promise<void> {
|
||||
await ensureDir(path.dirname(filePath))
|
||||
await fs.writeFile(filePath, content, "utf8")
|
||||
}
|
||||
|
||||
export async function writeJson(filePath: string, data: unknown): Promise<void> {
|
||||
const content = JSON.stringify(data, null, 2)
|
||||
await writeText(filePath, content + "\n")
|
||||
}
|
||||
|
||||
export async function walkFiles(root: string): Promise<string[]> {
|
||||
const entries = await fs.readdir(root, { withFileTypes: true })
|
||||
const results: string[] = []
|
||||
for (const entry of entries) {
|
||||
const fullPath = path.join(root, entry.name)
|
||||
if (entry.isDirectory()) {
|
||||
const nested = await walkFiles(fullPath)
|
||||
results.push(...nested)
|
||||
} else if (entry.isFile()) {
|
||||
results.push(fullPath)
|
||||
}
|
||||
}
|
||||
return results
|
||||
}
|
||||
|
||||
export async function copyDir(sourceDir: string, targetDir: string): Promise<void> {
|
||||
await ensureDir(targetDir)
|
||||
const entries = await fs.readdir(sourceDir, { withFileTypes: true })
|
||||
for (const entry of entries) {
|
||||
const sourcePath = path.join(sourceDir, entry.name)
|
||||
const targetPath = path.join(targetDir, entry.name)
|
||||
if (entry.isDirectory()) {
|
||||
await copyDir(sourcePath, targetPath)
|
||||
} else if (entry.isFile()) {
|
||||
await ensureDir(path.dirname(targetPath))
|
||||
await fs.copyFile(sourcePath, targetPath)
|
||||
}
|
||||
}
|
||||
}
|
||||
65
src/utils/frontmatter.ts
Normal file
65
src/utils/frontmatter.ts
Normal file
@@ -0,0 +1,65 @@
|
||||
import { load } from "js-yaml"
|
||||
|
||||
export type FrontmatterResult = {
|
||||
data: Record<string, unknown>
|
||||
body: string
|
||||
}
|
||||
|
||||
export function parseFrontmatter(raw: string): FrontmatterResult {
|
||||
const lines = raw.split(/\r?\n/)
|
||||
if (lines.length === 0 || lines[0].trim() !== "---") {
|
||||
return { data: {}, body: raw }
|
||||
}
|
||||
|
||||
let endIndex = -1
|
||||
for (let i = 1; i < lines.length; i += 1) {
|
||||
if (lines[i].trim() === "---") {
|
||||
endIndex = i
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (endIndex === -1) {
|
||||
return { data: {}, body: raw }
|
||||
}
|
||||
|
||||
const yamlText = lines.slice(1, endIndex).join("\n")
|
||||
const body = lines.slice(endIndex + 1).join("\n")
|
||||
const parsed = load(yamlText)
|
||||
const data = (parsed && typeof parsed === "object") ? (parsed as Record<string, unknown>) : {}
|
||||
return { data, body }
|
||||
}
|
||||
|
||||
export function formatFrontmatter(data: Record<string, unknown>, body: string): string {
|
||||
const yaml = Object.entries(data)
|
||||
.filter(([, value]) => value !== undefined)
|
||||
.map(([key, value]) => formatYamlLine(key, value))
|
||||
.join("\n")
|
||||
|
||||
if (yaml.trim().length === 0) {
|
||||
return body
|
||||
}
|
||||
|
||||
return [`---`, yaml, `---`, "", body].join("\n")
|
||||
}
|
||||
|
||||
function formatYamlLine(key: string, value: unknown): string {
|
||||
if (Array.isArray(value)) {
|
||||
const items = value.map((item) => ` - ${formatYamlValue(item)}`)
|
||||
return [key + ":", ...items].join("\n")
|
||||
}
|
||||
return `${key}: ${formatYamlValue(value)}`
|
||||
}
|
||||
|
||||
function formatYamlValue(value: unknown): string {
|
||||
if (value === null || value === undefined) return ""
|
||||
if (typeof value === "number" || typeof value === "boolean") return String(value)
|
||||
const raw = String(value)
|
||||
if (raw.includes("\n")) {
|
||||
return `|\n${raw.split("\n").map((line) => ` ${line}`).join("\n")}`
|
||||
}
|
||||
if (raw.includes(":") || raw.startsWith("[") || raw.startsWith("{")) {
|
||||
return JSON.stringify(raw)
|
||||
}
|
||||
return raw
|
||||
}
|
||||
Reference in New Issue
Block a user