diff --git a/src/commands/triage.ts b/src/commands/triage.ts new file mode 100644 index 0000000..029c683 --- /dev/null +++ b/src/commands/triage.ts @@ -0,0 +1,90 @@ +import type { Command } from "commander"; +import { findSeedsDir } from "../config.ts"; +import { computeMetrics } from "../graph.ts"; +import { accent, brand, muted, outputJson, printIssueOneLine } from "../output.ts"; +import { readIssues } from "../store.ts"; +import type { Issue } from "../types.ts"; + +interface TriageEntry { + id: string; + title: string; + status: Issue["status"]; + priority: number; + pagerank: number; + betweenness: number; + criticalPathLength: number; + score: number; +} + +export async function run( + opts: { json?: boolean; limit?: string }, + seedsDir?: string, +): Promise { + const jsonMode = opts.json === true; + const limit = opts.limit !== undefined ? Number(opts.limit) : 0; + + const dir = seedsDir ?? (await findSeedsDir()); + const issues = await readIssues(dir); + + // Only open issues participate — same eligibility as `sd ready` + const closedIds = new Set(issues.filter((i) => i.status === "closed").map((i) => i.id)); + const openIssues = issues.filter((i) => i.status !== "closed"); + + // Compute metrics across all open issues (blocked ones inform the graph) + const metrics = computeMetrics(openIssues); + + // Ready = open + all blockers closed + const ready = openIssues.filter((i) => (i.blockedBy ?? []).every((bid) => closedIds.has(bid))); + + // Sort ready issues by composite score descending + const ranked = ready + .map((issue): TriageEntry => { + const m = metrics.get(issue.id); + return { + id: issue.id, + title: issue.title, + status: issue.status, + priority: issue.priority, + pagerank: m?.pagerank ?? 0, + betweenness: m?.betweenness ?? 0, + criticalPathLength: m?.criticalPathLength ?? 0, + score: m?.score ?? 0, + }; + }) + .sort((a, b) => b.score - a.score || a.priority - b.priority); + + const output = limit > 0 ? ranked.slice(0, limit) : ranked; + + if (jsonMode) { + outputJson({ success: true, command: "triage", issues: output, count: output.length }); + return; + } + + if (output.length === 0) { + console.log(muted("No ready issues.")); + return; + } + + for (const entry of output) { + const issue = issues.find((i) => i.id === entry.id); + if (issue) { + printIssueOneLine(issue); + const scoreStr = brand(`${(entry.score * 100).toFixed(0)}pts`); + const cpStr = entry.criticalPathLength > 0 ? ` · cp:${entry.criticalPathLength}` : ""; + const bStr = entry.betweenness > 0.01 ? ` · btw:${entry.betweenness.toFixed(2)}` : ""; + console.log(` ${muted("score:")} ${scoreStr}${cpStr}${bStr}`); + } + } + console.log(`\n${accent(`${output.length} ready issue(s)`)} ${muted("(ranked by graph score)")}`); +} + +export function register(program: Command): void { + program + .command("triage") + .description("Ready issues ranked by graph score (PageRank + betweenness + critical path)") + .option("--json", "Output as JSON") + .option("--limit ", "Return top N issues only") + .action(async (opts: { json?: boolean; limit?: string }) => { + await run(opts); + }); +} diff --git a/src/graph.test.ts b/src/graph.test.ts new file mode 100644 index 0000000..6ed0ded --- /dev/null +++ b/src/graph.test.ts @@ -0,0 +1,88 @@ +import { describe, expect, it } from "bun:test"; +import { computeMetrics } from "./graph.ts"; +import type { Issue } from "./types.ts"; + +function makeIssue(id: string, blocks: string[] = [], blockedBy: string[] = []): Issue { + return { + id, + title: id, + status: "open", + type: "task", + priority: 2, + blocks, + blockedBy, + createdAt: new Date().toISOString(), + updatedAt: new Date().toISOString(), + }; +} + +describe("computeMetrics", () => { + it("returns empty map for no issues", () => { + const m = computeMetrics([]); + expect(m.size).toBe(0); + }); + + it("returns metrics for a single issue", () => { + const m = computeMetrics([makeIssue("a")]); + expect(m.has("a")).toBe(true); + const entry = m.get("a"); + expect(entry).toBeDefined(); + expect(entry?.criticalPathLength).toBe(0); + expect(entry?.score).toBeGreaterThanOrEqual(0); + }); + + it("critical path: linear chain a→b→c gives a=2, b=1, c=0", () => { + // a blocks b, b blocks c + const a = makeIssue("a", ["b"], []); + const b = makeIssue("b", ["c"], ["a"]); + const c = makeIssue("c", [], ["b"]); + const m = computeMetrics([a, b, c]); + expect(m.get("a")?.criticalPathLength).toBe(2); + expect(m.get("b")?.criticalPathLength).toBe(1); + expect(m.get("c")?.criticalPathLength).toBe(0); + }); + + it("bottleneck ranks higher than leaf in a diamond graph", () => { + // a→b, a→c, b→d, c→d — a and d are the bottleneck/sink + const a = makeIssue("a", ["b", "c"], []); + const b = makeIssue("b", ["d"], ["a"]); + const c = makeIssue("c", ["d"], ["a"]); + const d = makeIssue("d", [], ["b", "c"]); + const m = computeMetrics([a, b, c, d]); + + // a has the longest critical path (2: a→b→d or a→c→d) and blocks the most + expect(m.get("a")?.criticalPathLength).toBe(2); + // b and c are equivalent leaves with cp=1 + expect(m.get("b")?.criticalPathLength).toBe(1); + expect(m.get("c")?.criticalPathLength).toBe(1); + // d is a terminal with cp=0 + expect(m.get("d")?.criticalPathLength).toBe(0); + + // a should score higher than b or c (it unblocks more work) + expect(m.get("a")?.score).toBeGreaterThan(m.get("b")?.score ?? 0); + expect(m.get("a")?.score).toBeGreaterThan(m.get("c")?.score ?? 0); + }); + + it("scores are in [0,1] range", () => { + const issues = [ + makeIssue("a", ["b", "c"]), + makeIssue("b", ["d"], ["a"]), + makeIssue("c", ["d"], ["a"]), + makeIssue("d", [], ["b", "c"]), + ]; + const m = computeMetrics(issues); + for (const [, entry] of m) { + expect(entry.score).toBeGreaterThanOrEqual(0); + expect(entry.score).toBeLessThanOrEqual(1); + } + }); + + it("ignores edges to issues not in the set", () => { + // b references external id "z" that isn't in the list + const a = makeIssue("a", ["b"]); + const b = makeIssue("b", ["z"], ["a"]); + expect(() => computeMetrics([a, b])).not.toThrow(); + const m = computeMetrics([a, b]); + expect(m.get("b")?.criticalPathLength).toBe(0); // z filtered out + }); +}); diff --git a/src/graph.ts b/src/graph.ts new file mode 100644 index 0000000..daa7e3a --- /dev/null +++ b/src/graph.ts @@ -0,0 +1,228 @@ +/** + * Graph algorithms for issue dependency analysis. + * + * All algorithms operate on the directed graph where an edge A → B means + * "A blocks B" (B depends on A). PageRank and betweenness run on the full + * graph of open issues; critical path is computed forward from each node. + */ + +import type { Issue } from "./types.ts"; + +export interface GraphMetrics { + pagerank: number; + betweenness: number; + criticalPathLength: number; + score: number; +} + +export type IssueMetrics = Map; + +/** + * Build adjacency from issues. Returns: + * blocksMap: id → ids it blocks (outgoing edges A → B: A blocks B) + * blockedByMap: id → ids blocking it (incoming edges) + */ +function buildAdjacency(issues: Issue[]): { + blocksMap: Map; + blockedByMap: Map; + ids: string[]; +} { + const ids = issues.map((i) => i.id); + const idSet = new Set(ids); + const blocksMap = new Map(); + const blockedByMap = new Map(); + + for (const id of ids) { + blocksMap.set(id, []); + blockedByMap.set(id, []); + } + + for (const issue of issues) { + for (const blocked of issue.blocks ?? []) { + if (!idSet.has(blocked)) continue; + blocksMap.get(issue.id)?.push(blocked); + blockedByMap.get(blocked)?.push(issue.id); + } + } + + return { blocksMap, blockedByMap, ids }; +} + +/** + * PageRank on the reversed graph: authority flows from dependents back to + * their blockers. A node that blocks many important things accumulates higher + * rank — exactly the nodes you want to start first. + * + * Reversed graph edge (u → v) means v blocks u (v is needed before u). + * In-neighbours of v in the reversed graph = blocks(v). + * Out-degree of u in the reversed graph = len(blockedBy(u)). + * + * Converges after ~50 iterations at d=0.85. + */ +function computePageRank( + ids: string[], + blocksMap: Map, + blockedByMap: Map, + iterations = 50, + damping = 0.85, +): Map { + const n = ids.length; + if (n === 0) return new Map(); + + const rank = new Map(); + const initial = 1 / n; + for (const id of ids) rank.set(id, initial); + + for (let iter = 0; iter < iterations; iter++) { + const next = new Map(); + for (const id of ids) { + // In the reversed graph, in-neighbours of v are the nodes v blocks. + // Each such neighbour u contributes rank(u) / out_degree_reversed(u), + // where out_degree_reversed(u) = len(blockedBy(u)). + let incoming = 0; + for (const dep of blocksMap.get(id) ?? []) { + const depReversedOut = blockedByMap.get(dep)?.length ?? 0; + if (depReversedOut > 0) { + incoming += (rank.get(dep) ?? 0) / depReversedOut; + } + } + next.set(id, (1 - damping) / n + damping * incoming); + } + for (const [id, r] of next) rank.set(id, r); + } + + return rank; +} + +/** + * Betweenness centrality via Brandes algorithm (unweighted). + * High betweenness = many shortest paths pass through this node, + * meaning it's a coordination bottleneck. + */ +function computeBetweenness(ids: string[], blocksMap: Map): Map { + const betweenness = new Map(); + for (const id of ids) betweenness.set(id, 0); + + for (const source of ids) { + const stack: string[] = []; + const predecessors = new Map(); + const sigma = new Map(); + const dist = new Map(); + + for (const id of ids) { + predecessors.set(id, []); + sigma.set(id, 0); + dist.set(id, -1); + } + + sigma.set(source, 1); + dist.set(source, 0); + + const queue: string[] = [source]; + while (queue.length > 0) { + const v = queue.shift(); + if (v === undefined) break; + stack.push(v); + for (const w of blocksMap.get(v) ?? []) { + if (dist.get(w) === -1) { + queue.push(w); + dist.set(w, (dist.get(v) ?? 0) + 1); + } + if (dist.get(w) === (dist.get(v) ?? 0) + 1) { + sigma.set(w, (sigma.get(w) ?? 0) + (sigma.get(v) ?? 0)); + predecessors.get(w)?.push(v); + } + } + } + + const delta = new Map(); + for (const id of ids) delta.set(id, 0); + + while (stack.length > 0) { + const w = stack.pop(); + if (w === undefined) break; + for (const v of predecessors.get(w) ?? []) { + const contribution = + ((sigma.get(v) ?? 0) / (sigma.get(w) ?? 1)) * (1 + (delta.get(w) ?? 0)); + delta.set(v, (delta.get(v) ?? 0) + contribution); + } + if (w !== source) { + betweenness.set(w, (betweenness.get(w) ?? 0) + (delta.get(w) ?? 0)); + } + } + } + + // Normalize by (n-1)(n-2) for directed graphs + const n = ids.length; + const norm = n > 2 ? (n - 1) * (n - 2) : 1; + for (const [id, b] of betweenness) { + betweenness.set(id, b / norm); + } + + return betweenness; +} + +/** + * Critical path length: longest chain of "blocks" edges forward from each node. + * A node with criticalPathLength=3 has 3 more hops of dependent work after it. + */ +function computeCriticalPath(ids: string[], blocksMap: Map): Map { + const memo = new Map(); + + const dfs = (id: string): number => { + const cached = memo.get(id); + if (cached !== undefined) return cached; + const successors = blocksMap.get(id) ?? []; + if (successors.length === 0) { + memo.set(id, 0); + return 0; + } + const max = Math.max(...successors.map(dfs)); + memo.set(id, max + 1); + return max + 1; + }; + + for (const id of ids) dfs(id); + return memo; +} + +/** + * Compute all graph metrics for a set of issues. + * Composite score weights: 50% pagerank, 30% betweenness, 20% critical path. + */ +export function computeMetrics(issues: Issue[]): IssueMetrics { + if (issues.length === 0) return new Map(); + + const { blocksMap, blockedByMap, ids } = buildAdjacency(issues); + const pagerank = computePageRank(ids, blocksMap, blockedByMap); + const betweenness = computeBetweenness(ids, blocksMap); + const criticalPath = computeCriticalPath(ids, blocksMap); + + const maxCp = Array.from(criticalPath.values()).reduce((a, b) => Math.max(a, b), 0) || 1; + + // Normalize pagerank to [0,1] + const prValues = Array.from(pagerank.values()); + const maxPr = prValues.reduce((a, b) => Math.max(a, b), 0) || 1; + const minPr = Math.min(...prValues); + const prRange = maxPr - minPr || 1; + + // Normalize betweenness to [0,1] (already normalized by Brandes, but rescale to max) + const bValues = Array.from(betweenness.values()); + const maxB = bValues.reduce((a, b) => Math.max(a, b), 0) || 1; + + const result: IssueMetrics = new Map(); + for (const id of ids) { + const pr = ((pagerank.get(id) ?? 0) - minPr) / prRange; + const b = (betweenness.get(id) ?? 0) / maxB; + const cp = (criticalPath.get(id) ?? 0) / maxCp; + const score = 0.5 * pr + 0.3 * b + 0.2 * cp; + result.set(id, { + pagerank: pagerank.get(id) ?? 0, + betweenness: betweenness.get(id) ?? 0, + criticalPathLength: criticalPath.get(id) ?? 0, + score, + }); + } + + return result; +} diff --git a/src/index.ts b/src/index.ts index 502d83b..f659e3c 100755 --- a/src/index.ts +++ b/src/index.ts @@ -100,6 +100,7 @@ async function registerAll(): Promise { import("./commands/completions.ts"), import("./commands/block.ts"), import("./commands/unblock.ts"), + import("./commands/triage.ts"), ]); for (const mod of mods) {