diff --git a/server/src/lib/scheduler.ts b/server/src/lib/scheduler.ts new file mode 100644 index 0000000..6e65e80 --- /dev/null +++ b/server/src/lib/scheduler.ts @@ -0,0 +1,799 @@ +/** + * scheduler.ts — Pure scheduling engine for the Monrad Estimator timeline. + * + * This module is intentionally free of Prisma, I/O, and side effects. + * It accepts plain data in and returns plain data out, which lets the + * Phase 3 optimiser call runScheduler() in a tight loop with different + * resource configurations without touching the database. + * + * Phase 2 extraction: issue #233 + */ + +// ───────────────────────────────────────────────────────────────────────────── +// Input / Output types +// ───────────────────────────────────────────────────────────────────────────── + +export interface SchedulerTask { + resourceTypeId: string | null + hoursEffort: number + durationDays: number | null + resourceType: { + id: string + name: string + hoursPerDay: number | null + } | null +} + +export interface SchedulerStory { + id: string + order: number | null + isActive: boolean | null + tasks: SchedulerTask[] +} + +export interface SchedulerFeature { + id: string + order: number + isActive: boolean | null + userStories: SchedulerStory[] + /** FeatureDependency rows where this feature is the dependent */ + dependencies: Array<{ featureId: string; dependsOnId: string }> +} + +export interface SchedulerEpic { + id: string + name: string + order: number + isActive: boolean | null + featureMode: string | null + scheduleMode: string | null + timelineStartWeek: number | null + features: SchedulerFeature[] +} + +export interface SchedulerNamedResource { + id: string + name: string + startWeek: number | null + endWeek: number | null + allocationPct: number + allocationMode: string + allocationPercent: number + allocationStartWeek: number | null + allocationEndWeek: number | null +} + +export interface SchedulerResourceType { + id: string + name: string + count: number + hoursPerDay: number | null + namedResources: SchedulerNamedResource[] +} + +export interface SchedulerInput { + /** Scheduling-relevant project fields */ + project: { + hoursPerDay: number + } + /** Active epics (inactive ones already filtered out by the caller) */ + epics: SchedulerEpic[] + resourceTypes: SchedulerResourceType[] + epicDeps: Array<{ epicId: string; dependsOnId: string }> + /** Feature-level manual pins (isManual=true rows) */ + manualFeatureEntries: Array<{ featureId: string; startWeek: number; durationWeeks: number }> + /** Story-level manual pins */ + manualStoryEntries: Array<{ storyId: string; startWeek: number }> + /** When true, run the resource-levelling simulation */ + resourceLevel: boolean +} + +export interface ParallelWarning { + epicId: string + epicName: string + resourceTypeName: string + demandDays: number + capacityDays: number +} + +export interface SchedulerOutput { + /** One entry per processed feature */ + featureSchedule: Array<{ + featureId: string + startWeek: number + durationWeeks: number + isManual: boolean + }> + /** One entry per active story */ + storySchedule: Array<{ + storyId: string + startWeek: number + durationWeeks: number + isManual: boolean + }> + /** + * Actual resource consumption from the levelling simulation. + * Key: `${resourceTypeName}|${week}`, value: days consumed. + * Empty map when resourceLevel=false. + */ + weeklyConsumptionMap: Map + parallelWarnings: ParallelWarning[] +} + +// ───────────────────────────────────────────────────────────────────────────── +// Pure helpers (previously in routes/timeline.ts) +// ───────────────────────────────────────────────────────────────────────────── + +/** Compute the effective allocation percentage for a named resource in a given week. */ +export function effectiveAllocationPct( + nr: SchedulerNamedResource, + week: number, +): number { + if (nr.allocationMode === 'FULL_PROJECT') return nr.allocationPercent + if (nr.allocationMode === 'TIMELINE') { + const wStart = nr.allocationStartWeek ?? nr.startWeek ?? 0 + const wEnd = nr.allocationEndWeek ?? nr.endWeek ?? Infinity + return week >= wStart && week <= wEnd ? nr.allocationPercent : 0 + } + // EFFORT (T&M) — no fixed allocation; full capacity available + return 100 +} + +/** Compute weekly capacity (hours) for a resource type, accounting for named resource availability. */ +export function getWeeklyCapacity( + rt: SchedulerResourceType, + week: number, + defaultHoursPerDay: number, +): number { + const hoursPerDay = rt.hoursPerDay ?? defaultHoursPerDay + if (rt.namedResources.length === 0) { + // No named resources — use aggregate count (existing behaviour) + return rt.count * hoursPerDay * 5 + } + // Sum capacity from named resources active this week + let totalHours = 0 + for (const nr of rt.namedResources) { + const start = nr.startWeek ?? 0 // null = project start (week 0) + const end = nr.endWeek ?? Infinity // null = project end + if (week >= start && week <= end) { + const pct = effectiveAllocationPct(nr, week) + totalHours += (pct / 100) * hoursPerDay * 5 + } + } + return totalHours +} + +// ───────────────────────────────────────────────────────────────────────────── +// Internal helpers +// ───────────────────────────────────────────────────────────────────────────── + +/** Min-heap for Kahn's topological sort priority queue. O(n log n) total. */ +class MinHeap { + private data: Array<{ priority: number; id: string }> = [] + + push(item: { priority: number; id: string }) { + this.data.push(item) + this._bubbleUp(this.data.length - 1) + } + + pop(): { priority: number; id: string } | undefined { + if (this.data.length === 0) return undefined + const top = this.data[0] + const last = this.data.pop()! + if (this.data.length > 0) { + this.data[0] = last + this._sinkDown(0) + } + return top + } + + get length() { return this.data.length } + + private _bubbleUp(i: number) { + while (i > 0) { + const parent = Math.floor((i - 1) / 2) + if (this.data[parent].priority <= this.data[i].priority) break + ;[this.data[parent], this.data[i]] = [this.data[i], this.data[parent]] + i = parent + } + } + + private _sinkDown(i: number) { + const n = this.data.length + while (true) { + let smallest = i + const l = 2 * i + 1, r = 2 * i + 2 + if (l < n && this.data[l].priority < this.data[smallest].priority) smallest = l + if (r < n && this.data[r].priority < this.data[smallest].priority) smallest = r + if (smallest === i) break + ;[this.data[smallest], this.data[i]] = [this.data[i], this.data[smallest]] + i = smallest + } + } +} + +/** + * Compute over-allocation warnings for parallel-mode epics. + * Pure function: accepts pre-computed schedule entries + resource data. + * Exported so the GET /timeline route can call it directly on saved DB entries. + */ +export function computeParallelWarnings( + fallbackHoursPerDay: number, + entries: Array<{ + featureId: string + startWeek: number + durationWeeks: number + feature: { epic: { id: string; name: string; featureMode: string | null } } + }>, + allFeatures: Array<{ + id: string + userStories: Array<{ + isActive: boolean | null + tasks: Array<{ + resourceTypeId: string | null + resourceType: { id: string; name: string; hoursPerDay: number | null } | null + hoursEffort: number + durationDays: number | null + }> + }> + }>, + allResourceTypes: SchedulerResourceType[], +): ParallelWarning[] { + const warnings: ParallelWarning[] = [] + + // Only check parallel epics with 2+ features + const parallelEpics = new Map() + for (const e of entries) { + if ((e.feature.epic.featureMode ?? 'sequential') !== 'parallel') continue + const epicId = e.feature.epic.id + if (!parallelEpics.has(epicId)) { + parallelEpics.set(epicId, { epicName: e.feature.epic.name, featureIds: [], startWeek: e.startWeek, endWeek: e.startWeek + e.durationWeeks }) + } + const ep = parallelEpics.get(epicId)! + ep.featureIds.push(e.featureId) + ep.startWeek = Math.min(ep.startWeek, e.startWeek) + ep.endWeek = Math.max(ep.endWeek, e.startWeek + e.durationWeeks) + } + + const featureById = new Map(allFeatures.map(f => [f.id, f])) + const rtCountMap = new Map(allResourceTypes.map(rt => [rt.id, rt.count])) + const rtMap = new Map(allResourceTypes.map(rt => [rt.id, rt])) + + for (const [epicId, { epicName, featureIds, startWeek, endWeek }] of parallelEpics) { + if (featureIds.length < 2) continue + const epicSpanDays = (endWeek - startWeek) * 5 + + const features = featureIds.map(id => featureById.get(id)).filter((f): f is NonNullable => f !== undefined) + + const demandMap = new Map() + for (const feature of features) { + for (const story of feature.userStories) { + if (story.isActive === false) continue + for (const task of story.tasks) { + const rtId = task.resourceTypeId ?? '_unassigned' + const hpd = task.resourceType?.hoursPerDay ?? fallbackHoursPerDay + const days = task.durationDays ?? (task.hoursEffort / hpd) + if (!demandMap.has(rtId)) { + demandMap.set(rtId, { + name: task.resourceType?.name ?? 'Unassigned', + days: 0, + count: task.resourceTypeId ? (rtCountMap.get(task.resourceTypeId) ?? 1) : 1, + }) + } + demandMap.get(rtId)!.days += days + } + } + } + + for (const [rtId, { name, days, count }] of demandMap) { + const rt = rtMap.get(rtId) + let capacityDays: number + if (rt && rt.namedResources && rt.namedResources.length > 0) { + capacityDays = 0 + const hpd = rt.hoursPerDay ?? fallbackHoursPerDay + for (let w = Math.floor(startWeek); w < Math.ceil(endWeek); w++) { + const overlap = Math.min(w + 1, endWeek) - Math.max(w, startWeek) + if (overlap <= 0) continue + capacityDays += (getWeeklyCapacity(rt, w, fallbackHoursPerDay) / hpd) * overlap + } + } else { + capacityDays = count * epicSpanDays + } + if (days > capacityDays) { + warnings.push({ + epicId, + epicName, + resourceTypeName: name, + demandDays: Math.round(days * 10) / 10, + capacityDays: Math.round(capacityDays * 10) / 10, + }) + } + } + } + + return warnings +} + +// ───────────────────────────────────────────────────────────────────────────── +// Main entry point +// ───────────────────────────────────────────────────────────────────────────── + +/** + * Pure scheduling engine. + * + * Takes plain data (no Prisma types), performs topological sort + optional + * resource-levelling simulation, and returns computed schedules for features + * and stories together with the weekly resource consumption map and any + * over-allocation warnings. + * + * Same inputs → same outputs. No DB access. No side effects. + */ +export function runScheduler(input: SchedulerInput): SchedulerOutput { + const { project, epics, resourceTypes, epicDeps, manualFeatureEntries, manualStoryEntries, resourceLevel } = input + const fallbackHoursPerDay = project.hoursPerDay + + const rtCountMap = new Map(resourceTypes.map(rt => [rt.id, rt.count])) + + // Build manual lookup maps + const manualStartWeeks = new Map(manualFeatureEntries.map(e => [e.featureId, e.startWeek])) + const manualDurationWeeks = new Map(manualFeatureEntries.map(e => [e.featureId, e.durationWeeks])) + const manualStoryWeeks = new Map(manualStoryEntries.map(e => [e.storyId, e.startWeek])) + + // ── Flatten features across epics, attaching epic back-reference ───────── + const allFeatures = epics.flatMap(epic => + epic.features.map(f => ({ ...f, epic })) + ) + const featureMap = new Map(allFeatures.map(f => [f.id, f])) + + // ── Helper: duration in weeks for a feature ─────────────────────────────── + function featureDurationWeeks(feature: typeof allFeatures[0]): number { + const allTasks = feature.userStories.filter(s => s.isActive !== false).flatMap(s => s.tasks) + if (allTasks.length === 0) return 1 + + const byRt = new Map() + for (const task of allTasks) { + const group = byRt.get(task.resourceTypeId) ?? [] + group.push(task) + byRt.set(task.resourceTypeId, group) + } + + let maxDays = 0 + for (const [rtId, tasks] of byRt) { + const personDays = tasks.reduce((sum, t) => { + const hpd = t.resourceType?.hoursPerDay ?? fallbackHoursPerDay + return sum + (t.durationDays ?? (t.hoursEffort / hpd)) + }, 0) + const count = rtId ? (rtCountMap.get(rtId) ?? 1) : 1 + const days = personDays / count + if (days > maxDays) maxDays = days + } + return Math.max(0.2, maxDays / 5) + } + + // ── Kahn's topological sort over features ───────────────────────────────── + const inDegree = new Map() + const adjList = new Map>() // from → Set (Set for O(1) dedup) + const predecessors = new Map() // to → [from, ...] + + for (const f of allFeatures) { + inDegree.set(f.id, 0) + adjList.set(f.id, new Set()) + predecessors.set(f.id, []) + } + + function addEdge(fromId: string, toId: string) { + const succs = adjList.get(fromId) + const preds = predecessors.get(toId) + if (!succs || !preds) return // one of the features not in this project + if (succs.has(toId)) return // deduplicate (O(1) with Set) + succs.add(toId) + preds.push(fromId) + inDegree.set(toId, (inDegree.get(toId) ?? 0) + 1) + } + + const sortedEpics = [...epics].sort((a, b) => a.order - b.order) + + // 1. Intra-epic sequential edges + for (const epic of epics) { + if ((epic.featureMode ?? 'sequential') === 'sequential') { + const sorted = [...epic.features].sort((a, b) => a.order - b.order) + for (let i = 1; i < sorted.length; i++) { + // Don't chain successor onto a manually-pinned feature + if (manualStartWeeks.has(sorted[i - 1].id)) continue + addEdge(sorted[i - 1].id, sorted[i].id) + } + } + } + + // 2. Inter-epic sequential chaining + for (let i = 1; i < sortedEpics.length; i++) { + const prevEpic = sortedEpics[i - 1] + const currEpic = sortedEpics[i] + if (prevEpic.features.length === 0 || currEpic.features.length === 0) continue + + if (currEpic.timelineStartWeek != null) continue + if ((currEpic.scheduleMode ?? 'sequential') === 'parallel') continue + + const currTargets = (currEpic.featureMode ?? 'sequential') === 'sequential' + ? [currEpic.features[0]] + : currEpic.features + + for (const prevFeature of prevEpic.features) { + const hasCrossEpicDep = (prevFeature.dependencies ?? []).some(dep => { + const target = featureMap.get(dep.dependsOnId) + return target !== undefined && target.epic.id === currEpic.id + }) + if (hasCrossEpicDep) continue + + for (const currFeature of currTargets) { + addEdge(prevFeature.id, currFeature.id) + } + } + } + + // 3. Explicit cross-epic feature dependency edges + for (const f of allFeatures) { + for (const dep of (f.dependencies ?? [])) { + addEdge(dep.dependsOnId, dep.featureId) + } + } + + // 4. Epic dependency hard constraints + const epicById = new Map(epics.map(e => [e.id, e])) + for (const epicDep of epicDeps) { + const fromEpic = epicById.get(epicDep.dependsOnId) + const toEpic = epicById.get(epicDep.epicId) + if (!fromEpic || !toEpic) continue + for (const fromFeature of fromEpic.features) { + for (const toFeature of toEpic.features) { + addEdge(fromFeature.id, toFeature.id) + } + } + } + + // Kahn's algorithm with min-heap priority queue + const finishWeeks = new Map() + const startWeeks = new Map() + + function featurePriority(fId: string) { + const f = featureMap.get(fId)! + return f.epic.order * 100000 + f.order + } + + const queue = new MinHeap() + for (const [fId, deg] of inDegree) { + if (deg === 0) queue.push({ priority: featurePriority(fId), id: fId }) + } + + const processed: string[] = [] + + while (queue.length > 0) { + const { id: fId } = queue.pop()! + processed.push(fId) + + const f = featureMap.get(fId)! + const epic = f.epic + const dur = featureDurationWeeks(f) + + if (manualStartWeeks.has(fId)) { + const sw = manualStartWeeks.get(fId)! + startWeeks.set(fId, sw) + finishWeeks.set(fId, sw + dur) + } else { + let earliest = epic.timelineStartWeek ?? 0 + for (const predId of predecessors.get(fId) ?? []) { + const predFinish = finishWeeks.get(predId) ?? 0 + if (predFinish > earliest) earliest = predFinish + } + startWeeks.set(fId, earliest) + finishWeeks.set(fId, earliest + dur) + } + + for (const succId of adjList.get(fId) ?? []) { + const newDeg = (inDegree.get(succId) ?? 1) - 1 + inDegree.set(succId, newDeg) + if (newDeg === 0) queue.push({ priority: featurePriority(succId), id: succId }) + } + } + + // Fallback: features not processed (cycle / unresolvable deps) + if (processed.length < allFeatures.length) { + const epicMaxFinish = new Map() + for (const f of allFeatures) { + const fw = finishWeeks.get(f.id) + if (fw === undefined) continue + const prev = epicMaxFinish.get(f.epic.id) ?? 0 + if (fw > prev) epicMaxFinish.set(f.epic.id, fw) + } + for (const f of allFeatures) { + if (startWeeks.has(f.id)) continue + let earliest = f.epic.timelineStartWeek ?? 0 + for (const prevEpic of sortedEpics) { + if (prevEpic.order >= f.epic.order) break + const prevFinish = epicMaxFinish.get(prevEpic.id) ?? 0 + if (prevFinish > earliest) earliest = prevFinish + } + startWeeks.set(f.id, earliest) + finishWeeks.set(f.id, earliest + featureDurationWeeks(f)) + processed.push(f.id) + const cur = epicMaxFinish.get(f.epic.id) ?? 0 + const newFinish = earliest + featureDurationWeeks(f) + if (newFinish > cur) epicMaxFinish.set(f.epic.id, newFinish) + } + } + + // ── Resource-levelling simulation ───────────────────────────────────────── + const weeklyConsumptionMap = new Map() + + if (resourceLevel) { + function featureResourceHours(feature: typeof allFeatures[0]): Map { + const result = new Map() + for (const story of feature.userStories) { + if (story.isActive === false) continue + for (const task of story.tasks) { + const rtId = task.resourceTypeId ?? '_unassigned' + const hpd = task.resourceType?.hoursPerDay ?? fallbackHoursPerDay + const hours = (task.durationDays ?? (task.hoursEffort / hpd)) * hpd + result.set(rtId, (result.get(rtId) ?? 0) + hours) + } + } + return result + } + + const featureResourceHoursCache = new Map>() + for (const fId of processed) { + featureResourceHoursCache.set(fId, featureResourceHours(featureMap.get(fId)!)) + } + + const rtById = new Map(resourceTypes.map(rt => [rt.id, rt])) + const allRtIds = [...resourceTypes.map(rt => rt.id), '_unassigned'] + + const remainingHours = new Map>() + for (const fId of processed) { + if (manualStartWeeks.has(fId)) continue + remainingHours.set(fId, new Map(featureResourceHoursCache.get(fId)!)) + } + + const simStart = new Map() + const simDone = new Map() + + for (const [fId, sw] of manualStartWeeks) { + simStart.set(fId, sw) + const storedDur = manualDurationWeeks.get(fId) + simDone.set(fId, sw + (storedDur !== undefined ? storedDur : featureDurationWeeks(featureMap.get(fId)!))) + } + + const STEP = 0.2 + const MAX_WEEKS = 200 + const autoFeatures = processed.filter(fId => !manualStartWeeks.has(fId)) + const unfinished = new Set(autoFeatures) + + let t = 0 + while (unfinished.size > 0 && t < MAX_WEEKS) { + for (const fId of unfinished) { + if (simStart.has(fId)) continue + const f = featureMap.get(fId)! + const epicStart = f.epic.timelineStartWeek ?? 0 + if (t < epicStart) continue + const predsAllDone = (predecessors.get(fId) ?? []).every(predId => { + const done = simDone.get(predId) + return done !== undefined && done <= t + }) + if (predsAllDone) { + const currentWeekForStart = Math.floor(t) + const fHours = remainingHours.get(fId) + if (fHours && fHours.size > 0) { + const hasCapacity = [...fHours.keys()].some(rtId => { + if (rtId === '_unassigned') return true + const rt = rtById.get(rtId) + return !rt || getWeeklyCapacity(rt, currentWeekForStart, fallbackHoursPerDay) > 0 + }) + if (!hasCapacity) continue + } + simStart.set(fId, t) + } + } + + const active = [...unfinished].filter(fId => simStart.has(fId)) + + const currentWeek = Math.floor(t) + for (const rtId of allRtIds) { + const rt = rtById.get(rtId) + const rtName = rt?.name ?? 'Unassigned' + const hpd = rt?.hoursPerDay ?? fallbackHoursPerDay + for (const [fId] of manualStartWeeks) { + const fStart = simStart.get(fId) + const fDone = simDone.get(fId) + if (fStart === undefined || fDone === undefined || fDone <= fStart) continue + if (t >= fStart && t < fDone) { + const rtHours = featureResourceHoursCache.get(fId)!.get(rtId) ?? 0 + if (rtHours > 0) { + const perStep = (rtHours / (fDone - fStart)) * STEP + const consumptionKey = `${rtName}|${currentWeek}` + weeklyConsumptionMap.set(consumptionKey, (weeklyConsumptionMap.get(consumptionKey) ?? 0) + perStep / hpd) + } + } + } + } + + if (active.length === 0) { t += STEP; continue } + + for (const fId of active) { + if (remainingHours.get(fId)?.size === 0) { + if (!simDone.has(fId)) { + simDone.set(fId, t + STEP) + unfinished.delete(fId) + } + } + } + + for (const rtId of allRtIds) { + const rt = rtById.get(rtId) + const capPerWeek = rt + ? getWeeklyCapacity(rt, currentWeek, fallbackHoursPerDay) + : fallbackHoursPerDay * 5 + let capPerStep = capPerWeek * STEP + const rtName = rt?.name ?? 'Unassigned' + const hpd = rt?.hoursPerDay ?? fallbackHoursPerDay + + for (const [fId] of manualStartWeeks) { + const fStart = simStart.get(fId) + const fDone = simDone.get(fId) + if (fStart === undefined || fDone === undefined || fDone <= fStart) continue + if (t >= fStart && t < fDone) { + const rtHours = featureResourceHoursCache.get(fId)!.get(rtId) ?? 0 + if (rtHours > 0) { + const perStep = (rtHours / (fDone - fStart)) * STEP + capPerStep = Math.max(0, capPerStep - perStep) + } + } + } + + const competing = active.filter(fId => (remainingHours.get(fId)?.get(rtId) ?? 0) > 0.001) + if (competing.length === 0) continue + + const totalRemaining = competing.reduce((s, fId) => s + (remainingHours.get(fId)!.get(rtId) ?? 0), 0) + + for (const fId of competing) { + const rem = remainingHours.get(fId)!.get(rtId)! + const actualAllocated = Math.min((rem / totalRemaining) * capPerStep, rem) + const consumptionKey = `${rtName}|${currentWeek}` + weeklyConsumptionMap.set(consumptionKey, (weeklyConsumptionMap.get(consumptionKey) ?? 0) + actualAllocated / hpd) + remainingHours.get(fId)!.set(rtId, Math.max(0, rem - actualAllocated)) + } + } + + for (const fId of active) { + const allDone = [...(remainingHours.get(fId)?.values() ?? [])].every(h => h <= 0.001) + if (allDone) { + simDone.set(fId, t + STEP) + unfinished.delete(fId) + } + } + + t += STEP + t = Math.round(t * 5) / 5 + } + + // Apply simulation results back to startWeeks/finishWeeks + for (const fId of processed) { + const sw = simStart.get(fId) ?? startWeeks.get(fId) ?? 0 + const doneW = simDone.get(fId) + const dur = doneW !== undefined ? doneW - sw : featureDurationWeeks(featureMap.get(fId)!) + startWeeks.set(fId, sw) + finishWeeks.set(fId, sw + dur) + } + } + + // ── Story-level scheduling ───────────────────────────────────────────────── + const allStories = epics.flatMap(epic => + epic.features.flatMap(feature => + feature.userStories + .filter(s => s.isActive !== false) + .map(s => ({ ...s, feature: { ...feature, epic } })) + ) + ) + + function storyResourceHours(story: typeof allStories[0]): Map { + const result = new Map() + for (const task of story.tasks) { + const rtId = task.resourceTypeId ?? '_unassigned' + const hpd = task.resourceType?.hoursPerDay ?? fallbackHoursPerDay + const hours = (task.durationDays ?? (task.hoursEffort / hpd)) * hpd + result.set(rtId, (result.get(rtId) ?? 0) + hours) + } + return result + } + + function storyTotalHours(story: typeof allStories[0]): number { + return [...storyResourceHours(story).values()].reduce((a, b) => a + b, 0) + } + + const storyScheduled = new Map() + + // Pass 1: manual-pinned stories + for (const story of allStories) { + if (manualStoryWeeks.has(story.id)) { + const sw = manualStoryWeeks.get(story.id)! + const totalHours = storyTotalHours(story) + const dur = Math.max(0.2, totalHours / fallbackHoursPerDay / 5) + storyScheduled.set(story.id, { startWeek: sw, durationWeeks: dur, isManual: true }) + } + } + + // Pass 2: proportional sequential scheduling per feature + const storiesByFeature = new Map() + for (const story of allStories) { + const fId = story.feature.id + if (!storiesByFeature.has(fId)) storiesByFeature.set(fId, []) + storiesByFeature.get(fId)!.push(story) + } + for (const stories of storiesByFeature.values()) { + stories.sort((a, b) => (a.order ?? 0) - (b.order ?? 0)) + } + + for (const [fId, stories] of storiesByFeature) { + const featureStart = startWeeks.get(fId) ?? 0 + const featureDone = finishWeeks.get(fId) ?? (featureStart + 1) + const featureDuration = Math.max(0.2, featureDone - featureStart) + + const siblings = stories.filter(s => !manualStoryWeeks.has(s.id)) + if (siblings.length === 0) continue + + const totalFeatureHours = siblings.reduce((sum, s) => sum + storyTotalHours(s), 0) + + let cursor = featureStart + for (const sibling of siblings) { + const hrs = storyTotalHours(sibling) + const dur = totalFeatureHours > 0 + ? (hrs / totalFeatureHours) * featureDuration + : featureDuration / Math.max(1, siblings.length) + const safeDur = Math.max(0.2, dur) + storyScheduled.set(sibling.id, { startWeek: cursor, durationWeeks: safeDur, isManual: false }) + cursor += safeDur + } + } + + // ── Parallel warnings ───────────────────────────────────────────────────── + const scheduleEntries = processed.map(fId => { + const f = featureMap.get(fId)! + const sw = startWeeks.get(fId)! + const durationWeeks = (finishWeeks.get(fId)! - sw) + return { + featureId: fId, + startWeek: sw, + durationWeeks, + feature: { epic: { id: f.epic.id, name: f.epic.name, featureMode: f.epic.featureMode } }, + } + }) + + const parallelWarnings = computeParallelWarnings( + fallbackHoursPerDay, + scheduleEntries, + allFeatures, + resourceTypes, + ) + + // ── Assemble output ─────────────────────────────────────────────────────── + const featureSchedule = processed.map(fId => { + const sw = startWeeks.get(fId)! + const f = featureMap.get(fId)! + const durationWeeks = (finishWeeks.get(fId) ?? (sw + featureDurationWeeks(f))) - sw + return { + featureId: fId, + startWeek: sw, + durationWeeks, + isManual: manualStartWeeks.has(fId), + } + }) + + const storySchedule = allStories + .map(story => { + const sched = storyScheduled.get(story.id) + if (!sched) return null + return { storyId: story.id, ...sched } + }) + .filter((s): s is NonNullable => s !== null) + + return { featureSchedule, storySchedule, weeklyConsumptionMap, parallelWarnings } +} diff --git a/server/src/routes/timeline.ts b/server/src/routes/timeline.ts index 9099a8b..2b6449d 100644 --- a/server/src/routes/timeline.ts +++ b/server/src/routes/timeline.ts @@ -2,10 +2,26 @@ import { Router, Response } from 'express' import { prisma } from '../lib/prisma.js' import { asyncHandler } from '../lib/asyncHandler.js' import { authenticate, AuthRequest } from '../middleware/auth.js' +import { + runScheduler, + getWeeklyCapacity, + computeParallelWarnings, + type SchedulerResourceType, + type ParallelWarning, +} from '../lib/scheduler.js' const router = Router({ mergeParams: true }) router.use(authenticate) +/** + * Re-export for backward compatibility — timeline.test.ts imports this from + * routes/timeline.js. The canonical implementation lives in lib/scheduler.ts. + */ +export { getWeeklyCapacity } + +// Alias for internal use within this file +type ResourceTypeWithNamed = SchedulerResourceType + async function ownedProject(projectId: string, userId: string) { return prisma.project.findFirst({ where: { id: projectId, ownerId: userId } }) } @@ -19,65 +35,6 @@ function computeDates(projectStartDate: Date | null, startWeek: number, duration return { startDate: start.toISOString(), endDate: end.toISOString() } } -type ParallelWarning = { epicId: string; epicName: string; resourceTypeName: string; demandDays: number; capacityDays: number } - -type ResourceTypeWithNamed = { - id: string - name: string - count: number - hoursPerDay: number | null - namedResources: Array<{ - id: string - name: string - startWeek: number | null - endWeek: number | null - allocationPct: number - allocationMode: string - allocationPercent: number - allocationStartWeek: number | null - allocationEndWeek: number | null - }> -} - -/** Compute the effective allocation percentage for a named resource in a given week. */ -function effectiveAllocationPct( - nr: ResourceTypeWithNamed['namedResources'][number], - week: number, -): number { - if (nr.allocationMode === 'FULL_PROJECT') return nr.allocationPercent - if (nr.allocationMode === 'TIMELINE') { - const wStart = nr.allocationStartWeek ?? nr.startWeek ?? 0 - const wEnd = nr.allocationEndWeek ?? nr.endWeek ?? Infinity - return week >= wStart && week <= wEnd ? nr.allocationPercent : 0 - } - // EFFORT (T&M) — no fixed allocation; full capacity available - return 100 -} - -/** Compute weekly capacity (hours) for a resource type, accounting for named resource availability. */ -export function getWeeklyCapacity( - rt: ResourceTypeWithNamed, - week: number, - defaultHoursPerDay: number, -): number { - const hoursPerDay = rt.hoursPerDay ?? defaultHoursPerDay - if (rt.namedResources.length === 0) { - // No named resources — use aggregate count (existing behaviour) - return rt.count * hoursPerDay * 5 - } - // Sum capacity from named resources active this week - let totalHours = 0 - for (const nr of rt.namedResources) { - const start = nr.startWeek ?? 0 // null = project start (week 0) - const end = nr.endWeek ?? Infinity // null = project end - if (week >= start && week <= end) { - const pct = effectiveAllocationPct(nr, week) - totalHours += (pct / 100) * hoursPerDay * 5 - } - } - return totalHours -} - function computeResourceBreakdown( feature: { userStories: { isActive: boolean | null; tasks: { resourceTypeId: string | null, hoursEffort: number, durationDays: number | null, resourceType: { name: string, hoursPerDay: number | null } | null }[] }[] }, fallbackHpd: number @@ -314,93 +271,6 @@ function buildResponse( } } -// Compute over-allocation warnings for parallel-mode epics -// #178: accept pre-loaded features and resourceTypes to avoid redundant DB queries -async function computeParallelWarnings( - fallbackHoursPerDay: number, - entries: Array<{ featureId: string; startWeek: number; durationWeeks: number; feature: { epic: { id: string; name: string; featureMode: string } } }>, - allFeatures: Array<{ id: string; userStories: Array<{ isActive: boolean | null; tasks: Array<{ resourceTypeId: string | null; resourceType: { id: string; name: string; hoursPerDay: number | null } | null; hoursEffort: number; durationDays: number | null }> }> }>, - allResourceTypes: ResourceTypeWithNamed[], -): Promise { - const warnings: ParallelWarning[] = [] - - // Only check parallel epics with 2+ features - const parallelEpics = new Map() - for (const e of entries) { - if ((e.feature.epic.featureMode ?? 'sequential') !== 'parallel') continue - const epicId = e.feature.epic.id - if (!parallelEpics.has(epicId)) { - parallelEpics.set(epicId, { epicName: e.feature.epic.name, featureIds: [], startWeek: e.startWeek, endWeek: e.startWeek + e.durationWeeks }) - } - const ep = parallelEpics.get(epicId)! - ep.featureIds.push(e.featureId) - ep.startWeek = Math.min(ep.startWeek, e.startWeek) - ep.endWeek = Math.max(ep.endWeek, e.startWeek + e.durationWeeks) - } - - // Build lookup maps from pre-loaded data — no additional DB queries needed - const featureById = new Map(allFeatures.map(f => [f.id, f])) - const rtCountMap = new Map(allResourceTypes.map(rt => [rt.id, rt.count])) - const rtMap = new Map(allResourceTypes.map(rt => [rt.id, rt as ResourceTypeWithNamed])) - - for (const [epicId, { epicName, featureIds, startWeek, endWeek }] of parallelEpics) { - if (featureIds.length < 2) continue - const epicSpanDays = (endWeek - startWeek) * 5 - - // Use pre-loaded features filtered to this parallel epic - const features = featureIds.map(id => featureById.get(id)).filter((f): f is NonNullable => f !== undefined) - - // Sum total person-days per resource type across ALL features (they run simultaneously) - const demandMap = new Map() - for (const feature of features) { - for (const story of feature.userStories) { - if (story.isActive === false) continue - for (const task of story.tasks) { - const rtId = task.resourceTypeId ?? '_unassigned' - const hpd = task.resourceType?.hoursPerDay ?? fallbackHoursPerDay - const days = task.durationDays ?? (task.hoursEffort / hpd) - if (!demandMap.has(rtId)) { - demandMap.set(rtId, { - name: task.resourceType?.name ?? 'Unassigned', - days: 0, - count: task.resourceTypeId ? (rtCountMap.get(task.resourceTypeId) ?? 1) : 1, - }) - } - demandMap.get(rtId)!.days += days - } - } - } - - for (const [rtId, { name, days, count }] of demandMap) { - // Variable capacity: sum capacity across the epic's span, accounting for named resource availability - const rt = rtMap.get(rtId) - let capacityDays: number - if (rt && rt.namedResources && rt.namedResources.length > 0) { - capacityDays = 0 - const hpd = rt.hoursPerDay ?? fallbackHoursPerDay - for (let w = Math.floor(startWeek); w < Math.ceil(endWeek); w++) { - const overlap = Math.min(w + 1, endWeek) - Math.max(w, startWeek) - if (overlap <= 0) continue - capacityDays += (getWeeklyCapacity(rt, w, fallbackHoursPerDay) / hpd) * overlap - } - } else { - capacityDays = count * epicSpanDays - } - if (days > capacityDays) { - warnings.push({ - epicId, - epicName, - resourceTypeName: name, - demandDays: Math.round(days * 10) / 10, - capacityDays: Math.round(capacityDays * 10) / 10, - }) - } - } - } - - return warnings -} - // GET /api/projects/:projectId/timeline router.get('/', asyncHandler(async (req: AuthRequest, res: Response) => { const project = await ownedProject(req.params.projectId as string, req.userId!) @@ -431,7 +301,7 @@ router.get('/', asyncHandler(async (req: AuthRequest, res: Response) => { // #178: pass pre-loaded features and resource types — no extra DB queries inside const activeFeatures = activeEntries.map(e => e.feature) - const parallelWarnings = await computeParallelWarnings(project.hoursPerDay, activeEntries, activeFeatures, resourceTypes) + const parallelWarnings = computeParallelWarnings(project.hoursPerDay, activeEntries, activeFeatures, resourceTypes) const storyTimelineEntries = await prisma.storyTimelineEntry.findMany({ where: { projectId: project.id }, @@ -482,9 +352,8 @@ router.post('/schedule', asyncHandler(async (req: AuthRequest, res: Response) => }) } - const fallbackHoursPerDay = project.hoursPerDay + // ── 1. Load data from Prisma ─────────────────────────────────────────────── - // Load full hierarchy — filter inactive epics/features const allEpics = await prisma.epic.findMany({ where: { projectId: project.id }, orderBy: { order: 'asc' }, @@ -496,10 +365,10 @@ router.post('/schedule', asyncHandler(async (req: AuthRequest, res: Response) => orderBy: { order: 'asc' }, include: { tasks: { include: { resourceType: true } }, - dependencies: true, // StoryDependency rows where this story depends on others + dependencies: true, }, }, - dependencies: true, // FeatureDependency rows where this feature depends on others + dependencies: true, }, }, }, @@ -518,572 +387,66 @@ router.post('/schedule', asyncHandler(async (req: AuthRequest, res: Response) => .filter(e => e.isActive !== false) .map(e => ({ ...e, features: e.features.filter(f => f.isActive !== false) })) - // Load resource types const resourceTypes = await prisma.resourceType.findMany({ where: { projectId: project.id }, include: { namedResources: true } }) - const rtCountMap = new Map(resourceTypes.map(rt => [rt.id, rt.count])) - - // Helper: compute duration in weeks for a feature - function featureDurationWeeks(feature: typeof epics[0]['features'][0]): number { - const allTasks = feature.userStories.filter(s => s.isActive !== false).flatMap(s => s.tasks) - if (allTasks.length === 0) return 1 - - const byRt = new Map() - for (const task of allTasks) { - const group = byRt.get(task.resourceTypeId) ?? [] - group.push(task) - byRt.set(task.resourceTypeId, group) - } - - let maxDays = 0 - for (const [rtId, tasks] of byRt) { - const personDays = tasks.reduce((sum, t) => { - const hpd = t.resourceType?.hoursPerDay ?? fallbackHoursPerDay - return sum + (t.durationDays ?? (t.hoursEffort / hpd)) - }, 0) - const count = rtId ? (rtCountMap.get(rtId) ?? 1) : 1 - const days = personDays / count - if (days > maxDays) maxDays = days - } - return Math.max(0.2, maxDays / 5) - } - - // Build flat list of all features across all epics - const allFeatures = epics.flatMap(epic => - epic.features.map(f => ({ ...f, epic })) - ) - - // Build feature map for quick lookup - const featureMap = new Map(allFeatures.map(f => [f.id, f])) - // Load existing manual timeline entries — their startWeek is fixed const existingEntries = await prisma.timelineEntry.findMany({ where: { projectId: project.id, isManual: true }, }) - const manualStartWeeks = new Map(existingEntries.map(e => [e.featureId, e.startWeek])) - const manualDurationWeeks = new Map(existingEntries.map(e => [e.featureId, e.durationWeeks])) - const existingStoryEntries = await prisma.storyTimelineEntry.findMany({ where: { projectId: project.id, isManual: true }, }) - const manualStoryWeeks = new Map(existingStoryEntries.map(e => [e.storyId, e.startWeek])) - - // Load epic dependencies for hard constraint edges in the topological sort const epicDeps = await prisma.epicDependency.findMany({ where: { epic: { projectId: project.id } }, select: { epicId: true, dependsOnId: true }, }) - // Kahn's topological sort over features - const inDegree = new Map() - const adjList = new Map() // from → [to, ...] - const predecessors = new Map() // to → [from, ...] - - for (const f of allFeatures) { - inDegree.set(f.id, 0) - adjList.set(f.id, []) - predecessors.set(f.id, []) - } - - function addEdge(fromId: string, toId: string) { - const succs = adjList.get(fromId) - const preds = predecessors.get(toId) - if (!succs || !preds) return // one of the features not in this project - if (succs.includes(toId)) return // deduplicate - succs.push(toId) - preds.push(fromId) - inDegree.set(toId, (inDegree.get(toId) ?? 0) + 1) - } - - // 1. Intra-epic sequential edges: each feature depends on the previous in its epic - for (const epic of epics) { - if ((epic.featureMode ?? 'sequential') === 'sequential') { - const sorted = [...epic.features].sort((a, b) => a.order - b.order) - for (let i = 1; i < sorted.length; i++) { - // Don't chain successor onto a manually-pinned feature — let it float freely - // based only on explicit FeatureDependency rows - if (manualStartWeeks.has(sorted[i - 1].id)) continue - addEdge(sorted[i - 1].id, sorted[i].id) - } - } - } - - // 2. Inter-epic sequential chaining: Epic N completes before Epic N+1 starts - // All features of Epic[i] → first feature of Epic[i+1] (sequential) or all features (parallel) - const sortedEpics = [...epics].sort((a, b) => a.order - b.order) - for (let i = 1; i < sortedEpics.length; i++) { - const prevEpic = sortedEpics[i - 1] - const currEpic = sortedEpics[i] - if (prevEpic.features.length === 0 || currEpic.features.length === 0) continue - - // Skip if currEpic has a manual anchor — it will start at its fixed week regardless - if (currEpic.timelineStartWeek != null) continue - - // Skip if currEpic is parallel — it floats free, not chained after prevEpic - if ((currEpic.scheduleMode ?? 'sequential') === 'parallel') continue - - const currTargets = (currEpic.featureMode ?? 'sequential') === 'sequential' - ? [currEpic.features[0]] // first feature chains to the rest via sequential edges - : currEpic.features // parallel: all features need explicit constraint - - for (const prevFeature of prevEpic.features) { - // Bug fix: if this prevFeature has an explicit FeatureDependency on a feature in - // currEpic, adding the inter-epic chain edge would create a cycle - // (prevFeature → currEpic.first → ... → depTarget → prevFeature). - // Skip the chain edge for prevFeature in that case so it can float freely - // based only on its explicit deps. - const hasCrossEpicDep = (prevFeature.dependencies ?? []).some(dep => { - const target = featureMap.get(dep.dependsOnId) - return target !== undefined && target.epic.id === currEpic.id - }) - if (hasCrossEpicDep) continue - - for (const currFeature of currTargets) { - addEdge(prevFeature.id, currFeature.id) - } - } - } - - // 3. Explicit cross-epic feature dependency edges - for (const f of allFeatures) { - for (const dep of (f.dependencies ?? [])) { - addEdge(dep.dependsOnId, dep.featureId) - } - } - - // 4. Epic dependency hard constraints - // All features of dependsOn epic → all features of dependent epic - for (const epicDep of epicDeps) { - const fromEpic = epics.find(e => e.id === epicDep.dependsOnId) - const toEpic = epics.find(e => e.id === epicDep.epicId) - if (!fromEpic || !toEpic) continue - for (const fromFeature of fromEpic.features) { - for (const toFeature of toEpic.features) { - addEdge(fromFeature.id, toFeature.id) - } - } - } - - // Kahn's algorithm — priority queue: always pick lowest (epicOrder, featureOrder) - // This guarantees predecessors before successors AND respects user priority within independent features - const finishWeeks = new Map() - const startWeeks = new Map() - - function featurePriority(fId: string) { - const f = featureMap.get(fId)! - return f.epic.order * 100000 + f.order - } - - // #178: min-heap replacing array + sort — O(n log n) overall vs O(n² log n) - class MinHeap { - private data: Array<{ priority: number; id: string }> = [] - - push(item: { priority: number; id: string }) { - this.data.push(item) - this._bubbleUp(this.data.length - 1) - } - - pop(): { priority: number; id: string } | undefined { - if (this.data.length === 0) return undefined - const top = this.data[0] - const last = this.data.pop()! - if (this.data.length > 0) { - this.data[0] = last - this._sinkDown(0) - } - return top - } - - get length() { return this.data.length } - - private _bubbleUp(i: number) { - while (i > 0) { - const parent = Math.floor((i - 1) / 2) - if (this.data[parent].priority <= this.data[i].priority) break - ;[this.data[parent], this.data[i]] = [this.data[i], this.data[parent]] - i = parent - } - } - - private _sinkDown(i: number) { - const n = this.data.length - while (true) { - let smallest = i - const l = 2 * i + 1, r = 2 * i + 2 - if (l < n && this.data[l].priority < this.data[smallest].priority) smallest = l - if (r < n && this.data[r].priority < this.data[smallest].priority) smallest = r - if (smallest === i) break - ;[this.data[smallest], this.data[i]] = [this.data[i], this.data[smallest]] - i = smallest - } - } - } - - const queue = new MinHeap() - for (const [fId, deg] of inDegree) { - if (deg === 0) queue.push({ priority: featurePriority(fId), id: fId }) - } - - const processed: string[] = [] - - while (queue.length > 0) { - const { id: fId } = queue.pop()! - processed.push(fId) - - const f = featureMap.get(fId)! - const epic = f.epic - const dur = featureDurationWeeks(f) - - if (manualStartWeeks.has(fId)) { - const sw = manualStartWeeks.get(fId)! - startWeeks.set(fId, sw) - finishWeeks.set(fId, sw + dur) - } else { - // earliest = max(epic anchor, all predecessor finish weeks) - let earliest = epic.timelineStartWeek ?? 0 - for (const predId of predecessors.get(fId) ?? []) { - const predFinish = finishWeeks.get(predId) ?? 0 - if (predFinish > earliest) earliest = predFinish - } - startWeeks.set(fId, earliest) - finishWeeks.set(fId, earliest + dur) - } - - for (const succId of adjList.get(fId) ?? []) { - const newDeg = (inDegree.get(succId) ?? 1) - 1 - inDegree.set(succId, newDeg) - if (newDeg === 0) queue.push({ priority: featurePriority(succId), id: succId }) - } - } - - // Fallback: if any features weren't processed (cycle or unresolvable deps), - // schedule them at the end of their epic's predecessor chain so nothing is silently dropped - if (processed.length < allFeatures.length) { - // Compute max finishWeek for each epic among features that *were* processed - const epicMaxFinish = new Map() - for (const f of allFeatures) { - const fw = finishWeeks.get(f.id) - if (fw === undefined) continue - const prev = epicMaxFinish.get(f.epic.id) ?? 0 - if (fw > prev) epicMaxFinish.set(f.epic.id, fw) - } - // For each unscheduled feature: start at max(epic anchor, all prev-epic finishes) - for (const f of allFeatures) { - if (startWeeks.has(f.id)) continue - let earliest = f.epic.timelineStartWeek ?? 0 - // Walk the sorted epic chain up to this epic and find the latest finish - for (const prevEpic of sortedEpics) { - if (prevEpic.order >= f.epic.order) break - const prevFinish = epicMaxFinish.get(prevEpic.id) ?? 0 - if (prevFinish > earliest) earliest = prevFinish - } - startWeeks.set(f.id, earliest) - finishWeeks.set(f.id, earliest + featureDurationWeeks(f)) - processed.push(f.id) - // Update epicMaxFinish so subsequent features in the same epic can build on this - const cur = epicMaxFinish.get(f.epic.id) ?? 0 - const newFinish = earliest + featureDurationWeeks(f) - if (newFinish > cur) epicMaxFinish.set(f.epic.id, newFinish) - } - } - - // Tracks actual per-week resource consumption from the levelling simulation - // key: `${rtName}|${week}` → days consumed; populated only when resourceLevel=true - const weeklyConsumptionMap = new Map() - - if (resourceLevel) { - // featureResourceHours: total resource-hours needed per feature (unchanged helper) - function featureResourceHours(feature: typeof allFeatures[0]): Map { - const result = new Map() - for (const story of feature.userStories) { - if (story.isActive === false) continue - for (const task of story.tasks) { - const rtId = task.resourceTypeId ?? '_unassigned' - const hpd = task.resourceType?.hoursPerDay ?? fallbackHoursPerDay - const hours = (task.durationDays ?? (task.hoursEffort / hpd)) * hpd - result.set(rtId, (result.get(rtId) ?? 0) + hours) - } - } - return result - } - - // #178: pre-compute featureResourceHours for all features to avoid re-computing in the simulation loop - const featureResourceHoursCache = new Map>() - for (const fId of processed) { - featureResourceHoursCache.set(fId, featureResourceHours(featureMap.get(fId)!)) - } - - // Variable weekly capacity: build lookup by resource type ID - const rtById = new Map(resourceTypes.map(rt => [rt.id, rt as ResourceTypeWithNamed])) - const allRtIds = [...resourceTypes.map(rt => rt.id), '_unassigned'] - - // Build remaining hours per feature (Map>) - const remainingHours = new Map>() - for (const fId of processed) { - if (manualStartWeeks.has(fId)) continue - remainingHours.set(fId, featureResourceHoursCache.get(fId)!) - } - - // Simulation state - const simStart = new Map() // fId -> week started - const simDone = new Map() // fId -> week completed - - // Manual features: fix their start/done from pre-computed values - for (const [fId, sw] of manualStartWeeks) { - simStart.set(fId, sw) - const storedDur = manualDurationWeeks.get(fId) - simDone.set(fId, sw + (storedDur !== undefined ? storedDur : featureDurationWeeks(featureMap.get(fId)!))) - } - - const STEP = 0.2 // 1 day per step - const MAX_WEEKS = 200 - const autoFeatures = processed.filter(fId => !manualStartWeeks.has(fId)) - const unfinished = new Set(autoFeatures) - - let t = 0 - while (unfinished.size > 0 && t < MAX_WEEKS) { - // Mark newly eligible features as started - for (const fId of unfinished) { - if (simStart.has(fId)) continue - const f = featureMap.get(fId)! - const epicStart = f.epic.timelineStartWeek ?? 0 - if (t < epicStart) continue - const predsAllDone = (predecessors.get(fId) ?? []).every(predId => { - const done = simDone.get(predId) - return done !== undefined && done <= t - }) - if (predsAllDone) { - const currentWeekForStart = Math.floor(t) - const fHours = remainingHours.get(fId) - // Only start the feature when at least one of its resource types has capacity > 0. - // This prevents features from sitting idle during week 0 when named resources - // don't start until week 1. - if (fHours && fHours.size > 0) { - const hasCapacity = [...fHours.keys()].some(rtId => { - if (rtId === '_unassigned') return true // unassigned fallback always works - const rt = rtById.get(rtId) - return !rt || getWeeklyCapacity(rt, currentWeekForStart, fallbackHoursPerDay) > 0 - }) - if (!hasCapacity) continue // wait until capacity is available - } - simStart.set(fId, t) - } - } - - // Active = started but not done - const active = [...unfinished].filter(fId => simStart.has(fId)) - - // Always track manual feature consumption, even when no auto features are active. - // This ensures manually-pinned features contribute to the histogram regardless of - // whether any auto-scheduled features are running in the same window. - const currentWeek = Math.floor(t) - for (const rtId of allRtIds) { - const rt = rtById.get(rtId) - const rtName = rt?.name ?? 'Unassigned' - const hpd = rt?.hoursPerDay ?? fallbackHoursPerDay - for (const [fId] of manualStartWeeks) { - const fStart = simStart.get(fId) - const fDone = simDone.get(fId) - if (fStart === undefined || fDone === undefined || fDone <= fStart) continue - if (t >= fStart && t < fDone) { - const rtHours = featureResourceHoursCache.get(fId)!.get(rtId) ?? 0 - if (rtHours > 0) { - const perStep = (rtHours / (fDone - fStart)) * STEP - const consumptionKey = `${rtName}|${currentWeek}` - weeklyConsumptionMap.set(consumptionKey, (weeklyConsumptionMap.get(consumptionKey) ?? 0) + perStep / hpd) - } - } - } - } - - if (active.length === 0) { t += STEP; continue } - - // Features with no resource hours start and immediately complete - for (const fId of active) { - if (remainingHours.get(fId)?.size === 0) { - if (!simDone.has(fId)) { - simDone.set(fId, t + STEP) - unfinished.delete(fId) - } - } - } - - // Proportional allocation: for each resource type, divide capacity across active features needing it - for (const rtId of allRtIds) { - const rt = rtById.get(rtId) - const capPerWeek = rt - ? getWeeklyCapacity(rt, currentWeek, fallbackHoursPerDay) - : fallbackHoursPerDay * 5 // _unassigned fallback - let capPerStep = capPerWeek * STEP // hours available this step (STEP fraction of a week) - const rtName = rt?.name ?? 'Unassigned' - const hpd = rt?.hoursPerDay ?? fallbackHoursPerDay - - // Subtract capacity consumed by active manual features this step so that - // auto-scheduled features don't over-allocate during manual windows. - // NOTE: weeklyConsumptionMap is already updated in the block above — do not write it again here. - for (const [fId] of manualStartWeeks) { - const fStart = simStart.get(fId) - const fDone = simDone.get(fId) - if (fStart === undefined || fDone === undefined || fDone <= fStart) continue - if (t >= fStart && t < fDone) { - const rtHours = featureResourceHoursCache.get(fId)!.get(rtId) ?? 0 - if (rtHours > 0) { - const perStep = (rtHours / (fDone - fStart)) * STEP - capPerStep = Math.max(0, capPerStep - perStep) - } - } - } - - const competing = active.filter(fId => (remainingHours.get(fId)?.get(rtId) ?? 0) > 0.001) - if (competing.length === 0) continue - - const totalRemaining = competing.reduce((s, fId) => s + (remainingHours.get(fId)!.get(rtId) ?? 0), 0) - - for (const fId of competing) { - const rem = remainingHours.get(fId)!.get(rtId)! - const actualAllocated = Math.min((rem / totalRemaining) * capPerStep, rem) - // Track actual consumption per RT name per week - const consumptionKey = `${rtName}|${currentWeek}` - weeklyConsumptionMap.set(consumptionKey, (weeklyConsumptionMap.get(consumptionKey) ?? 0) + actualAllocated / hpd) - remainingHours.get(fId)!.set(rtId, Math.max(0, rem - actualAllocated)) - } - } - - // Mark done: all resource types exhausted - for (const fId of active) { - const allDone = [...(remainingHours.get(fId)?.values() ?? [])].every(h => h <= 0.001) - if (allDone) { - simDone.set(fId, t + STEP) - unfinished.delete(fId) - } - } - - t += STEP - t = Math.round(t * 5) / 5 // snap to nearest 0.2 to eliminate float drift - } - - // Apply simulation results back to startWeeks/finishWeeks - for (const fId of processed) { - const sw = simStart.get(fId) ?? startWeeks.get(fId) ?? 0 - const doneW = simDone.get(fId) - const dur = doneW !== undefined ? doneW - sw : featureDurationWeeks(featureMap.get(fId)!) - startWeeks.set(fId, sw) - finishWeeks.set(fId, sw + dur) - } - } - - // ── Story-level scheduling ───────────────────────────────────────────────── - // Build flat list of all stories with their feature context - const allStories = epics.flatMap(epic => - epic.features.flatMap(feature => - feature.userStories - .filter(s => s.isActive !== false) - .map(s => ({ ...s, feature: { ...feature, epic } })) - ) - ) - - // Story resource hours - function storyResourceHours(story: typeof allStories[0]): Map { - const result = new Map() - for (const task of story.tasks) { - const rtId = task.resourceTypeId ?? '_unassigned' - const hpd = task.resourceType?.hoursPerDay ?? fallbackHoursPerDay - const hours = (task.durationDays ?? (task.hoursEffort / hpd)) * hpd - result.set(rtId, (result.get(rtId) ?? 0) + hours) - } - return result - } - - // ── Per-feature proportional story scheduling ────────────────────────────── - // Stories are distributed sequentially and proportionally within their parent - // feature's scheduled window [featureStart, featureDone], based on total hours. - // This keeps all story bars visually within their feature bar. - // - // Manual story overrides retain their stored startWeek; their duration is - // re-computed from their own hours. - - // Group non-manual stories by feature, sorted by story order - const storiesByFeature = new Map() - for (const story of allStories) { - const fId = story.feature.id - if (!storiesByFeature.has(fId)) storiesByFeature.set(fId, []) - storiesByFeature.get(fId)!.push(story) - } - for (const stories of storiesByFeature.values()) { - stories.sort((a, b) => (a.order ?? 0) - (b.order ?? 0)) - } - - // Helper: total hours for a story - function storyTotalHours(story: typeof allStories[0]): number { - return [...storyResourceHours(story).values()].reduce((a, b) => a + b, 0) - } - - // #178: Compute scheduled position for every story in O(S) total — one pass per feature - // Previously O(S²): for each story we'd scan siblings from the start to accumulate the cursor. - // Now we build the storyPositionMap (featureId → pre-computed schedules) in a single pass - // per feature, then stamp each story's result without any repeated scanning. - const storyScheduled = new Map() - - // Pass 1: manual-pinned stories (O(S) total, independent of siblings) - for (const story of allStories) { - if (manualStoryWeeks.has(story.id)) { - const sw = manualStoryWeeks.get(story.id)! - const totalHours = storyTotalHours(story) - const dur = Math.max(0.2, totalHours / fallbackHoursPerDay / 5) - storyScheduled.set(story.id, { startWeek: sw, durationWeeks: dur, isManual: true }) - } - } + // ── 2. Run the pure scheduler ───────────────────────────────────────────── + + const { featureSchedule, storySchedule, weeklyConsumptionMap, parallelWarnings } = runScheduler({ + project: { hoursPerDay: project.hoursPerDay }, + epics, + resourceTypes, + epicDeps, + manualFeatureEntries: existingEntries.map(e => ({ + featureId: e.featureId, + startWeek: e.startWeek, + durationWeeks: e.durationWeeks, + })), + manualStoryEntries: existingStoryEntries.map(e => ({ + storyId: e.storyId, + startWeek: e.startWeek, + })), + resourceLevel, + }) - // Pass 2: proportional sequential scheduling — one pass per feature, O(S) total - for (const [fId, stories] of storiesByFeature) { - const featureStart = startWeeks.get(fId) ?? 0 - const featureDone = finishWeeks.get(fId) ?? (featureStart + 1) - const featureDuration = Math.max(0.2, featureDone - featureStart) - - // Exclude manually-pinned stories from proportional scheduling - const siblings = stories.filter(s => !manualStoryWeeks.has(s.id)) - if (siblings.length === 0) continue - - const totalFeatureHours = siblings.reduce((sum, s) => sum + storyTotalHours(s), 0) - - // Build position map in a single forward scan — no repeated iteration - let cursor = featureStart - for (const sibling of siblings) { - const hrs = storyTotalHours(sibling) - const dur = totalFeatureHours > 0 - ? (hrs / totalFeatureHours) * featureDuration - : featureDuration / Math.max(1, siblings.length) - const safeDur = Math.max(0.2, dur) - storyScheduled.set(sibling.id, { startWeek: cursor, durationWeeks: safeDur, isManual: false }) - cursor += safeDur - } - } + // ── 3. Write results to DB ──────────────────────────────────────────────── - // Write StoryTimelineEntry records - const storyUpserts = allStories.map(async story => { - const scheduled = storyScheduled.get(story.id) - if (!scheduled) return null - const { startWeek: sw, durationWeeks: dur, isManual } = scheduled - return prisma.storyTimelineEntry.upsert({ - where: { storyId: story.id }, - create: { storyId: story.id, projectId: project.id, startWeek: sw, durationWeeks: dur, isManual }, - update: isManual ? {} : { startWeek: sw, durationWeeks: dur, isManual: false }, + // Feature timeline upserts + await Promise.all(featureSchedule.map(({ featureId, startWeek, durationWeeks, isManual }) => + prisma.timelineEntry.upsert({ + where: { featureId }, + create: { projectId: project.id, featureId, startWeek, durationWeeks, isManual }, + update: isManual ? {} : { startWeek, durationWeeks, isManual: false }, }) - }) - await Promise.all(storyUpserts) - // ── End story-level scheduling ───────────────────────────────────────────── - - // #178: run feature timeline upserts in parallel instead of sequentially - await Promise.all([...processed].map(fId => { - const sw = startWeeks.get(fId)! - const f = featureMap.get(fId)! - const dur = (finishWeeks.get(fId) ?? (sw + featureDurationWeeks(f))) - sw - const isManual = manualStartWeeks.has(fId) - return prisma.timelineEntry.upsert({ - where: { featureId: fId }, - create: { projectId: project.id, featureId: fId, startWeek: sw, durationWeeks: dur, isManual }, - update: isManual ? {} : { startWeek: sw, durationWeeks: dur, isManual: false }, + )) + + // Story timeline upserts + await Promise.all(storySchedule.map(({ storyId, startWeek, durationWeeks, isManual }) => + prisma.storyTimelineEntry.upsert({ + where: { storyId }, + create: { storyId, projectId: project.id, startWeek, durationWeeks, isManual }, + update: isManual ? {} : { startWeek, durationWeeks, isManual: false }, }) - })) + )) + + // Persist the weekly demand cache so GET /timeline can reuse actual consumption + // data rather than falling back to uniform spread. + await prisma.project.update({ + where: { id: project.id }, + data: { weeklyDemandCache: Object.fromEntries(weeklyConsumptionMap) }, + }) + + // ── 4. Re-fetch and build HTTP response ──────────────────────────────────── const entries = await prisma.timelineEntry.findMany({ where: { projectId: project.id }, @@ -1091,38 +454,24 @@ router.post('/schedule', asyncHandler(async (req: AuthRequest, res: Response) => feature: { include: { epic: true, - userStories: { - include: { - tasks: { include: { resourceType: true } } - } - } - } - } + userStories: { include: { tasks: { include: { resourceType: true } } } }, + }, + }, }, orderBy: { startWeek: 'asc' }, }) - // #178: pass pre-loaded allFeatures and resourceTypes to avoid redundant DB queries - const parallelWarnings = await computeParallelWarnings(project.hoursPerDay, entries, allFeatures, resourceTypes) - const storyTimelineEntries = await prisma.storyTimelineEntry.findMany({ where: { projectId: project.id }, include: { story: { select: { name: true, featureId: true } } }, }) const allFeatureIds = entries.map(e => e.featureId) - const featureDependencies = await prisma.featureDependency.findMany({ - where: { featureId: { in: allFeatureIds } }, - select: { featureId: true, dependsOnId: true }, - }) - const epicDependenciesForResponse = await prisma.epicDependency.findMany({ - where: { epic: { projectId: project.id } }, - select: { epicId: true, dependsOnId: true }, - }) - const allStoryIds = storyTimelineEntries.map(e => e.storyId) - const storyDependencies = await prisma.storyDependency.findMany({ - where: { storyId: { in: allStoryIds } }, - select: { storyId: true, dependsOnId: true }, - }) + const [featureDependencies, epicDependenciesForResponse, storyDependencies] = await Promise.all([ + prisma.featureDependency.findMany({ where: { featureId: { in: allFeatureIds } }, select: { featureId: true, dependsOnId: true } }), + prisma.epicDependency.findMany({ where: { epic: { projectId: project.id } }, select: { epicId: true, dependsOnId: true } }), + prisma.storyDependency.findMany({ where: { storyId: { in: storyTimelineEntries.map(e => e.storyId) } }, select: { storyId: true, dependsOnId: true } }), + ]) + const mappedStoryEntries = storyTimelineEntries.map(e => ({ storyId: e.storyId, storyName: e.story.name, @@ -1132,13 +481,6 @@ router.post('/schedule', asyncHandler(async (req: AuthRequest, res: Response) => isManual: e.isManual, })) - // Persist the weekly demand cache so GET /timeline can reuse actual consumption - // data rather than falling back to uniform spread. - await prisma.project.update({ - where: { id: project.id }, - data: { weeklyDemandCache: Object.fromEntries(weeklyConsumptionMap) }, - }) - res.json(buildResponse(project, entries, parallelWarnings, mappedStoryEntries, featureDependencies, storyDependencies, epicDependenciesForResponse, resourceTypes, weeklyConsumptionMap)) })) diff --git a/server/src/test/scheduler.test.ts b/server/src/test/scheduler.test.ts new file mode 100644 index 0000000..29701a2 --- /dev/null +++ b/server/src/test/scheduler.test.ts @@ -0,0 +1,423 @@ +/** + * scheduler.test.ts + * + * Unit tests for the pure scheduling engine (lib/scheduler.ts). + * + * Because runScheduler() is a pure function with no DB or I/O dependencies, + * there is nothing to mock — we just construct minimal SchedulerInput objects + * and assert on SchedulerOutput. + */ +import { describe, it, expect } from 'vitest' +import { + runScheduler, + getWeeklyCapacity, + effectiveAllocationPct, + type SchedulerInput, + type SchedulerEpic, + type SchedulerFeature, + type SchedulerStory, + type SchedulerResourceType, +} from '../lib/scheduler.js' + +// ───────────────────────────────────────────────────────────────────────────── +// Helpers to build minimal input objects +// ───────────────────────────────────────────────────────────────────────────── + +function makeTask(hoursEffort: number, rtId: string | null = null, rtName = 'Dev', hpd = 8) { + return { + resourceTypeId: rtId, + hoursEffort, + durationDays: null as null, + resourceType: rtId ? { id: rtId, name: rtName, hoursPerDay: hpd } : null, + } +} + +function makeStory(id: string, tasks: ReturnType[], order = 0): SchedulerStory { + return { id, order, isActive: null, tasks } +} + +function makeFeature( + id: string, + stories: SchedulerStory[], + order = 0, + deps: Array<{ featureId: string; dependsOnId: string }> = [], +): SchedulerFeature { + return { id, order, isActive: null, userStories: stories, dependencies: deps } +} + +function makeEpic( + id: string, + features: SchedulerFeature[], + opts: Partial> = {}, +): SchedulerEpic { + return { + id, + name: id, + order: 0, + isActive: null, + featureMode: 'sequential', + scheduleMode: 'sequential', + timelineStartWeek: null, + features, + ...opts, + } +} + +function makeRt(id: string, name: string, count: number, hpd = 8): SchedulerResourceType { + return { id, name, count, hoursPerDay: hpd, namedResources: [] } +} + +function baseInput(overrides: Partial = {}): SchedulerInput { + return { + project: { hoursPerDay: 8 }, + epics: [], + resourceTypes: [], + epicDeps: [], + manualFeatureEntries: [], + manualStoryEntries: [], + resourceLevel: false, + ...overrides, + } +} + +// ───────────────────────────────────────────────────────────────────────────── +// Helper tests (pure utility functions) +// ───────────────────────────────────────────────────────────────────────────── + +describe('effectiveAllocationPct', () => { + it('FULL_PROJECT: returns allocationPercent for any week', () => { + const nr = { id: 'nr1', name: 'Dev 1', startWeek: null, endWeek: null, allocationPct: 100, allocationMode: 'FULL_PROJECT', allocationPercent: 80, allocationStartWeek: null, allocationEndWeek: null } + expect(effectiveAllocationPct(nr, 0)).toBe(80) + expect(effectiveAllocationPct(nr, 99)).toBe(80) + }) + + it('TIMELINE: returns allocationPercent only within window', () => { + const nr = { id: 'nr1', name: 'Dev 1', startWeek: 2, endWeek: 5, allocationPct: 100, allocationMode: 'TIMELINE', allocationPercent: 100, allocationStartWeek: 2, allocationEndWeek: 5 } + expect(effectiveAllocationPct(nr, 1)).toBe(0) + expect(effectiveAllocationPct(nr, 2)).toBe(100) + expect(effectiveAllocationPct(nr, 5)).toBe(100) + expect(effectiveAllocationPct(nr, 6)).toBe(0) + }) + + it('EFFORT: always returns 100', () => { + const nr = { id: 'nr1', name: 'Dev 1', startWeek: 1, endWeek: 3, allocationPct: 50, allocationMode: 'EFFORT', allocationPercent: 50, allocationStartWeek: null, allocationEndWeek: null } + expect(effectiveAllocationPct(nr, 0)).toBe(100) + expect(effectiveAllocationPct(nr, 10)).toBe(100) + }) +}) + +describe('getWeeklyCapacity', () => { + it('no named resources: count × hpd × 5', () => { + const rt = makeRt('rt1', 'Dev', 3, 8) + expect(getWeeklyCapacity(rt, 0, 8)).toBe(3 * 8 * 5) + }) + + it('named resources: sums capacity from active members', () => { + const rt: SchedulerResourceType = { + id: 'rt1', name: 'Dev', count: 2, hoursPerDay: 8, + namedResources: [ + { id: 'nr1', name: 'Alice', startWeek: 0, endWeek: 10, allocationPct: 100, allocationMode: 'FULL_PROJECT', allocationPercent: 100, allocationStartWeek: null, allocationEndWeek: null }, + { id: 'nr2', name: 'Bob', startWeek: 5, endWeek: 10, allocationPct: 100, allocationMode: 'FULL_PROJECT', allocationPercent: 100, allocationStartWeek: null, allocationEndWeek: null }, + ], + } + expect(getWeeklyCapacity(rt, 4, 8)).toBe(1 * 8 * 5) // only Alice active + expect(getWeeklyCapacity(rt, 5, 8)).toBe(2 * 8 * 5) // both active + }) +}) + +// ───────────────────────────────────────────────────────────────────────────── +// runScheduler tests +// ───────────────────────────────────────────────────────────────────────────── + +describe('runScheduler', () => { + // ── Happy path ────────────────────────────────────────────────────────────── + it('single epic, single feature, single task → schedules at week 0', () => { + const rt = makeRt('rt1', 'Dev', 1) + const story = makeStory('s1', [makeTask(40, 'rt1', 'Dev')]) // 40h = 5 days = 1 week + const feature = makeFeature('f1', [story]) + const epic = makeEpic('e1', [feature]) + + const result = runScheduler(baseInput({ epics: [epic], resourceTypes: [rt] })) + + const fEntry = result.featureSchedule.find(e => e.featureId === 'f1') + expect(fEntry).toBeDefined() + expect(fEntry!.startWeek).toBe(0) + expect(fEntry!.durationWeeks).toBeCloseTo(1, 1) + expect(fEntry!.isManual).toBe(false) + + const sEntry = result.storySchedule.find(e => e.storyId === 's1') + expect(sEntry).toBeDefined() + expect(sEntry!.startWeek).toBe(0) + expect(sEntry!.isManual).toBe(false) + }) + + // ── Parallel features ─────────────────────────────────────────────────────── + it('parallel featureMode: both features start at the same week', () => { + const rt = makeRt('rt1', 'Dev', 2) + const f1 = makeFeature('f1', [makeStory('s1', [makeTask(40, 'rt1', 'Dev')])], 0) + const f2 = makeFeature('f2', [makeStory('s2', [makeTask(40, 'rt1', 'Dev')])], 1) + const epic = makeEpic('e1', [f1, f2], { featureMode: 'parallel' }) + + const result = runScheduler(baseInput({ epics: [epic], resourceTypes: [rt] })) + + const sw1 = result.featureSchedule.find(e => e.featureId === 'f1')!.startWeek + const sw2 = result.featureSchedule.find(e => e.featureId === 'f2')!.startWeek + expect(sw1).toBe(sw2) // both start at week 0 in parallel mode + }) + + // ── Sequential features ───────────────────────────────────────────────────── + it('sequential featureMode: feature 2 starts after feature 1 finishes', () => { + const rt = makeRt('rt1', 'Dev', 1) + const f1 = makeFeature('f1', [makeStory('s1', [makeTask(40, 'rt1', 'Dev')])], 0) + const f2 = makeFeature('f2', [makeStory('s2', [makeTask(40, 'rt1', 'Dev')])], 1) + const epic = makeEpic('e1', [f1, f2], { featureMode: 'sequential' }) + + const result = runScheduler(baseInput({ epics: [epic], resourceTypes: [rt] })) + + const e1 = result.featureSchedule.find(e => e.featureId === 'f1')! + const e2 = result.featureSchedule.find(e => e.featureId === 'f2')! + expect(e2.startWeek).toBeCloseTo(e1.startWeek + e1.durationWeeks, 5) + }) + + // ── Epic dependency ───────────────────────────────────────────────────────── + it('epicDependency: dependent epic starts after parent finishes', () => { + const rt = makeRt('rt1', 'Dev', 1) + const f1 = makeFeature('f1', [makeStory('s1', [makeTask(40, 'rt1', 'Dev')])]) + const f2 = makeFeature('f2', [makeStory('s2', [makeTask(40, 'rt1', 'Dev')])]) + const epicA = makeEpic('epicA', [f1], { order: 0 }) + const epicB = makeEpic('epicB', [f2], { order: 1 }) + + const result = runScheduler(baseInput({ + epics: [epicA, epicB], + resourceTypes: [rt], + epicDeps: [{ epicId: 'epicB', dependsOnId: 'epicA' }], + })) + + const eA = result.featureSchedule.find(e => e.featureId === 'f1')! + const eB = result.featureSchedule.find(e => e.featureId === 'f2')! + expect(eB.startWeek).toBeGreaterThanOrEqual(eA.startWeek + eA.durationWeeks - 0.001) + }) + + // ── Resource constraint ───────────────────────────────────────────────────── + it('resource-level=true, count=1: single RT serialises features even in parallel epic', () => { + const rt = makeRt('rt1', 'Dev', 1) // only 1 developer + const f1 = makeFeature('f1', [makeStory('s1', [makeTask(40, 'rt1', 'Dev')])], 0) + const f2 = makeFeature('f2', [makeStory('s2', [makeTask(40, 'rt1', 'Dev')])], 1) + const epic = makeEpic('e1', [f1, f2], { featureMode: 'parallel' }) + + const result = runScheduler(baseInput({ + epics: [epic], + resourceTypes: [rt], + resourceLevel: true, + })) + + const e1 = result.featureSchedule.find(e => e.featureId === 'f1')! + const e2 = result.featureSchedule.find(e => e.featureId === 'f2')! + // With 1 Dev, features cannot truly run in parallel — total duration must be ~2 weeks + const totalDuration = Math.max(e1.startWeek + e1.durationWeeks, e2.startWeek + e2.durationWeeks) + expect(totalDuration).toBeGreaterThanOrEqual(1.8) // at least ~2 weeks + expect(result.weeklyConsumptionMap.size).toBeGreaterThan(0) // consumption tracked + }) + + // ── Named resource start/end constraint ───────────────────────────────────── + it('named resource with startWeek=2: feature cannot start before week 2', () => { + const rt: SchedulerResourceType = { + id: 'rt1', name: 'Dev', count: 1, hoursPerDay: 8, + namedResources: [ + { id: 'nr1', name: 'Alice', startWeek: 2, endWeek: null, allocationPct: 100, allocationMode: 'TIMELINE', allocationPercent: 100, allocationStartWeek: 2, allocationEndWeek: null }, + ], + } + const f1 = makeFeature('f1', [makeStory('s1', [makeTask(40, 'rt1', 'Dev')])]) + const epic = makeEpic('e1', [f1]) + + const result = runScheduler(baseInput({ + epics: [epic], + resourceTypes: [rt], + resourceLevel: true, + })) + + const entry = result.featureSchedule.find(e => e.featureId === 'f1')! + // Feature must wait until the named resource is available (week 2) + expect(entry.startWeek).toBeGreaterThanOrEqual(2) + }) + + // ── Manual override on a story ─────────────────────────────────────────────── + it('manual story override: story keeps its pinned startWeek, isManual=true', () => { + const rt = makeRt('rt1', 'Dev', 1) + const story = makeStory('s1', [makeTask(40, 'rt1', 'Dev')]) + const feature = makeFeature('f1', [story]) + const epic = makeEpic('e1', [feature]) + + const result = runScheduler(baseInput({ + epics: [epic], + resourceTypes: [rt], + manualStoryEntries: [{ storyId: 's1', startWeek: 5 }], + })) + + const sEntry = result.storySchedule.find(e => e.storyId === 's1')! + expect(sEntry.startWeek).toBe(5) + expect(sEntry.isManual).toBe(true) + }) + + // ── Manual override on a feature ──────────────────────────────────────────── + it('manual feature override: feature keeps its pinned startWeek, isManual=true', () => { + const rt = makeRt('rt1', 'Dev', 1) + const feature = makeFeature('f1', [makeStory('s1', [makeTask(40, 'rt1', 'Dev')])]) + const epic = makeEpic('e1', [feature]) + + const result = runScheduler(baseInput({ + epics: [epic], + resourceTypes: [rt], + manualFeatureEntries: [{ featureId: 'f1', startWeek: 10, durationWeeks: 2 }], + })) + + const fEntry = result.featureSchedule.find(e => e.featureId === 'f1')! + expect(fEntry.startWeek).toBe(10) + expect(fEntry.isManual).toBe(true) + }) + + // ── Empty input ────────────────────────────────────────────────────────────── + it('empty input (no epics): returns empty arrays, no crash', () => { + const result = runScheduler(baseInput()) + + expect(result.featureSchedule).toEqual([]) + expect(result.storySchedule).toEqual([]) + expect(result.parallelWarnings).toEqual([]) + expect(result.weeklyConsumptionMap.size).toBe(0) + }) + + // ── Feature with 0 hours / no tasks ───────────────────────────────────────── + it('feature with no tasks: scheduled with default 1-week duration, no crash', () => { + const f1 = makeFeature('f1', [makeStory('s1', [])]) // story with no tasks + const epic = makeEpic('e1', [f1]) + + const result = runScheduler(baseInput({ epics: [epic] })) + + const fEntry = result.featureSchedule.find(e => e.featureId === 'f1') + expect(fEntry).toBeDefined() + expect(fEntry!.durationWeeks).toBeGreaterThanOrEqual(0.2) + // Story has 0 hours: still gets an entry (proportional of 0 gets safeDur=0.2) + const sEntry = result.storySchedule.find(e => e.storyId === 's1') + expect(sEntry).toBeDefined() + }) + + // ── Feature with explicitly 0 tasks (empty story list) ────────────────────── + it('feature with empty userStories array: scheduled with default duration', () => { + const f1 = makeFeature('f1', []) // no stories at all + const epic = makeEpic('e1', [f1]) + + const result = runScheduler(baseInput({ epics: [epic] })) + + const fEntry = result.featureSchedule.find(e => e.featureId === 'f1') + expect(fEntry).toBeDefined() + expect(fEntry!.startWeek).toBe(0) + // featureDurationWeeks returns 1 when allTasks is empty + expect(fEntry!.durationWeeks).toBe(1) + }) + + // ── Explicit feature dependency ────────────────────────────────────────────── + it('explicit featureDependency: f2 starts after f1 even in parallel epic', () => { + const rt = makeRt('rt1', 'Dev', 2) + const f1 = makeFeature('f1', [makeStory('s1', [makeTask(40, 'rt1', 'Dev')])], 0) + // f2 explicitly depends on f1 + const f2 = makeFeature('f2', [makeStory('s2', [makeTask(40, 'rt1', 'Dev')])], 1, [ + { featureId: 'f2', dependsOnId: 'f1' }, + ]) + const epic = makeEpic('e1', [f1, f2], { featureMode: 'parallel' }) + + const result = runScheduler(baseInput({ epics: [epic], resourceTypes: [rt] })) + + const e1 = result.featureSchedule.find(e => e.featureId === 'f1')! + const e2 = result.featureSchedule.find(e => e.featureId === 'f2')! + expect(e2.startWeek).toBeGreaterThanOrEqual(e1.startWeek + e1.durationWeeks - 0.001) + }) + + // ── Two epics sequential (default) ────────────────────────────────────────── + it('two sequential epics: epic 2 starts after epic 1 completes', () => { + const rt = makeRt('rt1', 'Dev', 1) + const f1 = makeFeature('f1', [makeStory('s1', [makeTask(40, 'rt1', 'Dev')])]) + const f2 = makeFeature('f2', [makeStory('s2', [makeTask(40, 'rt1', 'Dev')])]) + const epic1 = makeEpic('e1', [f1], { order: 0 }) + const epic2 = makeEpic('e2', [f2], { order: 1 }) + + const result = runScheduler(baseInput({ epics: [epic1, epic2], resourceTypes: [rt] })) + + const e1 = result.featureSchedule.find(e => e.featureId === 'f1')! + const e2 = result.featureSchedule.find(e => e.featureId === 'f2')! + expect(e2.startWeek).toBeGreaterThanOrEqual(e1.startWeek + e1.durationWeeks - 0.001) + }) + + // ── Parallel warnings ──────────────────────────────────────────────────────── + it('parallel epic with insufficient capacity: generates parallel warning', () => { + const rt = makeRt('rt1', 'Dev', 1) // only 1 dev + // Two features in a parallel epic needing 2× capacity + const f1 = makeFeature('f1', [makeStory('s1', [makeTask(40, 'rt1', 'Dev')])], 0) + const f2 = makeFeature('f2', [makeStory('s2', [makeTask(40, 'rt1', 'Dev')])], 1) + const epic = makeEpic('e1', [f1, f2], { featureMode: 'parallel' }) + + const result = runScheduler(baseInput({ epics: [epic], resourceTypes: [rt] })) + + expect(result.parallelWarnings.length).toBeGreaterThan(0) + expect(result.parallelWarnings[0].epicId).toBe('e1') + expect(result.parallelWarnings[0].resourceTypeName).toBe('Dev') + }) + + // ── Resource-levelling: consumption map populated ──────────────────────────── + it('resourceLevel=true: weeklyConsumptionMap is populated', () => { + const rt = makeRt('rt1', 'Dev', 1) + const feature = makeFeature('f1', [makeStory('s1', [makeTask(40, 'rt1', 'Dev')])]) + const epic = makeEpic('e1', [feature]) + + const result = runScheduler(baseInput({ + epics: [epic], + resourceTypes: [rt], + resourceLevel: true, + })) + + expect(result.weeklyConsumptionMap.size).toBeGreaterThan(0) + const totalDays = [...result.weeklyConsumptionMap.values()].reduce((a, b) => a + b, 0) + expect(totalDays).toBeCloseTo(5, 0) // 40h / 8hpd = 5 days + }) + + // ── Cross-epic dep anti-cycle (hasCrossEpicDep skip logic) ────────────────── + it('hasCrossEpicDep: skips inter-epic chaining edge to avoid cycle, both features scheduled', () => { + const rt = makeRt('rt1', 'Dev', 1) + // fA (in epicA) explicitly depends on fB (in epicB). + // Without the hasCrossEpicDep guard the inter-epic chain would add fA→fB + // while the explicit dep adds fB→fA, creating a cycle. + const fB = makeFeature('fB', [makeStory('sB', [makeTask(40, 'rt1', 'Dev')])], 0) + const fA = makeFeature( + 'fA', + [makeStory('sA', [makeTask(40, 'rt1', 'Dev')])], + 0, + [{ featureId: 'fA', dependsOnId: 'fB' }], // fA depends on fB + ) + const epicA = makeEpic('epicA', [fA], { order: 0 }) + const epicB = makeEpic('epicB', [fB], { order: 1 }) + + // Should complete without an infinite loop or thrown error + const result = runScheduler(baseInput({ epics: [epicA, epicB], resourceTypes: [rt] })) + + const entryA = result.featureSchedule.find(e => e.featureId === 'fA') + const entryB = result.featureSchedule.find(e => e.featureId === 'fB') + expect(entryA).toBeDefined() + expect(entryB).toBeDefined() + // fA depends on fB so fA must start no earlier than fB finishes + expect(entryA!.startWeek).toBeGreaterThanOrEqual(entryB!.startWeek + entryB!.durationWeeks - 0.001) + }) + + // ── Epic timelineStartWeek anchor ──────────────────────────────────────────── + it('timelineStartWeek: epic2 feature starts at the pinned week regardless of epic1 end', () => { + const rt = makeRt('rt1', 'Dev', 1) + const f1 = makeFeature('f1', [makeStory('s1', [makeTask(40, 'rt1', 'Dev')])]) + const f2 = makeFeature('f2', [makeStory('s2', [makeTask(40, 'rt1', 'Dev')])]) + const epic1 = makeEpic('e1', [f1], { order: 0 }) + const epic2 = makeEpic('e2', [f2], { order: 1, timelineStartWeek: 5 }) + + const result = runScheduler(baseInput({ epics: [epic1, epic2], resourceTypes: [rt] })) + + const entry2 = result.featureSchedule.find(e => e.featureId === 'f2')! + // The timelineStartWeek anchor must be respected; inter-epic chaining is skipped + expect(entry2.startWeek).toBe(5) + }) +})