Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
35 commits
Select commit Hold shift + click to select a range
8668e62
New: Add ContentTree abstraction for efficient tree operations (refs …
taylortom Mar 10, 2026
6066562
Fix: Address code audit bugs, cache module refs, and simplify tests (…
taylortom Mar 10, 2026
4ae6cea
Update: Guard updateSortOrder and updateEnabledPlugins in update() (r…
taylortom Mar 10, 2026
ac8c3ad
Chore: Add unit tests for update guard conditions (refs #79)
taylortom Mar 10, 2026
5fa98c5
Update: Add MongoDB projections to all safe find call sites (refs #79)
taylortom Mar 10, 2026
1dfa372
Chore: Standardise this.find vs super.find usage (refs #79)
taylortom Mar 10, 2026
3472238
Update: Use findOne instead of destructured find for single-doc lookups
taylortom Mar 10, 2026
342ee2f
Update: Use deleteMany for descendant deletion (fixes #112)
taylortom Mar 10, 2026
2625c5d
Chore: Remove module-specific reference from delete comment
taylortom Mar 10, 2026
60c1e15
Fix: Add rollback, input validation, and parallel clones (fixes #111)
taylortom Mar 10, 2026
d36a5a9
Fix: Improve getSchema reliability and performance (fixes #110)
taylortom Mar 11, 2026
4409f69
New: Add dedicated tree endpoint with conditional request support (re…
taylortom Mar 11, 2026
3e77e4a
Update: Add _menu, _theme, _enabledPlugins to tree projection (refs #…
taylortom Mar 11, 2026
8970fe1
Fix: Move friendly ID generation from multilang into content module
taylortom Mar 11, 2026
e5ce6d8
Update: Refactor tests with shared mock helpers and add counter tests
taylortom Mar 11, 2026
809df70
Fix: Move adapt-authoring-mongodb from peer to standard dependency
taylortom Mar 11, 2026
fc2913e
Update: Add indexes for _parentId and _type queries (refs #109)
taylortom Mar 20, 2026
4b16ecb
Update: Skip updateEnabledPlugins for non-component types and respect…
taylortom Mar 21, 2026
a10170b
Update: Optimise ContentTree for large courses (refs #109)
taylortom Mar 22, 2026
04b50b1
Update: Refactor updateEnabledPlugins and updateSortOrder (refs #109)
taylortom Mar 22, 2026
f0ade8e
Update: Reduce redundant queries in delete, clone, and insertRecursiv…
taylortom Mar 22, 2026
cb818e1
Rework excessive comments
taylortom Mar 23, 2026
180db03
Update: Extract utilities and optimise clone for same-course copies
taylortom Mar 23, 2026
afb1d5a
New: Add bulk friendly ID generation via generateFriendlyIds
taylortom Mar 23, 2026
9ab7b71
Update: Pre-allocate friendly IDs in bulk during clone
taylortom Mar 23, 2026
053cfd6
Chore: Import utils from barrel file instead of individual files
taylortom Mar 23, 2026
d46a534
Chore: Remove generateFriendlyId in favour of generateFriendlyIds
taylortom Mar 23, 2026
c7f6fa9
Update: Rewrite clone as bulk insert with pre-generated IDs (refs #109)
taylortom Mar 23, 2026
a84e496
Upgrade: Bump adapt-authoring-mongodb to ^3.1.0
taylortom Mar 23, 2026
06ed07c
Update: Eliminate find-back and API hooks from clone for performance
taylortom Mar 23, 2026
f95bb60
New: Inline _assetIds on content documents, add asset deletion guard …
taylortom Mar 23, 2026
aa9debf
New: Add migration to backfill _friendlyId and _assetIds on existing …
taylortom Mar 24, 2026
d6b2a71
Simplify friendly ID generation and fix migration
taylortom Mar 27, 2026
40e2751
Fix: Address review feedback from PR #108
taylortom Mar 27, 2026
cbb24c9
Fix: Replace convertObjectIds in clone() with targeted ObjectId assig…
Copilot Mar 27, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
The diff you're trying to view is too large. We only load the first 3000 changed files.
15 changes: 7 additions & 8 deletions errors/errors.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@
"data": {
"parentId": "_id of the parent item"
},
"description": "Specified item is not a valid content item Invalid parent itemparent",
"statusCode": 500
"description": "Specified item is not a valid content parent",
"statusCode": 400
},
"DUPL_FRIENDLY_ID": {
"data": {
Expand All @@ -14,13 +14,12 @@
"description": "A content item with this _friendlyId already exists in this course",
"statusCode": 409
},
"UNKNOWN_SCHEMA_NAME": {
"RESOURCE_IN_USE": {
"data": {
"_id": "The database _id",
"_type": "The _type value",
"_component": "The _component value"
"type": "Type of resource",
"courses": "Courses using the resource"
},
"description": "Failed to determine schema name",
"statusCode": 500
"description": "Resource is currently being used in courses",
"statusCode": 400
}
}
1 change: 1 addition & 0 deletions index.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,3 +3,4 @@
* @namespace content
*/
export { default } from './lib/ContentModule.js'
export { default as ContentTree } from './lib/ContentTree.js'
476 changes: 370 additions & 106 deletions lib/ContentModule.js

Large diffs are not rendered by default.

128 changes: 128 additions & 0 deletions lib/ContentTree.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,128 @@
/**
* Efficient tree abstraction over a flat array of course content items.
* Builds O(1) lookup indexes on construction for parent-child, type, and ID queries.
* Pure data structure with no DB access — works on both server and client.
* @memberof content
*/
class ContentTree {
/**
* @param {Array<Object>} items Flat array of content items (from a single course)
*/
constructor (items) {
/** @type {Array<Object>} */
this.items = items
/** @type {Map<string, Object>} _id -> item */
this.byId = new Map()
/** @type {Map<string, Array<Object>>} _parentId -> [children] */
this.byParent = new Map()
/** @type {Map<string, Array<Object>>} _type -> [items] */
this.byType = new Map()
/** @type {Object|null} */
this.course = null
/** @type {Object|null} */
this.config = null

for (const item of items) {
const id = item._id.toString()
this.byId.set(id, item)

const parentId = item._parentId?.toString()
if (parentId) {
if (!this.byParent.has(parentId)) this.byParent.set(parentId, [])
this.byParent.get(parentId).push(item)
}

const type = item._type
if (!this.byType.has(type)) this.byType.set(type, [])
this.byType.get(type).push(item)

if (type === 'course') this.course = item
if (type === 'config') this.config = item
}
}

/**
* O(1) lookup by ID
* @param {string|Object} id
* @returns {Object|undefined}
*/
getById (id) {
return this.byId.get(id.toString())
}

/**
* O(1) children lookup
* @param {string|Object} parentId
* @returns {Array<Object>}
*/
getChildren (parentId) {
return this.byParent.get(parentId.toString()) ?? []
}

/**
* O(1) type lookup
* @param {string} type
* @returns {Array<Object>}
*/
getByType (type) {
return this.byType.get(type) ?? []
}

/**
* BFS traversal to find all descendants. O(n) where n = number of descendants.
* @param {string|Object} rootId
* @returns {Array<Object>}
*/
getDescendants (rootId) {
const descendants = []
const queue = [rootId.toString()]
let head = 0
while (head < queue.length) {
const children = this.byParent.get(queue[head++]) ?? []
for (const child of children) {
descendants.push(child)
queue.push(child._id.toString())
}
}
return descendants
}

/**
* Walk up the parent chain. O(d) where d = depth.
* @param {string|Object} itemId
* @returns {Array<Object>}
*/
getAncestors (itemId) {
const ancestors = []
let current = this.byId.get(itemId.toString())
while (current?._parentId) {
current = this.byId.get(current._parentId.toString())
if (current) ancestors.push(current)
}
return ancestors
}

/**
* O(1) siblings lookup (excludes the item itself)
* @param {string|Object} itemId
* @returns {Array<Object>}
*/
getSiblings (itemId) {
const id = itemId.toString()
const item = this.byId.get(id)
if (!item?._parentId) return []
return this.getChildren(item._parentId).filter(c => c._id.toString() !== id)
}

/**
* O(1) — unique component names across the course
* @returns {Array<string>}
*/
getComponentNames () {
const names = new Set()
for (const c of this.getByType('component')) names.add(c._component)
return [...names]
}
}

export default ContentTree
7 changes: 6 additions & 1 deletion lib/utils.js
Original file line number Diff line number Diff line change
@@ -1 +1,6 @@
export { getDescendants } from './utils/getDescendants.js'
export { default as ContentTree } from './ContentTree.js'
export { default as computeSortOrderOps } from './utils/computeSortOrderOps.js'
export { extractAssetIds } from './utils/extractAssetIds.js'
export { default as contentTypeToSchemaName } from './utils/contentTypeToSchemaName.js'
export { default as formatFriendlyId } from './utils/formatFriendlyId.js'
export { default as parseMaxSeq } from './utils/parseMaxSeq.js'
21 changes: 21 additions & 0 deletions lib/utils/computeSortOrderOps.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
/**
* Computes the bulk-write operations needed to recalculate _sortOrder values.
* Optionally splices the target item into the siblings list at the correct position.
* @param {Array<Object>} siblings Existing siblings sorted by _sortOrder (excluding the item)
* @param {Object} [item] The item being inserted/moved — omit when deleting
* @return {Array<Object>} Array of MongoDB updateOne operations
*/
export default function computeSortOrderOps (siblings, item) {
if (item) {
const newSO = item._sortOrder != null && item._sortOrder - 1 > -1 ? item._sortOrder - 1 : siblings.length
siblings.splice(newSO, 0, item)
}
const ops = []
for (let i = 0; i < siblings.length; i++) {
const _sortOrder = i + 1
if (siblings[i]._sortOrder !== _sortOrder) {
ops.push({ updateOne: { filter: { _id: siblings[i]._id }, update: { $set: { _sortOrder } } } })
}
}
return ops
}
9 changes: 9 additions & 0 deletions lib/utils/contentTypeToSchemaName.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
/**
* Maps a content _type to its corresponding schema name.
* 'page' and 'menu' both map to 'contentobject'; all other types map to themselves.
* @param {String} _type Content type (e.g. 'page', 'menu', 'article', 'block')
* @return {String}
*/
export default function contentTypeToSchemaName (_type) {
return _type === 'page' || _type === 'menu' ? 'contentobject' : _type
}
18 changes: 18 additions & 0 deletions lib/utils/extractAssetIds.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
/**
* Extracts unique asset IDs from a content document by walking its schema
* for Asset-type fields and collecting non-URL values.
* @param {Object} schema The built Schema instance (must have a walk method)
* @param {Object} data The data object to search for asset values
* @return {Array<String>} Unique array of asset IDs found in the data
* @memberof content
*/
export function extractAssetIds (schema, data) {
const isAssetField = (field) =>
field?._backboneForms?.type === 'Asset' || field?._backboneForms === 'Asset'

return [...new Set(
schema.walk(data, isAssetField)
.map(match => match.value?.toString())
.filter(v => v && !v.startsWith('http://') && !v.startsWith('https://'))
)]
}
12 changes: 12 additions & 0 deletions lib/utils/formatFriendlyId.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
/**
* Formats a friendly ID string from content type, sequence number and optional language
* @param {String} _type Content type (e.g. 'course', 'block', 'component')
* @param {Number} count Current sequence number
* @param {String} [_language] Language code (only used for courses)
* @return {String}
*/
export default function formatFriendlyId (_type, count, _language) {
if (_type === 'course') return `course-${count}${_language ? `-${_language}` : ''}`
if (_type === 'config') return 'config'
return `${_type[0]}-${count}`
}
22 changes: 0 additions & 22 deletions lib/utils/getDescendants.js

This file was deleted.

16 changes: 16 additions & 0 deletions lib/utils/parseMaxSeq.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
/**
* Parses friendly ID strings from content docs and returns the highest sequence number.
* @param {Array<Object>} docs Array of objects with a `_friendlyId` property
* @return {Number}
*/
export default function parseMaxSeq (docs) {
let maxNum = 0
for (const doc of docs) {
const match = doc._friendlyId?.match(/(\d+)/)
if (match) {
const num = parseInt(match[1])
if (num > maxNum) maxNum = num
}
}
return maxNum
}
120 changes: 120 additions & 0 deletions migrations/3.0.0.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@
import formatFriendlyId from '../lib/utils/formatFriendlyId.js'
import parseMaxSeq from '../lib/utils/parseMaxSeq.js'

export default function (migration) {
migration.describe('Backfill _friendlyId and _assetIds on existing content documents')
migration.runCommand(backfillFriendlyIds)
migration.runCommand(backfillAssetIds)
}

async function backfillFriendlyIds (db, log) {
const content = db.collection('content')

const missing = await content.find({
$or: [
{ _friendlyId: { $exists: false } },
{ _friendlyId: '' }
]
}).toArray()

if (missing.length === 0) {
log('info', 'migrations', 'No content documents missing _friendlyId, skipping')
return
}
log('info', 'migrations', `Backfilling _friendlyId for ${missing.length} document(s)`)

// Group by _courseId + _type to allocate sequential IDs per group
const groups = new Map()
for (const doc of missing) {
const courseId = doc._courseId?.toString() ?? 'none'
const type = doc._type
const key = `${courseId}:${type}`
if (!groups.has(key)) groups.set(key, { courseId, type, docs: [] })
groups.get(key).docs.push(doc)
}

for (const { courseId, type, docs } of groups.values()) {
if (type === 'config') {
for (const doc of docs) {
await content.updateOne({ _id: doc._id }, { $set: { _friendlyId: formatFriendlyId('config') } })
}
continue
}

const isCourse = type === 'course'
const query = {
_type: type,
_friendlyId: { $exists: true, $ne: '' }
}
if (!isCourse && courseId !== 'none') {
query._courseId = docs[0]._courseId
}

const existing = await content.find(query, { projection: { _friendlyId: 1 } }).toArray()
const maxSeq = parseMaxSeq(existing)

let nextSeq = maxSeq + 1
for (const doc of docs) {
const friendlyId = formatFriendlyId(type, nextSeq, doc._language)
await content.updateOne({ _id: doc._id }, { $set: { _friendlyId: friendlyId } })
nextSeq++
}
}
log('info', 'migrations', `Backfilled _friendlyId for ${missing.length} document(s)`)
}

async function backfillAssetIds (db, log) {
const content = db.collection('content')
const assets = db.collection('assets')

const docsToUpdate = await content.find({
$or: [
{ _assetIds: { $exists: false } },
{ _assetIds: null }
]
}).toArray()

if (docsToUpdate.length === 0) {
log('info', 'migrations', 'No content documents missing _assetIds, skipping')
return
}

// Build a set of all asset ID strings for fast lookup
const assetIds = await assets.distinct('_id')
const assetIdStrings = assetIds.map(id => id.toString())

if (assetIdStrings.length === 0) {
// No assets in the DB — set all to empty array
await content.updateMany(
{ $or: [{ _assetIds: { $exists: false } }, { _assetIds: null }] },
{ $set: { _assetIds: [] } }
)
log('info', 'migrations', `Set _assetIds to [] for ${docsToUpdate.length} document(s) (no assets in DB)`)
return
}

log('info', 'migrations', `Backfilling _assetIds for ${docsToUpdate.length} document(s) against ${assetIdStrings.length} known asset(s)`)

let updated = 0
const ops = []
for (const doc of docsToUpdate) {
const docStr = JSON.stringify(doc)
const foundIds = assetIdStrings.filter(id => docStr.includes(id))
ops.push({
updateOne: {
filter: { _id: doc._id },
update: { $set: { _assetIds: foundIds } }
}
})
if (ops.length >= 500) {
await content.bulkWrite(ops, { ordered: false })
updated += ops.length
ops.length = 0
}
}
if (ops.length > 0) {
await content.bulkWrite(ops, { ordered: false })
updated += ops.length
}
log('info', 'migrations', `Backfilled _assetIds for ${updated} document(s)`)
}
Loading