diff --git a/conf/config.schema.json b/conf/config.schema.json index bb83336..74b3361 100644 --- a/conf/config.schema.json +++ b/conf/config.schema.json @@ -32,6 +32,11 @@ "description": "URL of the Adapt framework git repository to install", "type": "string" }, + "prebuildCache": { + "description": "When enabled, eagerly rebuilds the prebuilt cache for every (theme, menu) combination in the background after invalidation (e.g. plugin install/update/delete)", + "type": "boolean", + "default": false + }, "importMaxFileSize": { "description": "Maximum file upload size for course imports", "type": "string", diff --git a/index.js b/index.js index 5fb33a0..05c162e 100644 --- a/index.js +++ b/index.js @@ -3,6 +3,6 @@ * @namespace adaptframework */ export { default } from './lib/AdaptFrameworkModule.js' -export { copyFrameworkSource } from './lib/utils.js' +export { copyFrameworkSource, readFrameworkPluginVersions } from './lib/utils.js' export { default as AdaptFrameworkBuild } from './lib/AdaptFrameworkBuild.js' export { default as AdaptFrameworkImport } from './lib/AdaptFrameworkImport.js' diff --git a/lib/AdaptFrameworkBuild.js b/lib/AdaptFrameworkBuild.js index 2b37167..83888ac 100644 --- a/lib/AdaptFrameworkBuild.js +++ b/lib/AdaptFrameworkBuild.js @@ -3,7 +3,8 @@ import { App, Hook, ensureDir, writeJson } from 'adapt-authoring-core' import { parseObjectId } from 'adapt-authoring-mongodb' import { createWriteStream } from 'node:fs' import AdaptCli from 'adapt-cli' -import { log, logDir, logMemory, copyFrameworkSource } from './utils.js' +import { log, logDir, logMemory, copyFrameworkSource, generateLanguageManifest, applyBuildReplacements } from './utils.js' +import BuildCache from './BuildCache.js' import fs from 'node:fs/promises' import path from 'upath' import semver from 'semver' @@ -199,11 +200,48 @@ class AdaptFrameworkBuild { await this.loadCourseData() + // Check for cached preview build + if (this.isPreview && !contentOnly) { + const cache = new BuildCache(path.join(framework.getConfig('buildDir'), 'prebuilt-cache')) + const pluginHash = await framework.getPluginHash() + const theme = this.courseData.config.data._theme + const menu = this.courseData.config.data._menu + + if (await cache.has(pluginHash, theme, menu)) { + await cache.restore(pluginHash, theme, menu, this.buildDir) + await this.applySchemaDefaults() + await this.copyAssets() + await this.preBuildHook.invoke(this) + await this.writeContentJson() + await this.writeLanguageManifest() + await applyBuildReplacements(this.buildDir, { + defaultLanguage: this.courseData.config.data._defaultLanguage ?? 'en', + defaultDirection: this.courseData.config.data._defaultDirection ?? 'ltr', + buildType: 'development', + timestamp: Date.now() + }) + this.location = path.join(this.dir, 'build') + await this.postBuildHook.invoke(this) + this.buildData = await this.recordBuildAttempt() + return this + } + } + const tasks = [this.copyAssets()] if (!contentOnly) { + // preview cache is shared across courses, so include all installed plugins + // — except disabled themes/menus, since only one of each can be active per + // build and the framework's less:dev task globs every theme/menu in src/, + // which OOMs when more than one is present (see adapt_framework#3802). + const pluginsToInclude = this.isPreview + ? [ + ...this.enabledPlugins, + ...this.disabledPlugins.filter(p => p.type !== 'theme' && p.type !== 'menu') + ] + : this.enabledPlugins tasks.push(copyFrameworkSource({ destDir: this.dir, - enabledPlugins: this.enabledPlugins.map(p => p.name), + enabledPlugins: pluginsToInclude.map(p => p.name), linkNodeModules: !this.isExport })) } @@ -233,6 +271,18 @@ class AdaptFrameworkBuild { .setData(e) } } + // Populate prebuilt cache after successful grunt build for preview + if (this.isPreview && !contentOnly) { + const cache = new BuildCache(path.join(framework.getConfig('buildDir'), 'prebuilt-cache')) + const pluginHash = await framework.getPluginHash() + const theme = this.courseData.config.data._theme + const menu = this.courseData.config.data._menu + try { + await cache.populate(this.buildDir, pluginHash, theme, menu) + } catch (e) { + log('warn', 'CACHE', `failed to populate prebuilt cache: ${e.message}`) + } + } if (this.compress) { this.location = await this.prepareZip() } else { @@ -275,10 +325,10 @@ class AdaptFrameworkBuild { * @return {Promise} */ async loadAssetData () { - const [assets, courseassets, tags] = await App.instance.waitForModule('assets', 'courseassets', 'tags') + const [assets, content, tags] = await App.instance.waitForModule('assets', 'content', 'tags') - const caRecs = await courseassets.find({ courseId: this.courseId }) - const uniqueAssetIds = new Set(caRecs.map(c => parseObjectId(c.assetId))) + const courseContent = await content.find({ _courseId: this.courseId }, { validate: false }, { projection: { _assetIds: 1 } }) + const uniqueAssetIds = new Set(courseContent.flatMap(c => (c._assetIds ?? []).map(id => parseObjectId(id)))) const usedAssets = await assets.find({ _id: { $in: [...uniqueAssetIds] } }) const usedTagIds = new Set(usedAssets.reduce((m, a) => [...m, ...(a.tags ?? [])], [])) @@ -426,12 +476,35 @@ class AdaptFrameworkBuild { })) } + /** + * Outputs all course data to the required JSON files + * @return {Promise} + */ + async writeContentJson () { + const data = Object.values(this.courseData) + if (this.isExport && this.assetData.data.length) { + this.assetData.data = this.assetData.data.map(d => { + return { + title: d.title, + description: d.description, + filename: d.path, + tags: d.tags + } + }) + data.push(this.assetData) + } + return Promise.all(data.map(async ({ dir, fileName, data }) => { + await ensureDir(dir) + const filepath = path.join(dir, fileName) + const returnData = await writeJson(filepath, data) + log('verbose', 'WRITE', filepath) + return returnData + })) + } + /** * Applies schema defaults to the in-memory course and config data using * the jsonschema module. Replicates what grunt's schema-defaults task does. - * - * TODO: replace validateWithDefaults workaround with schema.validate(data, { ignoreErrors: true }) - * once migrated to adapt-schemas v3.x (see #184) * @return {Promise} */ async applySchemaDefaults () { @@ -442,31 +515,19 @@ class AdaptFrameworkBuild { const extensionFilter = s => contentplugin.isPluginSchema(s) ? enabledPluginSchemas.includes(s) : true const getSchema = name => jsonschema.getSchema(name, { useCache: false, extensionFilter }) - /** - * Applies defaults via validate(), catching and ignoring validation errors. - * The validated+defaulted data is returned from validate() on success, or - * extracted from the error on failure (validate clones internally). - */ - const validateWithDefaults = (schema, data) => { - try { - return schema.validate(data, { useDefaults: true, ignoreRequired: true }) - } catch (e) { - return e.data.data - } - } - + // Apply defaults without running full validation (which rejects ObjectIds etc.) const [courseSchema, configSchema] = await Promise.all([ getSchema('course'), getSchema('config') ]) - Object.assign(this.courseData.course.data, validateWithDefaults(courseSchema, this.courseData.course.data)) - Object.assign(this.courseData.config.data, validateWithDefaults(configSchema, this.courseData.config.data)) + courseSchema.compiledWithDefaults(this.courseData.course.data) + configSchema.compiledWithDefaults(this.courseData.config.data) for (const type of ['contentObject', 'article', 'block']) { const schemaName = type === 'contentObject' ? 'contentobject' : type const schema = await getSchema(schemaName) for (const item of this.courseData[type].data) { - Object.assign(item, validateWithDefaults(schema, item)) + schema.compiledWithDefaults(item) } } @@ -476,34 +537,23 @@ class AdaptFrameworkBuild { if (!componentSchemas[schemaName]) { componentSchemas[schemaName] = await getSchema(schemaName) } - Object.assign(item, validateWithDefaults(componentSchemas[schemaName], item)) + componentSchemas[schemaName].compiledWithDefaults(item) } } /** - * Outputs all course data to the required JSON files + * Writes the language_data_manifest.js for each language dir. + * Only needed on cache-hit builds where grunt is skipped. * @return {Promise} */ - async writeContentJson () { - const data = Object.values(this.courseData) - if (this.isExport && this.assetData.data.length) { - this.assetData.data = this.assetData.data.map(d => { - return { - title: d.title, - description: d.description, - filename: d.path, - tags: d.tags - } - }) - data.push(this.assetData) - } - return Promise.all(data.map(async ({ dir, fileName, data }) => { - await ensureDir(dir) - const filepath = path.join(dir, fileName) - const returnData = await writeJson(filepath, data) - log('verbose', 'WRITE', filepath) - return returnData - })) + async writeLanguageManifest () { + const langDir = this.courseData.course.dir + const fileNames = Object.values(this.courseData) + .filter(d => d.dir === langDir) + .map(d => d.fileName) + const manifest = generateLanguageManifest(fileNames) + await ensureDir(langDir) + await writeJson(path.join(langDir, 'language_data_manifest.js'), manifest) } /** diff --git a/lib/AdaptFrameworkImport.js b/lib/AdaptFrameworkImport.js index 2b62480..72ebc62 100644 --- a/lib/AdaptFrameworkImport.js +++ b/lib/AdaptFrameworkImport.js @@ -1,12 +1,11 @@ -import { App, Hook, spawn, readJson, writeJson } from 'adapt-authoring-core' -import { parseObjectId } from 'adapt-authoring-mongodb' +import { App, Hook, readJson, writeJson } from 'adapt-authoring-core' +import { isValidObjectId, parseObjectId } from 'adapt-authoring-mongodb' import fs from 'node:fs/promises' import { glob } from 'glob' import path from 'upath' -import { randomBytes } from 'node:crypto' import semver from 'semver' import { unzip } from 'zipper' -import { log, logDir, getImportSummary, getImportContentCounts } from './utils.js' +import { log, logDir, getImportSummary, getImportContentCounts, readFrameworkPluginVersions, collectMigrationScripts, runContentMigration } from './utils.js' import ComponentTransform from './migrations/component.js' import ConfigTransform from './migrations/config.js' @@ -14,6 +13,7 @@ import GraphicSrcTransform from './migrations/graphic-src.js' import NavOrderTransform from './migrations/nav-order.js' import ParentIdTransform from './migrations/parent-id.js' import RemoveUndefTransform from './migrations/remove-undef.js' +import VanillaBackgroundStylesTransform from './migrations/vanilla-background-styles.js' import StartPageTransform from './migrations/start-page.js' import ThemeUndefTransform from './migrations/theme-undef.js' @@ -25,7 +25,8 @@ const ContentMigrations = [ ParentIdTransform, RemoveUndefTransform, StartPageTransform, - ThemeUndefTransform + ThemeUndefTransform, + VanillaBackgroundStylesTransform ] /** @@ -174,7 +175,8 @@ class AdaptFrameworkImport { */ this.statusReport = { info: [], - warn: [] + warn: [], + error: [] } /** * Summary information for the import run @@ -220,10 +222,9 @@ class AdaptFrameworkImport { assets, content, contentplugin, - courseassets, framework, jsonschema - ] = await App.instance.waitForModule('assets', 'content', 'contentplugin', 'courseassets', 'adaptframework', 'jsonschema') + ] = await App.instance.waitForModule('assets', 'content', 'contentplugin', 'adaptframework', 'jsonschema') /** * Cached module instance for easy access * @type {AssetsModule} @@ -239,11 +240,6 @@ class AdaptFrameworkImport { * @type {ContentPluginModule} */ this.contentplugin = contentplugin - /** - * Cached module instance for easy access - * @type {CourseAssetsModule} - */ - this.courseassets = courseassets /** * Cached module instance for easy access * @type {AdaptFrameworkModule} @@ -268,9 +264,8 @@ class AdaptFrameworkImport { [this.importCourseAssets, importContent], [this.importCoursePlugins, isDryRun && importPlugins], [this.importCoursePlugins, !isDryRun && importContent], - [this.loadCourseData, isDryRun && importContent], + [this.loadCourseData, importContent], [this.migrateCourseData, !isDryRun && migrateContent], - [this.loadCourseData, !isDryRun && importContent], [this.importCourseData, !isDryRun && importContent], [this.generateSummary] ] @@ -347,18 +342,14 @@ class AdaptFrameworkImport { } /** - * Writes the contents of 2-customStyles.less to course.json file. Unfortunately it's necessary to do it this way to ensure it's included in migrations. + * Reads 2-customStyles.less (if present) and injects its contents as customStyle on the in-memory course, so migrations and the DB write see it. Existing customStyle on the course takes precedence. */ async patchCustomStyle () { const [customStylePath] = await glob('**/2-customStyles.less', { cwd: this.path, absolute: true }) - const courseJsonPath = `${this.langPath}/course.json` - if (!customStylePath) { - return - } + if (!customStylePath) return try { const customStyle = await fs.readFile(customStylePath, 'utf8') - const courseJson = await readJson(courseJsonPath) - await writeJson(courseJsonPath, { customStyle, ...courseJson }) + this.contentJson.course = { customStyle, ...this.contentJson.course } log('info', 'patched course customStyle') } catch (e) { log('warn', 'failed to patch course customStyle', e) @@ -366,15 +357,14 @@ class AdaptFrameworkImport { } /** - * Ensures _theme exists on the config + * Ensures _theme exists on the in-memory config */ async patchThemeName () { try { - const configJsonPath = `${this.coursePath}/config.json` - const configJson = await readJson(configJsonPath) - if (configJson._theme) return - configJson._theme = Object.values(this.usedContentPlugins).find(p => p.type === 'theme').name - await writeJson(configJsonPath, configJson) + if (this.contentJson.config?._theme) return + const _theme = Object.values(this.usedContentPlugins).find(p => p.type === 'theme')?.name + if (!_theme || !this.contentJson.config) return + this.contentJson.config._theme = _theme log('info', 'patched config _theme') } catch (e) { log('warn', 'failed to patch config _theme', e) @@ -479,47 +469,61 @@ class AdaptFrameworkImport { } /** - * Run grunt task - * @return {Promise} - */ - async runGruntMigration (subTask, { outputDir, captureDir, outputFilePath }) { - const output = await spawn({ - cmd: 'npx', - args: ['grunt', `migration:${subTask}`, `--outputdir=${outputDir}`, `--capturedir=${captureDir}`], - cwd: this.frameworkPath ?? this.framework.path - }) - if (outputFilePath) await fs.writeFile(outputFilePath, output) - } - - /** - * Handle migrate course data, installs adapt-migrations/capture data/adds updated scripts/migrates data + * Migrates course data in-memory using adapt-migrations */ async migrateCourseData () { try { await this.patchThemeName() await this.patchCustomStyle() - const migrationId = `${this.userId}-${randomBytes(4).toString('hex')}` - - const opts = { - outputDir: path.relative(this.framework.path, path.resolve(this.coursePath, '..')), - captureDir: path.join(`./${migrationId}-migrations`), - outputFilePath: path.join(this.framework.path, 'migrations', `${migrationId}.txt`) - } - log('debug', 'MIGRATION_ID', migrationId) - logDir('captureDir', opts.captureDir) - logDir('outputDir', opts.outputDir) + const content = this.flattenContentJson() + const fromPlugins = Object.values(this.usedContentPlugins).map(p => ({ + name: p.name, + version: p.version + })) + const toPlugins = await readFrameworkPluginVersions(this.framework.path) + const scripts = await collectMigrationScripts(this.framework.path) - await this.runGruntMigration('capture', opts) - await this.runGruntMigration('migrate', opts) + const migrated = await runContentMigration({ content, fromPlugins, toPlugins, scripts }) - await fs.rm(path.join(this.framework.path, opts.captureDir), { recursive: true }) + this.unflattenContentJson(migrated) + log('info', 'in-memory content migration completed') } catch (error) { log('error', 'Migration process failed', error) throw App.instance.errors.FW_IMPORT_MIGRATION_FAILED.setData({ reason: error.message }) } } + /** + * Flattens this.contentJson into a flat array for adapt-migrations + * @returns {Array} + */ + flattenContentJson () { + const content = [] + if (this.contentJson.course?._id) content.push(this.contentJson.course) + if (this.contentJson.config?._id) content.push(this.contentJson.config) + for (const item of Object.values(this.contentJson.contentObjects)) { + if (item?._id) content.push(item) + } + return content + } + + /** + * Writes migrated content back into the contentJson structure + * @param {Array} migrated The migrated content array + */ + unflattenContentJson (migrated) { + for (const item of migrated) { + if (item._type === 'course') { + this.contentJson.course = item + } else if (item._type === 'config') { + this.contentJson.config = item + } else { + this.contentJson.contentObjects[item._id] = item + } + } + } + /** * Imports any specified tags * @return {Promise} @@ -573,13 +577,15 @@ class AdaptFrameworkImport { if (this.settings.isDryRun) { return } + const stats = await fs.stat(filepath) try { const asset = await this.assets.insert({ ...data, createdBy: this.userId, file: { filepath, - originalFilename: filepath + originalFilename: filepath, + size: stats.size }, tags: data.tags }) @@ -587,7 +593,13 @@ class AdaptFrameworkImport { const resolved = path.relative(`${this.coursePath}/..`, filepath) this.assetMap[resolved] = asset._id.toString() } catch (e) { - this.statusReport.warn.push({ code: 'ASSET_IMPORT_FAILED', data: { filepath } }) + if (e.code === 'DUPLICATE_ASSET') { + const resolved = path.relative(`${this.coursePath}/..`, filepath) + this.assetMap[resolved] = e.data.assetId + } else { + log('error', `asset import failed for '${filepath}'`, e) + this.statusReport.warn.push({ code: 'ASSET_IMPORT_FAILED', data: { filepath, reason: e?.message ?? String(e) } }) + } } imagesImported++ })) @@ -725,8 +737,9 @@ class AdaptFrameworkImport { try { const course = await this.importContentObject({ ...this.contentJson.course, tags: this.tags }) /* config */ await this.importContentObject(this.contentJson.config) - // we need to run an update with the same data to make sure all extension schema settings are applied - await this.importContentObject({ ...this.contentJson.course, _id: course._id }, { isUpdate: true }) + // we need to run an update with the same data to make sure all extension schema settings are applied; + // ignoreRequired because some plugins declare top-level required properties with no default that Ajv can't materialise (e.g. adapt-contrib-glossary) + await this.importContentObject({ ...this.contentJson.course, _id: course._id }, { isUpdate: true, ignoreRequired: true }) } catch (e) { throw App.instance.errors.FW_IMPORT_CONTENT_FAILED.setData({ errors: [formatError(e)] }) } @@ -738,8 +751,8 @@ class AdaptFrameworkImport { try { const itemJson = this.contentJson.contentObjects[_id] await this.importContentObject({ - _sortOrder: hierarchy[itemJson._parentId].indexOf(_id) + 1, - ...itemJson // note that JSON sort order will override the deduced one + ...itemJson, + _sortOrder: hierarchy[itemJson._parentId].indexOf(_id) + 1 // trust the hierarchy: per-insert updateSortOrder is disabled, so we can't rely on bad export values being normalised later }) } catch (e) { errors.push(formatError(e)) @@ -747,6 +760,8 @@ class AdaptFrameworkImport { } } if (errors.length) throw App.instance.errors.FW_IMPORT_CONTENT_FAILED.setData({ errors }) + // single-pass sweep now all content is in place; per-insert sweep was disabled to avoid O(n²) work + await this.content.updateEnabledPlugins({ _courseId: this.idMap.course }, { forceUpdate: true }) log('debug', 'imported course data successfully') } @@ -785,6 +800,7 @@ class AdaptFrameworkImport { let insertData = await this.transformData({ ...data, _id: undefined, + _assetIds: undefined, // recompute from resolved asset references; export ships paths, not ObjectIds _courseId: this.idMap.course, createdBy: this.userId }) @@ -797,7 +813,7 @@ class AdaptFrameworkImport { } insertData = schema.sanitise(insertData) let doc - const opts = { schemaName, validate: true, useCache: false } + const opts = { schemaName, validate: true, useCache: false, updateEnabledPlugins: false, updateSortOrder: false, ignoreRequired: options.ignoreRequired } if (options.isUpdate) { doc = await this.content.update({ _id: data._id }, insertData, opts) } else { @@ -829,7 +845,13 @@ class AdaptFrameworkImport { schema.walk(data, field => field?._backboneForms?.type === 'Asset' || field?._backboneForms === 'Asset' ).forEach(({ data: parent, key, value }) => { - value ? parent[key] = this.assetMap[value] ?? value : delete parent[key] + if (!value) return delete parent[key] + const mapped = this.assetMap[value] + if (mapped) return (parent[key] = mapped) + if (isValidObjectId(value)) return (parent[key] = value) + log('warn', `unable to resolve asset reference '${value}' — dropping field`) + this.statusReport.warn.push({ code: 'UNRESOLVED_ASSET_REF', data: { path: value } }) + delete parent[key] }) } @@ -901,9 +923,7 @@ class AdaptFrameworkImport { const _courseId = parseObjectId(this.idMap[this.contentJson.course._id]) tasks.push( this.content.deleteMany({ _courseId }) - .catch(e => log('warn', 'failed to delete course content', e)), - this.courseassets.deleteMany({ courseId: _courseId }) - .catch(e => log('warn', 'failed to delete course assets', e)) + .catch(e => log('warn', 'failed to delete course content', e)) ) } catch (e) {} // courseId not available, no content to roll back } diff --git a/lib/AdaptFrameworkModule.js b/lib/AdaptFrameworkModule.js index 3c6651d..3e73e89 100644 --- a/lib/AdaptFrameworkModule.js +++ b/lib/AdaptFrameworkModule.js @@ -4,7 +4,8 @@ import AdaptFrameworkImport from './AdaptFrameworkImport.js' import fs from 'node:fs/promises' import { getHandler, postHandler, importHandler, postUpdateHandler, getUpdateHandler } from './handlers.js' import { loadRouteConfig, registerRoutes } from 'adapt-authoring-server' -import { runCliCommand } from './utils.js' +import { runCliCommand, readFrameworkPluginVersions, migrateExistingCourses, computePluginHash, prebuildCache } from './utils.js' +import BuildCache from './BuildCache.js' import path from 'node:path' import semver from 'semver' @@ -83,6 +84,17 @@ class AdaptFrameworkModule extends AbstractModule { await this.installFramework() + this.app.waitForModule('contentplugin').then(contentplugin => { + contentplugin.postInsertHook.tap(() => this.invalidatePrebuiltCache()) + contentplugin.postUpdateHook.tap(() => this.invalidatePrebuiltCache()) + contentplugin.postDeleteHook.tap(() => this.invalidatePrebuiltCache()) + }) + this.postUpdateHook.tap(() => this.invalidatePrebuiltCache()) + + if (this.getConfig('prebuildCache')) { + this.prebuildCache() + } + process.env.BROWSERSLIST_IGNORE_OLD_DATA = '1' if (this.app.args['update-framework'] === true) { @@ -209,6 +221,7 @@ class AdaptFrameworkModule extends AbstractModule { * @return {Promise} */ async updateFramework (version) { + let migrationResult try { if (version) { this.checkVersionCompatibility(version) @@ -216,13 +229,71 @@ class AdaptFrameworkModule extends AbstractModule { if (!version && this.targetVersionRange) { version = await this.getLatestVersion() } + const fromPlugins = await readFrameworkPluginVersions(this.path) await this.runCliCommand('updateFramework', { version }) this._version = await this.runCliCommand('getCurrentFrameworkVersion') + const toPlugins = await readFrameworkPluginVersions(this.path) + migrationResult = await migrateExistingCourses({ fromPlugins, toPlugins, frameworkDir: this.path }) } catch (e) { this.log('error', `failed to update framework, ${e.message}`) throw e.statusCode ? e : this.app.errors.FW_UPDATE_FAILED.setData({ reason: e.message }) } - this.postUpdateHook.invoke() + await this.postUpdateHook.invoke() + return migrationResult + } + + /** + * Returns a cached plugin hash, computing it on first call + * @return {Promise} + */ + async getPluginHash () { + if (!this._pluginHash) { + this._pluginHash = await computePluginHash(this.path) + } + return this._pluginHash + } + + /** + * Invalidates the prebuilt compilation cache and optionally + * triggers an eager rebuild of the shared cache in the background + */ + async invalidatePrebuiltCache () { + this._pluginHash = null + + try { + await new BuildCache(path.join(this.getConfig('buildDir'), 'prebuilt-cache')).invalidate() + } catch (e) { + this.log('warn', `failed to invalidate prebuilt cache: ${e.message}`) + } + + if (this.getConfig('prebuildCache')) { + this.prebuildCache() + } + } + + /** + * Eagerly rebuilds the prebuilt cache in the background, iterating every + * (theme, menu) combination of installed plugins. Safe to call multiple + * times — if a build is already in progress, it will be reused. + * Idempotent — already-cached combos are skipped. + * @return {Promise} + */ + prebuildCache () { + if (this._eagerBuildPromise) { + return this._eagerBuildPromise + } + this._eagerBuildPromise = prebuildCache({ + buildDir: this.getConfig('buildDir'), + frameworkDir: this.path + }).catch(e => { + this.log('warn', `eager prebuild failed: ${e.message}`) + if (e.cmd) this.log('warn', `cmd: ${e.cmd}`) + if (e.raw) this.log('warn', `output: ${e.raw}`) + if (e.stderr) this.log('warn', `stderr: ${e.stderr}`) + }).finally(() => { + this._eagerBuildPromise = null + }) + return this._eagerBuildPromise } /** @@ -265,7 +336,7 @@ class AdaptFrameworkModule extends AbstractModule { if (!course) { return } - const shareWithUsers = course?._shareWithUsers.map(id => id.toString()) ?? [] + const shareWithUsers = course._shareWithUsers?.map(id => id.toString()) ?? [] const userId = req.auth.user._id.toString() return course.createdBy.toString() === userId || course._isShared || shareWithUsers.includes(userId) } @@ -307,6 +378,18 @@ class AdaptFrameworkModule extends AbstractModule { this.contentMigrations.push(migration) } + /** + * Migrates content for specific courses. Called by contentplugin on plugin update. + * @param {Object} options + * @param {Array<{name: String, version: String}>} options.fromPlugins Plugin versions before the update + * @param {Array<{name: String, version: String}>} options.toPlugins Plugin versions after the update + * @param {String[]} options.courseIds Course IDs to migrate + * @returns {Promise<{migrated: Number, failed: Number, errors: Array}>} + */ + async migrateCourses ({ fromPlugins, toPlugins, courseIds }) { + return migrateExistingCourses({ fromPlugins, toPlugins, frameworkDir: this.path, courseIds }) + } + /** * Builds a single Adapt framework course * @param {AdaptFrameworkBuildOptions} options diff --git a/lib/BuildCache.js b/lib/BuildCache.js new file mode 100644 index 0000000..eb44840 --- /dev/null +++ b/lib/BuildCache.js @@ -0,0 +1,105 @@ +import fs from 'node:fs/promises' +import path from 'upath' +import { log } from './utils/log.js' + +/** Build output entries that aren't cached (rebuilt per-build from course data) */ +const SKIP_ENTRIES = new Set(['course']) + +/** + * Filesystem-level cache of grunt build output, keyed by (pluginHash, theme, menu). + * One instance per cache root; methods are stateless beyond the root path. + */ +class BuildCache { + /** + * @param {String} cacheRoot Root cache directory + */ + constructor (cacheRoot) { + this.cacheRoot = cacheRoot + } + + /** + * @returns {String} The cache directory path for the given combo + */ + getPath (pluginHash, theme, menu) { + return path.join(this.cacheRoot, `${pluginHash}_${theme}_${menu}`) + } + + /** + * @returns {Promise} Whether a cached build exists for the given combo + */ + async has (pluginHash, theme, menu) { + try { + await fs.access(this.getPath(pluginHash, theme, menu)) + return true + } catch { + return false + } + } + + /** + * Copies the build output (minus per-course content) into the cache for the given combo. + * Uses a temp dir + atomic rename for parallel safety. + * @param {String} buildOutputDir The build output directory + */ + async populate (buildOutputDir, pluginHash, theme, menu) { + const cacheDir = this.getPath(pluginHash, theme, menu) + await fs.mkdir(this.cacheRoot, { recursive: true }) + + const tmpDir = `${cacheDir}_tmp_${Date.now()}` + try { + await fs.mkdir(tmpDir, { recursive: true }) + const entries = await fs.readdir(buildOutputDir) + for (const entry of entries) { + if (SKIP_ENTRIES.has(entry)) continue + await copyEntry(path.join(buildOutputDir, entry), path.join(tmpDir, entry)) + } + await safeRename(tmpDir, cacheDir) + log('info', 'CACHE', `populated cache for ${pluginHash} (theme=${theme}, menu=${menu})`) + } catch (e) { + await fs.rm(tmpDir, { recursive: true, force: true }) + throw e + } + } + + /** + * Copies cached artifacts to a build directory. + * @param {String} destDir Destination build directory + */ + async restore (pluginHash, theme, menu, destDir) { + await fs.mkdir(destDir, { recursive: true }) + await fs.cp(this.getPath(pluginHash, theme, menu), destDir, { recursive: true }) + log('info', 'CACHE', `restored from cache for ${pluginHash} (theme=${theme}, menu=${menu})`) + } + + /** + * Removes the entire cache root. + */ + async invalidate () { + await fs.rm(this.cacheRoot, { recursive: true, force: true }) + log('info', 'CACHE', 'invalidated prebuilt cache') + } +} + +async function copyEntry (src, dest) { + const stat = await fs.stat(src) + if (stat.isDirectory()) { + await fs.cp(src, dest, { recursive: true }) + } else { + await fs.mkdir(path.dirname(dest), { recursive: true }) + await fs.copyFile(src, dest) + } +} + +async function safeRename (src, dest) { + try { + await fs.rename(src, dest) + } catch (e) { + if (e.code === 'ENOTEMPTY' || e.code === 'EEXIST') { + await fs.rm(src, { recursive: true, force: true }) + } else { + throw e + } + } +} + +export default BuildCache diff --git a/lib/handlers.js b/lib/handlers.js index 7db26da..029682f 100644 --- a/lib/handlers.js +++ b/lib/handlers.js @@ -133,11 +133,12 @@ export async function postUpdateHandler (req, res, next) { } log('info', 'running framework update') const previousVersion = framework.version - await framework.updateFramework(req.body.version) + const migrationResult = await framework.updateFramework(req.body.version) const currentVersion = framework.version !== previousVersion ? framework.version : undefined res.json({ from: previousVersion, - to: currentVersion + to: currentVersion, + migration: migrationResult }) } catch (e) { return next(e) diff --git a/lib/migrations/vanilla-background-styles.js b/lib/migrations/vanilla-background-styles.js new file mode 100644 index 0000000..3d55912 --- /dev/null +++ b/lib/migrations/vanilla-background-styles.js @@ -0,0 +1,17 @@ +const FIELDS = ['_backgroundRepeat', '_backgroundSize', '_backgroundPosition'] + +function clean (styles) { + if (!styles || typeof styles !== 'object') return + for (const f of FIELDS) { + if (styles[f] === '' || styles[f] === null) delete styles[f] + } +} + +async function VanillaBackgroundStyles (data) { + const v = data._vanilla + if (!v || typeof v !== 'object') return + clean(v._backgroundStyles) + clean(v._pageHeader?._backgroundStyles) +} + +export default VanillaBackgroundStyles diff --git a/lib/utils.js b/lib/utils.js index 6b4efdf..0013b47 100644 --- a/lib/utils.js +++ b/lib/utils.js @@ -7,3 +7,11 @@ export { retrieveBuildData } from './utils/retrieveBuildData.js' export { getImportSummary } from './utils/getImportSummary.js' export { slugifyTitle } from './utils/slugifyTitle.js' export { copyFrameworkSource } from './utils/copyFrameworkSource.js' +export { readFrameworkPluginVersions } from './utils/readFrameworkPluginVersions.js' +export { collectMigrationScripts } from './utils/collectMigrationScripts.js' +export { runContentMigration } from './utils/runContentMigration.js' +export { migrateExistingCourses } from './utils/migrateExistingCourses.js' +export { computePluginHash } from './utils/computePluginHash.js' +export { prebuildCache } from './utils/prebuildCache.js' +export { generateLanguageManifest } from './utils/generateLanguageManifest.js' +export { applyBuildReplacements } from './utils/applyBuildReplacements.js' diff --git a/lib/utils/applyBuildReplacements.js b/lib/utils/applyBuildReplacements.js new file mode 100644 index 0000000..a8d0539 --- /dev/null +++ b/lib/utils/applyBuildReplacements.js @@ -0,0 +1,23 @@ +import fs from 'node:fs/promises' +import path from 'upath' + +/** + * Applies @@placeholder substitutions in index.html + * @param {String} buildDir The build output directory + * @param {Object} data Replacement values + * @param {String} data.defaultLanguage The default language code + * @param {String} data.defaultDirection The default text direction + * @param {String} data.buildType The build type (e.g. 'development') + * @param {Number} data.timestamp The build timestamp + * @return {Promise} + */ +export async function applyBuildReplacements (buildDir, { defaultLanguage, defaultDirection, buildType, timestamp }) { + const indexPath = path.join(buildDir, 'index.html') + let html = await fs.readFile(indexPath, 'utf8') + html = html + .replace(/@@config\._defaultLanguage/g, defaultLanguage) + .replace(/@@config\._defaultDirection/g, defaultDirection) + .replace(/@@build\.type/g, buildType) + .replace(/@@build\.timestamp/g, String(timestamp)) + await fs.writeFile(indexPath, html) +} diff --git a/lib/utils/collectMigrationScripts.js b/lib/utils/collectMigrationScripts.js new file mode 100644 index 0000000..3210a90 --- /dev/null +++ b/lib/utils/collectMigrationScripts.js @@ -0,0 +1,15 @@ +import { glob } from 'glob' +import path from 'node:path' + +/** + * Collects all migration script paths from the framework's src directory + * @param {String} frameworkDir Absolute path to the framework directory + * @returns {Promise} Absolute paths to migration scripts + */ +export async function collectMigrationScripts (frameworkDir) { + const srcDir = path.join(frameworkDir, 'src') + return glob([ + 'core/migrations/**/*.js', + '*/*/migrations/**/*.js' + ], { cwd: srcDir, absolute: true }) +} diff --git a/lib/utils/computePluginHash.js b/lib/utils/computePluginHash.js new file mode 100644 index 0000000..c8420a1 --- /dev/null +++ b/lib/utils/computePluginHash.js @@ -0,0 +1,14 @@ +import { createHash } from 'node:crypto' +import Project from 'adapt-cli/lib/integration/Project.js' + +/** + * Computes a deterministic hash from the installed plugin set + * @param {String} frameworkDir Path to the local framework installation + * @return {Promise} 16-char hex hash + */ +export async function computePluginHash (frameworkDir) { + const project = new Project({ cwd: frameworkDir }) + const deps = await project.getInstalledDependencies() + const sorted = Object.entries(deps).sort(([a], [b]) => a.localeCompare(b)) + return createHash('sha256').update(JSON.stringify(sorted)).digest('hex').slice(0, 16) +} diff --git a/lib/utils/copyFrameworkSource.js b/lib/utils/copyFrameworkSource.js index 34391fc..c2f90c1 100644 --- a/lib/utils/copyFrameworkSource.js +++ b/lib/utils/copyFrameworkSource.js @@ -17,7 +17,7 @@ export async function copyFrameworkSource (options) { if (options.copyNodeModules !== true) BLACKLIST.push('node_modules') const srcDir = path.join(fwPath, 'src') - const enabledPlugins = options.enabledPlugins ?? [] + const enabledPlugins = options.enabledPlugins await fs.cp(fwPath, options.destDir, { recursive: true, filter: f => { @@ -25,7 +25,7 @@ export async function copyFrameworkSource (options) { const [type, name] = path.relative(srcDir, f).split('/') const isPlugin = f.startsWith(srcDir) && type && type !== 'core' && !!name - if (isPlugin && !enabledPlugins.includes(name)) { + if (isPlugin && enabledPlugins && !enabledPlugins.includes(name)) { return false } return !BLACKLIST.includes(path.basename(f)) diff --git a/lib/utils/generateLanguageManifest.js b/lib/utils/generateLanguageManifest.js new file mode 100644 index 0000000..ce94a10 --- /dev/null +++ b/lib/utils/generateLanguageManifest.js @@ -0,0 +1,9 @@ +/** + * Returns the list of JSON filenames that belong in a language manifest. + * The framework runtime reads this to know which data files to fetch. + * @param {Array} jsonFileNames All JSON filenames written to the language dir + * @return {Array} Filtered list excluding the manifest itself and assets.json + */ +export function generateLanguageManifest (jsonFileNames) { + return jsonFileNames.filter(f => f !== 'language_data_manifest.js' && f !== 'assets.json') +} diff --git a/lib/utils/log.js b/lib/utils/log.js index b278867..f6ac80e 100644 --- a/lib/utils/log.js +++ b/lib/utils/log.js @@ -3,15 +3,13 @@ import bytes from 'bytes' import fsSync from 'node:fs' import path from 'upath' -let fw - /** - * Logs a message using the framework module - * @param {...*} args Arguments to be logged + * Logs a message using the framework module's namespace + * @param {String} level Log level + * @param {...*} rest Arguments to be logged */ -export async function log (...args) { - if (!fw) fw = await App.instance.waitForModule('adaptframework') - return fw.log(...args) +export function log (level, ...rest) { + App.instance?.logger?.log(level, 'adaptframework', ...rest) } /** diff --git a/lib/utils/migrateExistingCourses.js b/lib/utils/migrateExistingCourses.js new file mode 100644 index 0000000..9f16b4f --- /dev/null +++ b/lib/utils/migrateExistingCourses.js @@ -0,0 +1,87 @@ +import { App } from 'adapt-authoring-core' +import { isDeepStrictEqual } from 'node:util' +import { collectMigrationScripts } from './collectMigrationScripts.js' +import { runContentMigration } from './runContentMigration.js' +import { log } from './log.js' + +/** + * Migrates content for a set of courses by courseId + * @param {Object} options + * @param {Array<{name: String, version: String}>} options.fromPlugins Plugin versions before update + * @param {Array<{name: String, version: String}>} options.toPlugins Plugin versions after update + * @param {String} options.frameworkDir Absolute path to the framework directory + * @param {String[]} [options.courseIds] Specific course IDs to migrate (if omitted, migrates all) + * @returns {Promise<{migrated: Number, failed: Number, errors: Array}>} + */ +export async function migrateExistingCourses ({ fromPlugins, toPlugins, frameworkDir, courseIds }) { + const content = await App.instance.waitForModule('content') + const scripts = await collectMigrationScripts(frameworkDir) + + if (!scripts.length) { + log('debug', 'no migration scripts found, skipping') + return { migrated: 0, failed: 0, errors: [] } + } + + const foundCourses = courseIds + ? await Promise.all(courseIds.map(async _id => content.findOne({ _id, _type: 'course' }, { strict: false }))) + : await content.find({ _type: 'course' }) + + let migrated = 0 + let failed = 0 + const errors = [] + + for (let ci = 0; ci < foundCourses.length; ci++) { + const course = foundCourses[ci] + if (!course) { + const courseId = courseIds?.[ci] ?? 'unknown' + log('warn', `course ${courseId} not found, skipping`) + errors.push({ courseId, error: 'course not found' }) + failed++ + continue + } + try { + const courseId = course._id.toString() + log('debug', `migrating course ${courseId}`) + + const courseContent = await fetchCourseContent(content, course) + const originals = courseContent.map(item => JSON.parse(JSON.stringify(item))) + + const migratedContent = await runContentMigration({ + content: courseContent, + fromPlugins: JSON.parse(JSON.stringify(fromPlugins)), + toPlugins, + scripts + }) + + let updatedCount = 0 + for (let i = 0; i < migratedContent.length; i++) { + const normalized = JSON.parse(JSON.stringify(migratedContent[i])) + if (!isDeepStrictEqual(originals[i], normalized)) { + await content.update({ _id: migratedContent[i]._id }, normalized) + updatedCount++ + } + } + if (updatedCount > 0) { + log('info', `migrated ${updatedCount} items in course ${courseId}`) + } + migrated++ + } catch (e) { + const courseId = course?._id?.toString() ?? 'unknown' + log('error', `migration failed for course ${courseId}`, e.message) + errors.push({ courseId, error: e.message }) + failed++ + } + } + + log('info', `migration complete: ${migrated} succeeded, ${failed} failed`) + return { migrated, failed, errors } +} + +async function fetchCourseContent (content, course) { + const config = await content.findOne({ _courseId: course._id, _type: 'config' }, { strict: false }) + const items = await content.find({ _courseId: course._id, _type: { $nin: ['course', 'config'] } }) + const result = [course] + if (config) result.push(config) + result.push(...items) + return result +} diff --git a/lib/utils/prebuildCache.js b/lib/utils/prebuildCache.js new file mode 100644 index 0000000..c9a11a3 --- /dev/null +++ b/lib/utils/prebuildCache.js @@ -0,0 +1,110 @@ +import { App, ensureDir, writeJson } from 'adapt-authoring-core' +import AdaptCli from 'adapt-cli' +import fs from 'node:fs/promises' +import path from 'upath' +import { copyFrameworkSource } from './copyFrameworkSource.js' +import BuildCache from '../BuildCache.js' +import { computePluginHash } from './computePluginHash.js' +import { log } from './log.js' + +/** + * Eagerly populates the prebuilt cache for every (theme, menu) combination + * of installed plugins. Iterates serially: each iteration runs a full grunt + * build with the chosen theme/menu and caches the output. + * + * Idempotent — combos that already have a cache entry are skipped, so + * re-runs only build what's missing. Per-iteration failures are logged + * but don't abort the whole prebuild. + * @param {Object} options + * @param {String} options.buildDir Root build directory + * @param {String} options.frameworkDir Path to the adapt_framework source + * @return {Promise} + */ +export async function prebuildCache ({ buildDir, frameworkDir }) { + const app = App.instance + const cache = new BuildCache(path.join(buildDir, 'prebuilt-cache')) + const pluginHash = await computePluginHash(frameworkDir) + + const contentplugin = await app.waitForModule('contentplugin') + const allPlugins = await contentplugin.find({}) + const themes = allPlugins.filter(p => p.type === 'theme') + const menus = allPlugins.filter(p => p.type === 'menu') + + if (!themes.length || !menus.length) { + throw new Error('Cannot prebuild cache: no theme or menu plugin installed') + } + + log('info', 'CACHE', `starting eager prebuild for ${themes.length * menus.length} (theme,menu) combinations`) + + for (const theme of themes) { + for (const menu of menus) { + try { + await prebuildOne({ buildDir, cache, pluginHash, theme, menu, allPlugins }) + } catch (e) { + log('warn', 'CACHE', `eager prebuild failed for theme=${theme.name} menu=${menu.name}: ${e.message}`) + if (e.cmd) log('warn', 'CACHE', `cmd: ${e.cmd}`) + if (e.stderr) log('warn', 'CACHE', `stderr: ${e.stderr}`) + } + } + } + + log('info', 'CACHE', 'eager prebuild complete') +} + +async function prebuildOne ({ buildDir, cache, pluginHash, theme, menu, allPlugins }) { + const app = App.instance + + if (await cache.has(pluginHash, theme.name, menu.name)) { + log('info', 'CACHE', `skipping cached combo theme=${theme.name} menu=${menu.name}`) + return + } + + // Only one theme/menu can be active per build — drop the others so + // the framework's less:dev task doesn't glob multiple themes' LESS + // into a single adapt.css (see adapt_framework#3802). + const includedPlugins = allPlugins.filter(p => + (p.type !== 'theme' && p.type !== 'menu') || p.name === theme.name || p.name === menu.name + ) + const pluginNames = includedPlugins.map(p => p.name) + + const tmpDir = path.join(buildDir, `_eager_cache_${Date.now()}_${theme.name}_${menu.name}`) + try { + log('info', 'CACHE', `building combo theme=${theme.name} menu=${menu.name}`) + + await copyFrameworkSource({ + destDir: tmpDir, + enabledPlugins: pluginNames, + linkNodeModules: true + }) + + const outputDir = path.join(tmpDir, 'build') + const buildCourseDir = path.join(outputDir, 'course', 'en') + await ensureDir(buildCourseDir) + await writeJson(path.join(outputDir, 'course', 'config.json'), { + _defaultLanguage: 'en', + _theme: theme.name, + _menu: menu.name, + _enabledPlugins: pluginNames + }) + await writeJson(path.join(buildCourseDir, 'course.json'), { + title: '_eager_cache_build', + _latestTrackingId: 0 + }) + const cacheDir = path.join(buildDir, 'cache') + await ensureDir(cacheDir) + + await AdaptCli.buildCourse({ + cwd: tmpDir, + sourceMaps: true, + outputDir, + cachePath: path.join(cacheDir, '_eager_cache'), + logger: { log: (...args) => app.logger.log('debug', 'adapt-cli', ...args) } + }) + + if (!await cache.has(pluginHash, theme.name, menu.name)) { + await cache.populate(outputDir, pluginHash, theme.name, menu.name) + } + } finally { + await fs.rm(tmpDir, { recursive: true, force: true }) + } +} diff --git a/lib/utils/readFrameworkPluginVersions.js b/lib/utils/readFrameworkPluginVersions.js new file mode 100644 index 0000000..cbacb45 --- /dev/null +++ b/lib/utils/readFrameworkPluginVersions.js @@ -0,0 +1,21 @@ +import { readJson } from 'adapt-authoring-core' +import { glob } from 'glob' +import path from 'node:path' + +/** + * Reads bower.json files from the framework's src directory to build a list of plugin names and versions + * @param {String} frameworkDir Absolute path to the framework directory + * @returns {Promise>} + */ +export async function readFrameworkPluginVersions (frameworkDir) { + const srcDir = path.join(frameworkDir, 'src') + const bowerPaths = await glob([ + 'core/bower.json', + '{components,extensions,menu,theme}/*/bower.json' + ], { cwd: srcDir, absolute: true }) + const plugins = await Promise.all(bowerPaths.map(async p => { + const { name, version } = await readJson(p) + return { name, version } + })) + return plugins +} diff --git a/lib/utils/runContentMigration.js b/lib/utils/runContentMigration.js new file mode 100644 index 0000000..7bb4460 --- /dev/null +++ b/lib/utils/runContentMigration.js @@ -0,0 +1,72 @@ +import fs from 'node:fs' +import path from 'node:path' +import { createRequire } from 'node:module' +import { App, ensureDir } from 'adapt-authoring-core' +import { load, migrate, Journal, Logger } from 'adapt-migrations' + +const require = createRequire(import.meta.url) + +/** + * Runs adapt-migrations on a content array. Shared by framework update, course import, and plugin update. + * @param {Object} options + * @param {Array} options.content Flat array of content objects (course, config, contentObjects, etc.) + * @param {Array<{name: String, version: String}>} options.fromPlugins Plugin versions before the update + * @param {Array<{name: String, version: String}>} options.toPlugins Plugin versions after the update + * @param {String[]} options.scripts Absolute paths to migration scripts + * @param {String} [options.cachePath] Optional cache path for adapt-migrations. If omitted, a unique dir under the app's tempDir is created and removed after migration — callers running concurrently MUST either omit this or pass a unique path per call, as adapt-migrations wipes the directory on entry. + * @returns {Promise} The migrated content array + */ +export async function runContentMigration ({ content, fromPlugins, toPlugins, scripts, cachePath }) { + const logger = Logger.getInstance() + + let resolvedCachePath = cachePath + const usingEphemeralCache = !resolvedCachePath + if (usingEphemeralCache) { + const tempDir = App.instance.getConfig('tempDir') + const baseCacheDir = path.join(tempDir, 'migration-cache') + await ensureDir(baseCacheDir) + // Symlink node_modules at the base so cached migration scripts' bare + // `import 'adapt-migrations'` resolves via Node's upward walk. It must sit + // a level ABOVE the run dir, otherwise adapt-migrations's own `npm install` + // step (which runs in the run dir) wipes the symlink. + const sharedLink = path.join(baseCacheDir, 'node_modules') + if (!fs.existsSync(sharedLink)) { + const sharedNodeModules = path.dirname(path.dirname(require.resolve('adapt-migrations'))) + try { + fs.symlinkSync(sharedNodeModules, sharedLink, 'dir') + } catch (err) { + if (err.code !== 'EEXIST') throw err + } + } + resolvedCachePath = fs.mkdtempSync(path.join(baseCacheDir, 'run-')) + } else { + await ensureDir(resolvedCachePath) + } + + try { + await load({ scripts, cachePath: resolvedCachePath, logger }) + + const originalFromPlugins = JSON.parse(JSON.stringify(fromPlugins)) + const journal = new Journal({ + logger, + data: { + content, + fromPlugins, + originalFromPlugins, + toPlugins + } + }) + + await migrate({ journal, logger }) + + return journal.data.content + } finally { + if (usingEphemeralCache) { + try { + fs.rmSync(resolvedCachePath, { recursive: true, force: true }) + } catch (err) { + logger.warn(`Failed to clean up migration cache at ${resolvedCachePath}: ${err.message}`) + } + } + } +} diff --git a/package.json b/package.json index 7260c17..eee1b03 100644 --- a/package.json +++ b/package.json @@ -11,14 +11,14 @@ "test": "node --test --test-force-exit --experimental-test-module-mocks 'tests/**/*.spec.js'" }, "dependencies": { - "adapt-authoring-content": "^2.0.0", + "adapt-authoring-content": "^3.0.0", "adapt-authoring-contentplugin": "^1.0.3", - "adapt-authoring-core": "^2.0.0", - "adapt-authoring-courseassets": "^1.0.3", + "adapt-authoring-core": "^3.0.0", "adapt-authoring-coursetheme": "^1.0.2", "adapt-authoring-mongodb": "^3.0.0", "adapt-authoring-spoortracking": "^1.0.2", "adapt-cli": "^3.3.3", + "adapt-migrations": "^1.4.0", "bytes": "^3.1.2", "fs-extra": "11.3.3", "glob": "^13.0.0", diff --git a/tests/AdaptFrameworkImport.spec.js b/tests/AdaptFrameworkImport.spec.js index 432f1c4..aca8ece 100644 --- a/tests/AdaptFrameworkImport.spec.js +++ b/tests/AdaptFrameworkImport.spec.js @@ -150,7 +150,7 @@ describe('AdaptFrameworkImport', () => { describe('#resolveAssets()', () => { function makeCtx (assetMap) { - const ctx = { assetMap } + const ctx = { assetMap, statusReport: { warn: [] } } ctx.resolveAssets = AdaptFrameworkImport.prototype.resolveAssets.bind(ctx) return ctx } @@ -207,14 +207,17 @@ describe('AdaptFrameworkImport', () => { assert.equal('src' in data._graphic, false) }) - it('should keep value when not in assetMap', () => { + it('should drop unresolved asset refs and surface them in statusReport.warn', () => { const ctx = makeCtx({}) const schema = makeSchema({ img: { _backboneForms: { type: 'Asset' } } }) const data = { img: 'unknown/path.png' } ctx.resolveAssets(schema, data) - assert.equal(data.img, 'unknown/path.png') + assert.equal('img' in data, false) + assert.equal(ctx.statusReport.warn.length, 1) + assert.equal(ctx.statusReport.warn[0].code, 'UNRESOLVED_ASSET_REF') + assert.equal(ctx.statusReport.warn[0].data.path, 'unknown/path.png') }) it('should recurse into nested properties', () => { @@ -407,7 +410,6 @@ describe('AdaptFrameworkImport', () => { contentplugin: null, assets: null, content: null, - courseassets: null, ...overrides } } @@ -442,22 +444,17 @@ describe('AdaptFrameworkImport', () => { assert.deepEqual(deleted.sort(), ['a1', 'a2']) }) - it('should delete course content and course assets', async () => { + it('should delete course content on rollback', async () => { const contentDeleted = [] - const courseAssetsDeleted = [] const ctx = makeRollbackCtx({ content: { deleteMany: async (query) => contentDeleted.push(query) }, - courseassets: { - deleteMany: async (query) => courseAssetsDeleted.push(query) - }, contentJson: { course: { _id: 'oldCourseId' } }, idMap: { oldCourseId: '507f1f77bcf86cd799439011' } }) await rollback.call(ctx) assert.equal(contentDeleted.length, 1) - assert.equal(courseAssetsDeleted.length, 1) }) it('should skip plugin uninstall when contentplugin is not available', async () => { @@ -482,9 +479,6 @@ describe('AdaptFrameworkImport', () => { content: { deleteMany: async (query) => deleted.push(query) }, - courseassets: { - deleteMany: async (query) => deleted.push(query) - }, contentJson: { course: { _id: 'oldCourseId' } }, idMap: {} // no mapping exists }) diff --git a/tests/BuildCache.spec.js b/tests/BuildCache.spec.js new file mode 100644 index 0000000..5bec235 --- /dev/null +++ b/tests/BuildCache.spec.js @@ -0,0 +1,107 @@ +import { before, describe, it, beforeEach, afterEach, mock } from 'node:test' +import assert from 'node:assert/strict' +import fs from 'node:fs/promises' +import path from 'node:path' +import upath from 'upath' +import os from 'node:os' + +describe('BuildCache', () => { + let BuildCache + let tmpDir, cacheRoot, buildDir, cache + + before(async () => { + mock.module('../lib/utils/log.js', { + namedExports: { + log: () => {}, + logDir: () => {}, + logMemory: () => {} + } + }) + ;({ default: BuildCache } = await import('../lib/BuildCache.js')) + }) + + beforeEach(async () => { + tmpDir = await fs.mkdtemp(path.join(os.tmpdir(), 'aat-cache-test-')) + cacheRoot = path.join(tmpDir, 'prebuilt-cache') + buildDir = path.join(tmpDir, 'build') + await fs.mkdir(buildDir, { recursive: true }) + cache = new BuildCache(cacheRoot) + }) + + afterEach(async () => { + await fs.rm(tmpDir, { recursive: true, force: true }) + }) + + describe('getPath()', () => { + it('returns one combo-keyed directory path', () => { + assert.equal(cache.getPath('abc123', 'vanilla', 'boxMenu'), upath.join(cacheRoot, 'abc123_vanilla_boxMenu')) + }) + }) + + describe('has()', () => { + it('returns false when cache does not exist', async () => { + assert.equal(await cache.has('hash1', 'theme', 'menu'), false) + }) + + it('returns true when the combo dir exists', async () => { + await fs.mkdir(cache.getPath('hash1', 'theme', 'menu'), { recursive: true }) + assert.equal(await cache.has('hash1', 'theme', 'menu'), true) + }) + }) + + describe('populate()', () => { + it('caches all build entries except course/', async () => { + await fs.mkdir(path.join(buildDir, 'adapt', 'js'), { recursive: true }) + await fs.writeFile(path.join(buildDir, 'adapt', 'js', 'adapt.min.js'), 'js-content') + await fs.writeFile(path.join(buildDir, 'adapt.css'), 'css-content') + await fs.writeFile(path.join(buildDir, 'adapt.css.map'), 'map-content') + await fs.mkdir(path.join(buildDir, 'fonts'), { recursive: true }) + await fs.writeFile(path.join(buildDir, 'fonts', 'icon.woff2'), 'font-data') + await fs.writeFile(path.join(buildDir, 'index.html'), '') + await fs.writeFile(path.join(buildDir, 'templates.js'), 'templates') + await fs.mkdir(path.join(buildDir, 'libraries'), { recursive: true }) + await fs.writeFile(path.join(buildDir, 'libraries', 'modernizr.js'), 'lib') + // course/ should be skipped + await fs.mkdir(path.join(buildDir, 'course', 'en'), { recursive: true }) + await fs.writeFile(path.join(buildDir, 'course', 'en', 'course.json'), '{}') + + await cache.populate(buildDir, 'hash1', 'theme', 'menu') + + const cacheDir = cache.getPath('hash1', 'theme', 'menu') + assert.equal(await fs.readFile(path.join(cacheDir, 'adapt', 'js', 'adapt.min.js'), 'utf8'), 'js-content') + assert.equal(await fs.readFile(path.join(cacheDir, 'index.html'), 'utf8'), '') + assert.equal(await fs.readFile(path.join(cacheDir, 'adapt.css'), 'utf8'), 'css-content') + assert.equal(await fs.readFile(path.join(cacheDir, 'fonts', 'icon.woff2'), 'utf8'), 'font-data') + await assert.rejects(fs.access(path.join(cacheDir, 'course')), { code: 'ENOENT' }) + }) + }) + + describe('restore()', () => { + it('copies cached artifacts to destination', async () => { + const cacheDir = cache.getPath('hash1', 'theme', 'menu') + await fs.mkdir(path.join(cacheDir, 'adapt', 'js'), { recursive: true }) + await fs.writeFile(path.join(cacheDir, 'adapt', 'js', 'adapt.min.js'), 'cached-js') + await fs.writeFile(path.join(cacheDir, 'adapt.css'), 'cached-css') + + const destDir = path.join(tmpDir, 'restored') + await cache.restore('hash1', 'theme', 'menu', destDir) + + assert.equal(await fs.readFile(path.join(destDir, 'adapt', 'js', 'adapt.min.js'), 'utf8'), 'cached-js') + assert.equal(await fs.readFile(path.join(destDir, 'adapt.css'), 'utf8'), 'cached-css') + }) + }) + + describe('invalidate()', () => { + it('removes the cache directory', async () => { + await fs.mkdir(cacheRoot, { recursive: true }) + await fs.writeFile(path.join(cacheRoot, 'test'), 'data') + await cache.invalidate() + await assert.rejects(fs.access(cacheRoot), { code: 'ENOENT' }) + }) + + it('does not throw when cache does not exist', async () => { + const missing = new BuildCache(path.join(tmpDir, 'nonexistent')) + await assert.doesNotReject(missing.invalidate()) + }) + }) +}) diff --git a/tests/utils-applyBuildReplacements.spec.js b/tests/utils-applyBuildReplacements.spec.js new file mode 100644 index 0000000..1926d3c --- /dev/null +++ b/tests/utils-applyBuildReplacements.spec.js @@ -0,0 +1,57 @@ +import { describe, it, beforeEach, afterEach } from 'node:test' +import assert from 'node:assert/strict' +import fs from 'node:fs/promises' +import path from 'node:path' +import os from 'node:os' + +import { applyBuildReplacements } from '../lib/utils/applyBuildReplacements.js' + +describe('applyBuildReplacements()', () => { + let tmpDir + + beforeEach(async () => { + tmpDir = await fs.mkdtemp(path.join(os.tmpdir(), 'aat-replace-test-')) + }) + + afterEach(async () => { + await fs.rm(tmpDir, { recursive: true, force: true }) + }) + + it('should replace all @@placeholders in index.html', async () => { + const template = [ + '', + '', + '' + ].join('\n') + await fs.writeFile(path.join(tmpDir, 'index.html'), template) + + await applyBuildReplacements(tmpDir, { + defaultLanguage: 'fr', + defaultDirection: 'rtl', + buildType: 'development', + timestamp: 1234567890 + }) + + const result = await fs.readFile(path.join(tmpDir, 'index.html'), 'utf8') + assert.ok(result.includes('lang="fr"')) + assert.ok(result.includes('dir="rtl"')) + assert.ok(result.includes('content="development"')) + assert.ok(result.includes('content="1234567890"')) + assert.ok(!result.includes('@@')) + }) + + it('should handle multiple occurrences of the same placeholder', async () => { + const template = '@@config._defaultLanguage @@config._defaultLanguage' + await fs.writeFile(path.join(tmpDir, 'index.html'), template) + + await applyBuildReplacements(tmpDir, { + defaultLanguage: 'de', + defaultDirection: 'ltr', + buildType: 'production', + timestamp: 0 + }) + + const result = await fs.readFile(path.join(tmpDir, 'index.html'), 'utf8') + assert.equal(result, 'de de') + }) +}) diff --git a/tests/utils-collectMigrationScripts.spec.js b/tests/utils-collectMigrationScripts.spec.js new file mode 100644 index 0000000..4944ab3 --- /dev/null +++ b/tests/utils-collectMigrationScripts.spec.js @@ -0,0 +1,45 @@ +import { describe, it, beforeEach, afterEach } from 'node:test' +import assert from 'node:assert/strict' +import fs from 'node:fs/promises' +import path from 'node:path' +import os from 'node:os' + +import { collectMigrationScripts } from '../lib/utils/collectMigrationScripts.js' + +describe('collectMigrationScripts()', () => { + let tmpDir + + beforeEach(async () => { + tmpDir = await fs.mkdtemp(path.join(os.tmpdir(), 'fw-mig-')) + const srcDir = path.join(tmpDir, 'src') + await fs.mkdir(path.join(srcDir, 'core', 'migrations'), { recursive: true }) + await fs.mkdir(path.join(srcDir, 'components', 'adapt-contrib-text', 'migrations'), { recursive: true }) + + await fs.writeFile(path.join(srcDir, 'core', 'migrations', '6.24.2.js'), '// core migration') + await fs.writeFile(path.join(srcDir, 'components', 'adapt-contrib-text', 'migrations', '5.0.1.js'), '// text migration') + }) + + afterEach(async () => { + await fs.rm(tmpDir, { recursive: true }) + }) + + it('should find core and plugin migration scripts', async () => { + const scripts = await collectMigrationScripts(tmpDir) + assert.equal(scripts.length, 2) + assert.ok(scripts.some(s => s.includes('core/migrations/6.24.2.js'))) + assert.ok(scripts.some(s => s.includes('adapt-contrib-text/migrations/5.0.1.js'))) + }) + + it('should return absolute paths', async () => { + const scripts = await collectMigrationScripts(tmpDir) + scripts.forEach(s => assert.ok(path.isAbsolute(s))) + }) + + it('should return empty array when no migration scripts exist', async () => { + const emptyDir = await fs.mkdtemp(path.join(os.tmpdir(), 'fw-nomig-')) + await fs.mkdir(path.join(emptyDir, 'src'), { recursive: true }) + const scripts = await collectMigrationScripts(emptyDir) + assert.deepEqual(scripts, []) + await fs.rm(emptyDir, { recursive: true }) + }) +}) diff --git a/tests/utils-computePluginHash.spec.js b/tests/utils-computePluginHash.spec.js new file mode 100644 index 0000000..09df53d --- /dev/null +++ b/tests/utils-computePluginHash.spec.js @@ -0,0 +1,40 @@ +import { before, describe, it, mock } from 'node:test' +import assert from 'node:assert/strict' + +describe('computePluginHash()', () => { + let computePluginHash + + before(async () => { + mock.module('adapt-cli/lib/integration/Project.js', { + defaultExport: class MockProject { + constructor ({ cwd }) { this.cwd = cwd } + async getInstalledDependencies () { + return { + 'adapt-contrib-text': '1.0.0', + 'adapt-contrib-narrative': '2.0.0', + 'adapt-contrib-core': '3.0.0' + } + } + } + }) + ;({ computePluginHash } = await import('../lib/utils/computePluginHash.js')) + }) + + it('should return a 16-character hex string', async () => { + const hash = await computePluginHash('/fake/framework') + assert.match(hash, /^[0-9a-f]{16}$/) + }) + + it('should return the same hash for the same plugin set', async () => { + const hash1 = await computePluginHash('/fake/framework') + const hash2 = await computePluginHash('/fake/framework') + assert.equal(hash1, hash2) + }) + + it('should produce a deterministic hash regardless of insertion order', async () => { + // The mock always returns the same deps — verify stability + const hash1 = await computePluginHash('/path/a') + const hash2 = await computePluginHash('/path/b') + assert.equal(hash1, hash2) + }) +}) diff --git a/tests/utils-generateLanguageManifest.spec.js b/tests/utils-generateLanguageManifest.spec.js new file mode 100644 index 0000000..89586d9 --- /dev/null +++ b/tests/utils-generateLanguageManifest.spec.js @@ -0,0 +1,27 @@ +import { describe, it } from 'node:test' +import assert from 'node:assert/strict' + +import { generateLanguageManifest } from '../lib/utils/generateLanguageManifest.js' + +describe('generateLanguageManifest()', () => { + it('should return all filenames except the manifest and assets.json', () => { + const input = ['course.json', 'contentObjects.json', 'articles.json', 'language_data_manifest.js', 'assets.json'] + const result = generateLanguageManifest(input) + assert.deepEqual(result, ['course.json', 'contentObjects.json', 'articles.json']) + }) + + it('should return an empty array when only excluded files are present', () => { + const result = generateLanguageManifest(['language_data_manifest.js', 'assets.json']) + assert.deepEqual(result, []) + }) + + it('should return all filenames when no exclusions apply', () => { + const input = ['course.json', 'blocks.json'] + const result = generateLanguageManifest(input) + assert.deepEqual(result, ['course.json', 'blocks.json']) + }) + + it('should handle an empty input array', () => { + assert.deepEqual(generateLanguageManifest([]), []) + }) +}) diff --git a/tests/utils-migrateExistingCourses.spec.js b/tests/utils-migrateExistingCourses.spec.js new file mode 100644 index 0000000..9ca0ee7 --- /dev/null +++ b/tests/utils-migrateExistingCourses.spec.js @@ -0,0 +1,163 @@ +import { describe, it, mock } from 'node:test' +import assert from 'node:assert/strict' + +const mockContentModule = { + find: mock.fn(async () => [ + { _id: 'course1', _type: 'course', title: 'Course 1' } + ]), + findOne: mock.fn(async ({ _id, _type, _courseId }) => { + if (_type === 'config') return { _id: 'cfg1', _type: 'config', _courseId } + return { _id, _type: 'course', title: 'Course 1' } + }), + update: mock.fn(async () => {}) +} + +mock.module('adapt-authoring-core', { + namedExports: { + App: { + instance: { + waitForModule: mock.fn(async () => mockContentModule) + } + } + } +}) + +const mockCollectMigrationScripts = mock.fn(async () => ['/path/to/script.js']) +mock.module('../lib/utils/collectMigrationScripts.js', { + namedExports: { + collectMigrationScripts: mockCollectMigrationScripts + } +}) + +const mockRunContentMigration = mock.fn(async ({ content }) => { + return content.map(item => ({ + ...item, + title: item.title ? item.title + ' (migrated)' : item.title + })) +}) +mock.module('../lib/utils/runContentMigration.js', { + namedExports: { + runContentMigration: mockRunContentMigration + } +}) + +mock.module('../lib/utils/log.js', { + namedExports: { + log: () => {} + } +}) + +const { migrateExistingCourses } = await import('../lib/utils/migrateExistingCourses.js') + +describe('migrateExistingCourses()', () => { + it('should collect migration scripts from frameworkDir', async () => { + mockCollectMigrationScripts.mock.resetCalls() + await migrateExistingCourses({ + fromPlugins: [{ name: 'core', version: '1.0.0' }], + toPlugins: [{ name: 'core', version: '2.0.0' }], + frameworkDir: '/fw' + }) + assert.equal(mockCollectMigrationScripts.mock.calls.length, 1) + assert.equal(mockCollectMigrationScripts.mock.calls[0].arguments[0], '/fw') + }) + + it('should query all courses when no courseIds provided', async () => { + mockContentModule.find.mock.resetCalls() + await migrateExistingCourses({ + fromPlugins: [], + toPlugins: [], + frameworkDir: '/fw' + }) + const findCalls = mockContentModule.find.mock.calls + assert.ok(findCalls.some(c => + JSON.stringify(c.arguments[0]) === JSON.stringify({ _type: 'course' }) + )) + }) + + it('should return migration result counts', async () => { + mockContentModule.find.mock.resetCalls() + mockContentModule.update.mock.resetCalls() + mockRunContentMigration.mock.resetCalls() + mockRunContentMigration.mock.mockImplementation(async ({ content }) => { + return content.map(item => ({ + ...item, + title: item.title ? item.title + ' (migrated)' : item.title + })) + }) + const result = await migrateExistingCourses({ + fromPlugins: [{ name: 'core', version: '1.0.0' }], + toPlugins: [{ name: 'core', version: '2.0.0' }], + frameworkDir: '/fw' + }) + assert.equal(result.migrated, 1) + assert.equal(result.failed, 0) + assert.deepEqual(result.errors, []) + }) + + it('should only update changed items in DB', async () => { + mockContentModule.update.mock.resetCalls() + mockRunContentMigration.mock.mockImplementation(async ({ content }) => { + return content.map(item => ({ + ...item, + title: item.title ? item.title + ' (migrated)' : item.title + })) + }) + await migrateExistingCourses({ + fromPlugins: [], + toPlugins: [], + frameworkDir: '/fw' + }) + assert.ok(mockContentModule.update.mock.calls.length > 0) + }) + + it('should skip DB writes when content is unchanged', async () => { + mockContentModule.update.mock.resetCalls() + mockRunContentMigration.mock.mockImplementation(async ({ content }) => content) + await migrateExistingCourses({ + fromPlugins: [], + toPlugins: [], + frameworkDir: '/fw' + }) + assert.equal(mockContentModule.update.mock.calls.length, 0) + }) + + it('should return early with zero counts when no scripts found', async () => { + mockCollectMigrationScripts.mock.mockImplementation(async () => []) + const result = await migrateExistingCourses({ + fromPlugins: [], + toPlugins: [], + frameworkDir: '/fw' + }) + assert.equal(result.migrated, 0) + assert.equal(result.failed, 0) + // restore + mockCollectMigrationScripts.mock.mockImplementation(async () => ['/path/to/script.js']) + }) + + it('should isolate per-course errors and continue', async () => { + mockContentModule.find.mock.mockImplementation(async (query) => { + if (query._type === 'course') { + return [ + { _id: 'course1', _type: 'course', title: 'OK' }, + { _id: 'course2', _type: 'course', title: 'Fails' } + ] + } + return [] + }) + let callCount = 0 + mockRunContentMigration.mock.mockImplementation(async ({ content }) => { + callCount++ + if (callCount === 2) throw new Error('migration error') + return content.map(item => ({ ...item, title: item.title + ' (migrated)' })) + }) + const result = await migrateExistingCourses({ + fromPlugins: [], + toPlugins: [], + frameworkDir: '/fw' + }) + assert.equal(result.migrated, 1) + assert.equal(result.failed, 1) + assert.equal(result.errors.length, 1) + assert.equal(result.errors[0].courseId, 'course2') + }) +}) diff --git a/tests/utils-readFrameworkPluginVersions.spec.js b/tests/utils-readFrameworkPluginVersions.spec.js new file mode 100644 index 0000000..9393b23 --- /dev/null +++ b/tests/utils-readFrameworkPluginVersions.spec.js @@ -0,0 +1,48 @@ +import { describe, it, beforeEach, afterEach } from 'node:test' +import assert from 'node:assert/strict' +import fs from 'node:fs/promises' +import path from 'node:path' +import os from 'node:os' + +import { readFrameworkPluginVersions } from '../lib/utils/readFrameworkPluginVersions.js' + +describe('readFrameworkPluginVersions()', () => { + let tmpDir + + beforeEach(async () => { + tmpDir = await fs.mkdtemp(path.join(os.tmpdir(), 'fw-test-')) + const srcDir = path.join(tmpDir, 'src') + await fs.mkdir(path.join(srcDir, 'core'), { recursive: true }) + await fs.mkdir(path.join(srcDir, 'components', 'adapt-contrib-text'), { recursive: true }) + await fs.mkdir(path.join(srcDir, 'extensions', 'adapt-contrib-trickle'), { recursive: true }) + + await fs.writeFile(path.join(srcDir, 'core', 'bower.json'), JSON.stringify({ name: 'adapt-contrib-core', version: '6.24.1' })) + await fs.writeFile(path.join(srcDir, 'components', 'adapt-contrib-text', 'bower.json'), JSON.stringify({ name: 'adapt-contrib-text', version: '5.0.0' })) + await fs.writeFile(path.join(srcDir, 'extensions', 'adapt-contrib-trickle', 'bower.json'), JSON.stringify({ name: 'adapt-contrib-trickle', version: '4.2.1' })) + }) + + afterEach(async () => { + await fs.rm(tmpDir, { recursive: true }) + }) + + it('should return plugin names and versions from bower.json files', async () => { + const plugins = await readFrameworkPluginVersions(tmpDir) + assert.equal(plugins.length, 3) + const names = plugins.map(p => p.name).sort() + assert.deepEqual(names, ['adapt-contrib-core', 'adapt-contrib-text', 'adapt-contrib-trickle']) + }) + + it('should return name and version for each plugin', async () => { + const plugins = await readFrameworkPluginVersions(tmpDir) + const core = plugins.find(p => p.name === 'adapt-contrib-core') + assert.equal(core.version, '6.24.1') + }) + + it('should return empty array when src dir has no bower files', async () => { + const emptyDir = await fs.mkdtemp(path.join(os.tmpdir(), 'fw-empty-')) + await fs.mkdir(path.join(emptyDir, 'src'), { recursive: true }) + const plugins = await readFrameworkPluginVersions(emptyDir) + assert.deepEqual(plugins, []) + await fs.rm(emptyDir, { recursive: true }) + }) +}) diff --git a/tests/utils-runContentMigration.spec.js b/tests/utils-runContentMigration.spec.js new file mode 100644 index 0000000..0206cca --- /dev/null +++ b/tests/utils-runContentMigration.spec.js @@ -0,0 +1,101 @@ +import { describe, it, mock } from 'node:test' +import assert from 'node:assert/strict' +import fs from 'node:fs' +import os from 'node:os' +import path from 'node:path' + +const testCachePath = fs.mkdtempSync(path.join(os.tmpdir(), 'runContentMigration-test-')) + +const mockLoad = mock.fn(async () => {}) +const mockMigrate = mock.fn(async ({ journal }) => { + journal.data.content[0].title = 'migrated' +}) + +class FakeJournal { + constructor ({ data }) { + this.data = data + } +} + +class FakeLogger { + info () {} + error () {} + warn () {} + debug () {} + log () {} +} + +mock.module('adapt-migrations', { + namedExports: { + load: mockLoad, + migrate: mockMigrate, + Journal: FakeJournal, + Logger: { getInstance: () => new FakeLogger() } + } +}) + +const { runContentMigration } = await import('../lib/utils/runContentMigration.js') + +describe('runContentMigration()', () => { + it('should call load with scripts and logger', async () => { + mockLoad.mock.resetCalls() + const scripts = ['/path/to/migration.js'] + await runContentMigration({ + content: [{ _id: 'c1', title: 'old' }], + fromPlugins: [{ name: 'core', version: '1.0.0' }], + toPlugins: [{ name: 'core', version: '2.0.0' }], + scripts, + cachePath: testCachePath + }) + assert.equal(mockLoad.mock.calls.length, 1) + assert.deepEqual(mockLoad.mock.calls[0].arguments[0].scripts, scripts) + }) + + it('should create a Journal with correct data shape', async () => { + mockMigrate.mock.resetCalls() + await runContentMigration({ + content: [{ _id: 'c1', title: 'old' }], + fromPlugins: [{ name: 'core', version: '1.0.0' }], + toPlugins: [{ name: 'core', version: '2.0.0' }], + scripts: [], + cachePath: testCachePath + }) + assert.equal(mockMigrate.mock.calls.length, 1) + const journal = mockMigrate.mock.calls[0].arguments[0].journal + assert.ok(journal.data.content) + assert.ok(journal.data.fromPlugins) + assert.ok(journal.data.originalFromPlugins) + assert.ok(journal.data.toPlugins) + }) + + it('should return mutated content', async () => { + mockMigrate.mock.resetCalls() + mockMigrate.mock.mockImplementation(async ({ journal }) => { + journal.data.content[0].title = 'migrated' + }) + const result = await runContentMigration({ + content: [{ _id: 'c1', title: 'old' }], + fromPlugins: [{ name: 'core', version: '1.0.0' }], + toPlugins: [{ name: 'core', version: '2.0.0' }], + scripts: [], + cachePath: testCachePath + }) + assert.equal(result[0].title, 'migrated') + }) + + it('should deep-clone fromPlugins into originalFromPlugins', async () => { + mockMigrate.mock.resetCalls() + mockMigrate.mock.mockImplementation(async () => {}) + const fromPlugins = [{ name: 'core', version: '1.0.0' }] + await runContentMigration({ + content: [{ _id: 'c1' }], + fromPlugins, + toPlugins: [{ name: 'core', version: '2.0.0' }], + scripts: [], + cachePath: testCachePath + }) + const journal = mockMigrate.mock.calls[0].arguments[0].journal + assert.deepEqual(journal.data.originalFromPlugins, fromPlugins) + assert.notEqual(journal.data.originalFromPlugins, journal.data.fromPlugins) + }) +})