From 84bfd33318fe1d2f2aaf596eb92f0d334fccbb3f Mon Sep 17 00:00:00 2001 From: Eli <88557639+lishaduck@users.noreply.github.com> Date: Wed, 10 Jul 2024 16:59:54 -0500 Subject: [PATCH] fix: use import attributes rather than json assertions --- .../core/src/generation/generate-dotpkg.ts | 28 +++++++++---------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/packages/@contentlayer/core/src/generation/generate-dotpkg.ts b/packages/@contentlayer/core/src/generation/generate-dotpkg.ts index 4cd9023c..726b7d34 100644 --- a/packages/@contentlayer/core/src/generation/generate-dotpkg.ts +++ b/packages/@contentlayer/core/src/generation/generate-dotpkg.ts @@ -192,16 +192,16 @@ const writeFilesForCache = ({ T.succeedWith(() => process.versions.node.split('.').map((_) => parseInt(_, 10)) as [number, number, number]), ) - // NOTE Type assert statements for `.json` files are neccessary from Node v16.14 onwards - const needsJsonAssertStatement = nodeVersionMajor > 16 || (nodeVersionMajor === 16 && nodeVersionMinor >= 14) - const assertStatement = needsJsonAssertStatement ? ` assert { type: 'json' }` : '' + // NOTE: An import attribute for `.json` files is neccessary from Node v16.14 onwards. + const needsTypeJsonAttribute = nodeVersionMajor > 16 || (nodeVersionMajor === 16 && nodeVersionMinor >= 14) + const importAttributeStatement = needsJsonimportAttributeStatement ? ` with { type: 'json' }` : '' const typeNameField = generationOptions.options.fieldOptions.typeFieldName const dataBarrelFiles = documentDefs.map((docDef) => ({ content: makeDataExportFile({ docDef, documentIds: allDocuments.filter((_) => _[typeNameField] === docDef.name).map((_) => _._id), - assertStatement, + importAttributeStatement, }), filePath: withPrefix('generated', docDef.name, `_index.mjs`), })) @@ -255,7 +255,7 @@ const writeFilesForCache = ({ filePath: withPrefix('generated', 'index.mjs'), content: makeIndexMjs({ schemaDef, - assertStatement, + importAttributeStatement, bundleFilePath: relativeBundleFilePath, isDev, options, @@ -338,11 +338,11 @@ const writeFileWithWrittenFilesCache = const makeDataExportFile = ({ docDef, documentIds, - assertStatement, + importAttributeStatement, }: { docDef: DocumentTypeDef documentIds: string[] - assertStatement: string + importAttributeStatement: string }): string => { const dataVariableName = getDataVariableName({ docDef }) @@ -350,7 +350,7 @@ const makeDataExportFile = ({ const documentId = documentIds[0]! return `\ // ${autogeneratedNote} -export { default as ${dataVariableName} } from './${idToFileName(documentId)}.json'${assertStatement} +export { default as ${dataVariableName} } from './${idToFileName(documentId)}.json'${importAttributeStatement} ` } @@ -376,7 +376,7 @@ export { default as ${dataVariableName} } from './${idToFileName(documentId)}.js ) const docImports = documentIds - .map((_) => `import ${idToVariableNameMap.get(_)} from './${idToFileName(_)}.json'${assertStatement}`) + .map((_) => `import ${idToVariableNameMap.get(_)} from './${idToFileName(_)}.json'${importAttributeStatement}`) .join('\n') return `\ @@ -390,13 +390,13 @@ export const ${dataVariableName} = [${Array.from(idToVariableNameMap.values()).j const makeIndexMjs = ({ schemaDef, - assertStatement, + importAttributeStatement, bundleFilePath, options, isDev, }: { schemaDef: SchemaDef - assertStatement: string + importAttributeStatement: string bundleFilePath: RelativePosixFilePath options: PluginOptions isDev: boolean @@ -413,7 +413,7 @@ const makeIndexMjs = ({ .map(({ documentDefName, dataVariableName }) => isDev ? `import { ${dataVariableName} } from './${documentDefName}/_index.mjs'` - : `import ${dataVariableName} from './${documentDefName}/_index.json'${assertStatement}`, + : `import ${dataVariableName} from './${documentDefName}/_index.json'${importAttributeStatement}`, ) .join('\n') @@ -434,7 +434,7 @@ export const fetchContent = async (sourceKey) => { const worker = new Worker(workerFilePath, { workerData: { sourceKey } }) return new Promise((resolve, reject) => { - worker.on('message', (data) => { + worker.on('message', (data) => { if (data.result) { resolve(data.result) } else if (data.fatalError) { @@ -485,7 +485,7 @@ export const makeDataTypes = ({ schemaDef, options }: { schemaDef: SchemaDef; op if (options.experimental.enableDynamicBuild === false) return '' return `\ -export type FetchContentResult = +export type FetchContentResult = | { _tag: 'Error', error: SourceProvideSchemaErrorJSON | SourceFetchDataErrorJSON } | { _tag: 'Data', data: DataExports }