diff --git a/lib/index.js b/lib/index.js index 34e415b..f0b71a9 100644 --- a/lib/index.js +++ b/lib/index.js @@ -38,6 +38,7 @@ class PackageJson { '_attributes', 'bundledDependencies', 'bundleDependencies', + 'deleteFalseBundleDependencies', 'gypfile', 'serverjs', 'scriptpath', diff --git a/lib/normalize.js b/lib/normalize.js index bc101cd..79bc15d 100644 --- a/lib/normalize.js +++ b/lib/normalize.js @@ -3,10 +3,27 @@ const { glob } = require('glob') const normalizePackageBin = require('npm-normalize-package-bin') const normalizePackageData = require('normalize-package-data') const path = require('path') +const log = require('proc-log') +const git = require('@npmcli/git') -const normalize = async (pkg, { strict, steps }) => { +// Replace with https://github.com/npm/git/pull/135 once it lands +const gitFind = async ({ cwd, root }) => { + if (await git.is({ cwd })) { + return cwd + } + while (cwd !== path.dirname(cwd) && cwd !== root) { + cwd = path.dirname(cwd) + if (await git.is({ cwd })) { + return cwd + } + } + return null +} + +const normalize = async (pkg, { strict, steps, root }) => { const data = pkg.content const scripts = data.scripts || {} + const pkgId = `${data.name ?? ''}@${data.version ?? ''}` // remove attributes that start with "_" if (steps.includes('_attributes')) { @@ -20,7 +37,7 @@ const normalize = async (pkg, { strict, steps }) => { // build the "_id" attribute if (steps.includes('_id')) { if (data.name && data.version) { - data._id = `${data.name}@${data.version}` + data._id = pkgId } } @@ -34,7 +51,13 @@ const normalize = async (pkg, { strict, steps }) => { // expand "bundleDependencies: true or translate from object" if (steps.includes('bundleDependencies')) { const bd = data.bundleDependencies - if (bd === true) { + if (bd === false) { + if (steps.includes('deleteFalseBundleDependencies')) { + delete data.bundleDependencies + } else { + data.bundleDependencies = [] + } + } else if (bd === true) { data.bundleDependencies = Object.keys(data.dependencies || {}) } else if (bd && typeof bd === 'object') { if (!Array.isArray(bd)) { @@ -158,7 +181,7 @@ const normalize = async (pkg, { strict, steps }) => { } // expand "directories.bin" - if (steps.includes('binDir') && data.directories?.bin) { + if (steps.includes('binDir') && data.directories?.bin && !data.bin) { const binsDir = path.resolve(pkg.path, path.join('.', path.join('/', data.directories.bin))) const bins = await glob('**', { cwd: binsDir }) data.bin = bins.reduce((acc, binFile) => { @@ -174,17 +197,20 @@ const normalize = async (pkg, { strict, steps }) => { // populate "gitHead" attribute if (steps.includes('gitHead') && !data.gitHead) { + const gitRoot = await gitFind({ cwd: pkg.path, root }) let head - try { - head = await fs.readFile(path.resolve(pkg.path, '.git/HEAD'), 'utf8') - } catch (err) { + if (gitRoot) { + try { + head = await fs.readFile(path.resolve(gitRoot, '.git/HEAD'), 'utf8') + } catch (err) { // do nothing + } } let headData if (head) { if (head.startsWith('ref: ')) { const headRef = head.replace(/^ref: /, '').trim() - const headFile = path.resolve(pkg.path, '.git', headRef) + const headFile = path.resolve(gitRoot, '.git', headRef) try { headData = await fs.readFile(headFile, 'utf8') headData = headData.replace(/^ref: /, '').trim() @@ -192,7 +218,7 @@ const normalize = async (pkg, { strict, steps }) => { // do nothing } if (!headData) { - const packFile = path.resolve(pkg.path, '.git/packed-refs') + const packFile = path.resolve(gitRoot, '.git/packed-refs') try { let refs = await fs.readFile(packFile, 'utf8') if (refs) { @@ -271,11 +297,11 @@ const normalize = async (pkg, { strict, steps }) => { // in normalize-package-data if it had access to the file path. if (steps.includes('binRefs') && data.bin instanceof Object) { for (const key in data.bin) { - const binPath = path.resolve(pkg.path, data.bin[key]) try { - await fs.access(binPath) + await fs.access(path.resolve(pkg.path, data.bin[key])) } catch { - delete data.bin[key] + log.warn('package-json', pkgId, `No bin file found at ${data.bin[key]}`) + // XXX: should a future breaking change delete bin entries that cannot be accessed? } } } diff --git a/package.json b/package.json index 61607c5..d29a2e4 100644 --- a/package.json +++ b/package.json @@ -26,13 +26,17 @@ "devDependencies": { "@npmcli/eslint-config": "^4.0.0", "@npmcli/template-oss": "4.15.1", + "read-package-json": "^6.0.4", + "read-package-json-fast": "^3.0.2", "tap": "^16.0.1" }, "dependencies": { + "@npmcli/git": "^4.0.4", "glob": "^10.2.2", "json-parse-even-better-errors": "^3.0.0", "normalize-package-data": "^5.0.0", - "npm-normalize-package-bin": "^3.0.1" + "npm-normalize-package-bin": "^3.0.1", + "proc-log": "^3.0.0" }, "repository": { "type": "git", diff --git a/test/normalize.js b/test/normalize.js index 439a161..9a08799 100644 --- a/test/normalize.js +++ b/test/normalize.js @@ -1,333 +1,355 @@ const t = require('tap') +const { join } = require('path') const pkg = require('../') +const rpj = require('read-package-json-fast') -t.test('errors for bad/missing data', async t => { - t.test('raises an error for missing file', t => - t.rejects(pkg.normalize(t.testdir()), { code: 'ENOENT' })) +const testMethods = { + '@npmcli/package-json': async (t, testdir = {}, { dir = (v) => v, ...opts } = {}) => { + const p = t.testdir(testdir) + return pkg.normalize(dir(p), opts) + }, + 'read-package-json-fast': (t, testdir = {}, { dir = (v) => v } = {}) => { + const p = t.testdir(testdir) + return rpj(join(dir(p), 'package.json')).then(r => ({ content: r })) + }, +} - t.test('rejects if file is not json', t => - t.rejects(pkg.normalize(t.testdir({ - 'package.json': 'this is not json', - })), { code: 'EJSONPARSE' })) -}) +for (const [name, testNormalize] of Object.entries(testMethods)) { + const isLegacy = name === 'read-package-json-fast' -t.test('clean up bundleDependencies', async t => { - t.test('change name if bundleDependencies is not present', async t => { - const { content } = await pkg.normalize(t.testdir({ - 'package.json': JSON.stringify({ bundledDependencies: [] }), - })) - t.strictSame(content.bundleDependencies, []) - }) + t.test(name, async t => { + t.test('errors for bad/missing data', async t => { + t.test('raises an error for missing file', t => + t.rejects(testNormalize(t, {}, {}), { code: 'ENOENT' })) - t.test('dont array-ify if its an array already', async t => { - const { content } = await pkg.normalize(t.testdir({ - 'package.json': JSON.stringify({ bundleDependencies: ['a'] }), - })) - t.strictSame(content.bundleDependencies, ['a']) - }) + await t.test('rejects if file is not json', t => + t.rejects(testNormalize(t, ({ + 'package.json': 'this is not json', + })), { code: 'EJSONPARSE' })) + }) - t.test('handle bundledDependencies: true', async t => { - const { content } = await pkg.normalize(t.testdir({ - 'package.json': JSON.stringify({ - bundledDependencies: true, - dependencies: { a: '1.2.3' }, - }), - })) - t.strictSame(content.bundleDependencies, ['a']) - }) + t.test('clean up bundleDependencies', async t => { + t.test('change name if bundleDependencies is not present', async t => { + const { content } = await testNormalize(t, ({ + 'package.json': JSON.stringify({ bundledDependencies: [] }), + })) + t.strictSame(content.bundleDependencies, []) + }) - t.test('handle bundleDependencies: true', async t => { - const { content } = await pkg.normalize(t.testdir({ - 'package.json': JSON.stringify({ - bundleDependencies: true, - dependencies: { a: '1.2.3' }, - }), - })) - t.strictSame(content.bundleDependencies, ['a']) - }) + t.test('dont array-ify if its an array already', async t => { + const { content } = await testNormalize(t, ({ + 'package.json': JSON.stringify({ bundleDependencies: ['a'] }), + })) + t.strictSame(content.bundleDependencies, ['a']) + }) - t.test('handle bundleDependencies: true with no deps', async t => { - const { content } = await pkg.normalize(t.testdir({ - 'package.json': JSON.stringify({ - bundleDependencies: true, - }), - })) - t.strictSame(content.bundleDependencies, []) - }) + t.test('handle bundledDependencies: true', async t => { + const { content } = await testNormalize(t, ({ + 'package.json': JSON.stringify({ + bundledDependencies: true, + dependencies: { a: '1.2.3' }, + }), + })) + t.strictSame(content.bundleDependencies, ['a']) + }) - t.test('handle bundleDependencies: false', async t => { - const { content } = await pkg.normalize(t.testdir({ - 'package.json': JSON.stringify({ - bundleDependencies: false, - dependencies: { a: '1.2.3' }, - }), - })) - t.has(content, { bundleDependencies: undefined }) - }) + t.test('handle bundleDependencies: true', async t => { + const { content } = await testNormalize(t, ({ + 'package.json': JSON.stringify({ + bundleDependencies: true, + dependencies: { a: '1.2.3' }, + }), + })) + t.strictSame(content.bundleDependencies, ['a']) + }) - t.test('handle bundleDependencies object', async t => { - const { content } = await pkg.normalize(t.testdir({ - 'package.json': JSON.stringify({ - bundleDependencies: { a: '1.2.3' }, - dependencies: { a: '1.2.3' }, - }), - })) - t.strictSame(content.bundleDependencies, ['a']) - }) -}) + t.test('handle bundleDependencies: true with no deps', async t => { + const { content } = await testNormalize(t, ({ + 'package.json': JSON.stringify({ + bundleDependencies: true, + }), + })) + t.strictSame(content.bundleDependencies, []) + }) -t.test('clean up scripts', async t => { - t.test('delete non-object scripts', async t => { - const { content } = await pkg.normalize(t.testdir({ - 'package.json': JSON.stringify({ - scripts: 1234, - }), - })) - t.has(content, { scripts: undefined }) - }) + t.test('handle bundleDependencies: false', async t => { + const { content } = await testNormalize(t, ({ + 'package.json': JSON.stringify({ + bundleDependencies: false, + dependencies: { a: '1.2.3' }, + }), + })) + t.has(content, { bundleDependencies: [] }) + }) - t.test('delete non-string script targets', async t => { - const { content } = await pkg.normalize(t.testdir({ - 'package.json': JSON.stringify({ - scripts: { - foo: 'bar', - bar: ['baz'], - baz: { bar: { foo: 'barbaz' } }, - }, - }), - })) - t.strictSame(content.scripts, { foo: 'bar' }) - }) -}) + t.test('handle bundleDependencies object', async t => { + const { content } = await testNormalize(t, ({ + 'package.json': JSON.stringify({ + bundleDependencies: { a: '1.2.3' }, + dependencies: { a: '1.2.3' }, + }), + })) + t.strictSame(content.bundleDependencies, ['a']) + }) + }) -t.test('convert funding string to object', async t => { - const { content } = await pkg.normalize(t.testdir({ - 'package.json': JSON.stringify({ funding: 'hello' }), - })) - t.strictSame(content.funding, { url: 'hello' }) -}) + t.test('clean up scripts', async t => { + t.test('delete non-object scripts', async t => { + const { content } = await testNormalize(t, ({ + 'package.json': JSON.stringify({ + scripts: 1234, + }), + })) + t.has(content, { scripts: undefined }) + }) -t.test('cleanup bins', async t => { - t.test('handle string when a name is set', async t => { - const { content } = await pkg.normalize(t.testdir({ - 'package.json': JSON.stringify({ name: 'x', bin: 'y' }), - })) - t.strictSame(content.bin, { x: 'y' }) - }) + t.test('delete non-string script targets', async t => { + const { content } = await testNormalize(t, ({ + 'package.json': JSON.stringify({ + scripts: { + foo: 'bar', + bar: ['baz'], + baz: { bar: { foo: 'barbaz' } }, + }, + }), + })) + t.strictSame(content.scripts, { foo: 'bar' }) + }) + }) - t.test('delete string bin when no name', async t => { - const { content } = await pkg.normalize(t.testdir({ - 'package.json': JSON.stringify({ bin: 'y' }), - })) - t.has(content, { bin: undefined }) - }) + t.test('convert funding string to object', async t => { + const { content } = await testNormalize(t, ({ + 'package.json': JSON.stringify({ funding: 'hello' }), + })) + t.strictSame(content.funding, { url: 'hello' }) + }) - t.test('remove non-object bin', async t => { - const { content } = await pkg.normalize(t.testdir({ - 'package.json': JSON.stringify({ bin: 1234 }), - })) - t.has(content, { bin: undefined }) - }) + t.test('cleanup bins', async t => { + t.test('handle string when a name is set', async t => { + const { content } = await testNormalize(t, ({ + 'package.json': JSON.stringify({ name: 'x', bin: 'y' }), + })) + t.strictSame(content.bin, { x: 'y' }) + }) - t.test('remove non-string bin values', async t => { - const { content } = await pkg.normalize(t.testdir({ - 'package.json': JSON.stringify({ bin: { - x: 'y', - y: 1234, - z: { a: 'b' }, - } }), - })) - t.strictSame(content.bin, { x: 'y' }) - }) -}) + t.test('delete string bin when no name', async t => { + const { content } = await testNormalize(t, ({ + 'package.json': JSON.stringify({ bin: 'y' }), + })) + t.has(content, { bin: undefined }) + }) + + t.test('remove non-object bin', async t => { + const { content } = await testNormalize(t, ({ + 'package.json': JSON.stringify({ bin: 1234 }), + })) + t.has(content, { bin: undefined }) + }) -t.test('dedupe optional deps out of regular deps', async t => { - t.test('choose optional deps in conflict, removing empty dependencies', async t => { - const { content } = await pkg.normalize(t.testdir({ - 'package.json': JSON.stringify({ - optionalDependencies: { - whowins: '1.2.3-optional', + t.test('remove non-string bin values', async t => { + const { content } = await testNormalize(t, ({ + 'package.json': JSON.stringify({ bin: { + x: 'y', + y: 1234, + z: { a: 'b' }, + } }), + })) + t.strictSame(content.bin, { x: 'y' }) + }) + }) + + t.test('dedupe optional deps out of regular deps', async t => { + t.test('choose optional deps in conflict, removing empty dependencies', async t => { + const { content } = await testNormalize(t, ({ + 'package.json': JSON.stringify({ + optionalDependencies: { + whowins: '1.2.3-optional', + }, + dependencies: { + whowins: '1.2.3-prod', + }, + }), + })) + t.has(content, { dependencies: undefined }) + t.strictSame(content.optionalDependencies, { whowins: '1.2.3-optional' }) + }) + + t.test('choose optional deps in conflict, leaving populated dependencies', async t => { + const { content } = await testNormalize(t, ({ + 'package.json': JSON.stringify({ + optionalDependencies: { + whowins: '1.2.3-optional', + }, + dependencies: { + otherdep: '1.0.0', + whowins: '1.2.3-prod', + }, + }), + })) + t.strictSame(content.dependencies, { otherdep: '1.0.0' }) + t.strictSame(content.optionalDependencies, { whowins: '1.2.3-optional' }) + }) + + t.test('do not create regular deps if only optional specified', async t => { + const { content } = await testNormalize(t, ({ + 'package.json': JSON.stringify({ + optionalDependencies: { + whowins: '1.2.3-optional', + }, + }), + })) + t.has(content, { dependencies: undefined }) + t.strictSame(content.optionalDependencies, { whowins: '1.2.3-optional' }) + }) + }) + + t.test('set _id if name and version set', async t => { + const { content } = await testNormalize(t, ({ + 'package.json': JSON.stringify({ name: 'a', version: '1.2.3' }), + })) + t.equal(content._id, 'a@1.2.3') + }) + + t.test('preserve indentation', async t => { + const obj = { + name: 'object', + version: '1.2.3', + } + const path = t.testdir({ + none: { + 'package.json': JSON.stringify(obj), }, - dependencies: { - whowins: '1.2.3-prod', + twospace: { + 'package.json': JSON.stringify(obj, null, 2), }, - }), - })) - t.has(content, { dependencies: undefined }) - t.strictSame(content.optionalDependencies, { whowins: '1.2.3-optional' }) - }) - - t.test('choose optional deps in conflict, leaving populated dependencies', async t => { - const { content } = await pkg.normalize(t.testdir({ - 'package.json': JSON.stringify({ - optionalDependencies: { - whowins: '1.2.3-optional', + tab: { + 'package.json': JSON.stringify(obj, null, '\t'), }, - dependencies: { - otherdep: '1.0.0', - whowins: '1.2.3-prod', + weird: { + 'package.json': JSON.stringify(obj, null, ' \t \t '), }, - }), - })) - t.strictSame(content.dependencies, { otherdep: '1.0.0' }) - t.strictSame(content.optionalDependencies, { whowins: '1.2.3-optional' }) - }) - - t.test('do not create regular deps if only optional specified', async t => { - const { content } = await pkg.normalize(t.testdir({ - 'package.json': JSON.stringify({ - optionalDependencies: { - whowins: '1.2.3-optional', + winEol: { + none: { + 'package.json': JSON.stringify(obj).replace(/\n/g, '\r\n'), + }, + twospace: { + 'package.json': JSON.stringify(obj, null, 2).replace(/\n/g, '\r\n'), + }, + tab: { + 'package.json': JSON.stringify(obj, null, '\t').replace(/\n/g, '\r\n'), + }, + weird: { + 'package.json': JSON.stringify(obj, null, ' \t \t ').replace(/\n/g, '\r\n'), + }, }, - }), - })) - t.has(content, { dependencies: undefined }) - t.strictSame(content.optionalDependencies, { whowins: '1.2.3-optional' }) - }) -}) - -t.test('set _id if name and version set', async t => { - const { content } = await pkg.normalize(t.testdir({ - 'package.json': JSON.stringify({ name: 'a', version: '1.2.3' }), - })) - t.equal(content._id, 'a@1.2.3') -}) - -t.test('preserve indentation', async t => { - const obj = { - name: 'object', - version: '1.2.3', - } - const path = t.testdir({ - none: { - 'package.json': JSON.stringify(obj), - }, - twospace: { - 'package.json': JSON.stringify(obj, null, 2), - }, - tab: { - 'package.json': JSON.stringify(obj, null, '\t'), - }, - weird: { - 'package.json': JSON.stringify(obj, null, ' \t \t '), - }, - winEol: { - none: { - 'package.json': JSON.stringify(obj).replace(/\n/g, '\r\n'), - }, - twospace: { - 'package.json': JSON.stringify(obj, null, 2).replace(/\n/g, '\r\n'), - }, - tab: { - 'package.json': JSON.stringify(obj, null, '\t').replace(/\n/g, '\r\n'), - }, - weird: { - 'package.json': JSON.stringify(obj, null, ' \t \t ').replace(/\n/g, '\r\n'), - }, - }, - doubleSpaced: { - none: { - 'package.json': JSON.stringify(obj).replace(/\n/g, '\n\n'), - }, - twospace: { - 'package.json': JSON.stringify(obj, null, 2).replace(/\n/g, '\n\n'), - }, - tab: { - 'package.json': JSON.stringify(obj, null, '\t').replace(/\n/g, '\n\n'), - }, - weird: { - 'package.json': JSON.stringify(obj, null, ' \t \t ').replace(/\n/g, '\n\n'), - }, - }, - doubleWin: { - none: { - 'package.json': JSON.stringify(obj).replace(/\n/g, '\r\n\r\n'), - }, - twospace: { - 'package.json': JSON.stringify(obj, null, 2).replace(/\n/g, '\r\n\r\n'), - }, - tab: { - 'package.json': JSON.stringify(obj, null, '\t').replace(/\n/g, '\r\n\r\n'), - }, - weird: { - 'package.json': JSON.stringify(obj, null, ' \t \t ').replace(/\n/g, '\r\n\r\n'), - }, - }, - }) - const i = Symbol.for('indent') - const n = Symbol.for('newline') - t.equal((await pkg.normalize(`${path}/none`)).content[i], '') - t.equal((await pkg.normalize(`${path}/none`)).content[n], '') - t.equal((await pkg.normalize(`${path}/twospace`)).content[i], ' ') - t.equal((await pkg.normalize(`${path}/twospace`)).content[n], '\n') - t.equal((await pkg.normalize(`${path}/tab`)).content[i], '\t') - t.equal((await pkg.normalize(`${path}/tab`)).content[n], '\n') - t.equal((await pkg.normalize(`${path}/weird`)).content[i], ' \t \t ') - t.equal((await pkg.normalize(`${path}/weird`)).content[n], '\n') - t.equal((await pkg.normalize(`${path}/winEol/none`)).content[i], '') - t.equal((await pkg.normalize(`${path}/winEol/none`)).content[n], '') - t.equal((await pkg.normalize(`${path}/winEol/twospace`)).content[i], ' ') - t.equal((await pkg.normalize(`${path}/winEol/twospace`)).content[n], '\r\n') - t.equal((await pkg.normalize(`${path}/winEol/tab`)).content[i], '\t') - t.equal((await pkg.normalize(`${path}/winEol/tab`)).content[n], '\r\n') - t.equal((await pkg.normalize(`${path}/winEol/weird`)).content[i], ' \t \t ') - t.equal((await pkg.normalize(`${path}/winEol/weird`)).content[n], '\r\n') - t.equal((await pkg.normalize(`${path}/doubleSpaced/none`)).content[i], '') - t.equal((await pkg.normalize(`${path}/doubleSpaced/none`)).content[n], '') - t.equal((await pkg.normalize(`${path}/doubleSpaced/twospace`)).content[i], ' ') - t.equal((await pkg.normalize(`${path}/doubleSpaced/twospace`)).content[n], '\n\n') - t.equal((await pkg.normalize(`${path}/doubleSpaced/tab`)).content[i], '\t') - t.equal((await pkg.normalize(`${path}/doubleSpaced/tab`)).content[n], '\n\n') - t.equal((await pkg.normalize(`${path}/doubleSpaced/weird`)).content[i], ' \t \t ') - t.equal((await pkg.normalize(`${path}/doubleSpaced/weird`)).content[n], '\n\n') - t.equal((await pkg.normalize(`${path}/doubleWin/none`)).content[i], '') - t.equal((await pkg.normalize(`${path}/doubleWin/none`)).content[n], '') - t.equal((await pkg.normalize(`${path}/doubleWin/twospace`)).content[i], ' ') - t.equal((await pkg.normalize(`${path}/doubleWin/twospace`)).content[n], '\r\n\r\n') - t.equal((await pkg.normalize(`${path}/doubleWin/tab`)).content[i], '\t') - t.equal((await pkg.normalize(`${path}/doubleWin/tab`)).content[n], '\r\n\r\n') - t.equal((await pkg.normalize(`${path}/doubleWin/weird`)).content[i], ' \t \t ') - t.equal((await pkg.normalize(`${path}/doubleWin/weird`)).content[n], '\r\n\r\n') -}) + doubleSpaced: { + none: { + 'package.json': JSON.stringify(obj).replace(/\n/g, '\n\n'), + }, + twospace: { + 'package.json': JSON.stringify(obj, null, 2).replace(/\n/g, '\n\n'), + }, + tab: { + 'package.json': JSON.stringify(obj, null, '\t').replace(/\n/g, '\n\n'), + }, + weird: { + 'package.json': JSON.stringify(obj, null, ' \t \t ').replace(/\n/g, '\n\n'), + }, + }, + doubleWin: { + none: { + 'package.json': JSON.stringify(obj).replace(/\n/g, '\r\n\r\n'), + }, + twospace: { + 'package.json': JSON.stringify(obj, null, 2).replace(/\n/g, '\r\n\r\n'), + }, + tab: { + 'package.json': JSON.stringify(obj, null, '\t').replace(/\n/g, '\r\n\r\n'), + }, + weird: { + 'package.json': JSON.stringify(obj, null, ' \t \t ').replace(/\n/g, '\r\n\r\n'), + }, + }, + }) + const i = Symbol.for('indent') + const n = Symbol.for('newline') + t.equal((await pkg.normalize(`${path}/none`)).content[i], '') + t.equal((await pkg.normalize(`${path}/none`)).content[n], '') + t.equal((await pkg.normalize(`${path}/twospace`)).content[i], ' ') + t.equal((await pkg.normalize(`${path}/twospace`)).content[n], '\n') + t.equal((await pkg.normalize(`${path}/tab`)).content[i], '\t') + t.equal((await pkg.normalize(`${path}/tab`)).content[n], '\n') + t.equal((await pkg.normalize(`${path}/weird`)).content[i], ' \t \t ') + t.equal((await pkg.normalize(`${path}/weird`)).content[n], '\n') + t.equal((await pkg.normalize(`${path}/winEol/none`)).content[i], '') + t.equal((await pkg.normalize(`${path}/winEol/none`)).content[n], '') + t.equal((await pkg.normalize(`${path}/winEol/twospace`)).content[i], ' ') + t.equal((await pkg.normalize(`${path}/winEol/twospace`)).content[n], '\r\n') + t.equal((await pkg.normalize(`${path}/winEol/tab`)).content[i], '\t') + t.equal((await pkg.normalize(`${path}/winEol/tab`)).content[n], '\r\n') + t.equal((await pkg.normalize(`${path}/winEol/weird`)).content[i], ' \t \t ') + t.equal((await pkg.normalize(`${path}/winEol/weird`)).content[n], '\r\n') + t.equal((await pkg.normalize(`${path}/doubleSpaced/none`)).content[i], '') + t.equal((await pkg.normalize(`${path}/doubleSpaced/none`)).content[n], '') + t.equal((await pkg.normalize(`${path}/doubleSpaced/twospace`)).content[i], ' ') + t.equal((await pkg.normalize(`${path}/doubleSpaced/twospace`)).content[n], '\n\n') + t.equal((await pkg.normalize(`${path}/doubleSpaced/tab`)).content[i], '\t') + t.equal((await pkg.normalize(`${path}/doubleSpaced/tab`)).content[n], '\n\n') + t.equal((await pkg.normalize(`${path}/doubleSpaced/weird`)).content[i], ' \t \t ') + t.equal((await pkg.normalize(`${path}/doubleSpaced/weird`)).content[n], '\n\n') + t.equal((await pkg.normalize(`${path}/doubleWin/none`)).content[i], '') + t.equal((await pkg.normalize(`${path}/doubleWin/none`)).content[n], '') + t.equal((await pkg.normalize(`${path}/doubleWin/twospace`)).content[i], ' ') + t.equal((await pkg.normalize(`${path}/doubleWin/twospace`)).content[n], '\r\n\r\n') + t.equal((await pkg.normalize(`${path}/doubleWin/tab`)).content[i], '\t') + t.equal((await pkg.normalize(`${path}/doubleWin/tab`)).content[n], '\r\n\r\n') + t.equal((await pkg.normalize(`${path}/doubleWin/weird`)).content[i], ' \t \t ') + t.equal((await pkg.normalize(`${path}/doubleWin/weird`)).content[n], '\r\n\r\n') + }) -t.test('strip _fields', async t => { - const { content } = await pkg.normalize(t.testdir({ - 'package.json': JSON.stringify({ - name: 'underscore', - version: '1.2.3', - _lodash: true, - }), - })) - t.has(content, { _lodash: undefined }) -}) + t.test('strip _fields', async t => { + const { content } = await testNormalize(t, ({ + 'package.json': JSON.stringify({ + name: 'underscore', + version: '1.2.3', + _lodash: true, + }), + })) + t.has(content, { _lodash: undefined }) + }) -// For now this is just checking one of the many side effects of -// npm-normalize-package-bin so we're sure it got called -t.test('normalize bin', async t => { - const { content } = await pkg.normalize(t.testdir({ - 'package.json': JSON.stringify({ - bin: false, - }), - })) - t.has(content, { bin: undefined }) -}) + // For now this is just checking one of the many side effects of + // npm-normalize-package-bin so we're sure it got called + t.test('normalize bin', async t => { + const { content } = await testNormalize(t, ({ + 'package.json': JSON.stringify({ + bin: false, + }), + })) + t.has(content, { bin: undefined }) + }) -t.test('skipping steps', async t => { - const packageJson = { - _lodash: true, - dependencies: { a: '' }, - optionalDependencies: { a: '' }, - bundledDependencies: true, - funding: 'just a string', - scripts: { test: './node_modules/.bin/test' }, - bin: { a: ['invalid array'] }, - } - const { content } = await pkg.normalize(t.testdir({ - 'package.json': JSON.stringify(packageJson), - }), { steps: [] }) - t.strictSame(content, packageJson) - t.has(content, { - bundleDependencies: undefined, - _id: undefined, + t.test('skipping steps', async t => { + if (isLegacy) { + return t.skip('rpj does not have configurable steps') + } + const packageJson = { + _lodash: true, + dependencies: { a: '' }, + optionalDependencies: { a: '' }, + bundledDependencies: true, + funding: 'just a string', + scripts: { test: './node_modules/.bin/test' }, + bin: { a: ['invalid array'] }, + } + const { content } = await testNormalize(t, ({ + 'package.json': JSON.stringify(packageJson), + }), { steps: [] }) + t.strictSame(content, packageJson) + t.has(content, { + bundleDependencies: undefined, + _id: undefined, + }) + }) }) -}) +} diff --git a/test/prepare.js b/test/prepare.js index c73db79..4f8414e 100644 --- a/test/prepare.js +++ b/test/prepare.js @@ -1,486 +1,570 @@ const t = require('tap') const pkg = require('../') +const { join } = require('path') + +const testMethods = { + '@npmcli/package-json': async (t, testdir, { dir = (v) => v, ...opts } = {}) => { + const p = t.testdir(testdir) + const logs = [] + const logger = (...a) => logs.push(a.slice(2)) + process.addListener('log', logger) + t.teardown(() => process.removeListener('log', logger)) + if (opts.root === true) { + opts.root = dir(p) + } + const res = await pkg.prepare(dir(p), Object.keys(opts).length ? opts : undefined) + res.logs = logs + return res + }, + 'read-package-json': (t, testdir, { dir = (v) => v } = {}) => { + const p = t.testdir(testdir) + const rpj = t.mock('read-package-json') // reset rpj caches + const logs = [] + const logger = (...a) => logs.push(a.slice(1)) + return new Promise((res, rej) => { + rpj(join(dir(p), 'package.json'), logger, (err, content) => { + if (err) { + rej(err) + } else { + res({ content, logs }) + } + }) + }) + }, +} -t.test('errors for bad/missing data', async t => { - t.test('invalid version', t => - t.rejects(pkg.prepare(t.testdir({ - 'package.json': JSON.stringify({ - version: 'not semver', - }), - })), { message: 'Invalid version' })) - - t.test('non-string main entry', t => - t.rejects(pkg.prepare(t.testdir({ - 'package.json': JSON.stringify({ - main: ['this is not a thing'], - }), - })), { name: 'TypeError' })) -}) - -t.test('strip underscores', async t => { - const { content } = await pkg.prepare(t.testdir({ - 'package.json': JSON.stringify({ - name: 'underscore', - version: '1.2.3', - _lodash: true, - }), - })) - t.has(content, { _lodash: undefined }) -}) - -t.test('bin', t => { - t.test('non-string', async t => { - const { content } = await pkg.prepare(t.testdir({ - 'package.json': JSON.stringify({ - bin: { key: {} }, - }), - })) - t.has(content, { bin: undefined }) - }) - - t.test('good', async t => { - const { content } = await pkg.prepare(t.testdir({ - 'package.json': JSON.stringify({ - name: 'bin-test', - bin: './bin/echo', - }), - bin: { echo: '#!/bin/sh\n\necho "hello world"' }, - })) - t.strictSame(content.bin, { 'bin-test': 'bin/echo' }) - }) - - t.test('missing', async t => { - const { content } = await pkg.prepare(t.testdir({ - 'package.json': JSON.stringify({ - name: 'bin-test', - bin: './bin/missing', - }), - })) - t.strictSame(content.bin, {}) - }) - - t.test('empty', async t => { - const { content } = await pkg.prepare(t.testdir({ - 'package.json': JSON.stringify({ - name: 'bin-test', - bin: {}, - }), - })) - t.has(content, { bin: undefined }) - }) - - t.test('directories.bin no prefix', async t => { - const { content } = await pkg.prepare(t.testdir({ - 'package.json': JSON.stringify({ - name: 'bin-test', - directories: { - bin: './bin', - }, - }), - bin: { echo: '#!/bin/sh\n\necho "hello world"' }, - })) - t.strictSame(content.bin, { echo: 'bin/echo' }) - }) - - t.test('directories.bin trim prefix', async t => { - const { content } = await pkg.prepare(t.testdir({ - 'package.json': JSON.stringify({ - name: 'bin-test', - directories: { - bin: '../../../../../bin', - }, - }), - bin: { echo: '#!/bin/sh\n\necho "hello world"' }, - })) - t.strictSame(content.bin, { echo: 'bin/echo' }) - }) - - t.end() -}) - -t.test('bundleDependencies', t => { - t.test('true', async t => { - const { content } = await pkg.prepare(t.testdir({ - 'package.json': JSON.stringify({ - dependencies: { a: '' }, - bundleDependencies: true, - }), - })) - t.strictSame(content.bundleDependencies, ['a']) - }) - - // t.test('null', async t => { - // const { content } = await pkg.prepare(t.testdir({ - // 'package.json': JSON.stringify({ - // dependencies: { a: '' }, - // bundleDependencies: null - // }), - // })) - // t.has(content, { bundleDependencies: undefined }) - // }) - - t.test('false', async t => { - const { content } = await pkg.prepare(t.testdir({ - 'package.json': JSON.stringify({ - dependencies: { a: '' }, - bundleDependencies: false, - }), - })) - t.has(content, { bundleDependencies: undefined }) - }) +for (const [name, testPrepare] of Object.entries(testMethods)) { + const isLegacy = name === 'read-package-json' + t.test(name, async t => { + t.test('errors for bad/missing data', async t => { + t.test('invalid version', t => + t.rejects(testPrepare(t, ({ + 'package.json': JSON.stringify({ + version: 'not semver', + }), + })), { message: 'Invalid version' })) + + t.test('non-string main entry', t => + t.rejects(testPrepare(t, ({ + 'package.json': JSON.stringify({ + main: ['this is not a thing'], + }), + })), { name: 'TypeError' })) + }) - t.test('rename bundledDependencies', async t => { - const { content } = await pkg.prepare(t.testdir({ - 'package.json': JSON.stringify({ - dependencies: { a: '', b: '' }, - devDependencies: { c: '' }, - bundledDependencies: ['a', 'b', 'c'], - }), - })) - t.has(content, { bundledDependencies: undefined }) - t.strictSame(content.bundleDependencies, ['a', 'b', 'c']) - }) - t.end() -}) - -t.test('server.js', t => { - t.test('adds if missing', async t => { - const { content } = await pkg.prepare(t.testdir({ - 'package.json': JSON.stringify({}), - 'server.js': 'a file that exists', - })) - t.strictSame(content.scripts, { start: 'node server.js' }) - }) - t.test('keeps existing', async t => { - const { content } = await pkg.prepare(t.testdir({ - 'package.json': JSON.stringify({ - scripts: { - start: 'something else', - }, - }), - 'server.js': 'a file that exists', - })) - t.strictSame(content.scripts, { start: 'something else' }) - }) - t.end() -}) - -t.test('gypfile', t => { - t.test('with install', async t => { - const { content } = await pkg.prepare(t.testdir({ - 'package.json': JSON.stringify({ - scripts: { install: 'existing script' }, - }), - 'test.gyp': 'a file that exists', - })) - t.strictSame(content.scripts.install, 'existing script') - }) - t.test('with preinstall', async t => { - const { content } = await pkg.prepare(t.testdir({ - 'package.json': JSON.stringify({ - scripts: { preinstall: 'existing script' }, - }), - 'test.gyp': 'a file that exists', - })) - t.has(content.scripts, { install: undefined }) - t.strictSame(content.scripts, { preinstall: 'existing script' }) - }) - t.test('no other scripts', async t => { - const { content } = await pkg.prepare(t.testdir({ - 'package.json': JSON.stringify({}), - 'test.gyp': 'a file that exists', - })) - t.strictSame(content.scripts, { install: 'node-gyp rebuild' }) - }) - t.end() -}) - -t.test('authors', t => { - t.test('contributors already exists', async t => { - const { content } = await pkg.prepare(t.testdir({ - 'package.json': JSON.stringify({ - contributors: 'exists', - }), - AUTHORS: 'name from authors file', - })) - t.strictSame(content.contributors, 'exists') - }) - t.test('contributors does not exist', async t => { - const { content } = await pkg.prepare(t.testdir({ - 'package.json': JSON.stringify({ - }), - AUTHORS: 'name from authors file', - })) - t.strictSame(content.contributors, [{ name: 'name from authors file' }]) - }) - t.end() -}) - -t.test('readme', t => { - t.test('already exists', async t => { - const { content } = await pkg.prepare(t.testdir({ - 'package.json': JSON.stringify({ - readme: 'a file that exists', - }), - 'README.md': 'readme file', - })) - t.strictSame(content.readme, 'a file that exists') - }) + t.test('strip underscores', async t => { + const { content } = await testPrepare(t, ({ + 'package.json': JSON.stringify({ + name: 'underscore', + version: '1.2.3', + _lodash: true, + }), + })) + t.has(content, { _lodash: undefined }) + }) - t.test('no readme at all', async t => { - const { content } = await pkg.prepare(t.testdir({ - 'package.json': JSON.stringify({}), - })) - t.match(content.readme, /No README/) - }) + t.test('bin', t => { + t.test('non-string', async t => { + const { content } = await testPrepare(t, ({ + 'package.json': JSON.stringify({ + bin: { key: {}, n: 123 }, + }), + })) + t.has(content, { bin: undefined }) + }) + + t.test('good', async t => { + const { content } = await testPrepare(t, ({ + 'package.json': JSON.stringify({ + name: 'bin-test', + bin: './bin/echo', + }), + bin: { echo: '#!/bin/sh\n\necho "hello world"' }, + })) + t.strictSame(content.bin, { 'bin-test': 'bin/echo' }) + }) + + t.test('missing', async t => { + const { content, logs } = await testPrepare(t, ({ + 'package.json': JSON.stringify({ + name: 'bin-test', + bin: './bin/missing', + }), + })) + t.strictSame(content.bin, { 'bin-test': 'bin/missing' }) + const binLog = logs.find((l) => l[1].includes('bin file')) + t.strictSame(binLog, ['bin-test@', 'No bin file found at bin/missing']) + }) + + t.test('empty', async t => { + const { content } = await testPrepare(t, ({ + 'package.json': JSON.stringify({ + name: 'bin-test', + bin: {}, + }), + })) + t.has(content, { bin: undefined }) + }) + + t.test('directories.bin no prefix', async t => { + const { content } = await testPrepare(t, ({ + 'package.json': JSON.stringify({ + name: 'bin-test', + directories: { + bin: './bin', + }, + }), + bin: { echo: '#!/bin/sh\n\necho "hello world"' }, + })) + t.strictSame(content.bin, { echo: 'bin/echo' }) + }) + + t.test('directories.bin trim prefix', async t => { + const { content } = await testPrepare(t, ({ + 'package.json': JSON.stringify({ + name: 'bin-test', + directories: { + bin: '../../../../../bin', + }, + }), + bin: { echo: '#!/bin/sh\n\necho "hello world"' }, + })) + t.strictSame(content.bin, { echo: 'bin/echo' }) + }) + + t.test('directories.bin with bin', async t => { + const { content } = await testPrepare(t, ({ + 'package.json': JSON.stringify({ + name: 'bin-test', + directories: { + bin: './bin', + }, + bin: { + echo: './bin/echo', + }, + }), + bin: { + echo: '#!/bin/sh\n\necho "hello world"', + echo2: '#!/bin/sh\n\necho "hello world2"', + }, + })) + t.strictSame(content.bin, { echo: 'bin/echo' }) + }) + + t.end() + }) - t.test('finds .md file', async t => { - const { content } = await pkg.prepare(t.testdir({ - 'package.json': JSON.stringify({}), - 'README.md': 'readme file', - })) - t.strictSame(content.readme, 'readme file') - }) + t.test('bundleDependencies', t => { + t.test('true', async t => { + const { content } = await testPrepare(t, ({ + 'package.json': JSON.stringify({ + dependencies: { a: '' }, + bundleDependencies: true, + }), + })) + t.strictSame(content.bundleDependencies, ['a']) + }) + + t.test('null', async t => { + const { content } = await testPrepare(t, ({ + 'package.json': JSON.stringify({ + dependencies: { a: '' }, + bundleDependencies: null, + }), + })) + t.has(content, { bundleDependencies: undefined }) + }) + + t.test('false', async t => { + const { content } = await testPrepare(t, ({ + 'package.json': JSON.stringify({ + dependencies: { a: '' }, + bundleDependencies: false, + }), + })) + t.has(content, { bundleDependencies: undefined }) + }) + + t.test('rename bundledDependencies', async t => { + const { content } = await testPrepare(t, ({ + 'package.json': JSON.stringify({ + dependencies: { a: '', b: '' }, + devDependencies: { c: '' }, + bundledDependencies: ['a', 'b', 'c'], + }), + })) + t.has(content, { bundledDependencies: undefined }) + t.strictSame(content.bundleDependencies, ['a', 'b', 'c']) + }) + t.end() + }) - t.test('finds README file', async t => { - const { content } = await pkg.prepare(t.testdir({ - 'package.json': JSON.stringify({}), - README: 'readme file', - })) - t.strictSame(content.readme, 'readme file') - }) + t.test('server.js', t => { + t.test('adds if missing', async t => { + const { content } = await testPrepare(t, ({ + 'package.json': JSON.stringify({}), + 'server.js': 'a file that exists', + })) + t.strictSame(content.scripts, { start: 'node server.js' }) + }) + t.test('keeps existing', async t => { + const { content } = await testPrepare(t, ({ + 'package.json': JSON.stringify({ + scripts: { + start: 'something else', + }, + }), + 'server.js': 'a file that exists', + })) + t.strictSame(content.scripts, { start: 'something else' }) + }) + t.end() + }) - t.test('ignores directory', async t => { - const { content } = await pkg.prepare(t.testdir({ - 'package.json': JSON.stringify({}), - 'README.md': {}, - })) - t.match(content.readme, /No README/) - }) + t.test('gypfile', t => { + t.test('with install', async t => { + const { content } = await testPrepare(t, ({ + 'package.json': JSON.stringify({ + scripts: { install: 'existing script' }, + }), + 'test.gyp': 'a file that exists', + })) + t.strictSame(content.scripts.install, 'existing script') + }) + t.test('with preinstall', async t => { + const { content } = await testPrepare(t, ({ + 'package.json': JSON.stringify({ + scripts: { preinstall: 'existing script' }, + }), + 'test.gyp': 'a file that exists', + })) + t.has(content.scripts, { install: undefined }) + t.strictSame(content.scripts, { preinstall: 'existing script' }) + }) + t.test('no other scripts', async t => { + const { content } = await testPrepare(t, ({ + 'package.json': JSON.stringify({}), + 'test.gyp': 'a file that exists', + })) + t.strictSame(content.scripts, { install: 'node-gyp rebuild' }) + }) + t.end() + }) - t.test('ignores non-md', async t => { - const { content } = await pkg.prepare(t.testdir({ - 'package.json': JSON.stringify({}), - README: 'no extension', - 'README.txt': 'txt file', - })) - t.strictSame(content.readme, 'no extension') - }) - t.end() -}) - -t.test('man', t => { - t.test('resolves directory', async t => { - const { content } = await pkg.prepare(t.testdir({ - 'package.json': JSON.stringify({ - directories: { man: './man' }, - }), - man: { man1: { 'test.1': 'man test file' } }, - })) - t.strictSame(content.man, ['man/man1/test.1']) - }) - t.end() -}) - -t.test('gitHead', t => { - t.test('HEAD with no ref', async t => { - const { content } = await pkg.prepare(t.testdir({ - 'package.json': JSON.stringify({}), - '.git': { HEAD: 'testgitref' }, - })) - t.strictSame(content.gitHead, 'testgitref') - }) + t.test('authors', t => { + t.test('contributors already exists', async t => { + const { content } = await testPrepare(t, ({ + 'package.json': JSON.stringify({ + contributors: 'exists', + }), + AUTHORS: 'name from authors file', + })) + t.strictSame(content.contributors, 'exists') + }) + t.test('contributors does not exist', async t => { + const { content } = await testPrepare(t, ({ + 'package.json': JSON.stringify({ + }), + AUTHORS: 'name from authors file', + })) + t.strictSame(content.contributors, [{ name: 'name from authors file' }]) + }) + t.end() + }) - t.test('HEAD with ref', async t => { - const { content } = await pkg.prepare(t.testdir({ - 'package.json': JSON.stringify({}), - '.git': { - HEAD: 'ref: testgitref', - testgitref: 'filegitref', - }, - })) - t.strictSame(content.gitHead, 'filegitref') - }) + t.test('readme', t => { + t.test('already exists', async t => { + const { content } = await testPrepare(t, ({ + 'package.json': JSON.stringify({ + readme: 'a file that exists', + }), + 'README.md': 'readme file', + })) + t.strictSame(content.readme, 'a file that exists') + }) + + t.test('no readme at all', async t => { + const { content } = await testPrepare(t, ({ + 'package.json': JSON.stringify({}), + })) + t.match(content.readme, /No README/) + }) + + t.test('finds .md file', async t => { + const { content } = await testPrepare(t, ({ + 'package.json': JSON.stringify({}), + 'README.md': 'readme file', + })) + t.strictSame(content.readme, 'readme file') + }) + + t.test('finds README file', async t => { + const { content } = await testPrepare(t, ({ + 'package.json': JSON.stringify({}), + README: 'readme file', + })) + t.strictSame(content.readme, 'readme file') + }) + + t.test('ignores directory', async t => { + const { content } = await testPrepare(t, ({ + 'package.json': JSON.stringify({}), + 'README.md': {}, + })) + t.match(content.readme, /No README/) + }) + + t.test('ignores non-md', async t => { + const { content } = await testPrepare(t, ({ + 'package.json': JSON.stringify({}), + README: 'no extension', + 'README.txt': 'txt file', + })) + t.strictSame(content.readme, 'no extension') + }) + t.end() + }) - t.test('HEAD with valid packed ref', async t => { - const { content } = await pkg.prepare(t.testdir({ - 'package.json': JSON.stringify({}), - '.git': { - HEAD: 'ref: testgitref', - 'packed-refs': `${'a'.repeat(40)} testgitref`, - }, - })) - t.strictSame(content.gitHead, 'a'.repeat(40)) - }) + t.test('man', t => { + t.test('resolves directory', async t => { + const { content } = await testPrepare(t, ({ + 'package.json': JSON.stringify({ + directories: { man: './man' }, + }), + man: { man1: { 'test.1': 'man test file' } }, + })) + t.strictSame(content.man, ['man/man1/test.1']) + }) + t.end() + }) - t.test('HEAD with empty packed ref', async t => { - const { content } = await pkg.prepare(t.testdir({ - 'package.json': JSON.stringify({}), - '.git': { - HEAD: 'ref: testgitref', - 'packed-refs': '', - }, - })) - t.has(content, { gitHead: undefined }) - }) + t.test('gitHead', t => { + t.test('HEAD with no ref', async t => { + const { content } = await testPrepare(t, ({ + 'package.json': JSON.stringify({}), + '.git': { HEAD: 'testgitref' }, + })) + t.strictSame(content.gitHead, 'testgitref') + }) + + t.test('HEAD with ref', async t => { + const { content } = await testPrepare(t, ({ + 'package.json': JSON.stringify({}), + '.git': { + HEAD: 'ref: testgitref', + testgitref: 'filegitref', + }, + })) + t.strictSame(content.gitHead, 'filegitref') + }) + + t.test('HEAD with valid packed ref', async t => { + const { content } = await testPrepare(t, ({ + 'package.json': JSON.stringify({}), + '.git': { + HEAD: 'ref: testgitref', + 'packed-refs': `${'a'.repeat(40)} testgitref`, + }, + })) + t.strictSame(content.gitHead, 'a'.repeat(40)) + }) + + t.test('HEAD with empty packed ref', async t => { + const { content } = await testPrepare(t, ({ + 'package.json': JSON.stringify({}), + '.git': { + HEAD: 'ref: testgitref', + 'packed-refs': '', + }, + })) + t.has(content, { gitHead: undefined }) + }) + + t.test('HEAD with unparseable packed ref', async t => { + const { content } = await testPrepare(t, ({ + 'package.json': JSON.stringify({}), + '.git': { + HEAD: 'ref: testgitref', + 'packed-refs': 'not sure what this is', + }, + })) + t.has(content, { gitHead: undefined }) + }) + + t.test('HEAD with ref in subdir', async t => { + const { content } = await testPrepare(t, ({ + sub: { 'package.json': JSON.stringify({}) }, + '.git': { + HEAD: 'ref: testgitref', + testgitref: 'filegitref', + }, + }), { dir: (p) => join(p, 'sub') }) + t.strictSame(content.gitHead, 'filegitref') + }) + + t.test('HEAD with ref in subdir but stop at root', async t => { + const { content } = await testPrepare(t, ({ + sub: { 'package.json': JSON.stringify({}) }, + '.git': { + HEAD: 'ref: testgitref', + testgitref: 'filegitref', + }, + }), { dir: (p) => join(p, 'sub'), root: true }) + // rpj has no way prevent walking up all directories + t.strictSame(content.gitHead, isLegacy ? 'filegitref' : undefined) + }) + t.end() + }) - t.test('HEAD with unparseable packed ref', async t => { - const { content } = await pkg.prepare(t.testdir({ - 'package.json': JSON.stringify({}), - '.git': { - HEAD: 'ref: testgitref', - 'packed-refs': 'not sure what this is', - }, - })) - t.has(content, { gitHead: undefined }) - }) - t.end() -}) + t.test('fillTypes', t => { + t.test('custom main field', async t => { + const { content } = await testPrepare(t, ({ + 'package.json': JSON.stringify({ + main: './custom-path.js', + }), + 'custom-path.d.ts': 'a file that exists', + })) + t.strictSame(content.types, './custom-path.d.ts') + }) + + t.test('inferred index.js', async t => { + const { content } = await testPrepare(t, ({ + 'package.json': JSON.stringify({}), + 'index.d.ts': 'a file that exists', + })) + t.strictSame(content.types, './index.d.ts') + }) + + t.test('subpaths and starting with ./', async t => { + const { content } = await testPrepare(t, ({ + 'package.json': JSON.stringify({ + main: './a/b/c.js', + }), + a: { b: { + 'c.d.ts': 'a file that exists', + 'c.js': 'another file that exists', + } }, + })) + t.strictSame(content.types, './a/b/c.d.ts') + }) + + t.test('existing types', async t => { + const { content } = await testPrepare(t, ({ + 'package.json': JSON.stringify({ + types: '@types/express', + }), + 'index.d.ts': 'a file that exists', + })) + t.strictSame(content.types, '@types/express') + }) + + t.test('no types present', async t => { + const { content } = await testPrepare(t, ({ + 'package.json': JSON.stringify({}), + })) + t.has(content, { type: undefined }) + }) + + // eslint-disable-next-line max-len + // https://nodejs.org/api/esm.html#esm_writing_dual_packages_while_avoiding_or_minimizing_hazards + + t.skip('handles esm modules', async t => { + const { content } = await testPrepare(t, ({ + 'package.json': JSON.stringify({}), + 'exports.json': JSON.stringify({ + type: 'module', + exports: { + '.': './a/b/c.js', + './a': './a.mjs', + }, + }), + a: { b: { + 'c.d.ts': 'a file that exists', + 'c.js': 'another file that exists', + } }, + })) + t.strictSame(content.types, './a/b/c/d.ts') + }) + t.skip('handles esm modules with sugared exports', async t => { + const { content } = await testPrepare(t, ({ + 'package.json': JSON.stringify({}), + 'sugar.json': JSON.stringify({ + exports: './a/b.js', + }), + a: { + 'b.d.ts': 'a file that exists', + 'b.js': 'another file that exists', + }, + })) + t.strictSame(content.types, './a/b/c/d.ts') + }) + t.end() + }) -t.test('fillTypes', t => { - t.test('custom main field', async t => { - const { content } = await pkg.prepare(t.testdir({ - 'package.json': JSON.stringify({ + t.test('skipping steps', async t => { + if (isLegacy) { + return t.skip('rpj does not have configurable steps') + } + const packageJson = { + scripts: { test: './node_modules/.bin/test' }, main: './custom-path.js', - }), - 'custom-path.d.ts': 'a file that exists', - })) - t.strictSame(content.types, './custom-path.d.ts') - }) - - t.test('inferred index.js', async t => { - const { content } = await pkg.prepare(t.testdir({ - 'package.json': JSON.stringify({}), - 'index.d.ts': 'a file that exists', - })) - t.strictSame(content.types, './index.d.ts') - }) - - t.test('subpaths and starting with ./', async t => { - const { content } = await pkg.prepare(t.testdir({ - 'package.json': JSON.stringify({ - main: './a/b/c.js', - }), - a: { b: { - 'c.d.ts': 'a file that exists', - 'c.js': 'another file that exists', - } }, - })) - t.strictSame(content.types, './a/b/c.d.ts') - }) - - t.test('existing types', async t => { - const { content } = await pkg.prepare(t.testdir({ - 'package.json': JSON.stringify({ - types: '@types/express', - }), - 'index.d.ts': 'a file that exists', - })) - t.strictSame(content.types, '@types/express') - }) - - t.test('no types present', async t => { - const { content } = await pkg.prepare(t.testdir({ - 'package.json': JSON.stringify({}), - })) - t.has(content, { type: undefined }) - }) - - // https://nodejs.org/api/esm.html#esm_writing_dual_packages_while_avoiding_or_minimizing_hazards - - t.skip('handles esm modules', async t => { - const { content } = await pkg.prepare(t.testdir({ - 'package.json': JSON.stringify({}), - 'exports.json': JSON.stringify({ - type: 'module', - exports: { - '.': './a/b/c.js', - './a': './a.mjs', + bin: { + foo: ['invalid'], + bar: './nonexistent', }, - }), - a: { b: { - 'c.d.ts': 'a file that exists', - 'c.js': 'another file that exists', - } }, - })) - t.strictSame(content.types, './a/b/c/d.ts') - }) - t.skip('handles esm modules with sugared exports', async t => { - const { content } = await pkg.prepare(t.testdir({ - 'package.json': JSON.stringify({}), - 'sugar.json': JSON.stringify({ - exports: './a/b.js', - }), - a: { - 'b.d.ts': 'a file that exists', - 'b.js': 'another file that exists', - }, - })) - t.strictSame(content.types, './a/b/c/d.ts') - }) - t.end() -}) - -t.test('skipping steps', async t => { - const packageJson = { - scripts: { test: './node_modules/.bin/test' }, - main: './custom-path.js', - bin: { - foo: ['invalid'], - bar: './nonexistent', - }, - directories: { - man: './man', - bin: './bin', - }, - } - const { content } = await pkg.prepare(t.testdir({ - 'package.json': JSON.stringify(packageJson), - 'build.gyp': '', - 'server.js': '', - AUTHORS: 'me', - man: { man1: { 'test.1': 'man test file' } }, - bin: { echo: '#!/bin/sh\n\necho "hello world"' }, - '.git': { HEAD: 'testgitref' }, - 'custom-path.d.ts': 'a file that exists', - }), { steps: [] }) - t.strictSame(content, packageJson) - t.has(content, { - // _id and normalizeData both do this one - _id: undefined, - authors: undefined, - bundleDependencies: undefined, - man: undefined, - readme: undefined, - gitHead: undefined, - types: undefined, - }) - t.has(content.scripts, { - install: undefined, - start: undefined, - }) -}) + directories: { + man: './man', + bin: './bin', + }, + } + const { content } = await testPrepare(t, ({ + 'package.json': JSON.stringify(packageJson), + 'build.gyp': '', + 'server.js': '', + AUTHORS: 'me', + man: { man1: { 'test.1': 'man test file' } }, + bin: { echo: '#!/bin/sh\n\necho "hello world"' }, + '.git': { HEAD: 'testgitref' }, + 'custom-path.d.ts': 'a file that exists', + }), { steps: [] }) + t.strictSame(content, packageJson) + t.has(content, { + // _id and normalizeData both do this one + _id: undefined, + authors: undefined, + bundleDependencies: undefined, + man: undefined, + readme: undefined, + gitHead: undefined, + types: undefined, + }) + t.has(content.scripts, { + install: undefined, + start: undefined, + }) + }) -t.test('parseIndex', t => { - t.test('no files at all', t => - t.rejects(pkg.prepare(t.testdir({})), { code: 'ENOENT', message: /package.json/ })) + t.test('parseIndex', t => { + t.test('no files at all', t => + t.rejects(testPrepare(t, ({})), { code: 'ENOENT', message: /package.json/ })) - t.test('index.js present but empty', t => - t.rejects(pkg.prepare(t.testdir({ - 'index.js': 'no comments here', - })), { code: 'ENOENT', message: /package.json/ })) + t.test('index.js present but empty', t => + t.rejects(testPrepare(t, ({ + 'index.js': 'no comments here', + })), { code: 'ENOENT', message: /package.json/ })) - t.test('index.js present but invalid', t => - t.rejects(pkg.prepare(t.testdir({ - 'index.js': `console.log("I don't close my comment") + t.test('index.js present but invalid', t => + t.rejects(testPrepare(t, ({ + 'index.js': `console.log("I don't close my comment") /**package { }`, - })), { code: 'ENOENT', message: /package.json/ })) + })), { code: 'ENOENT', message: /package.json/ })) - t.test('parseable index.js', async t => { - const parsed = await pkg.prepare(t.testdir({ - 'index.js': `console.log('i am a package!') + t.test('parseable index.js', async t => { + const parsed = await testPrepare(t, ({ + '.git': { HEAD: 'testgitref' }, + 'index.js': `console.log('i am a package!') /**package { "name": "from-index", @@ -488,17 +572,24 @@ t.test('parseIndex', t => { "description": "Package that is just an index.js" } **/`, - })) - t.strictSame(parsed.content, { - _id: 'from-index@1.0.0', - name: 'from-index', - version: '1.0.0', - description: 'Package that is just an index.js', - readme: 'ERROR: No README data found!', - }) - await t.rejects(parsed.save(), { - message: /No package.json/, + })) + t.strictSame(parsed.content, { + _id: 'from-index@1.0.0', + name: 'from-index', + version: '1.0.0', + description: 'Package that is just an index.js', + readme: 'ERROR: No README data found!', + gitHead: 'testgitref', + }) + if (isLegacy) { + t.skip('rpj does not save files') + } else { + await t.rejects(parsed.save(), { + message: /No package.json/, + }) + } + }) + t.end() }) }) - t.end() -}) +}