Skip to content

Commit

Permalink
deps: @npmcli/package-json@6.1.1
Browse files Browse the repository at this point in the history
  • Loading branch information
wraithgar committed Jan 21, 2025
1 parent 18e0449 commit 7ddfbad
Show file tree
Hide file tree
Showing 9 changed files with 283 additions and 47 deletions.
4 changes: 2 additions & 2 deletions DEPENDENCIES.json
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,6 @@
"@npmcli/promise-spawn",
"npm-install-checks",
"npm-bundled",
"normalize-package-data",
"@npmcli/fs",
"unique-filename",
"npm-packlist",
Expand All @@ -61,7 +60,8 @@
"nopt",
"parse-conflict-json",
"read-package-json-fast",
"read"
"read",
"normalize-package-data"
],
[
"@npmcli/eslint-config",
Expand Down
5 changes: 2 additions & 3 deletions DEPENDENCIES.md
Original file line number Diff line number Diff line change
Expand Up @@ -218,7 +218,6 @@ graph LR;
npmcli-mock-registry-->pacote;
npmcli-package-json-->hosted-git-info;
npmcli-package-json-->json-parse-even-better-errors;
npmcli-package-json-->normalize-package-data;
npmcli-package-json-->npmcli-git["@npmcli/git"];
npmcli-package-json-->proc-log;
npmcli-package-json-->semver;
Expand Down Expand Up @@ -660,10 +659,10 @@ graph LR;
npmcli-package-json-->glob;
npmcli-package-json-->hosted-git-info;
npmcli-package-json-->json-parse-even-better-errors;
npmcli-package-json-->normalize-package-data;
npmcli-package-json-->npmcli-git["@npmcli/git"];
npmcli-package-json-->proc-log;
npmcli-package-json-->semver;
npmcli-package-json-->validate-npm-package-license;
npmcli-promise-spawn-->which;
npmcli-query-->postcss-selector-parser;
npmcli-run-script-->node-gyp;
Expand Down Expand Up @@ -783,5 +782,5 @@ packages higher up the chain.
- @npmcli/package-json, npm-registry-fetch
- @npmcli/git, make-fetch-happen
- @npmcli/smoke-tests, npm-pick-manifest, @npmcli/installed-package-contents, cacache, promzard
- @npmcli/docs, npm-package-arg, @npmcli/promise-spawn, npm-install-checks, npm-bundled, normalize-package-data, @npmcli/fs, unique-filename, npm-packlist, @npmcli/mock-globals, bin-links, nopt, parse-conflict-json, read-package-json-fast, read
- @npmcli/docs, npm-package-arg, @npmcli/promise-spawn, npm-install-checks, npm-bundled, @npmcli/fs, unique-filename, npm-packlist, @npmcli/mock-globals, bin-links, nopt, parse-conflict-json, read-package-json-fast, read, normalize-package-data
- @npmcli/eslint-config, @npmcli/template-oss, ignore-walk, semver, hosted-git-info, proc-log, validate-npm-package-name, which, ini, npm-normalize-package-bin, json-parse-even-better-errors, @npmcli/node-gyp, ssri, unique-slug, @npmcli/redact, @npmcli/agent, minipass-fetch, @npmcli/name-from-folder, @npmcli/query, cmd-shim, read-cmd-shim, write-file-atomic, abbrev, proggy, minify-registry-metadata, mute-stream, npm-audit-report, npm-user-validate
4 changes: 3 additions & 1 deletion node_modules/@npmcli/package-json/lib/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -252,7 +252,9 @@ class PackageJson {
.replace(/\n/g, eol)

if (fileContent.trim() !== this.#readFileContent.trim()) {
return await writeFile(this.filename, fileContent)
const written = await writeFile(this.filename, fileContent)
this.#readFileContent = fileContent
return written
}
}

Expand Down
257 changes: 257 additions & 0 deletions node_modules/@npmcli/package-json/lib/normalize-data.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,257 @@
// Originally normalize-package-data

const url = require('node:url')
const hostedGitInfo = require('hosted-git-info')
const validateLicense = require('validate-npm-package-license')

const typos = {
dependancies: 'dependencies',
dependecies: 'dependencies',
depdenencies: 'dependencies',
devEependencies: 'devDependencies',
depends: 'dependencies',
'dev-dependencies': 'devDependencies',
devDependences: 'devDependencies',
devDepenencies: 'devDependencies',
devdependencies: 'devDependencies',
repostitory: 'repository',
repo: 'repository',
prefereGlobal: 'preferGlobal',
hompage: 'homepage',
hampage: 'homepage',
autohr: 'author',
autor: 'author',
contributers: 'contributors',
publicationConfig: 'publishConfig',
script: 'scripts',
}

const isEmail = str => str.includes('@') && (str.indexOf('@') < str.lastIndexOf('.'))

// Extracts description from contents of a readme file in markdown format
function extractDescription (description) {
// the first block of text before the first heading that isn't the first line heading
const lines = description.trim().split('\n')
let start = 0
// skip initial empty lines and lines that start with #
while (lines[start]?.trim().match(/^(#|$)/)) {
start++
}
let end = start + 1
// keep going till we get to the end or an empty line
while (end < lines.length && lines[end].trim()) {
end++
}
return lines.slice(start, end).join(' ').trim()
}

function stringifyPerson (person) {
if (typeof person !== 'string') {
const name = person.name || ''
const u = person.url || person.web
const wrappedUrl = u ? (' (' + u + ')') : ''
const e = person.email || person.mail
const wrappedEmail = e ? (' <' + e + '>') : ''
person = name + wrappedEmail + wrappedUrl
}
const matchedName = person.match(/^([^(<]+)/)
const matchedUrl = person.match(/\(([^()]+)\)/)
const matchedEmail = person.match(/<([^<>]+)>/)
const parsed = {}
if (matchedName?.[0].trim()) {
parsed.name = matchedName[0].trim()
}
if (matchedEmail) {
parsed.email = matchedEmail[1]
}
if (matchedUrl) {
parsed.url = matchedUrl[1]
}
return parsed
}

function normalizeData (data, changes) {
// fixDescriptionField
if (data.description && typeof data.description !== 'string') {
changes?.push(`'description' field should be a string`)
delete data.description
}
if (data.readme && !data.description && data.readme !== 'ERROR: No README data found!') {
data.description = extractDescription(data.readme)
}
if (data.description === undefined) {
delete data.description
}
if (!data.description) {
changes?.push('No description')
}

// fixModulesField
if (data.modules) {
changes?.push(`modules field is deprecated`)
delete data.modules
}

// fixFilesField
const files = data.files
if (files && !Array.isArray(files)) {
changes?.push(`Invalid 'files' member`)
delete data.files
} else if (data.files) {
data.files = data.files.filter(function (file) {
if (!file || typeof file !== 'string') {
changes?.push(`Invalid filename in 'files' list: ${file}`)
return false
} else {
return true
}
})
}

// fixManField
if (data.man && typeof data.man === 'string') {
data.man = [data.man]
}

// fixBugsField
if (!data.bugs && data.repository?.url) {
const hosted = hostedGitInfo.fromUrl(data.repository.url)
if (hosted && hosted.bugs()) {
data.bugs = { url: hosted.bugs() }
}
} else if (data.bugs) {
if (typeof data.bugs === 'string') {
if (isEmail(data.bugs)) {
data.bugs = { email: data.bugs }
/* eslint-disable-next-line node/no-deprecated-api */
} else if (url.parse(data.bugs).protocol) {
data.bugs = { url: data.bugs }
} else {
changes?.push(`Bug string field must be url, email, or {email,url}`)
}
} else {
for (const k in data.bugs) {
if (['web', 'name'].includes(k)) {
changes?.push(`bugs['${k}'] should probably be bugs['url'].`)
data.bugs.url = data.bugs[k]
delete data.bugs[k]
}
}
const oldBugs = data.bugs
data.bugs = {}
if (oldBugs.url) {
/* eslint-disable-next-line node/no-deprecated-api */
if (typeof (oldBugs.url) === 'string' && url.parse(oldBugs.url).protocol) {
data.bugs.url = oldBugs.url
} else {
changes?.push('bugs.url field must be a string url. Deleted.')
}
}
if (oldBugs.email) {
if (typeof (oldBugs.email) === 'string' && isEmail(oldBugs.email)) {
data.bugs.email = oldBugs.email
} else {
changes?.push('bugs.email field must be a string email. Deleted.')
}
}
}
if (!data.bugs.email && !data.bugs.url) {
delete data.bugs
changes?.push('Normalized value of bugs field is an empty object. Deleted.')
}
}
// fixKeywordsField
if (typeof data.keywords === 'string') {
data.keywords = data.keywords.split(/,\s+/)
}
if (data.keywords && !Array.isArray(data.keywords)) {
delete data.keywords
changes?.push(`keywords should be an array of strings`)
} else if (data.keywords) {
data.keywords = data.keywords.filter(function (kw) {
if (typeof kw !== 'string' || !kw) {
changes?.push(`keywords should be an array of strings`)
return false
} else {
return true
}
})
}
// fixBundleDependenciesField
const bdd = 'bundledDependencies'
const bd = 'bundleDependencies'
if (data[bdd] && !data[bd]) {
data[bd] = data[bdd]
delete data[bdd]
}
if (data[bd] && !Array.isArray(data[bd])) {
changes?.push(`Invalid 'bundleDependencies' list. Must be array of package names`)
delete data[bd]
} else if (data[bd]) {
data[bd] = data[bd].filter(function (filtered) {
if (!filtered || typeof filtered !== 'string') {
changes?.push(`Invalid bundleDependencies member: ${filtered}`)
return false
} else {
if (!data.dependencies) {
data.dependencies = {}
}
if (!Object.prototype.hasOwnProperty.call(data.dependencies, filtered)) {
changes?.push(`Non-dependency in bundleDependencies: ${filtered}`)
data.dependencies[filtered] = '*'
}
return true
}
})
}
// fixHomepageField
if (!data.homepage && data.repository && data.repository.url) {
const hosted = hostedGitInfo.fromUrl(data.repository.url)
if (hosted) {
data.homepage = hosted.docs()
}
}
if (data.homepage) {
if (typeof data.homepage !== 'string') {
changes?.push('homepage field must be a string url. Deleted.')
delete data.homepage
} else {
/* eslint-disable-next-line node/no-deprecated-api */
if (!url.parse(data.homepage).protocol) {
data.homepage = 'http://' + data.homepage
}
}
}
// fixReadmeField
if (!data.readme) {
changes?.push('No README data')
data.readme = 'ERROR: No README data found!'
}
// fixLicenseField
const license = data.license || data.licence
if (!license) {
changes?.push('No license field.')
} else if (typeof (license) !== 'string' || license.length < 1 || license.trim() === '') {
changes?.push('license should be a valid SPDX license expression')
} else if (!validateLicense(license).validForNewPackages) {
changes?.push('license should be a valid SPDX license expression')
}
// fixPeople
if (data.author) {
data.author = stringifyPerson(data.author)
}
['maintainers', 'contributors'].forEach(function (set) {
if (!Array.isArray(data[set])) {
return
}
data[set] = data[set].map(stringifyPerson)
})
// fixTypos
for (const d in typos) {
if (Object.prototype.hasOwnProperty.call(data, d)) {
changes?.push(`${d} should probably be ${typos[d]}.`)
}
}
}

module.exports = { normalizeData }
28 changes: 3 additions & 25 deletions node_modules/@npmcli/package-json/lib/normalize.js
Original file line number Diff line number Diff line change
Expand Up @@ -348,7 +348,6 @@ const normalize = async (pkg, { strict, steps, root, changes, allowLegacyCase })
changes?.push(`"readmeFilename" was set to ${readmeFile}`)
}
if (!data.readme) {
// this.warn('missingReadme')
data.readme = 'ERROR: No README data found!'
}
}
Expand Down Expand Up @@ -488,7 +487,6 @@ const normalize = async (pkg, { strict, steps, root, changes, allowLegacyCase })
// Some steps are isolated so we can do a limited subset of these in `fix`
if (steps.includes('fixRepositoryField') || steps.includes('normalizeData')) {
if (data.repositories) {
/* eslint-disable-next-line max-len */
changes?.push(`"repository" was set to the first entry in "repositories" (${data.repository})`)
data.repository = data.repositories[0]
}
Expand Down Expand Up @@ -572,30 +570,10 @@ const normalize = async (pkg, { strict, steps, root, changes, allowLegacyCase })
}
}

// TODO some of this is duplicated in other steps here, a future breaking change may be able to remove the duplicates involved in this step
if (steps.includes('normalizeData')) {
const legacyFixer = require('normalize-package-data/lib/fixer.js')
const legacyMakeWarning = require('normalize-package-data/lib/make_warning.js')
legacyFixer.warn = function () {
changes?.push(legacyMakeWarning.apply(null, arguments))
}

const legacySteps = [
'fixDescriptionField',
'fixModulesField',
'fixFilesField',
'fixManField',
'fixBugsField',
'fixKeywordsField',
'fixBundleDependenciesField',
'fixHomepageField',
'fixReadmeField',
'fixLicenseField',
'fixPeople',
'fixTypos',
]
for (const legacyStep of legacySteps) {
legacyFixer[legacyStep](data)
}
const { normalizeData } = require('./normalize-data.js')
normalizeData(data, changes)
}

// Warn if the bin references don't point to anything. This might be better
Expand Down
12 changes: 6 additions & 6 deletions node_modules/@npmcli/package-json/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@npmcli/package-json",
"version": "6.1.0",
"version": "6.1.1",
"description": "Programmatic API to update package.json",
"keywords": [
"npm",
Expand Down Expand Up @@ -33,13 +33,13 @@
"glob": "^10.2.2",
"hosted-git-info": "^8.0.0",
"json-parse-even-better-errors": "^4.0.0",
"normalize-package-data": "^7.0.0",
"proc-log": "^5.0.0",
"semver": "^7.5.3"
"semver": "^7.5.3",
"validate-npm-package-license": "^3.0.4"
},
"devDependencies": {
"@npmcli/eslint-config": "^5.0.0",
"@npmcli/template-oss": "4.23.5",
"@npmcli/eslint-config": "^5.1.0",
"@npmcli/template-oss": "4.23.6",
"read-package-json": "^7.0.0",
"read-package-json-fast": "^4.0.0",
"tap": "^16.0.1"
Expand All @@ -49,7 +49,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
"version": "4.23.5",
"version": "4.23.6",
"publish": "true"
},
"tap": {
Expand Down
Loading

0 comments on commit 7ddfbad

Please sign in to comment.