Skip to content

Commit

Permalink
publish refactor
Browse files Browse the repository at this point in the history
  • Loading branch information
claudiahdz committed Mar 10, 2020
1 parent 03b7428 commit 0b58a78
Show file tree
Hide file tree
Showing 3 changed files with 178 additions and 232 deletions.
107 changes: 4 additions & 103 deletions lib/pack.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,15 +9,14 @@ module.exports = pack

const BB = require('bluebird')

const byteSize = require('byte-size')
const cacache = require('cacache')
const columnify = require('columnify')
const cp = require('child_process')
const deprCheck = require('./utils/depr-check')
const fpm = require('./fetch-package-metadata')
const fs = require('graceful-fs')
const install = require('./install')
const lifecycle = BB.promisify(require('./utils/lifecycle'))
const { getTarContents, logTarContents } = ('/.utils/tar-contents')
const log = require('npmlog')
const move = require('move-concurrently')
const npm = require('./npm')
Expand All @@ -33,7 +32,6 @@ const pinflight = require('promise-inflight')
const readJson = BB.promisify(require('read-package-json'))
const tar = require('tar')
const packlist = require('npm-packlist')
const ssri = require('ssri')

pack.usage = 'npm pack [[<@scope>/]<pkg>...] [--dry-run]'

Expand All @@ -55,7 +53,7 @@ function pack (args, silent, cb) {
if (!silent && npm.config.get('json')) {
output(JSON.stringify(tarballs, null, 2))
} else if (!silent) {
tarballs.forEach(logContents)
tarballs.forEach(logTarContents)
output(tarballs.map((f) => path.relative(cwd, f.filename)).join('\n'))
}
return tarballs
Expand Down Expand Up @@ -97,7 +95,7 @@ function packFromPackage (arg, target, filename) {
return pacote.extract(arg, tmpTarget, opts)
.then(() => readJson(path.join(tmpTarget, 'package.json')))
}))
.then((pkg) => getContents(pkg, target, filename))
.then((pkg) => getTarContents(pkg, target, filename))
}

module.exports.prepareDirectory = prepareDirectory
Expand Down Expand Up @@ -156,7 +154,7 @@ function packDirectory (mani, dir, target, filename, logIt, dryRun) {
// specifically with @ signs, so we just neutralize that one
// and any such future "features" by prepending `./`
.then((files) => tar.create(tarOpt, files.map((f) => `./${f}`)))
.then(() => getContents(pkg, tmpTarget, filename, logIt))
.then(() => getTarContents(pkg, tmpTarget, filename, logIt))
// thread the content info through
.tap(() => {
if (dryRun) {
Expand All @@ -170,103 +168,6 @@ function packDirectory (mani, dir, target, filename, logIt, dryRun) {
})
}

module.exports.logContents = logContents
function logContents (tarball) {
log.notice('')
log.notice('', `${npm.config.get('unicode') ? '📦 ' : 'package:'} ${tarball.name}@${tarball.version}`)
log.notice('=== Tarball Contents ===')
if (tarball.files.length) {
log.notice('', columnify(tarball.files.map((f) => {
const bytes = byteSize(f.size)
return {path: f.path, size: `${bytes.value}${bytes.unit}`}
}), {
include: ['size', 'path'],
showHeaders: false
}))
}
if (tarball.bundled.length) {
log.notice('=== Bundled Dependencies ===')
tarball.bundled.forEach((name) => log.notice('', name))
}
log.notice('=== Tarball Details ===')
log.notice('', columnify([
{name: 'name:', value: tarball.name},
{name: 'version:', value: tarball.version},
tarball.filename && {name: 'filename:', value: tarball.filename},
{name: 'package size:', value: byteSize(tarball.size)},
{name: 'unpacked size:', value: byteSize(tarball.unpackedSize)},
{name: 'shasum:', value: tarball.shasum},
{
name: 'integrity:',
value: tarball.integrity.toString().substr(0, 20) + '[...]' + tarball.integrity.toString().substr(80)},
tarball.bundled.length && {name: 'bundled deps:', value: tarball.bundled.length},
tarball.bundled.length && {name: 'bundled files:', value: tarball.entryCount - tarball.files.length},
tarball.bundled.length && {name: 'own files:', value: tarball.files.length},
{name: 'total files:', value: tarball.entryCount}
].filter((x) => x), {
include: ['name', 'value'],
showHeaders: false
}))
log.notice('', '')
}

module.exports.getContents = getContents
function getContents (pkg, target, filename, silent) {
const bundledWanted = new Set(
pkg.bundleDependencies ||
pkg.bundledDependencies ||
[]
)
const files = []
const bundled = new Set()
let totalEntries = 0
let totalEntrySize = 0
return tar.t({
file: target,
onentry (entry) {
totalEntries++
totalEntrySize += entry.size
const p = entry.path
if (p.startsWith('package/node_modules/')) {
const name = p.match(/^package\/node_modules\/((?:@[^/]+\/)?[^/]+)/)[1]
if (bundledWanted.has(name)) {
bundled.add(name)
}
} else {
files.push({
path: entry.path.replace(/^package\//, ''),
size: entry.size,
mode: entry.mode
})
}
},
strip: 1
})
.then(() => BB.all([
BB.fromNode((cb) => fs.stat(target, cb)),
ssri.fromStream(fs.createReadStream(target), {
algorithms: ['sha1', 'sha512']
})
]))
.then(([stat, integrity]) => {
const shasum = integrity['sha1'][0].hexDigest()
return {
id: pkg._id,
name: pkg.name,
version: pkg.version,
from: pkg._from,
size: stat.size,
unpackedSize: totalEntrySize,
shasum,
integrity: ssri.parse(integrity['sha512'][0]),
filename,
files,
entryCount: totalEntries,
bundled: Array.from(bundled)
}
})
}

const PASSTHROUGH_OPTS = [
'always-auth',
'auth-type',
Expand Down
195 changes: 66 additions & 129 deletions lib/publish.js
Original file line number Diff line number Diff line change
@@ -1,167 +1,104 @@
'use strict'

const BB = require('bluebird')

const util = require('util')
const log = require('npmlog')
const semver = require('semver')
const pacote = require('pacote')
const cacache = require('cacache')
const figgyPudding = require('figgy-pudding')
const libpub = require('libnpmpublish').publish
const libunpub = require('libnpmpublish').unpublish
const lifecycle = BB.promisify(require('./utils/lifecycle.js'))
const log = require('npmlog')
const npa = require('npm-package-arg')
const npmConfig = require('./config/figgy-config.js')

const npm = require('./npm.js')
const output = require('./utils/output.js')
const otplease = require('./utils/otplease.js')
const pack = require('./pack')
const { tarball, extract } = require('pacote')
const path = require('path')
const readFileAsync = BB.promisify(require('graceful-fs').readFile)
const readJson = BB.promisify(require('read-package-json'))
const semver = require('semver')
const statAsync = BB.promisify(require('graceful-fs').stat)

publish.usage = 'npm publish [<tarball>|<folder>] [--tag <tag>] [--access <public|restricted>] [--dry-run]' +
const readJson = util.promisify(require('read-package-json'))
const lifecycle = util.promisify(require('./utils/lifecycle.js'))
const { getTarContents, logTarContents } = require('./utils/tar-contents.js')

publish.usage = 'npm publish [<folder>] [--tag <tag>] [--access <public|restricted>] [--dry-run]' +
"\n\nPublishes '.' if no argument supplied" +
'\n\nSets tag `latest` if no --tag specified'

publish.completion = function (opts, cb) {
// publish can complete to a folder with a package.json
// or a tarball, or a tarball url.
// for now, not yet implemented.
return cb()
}

const PublishConfig = figgyPudding({
const publishConfig = () => ({
dryRun: 'dry-run',
'dry-run': { default: false },
force: { default: false },
json: { default: false },
Promise: { default: () => Promise },
tag: { default: 'latest' },
tmp: {}
tag: 'defaultTag',
'dry-run': false,
defaultTag: 'latest',
json: false,
tmp: {},
...npm.flatOptions
})

module.exports = publish
function publish (args, isRetry, cb) {
if (typeof cb !== 'function') {
cb = isRetry
isRetry = false
}
function publish (args, cb) {
if (args.length === 0) args = ['.']
if (args.length !== 1) return cb(publish.usage)

log.verbose('publish', args)
log.verbose('publish', args)

const opts = PublishConfig(npmConfig())
const t = opts.tag.trim()
const opts = publishConfig()
const t = opts.defaultTag.trim()
if (semver.validRange(t)) {
return cb(new Error('Tag name must not be a valid SemVer range: ' + t))
}

return publish_(args[0], opts)
.then((tarball) => {
.then(tarball => {
const silent = log.level === 'silent'
if (!silent && opts.json) {
output(JSON.stringify(tarball, null, 2))
} else if (!silent) {
output(`+ ${tarball.id}`)
}
})
.nodeify(cb)
}

function publish_ (arg, opts) {
return statAsync(arg).then((stat) => {
if (stat.isDirectory()) {
return stat
} else {
const err = new Error('not a directory')
err.code = 'ENOTDIR'
throw err
}
}).then(() => {
return publishFromDirectory(arg, opts)
}, (err) => {
if (err.code !== 'ENOENT' && err.code !== 'ENOTDIR') {
throw err
} else {
return publishFromPackage(arg, opts)
}
})
.then(cb)
}

function publishFromDirectory (arg, opts) {
// All this readJson is because any of the given scripts might modify the
async function publish_ (arg, opts) {
// all this readJson is because any of the given scripts might modify the
// package.json in question, so we need to refresh after every step.
let contents
return pack.prepareDirectory(arg).then(() => {
return readJson(path.join(arg, 'package.json'))
}).then((pkg) => {
return lifecycle(pkg, 'prepublishOnly', arg)
}).then(() => {
return readJson(path.join(arg, 'package.json'))
}).then((pkg) => {
return cacache.tmp.withTmp(opts.tmp, {tmpPrefix: 'fromDir'}, (tmpDir) => {
const target = path.join(tmpDir, 'package.tgz')
return pack.packDirectory(pkg, arg, target, null, true)
.tap((c) => { contents = c })
.then((c) => !opts.json && pack.logContents(c))
.then(() => upload(pkg, false, target, opts))
})
}).then(() => {
return readJson(path.join(arg, 'package.json'))
}).tap((pkg) => {
return lifecycle(pkg, 'publish', arg)
}).tap((pkg) => {
return lifecycle(pkg, 'postpublish', arg)
})
.then(() => contents)
}
let manifest = await readJson(`${arg}/package.json`)
let pkgContents

if (npm.config.get('ignore-prepublish')) {
// just prepare
await lifecycle(manifest, 'prepare', arg)
} else {
// prepublish & prepare
await lifecycle(manifest, 'prepublish', arg)
await lifecycle(manifest, 'prepare', arg)
}

function publishFromPackage (arg, opts) {
return cacache.tmp.withTmp(opts.tmp, {tmpPrefix: 'fromPackage'}, tmp => {
const extracted = path.join(tmp, 'package')
const target = path.join(tmp, 'package.json')
return tarball.toFile(arg, target, opts)
.then(() => extract(arg, extracted, opts))
.then(() => readJson(path.join(extracted, 'package.json')))
.then((pkg) => {
return BB.resolve(pack.getContents(pkg, target))
.tap((c) => !opts.json && pack.logContents(c))
.tap(() => upload(pkg, false, target, opts))
})
// prepublishOnly
await lifecycle(manifest, 'prepublishOnly', arg)

// package and display contents
await cacache.tmp.withTmp(opts.tmp, { tmpPrefix: 'fromDir' }, async (tmpDir) => {
manifest = await readJson(`${arg}/package.json`)
const filename = `${manifest.name}-${manifest.version}.tgz`
const tmpTarget = `${tmpDir}/${filename}`
// pack tarball
await pacote.tarball.file(`file:${arg}`, tmpTarget)
pkgContents = await getTarContents(manifest, tmpTarget, filename)
})
}

function upload (pkg, isRetry, cached, opts) {
if (!opts.dryRun) {
return readFileAsync(cached).then(tarball => {
return otplease(opts, opts => {
return libpub(pkg, tarball, opts)
}).catch(err => {
if (
err.code === 'EPUBLISHCONFLICT' &&
opts.force &&
!isRetry
) {
log.warn('publish', 'Forced publish over ' + pkg._id)
return otplease(opts, opts => libunpub(
npa.resolve(pkg.name, pkg.version), opts
)).finally(() => {
// ignore errors. Use the force. Reach out with your feelings.
return otplease(opts, opts => {
return upload(pkg, true, tarball, opts)
}).catch(() => {
// but if it fails again, then report the first error.
throw err
})
})
} else {
throw err
}
})
})
} else {
return opts.Promise.resolve(true)
if (!opts.json) {
logTarContents(pkgContents)
}

try {
if (!opts.dryRun) {
await otplease(opts, opts => libpub(arg, manifest, opts))
}
} catch (err) {
throw err
}

manifest = await readJson(`${arg}/package.json`)
// publish
await lifecycle(manifest, 'publish', arg)
// postpublish
await lifecycle(manifest, 'postpublish', arg)

return pkgContents
}
Loading

0 comments on commit 0b58a78

Please sign in to comment.