UNPKG

12 kBJavaScriptView Raw
1'use strict'
2
3// npm pack <pkg>
4// Packs the specified package into a .tgz file, which can then
5// be installed.
6
7const BB = require('bluebird')
8
9const byteSize = require('byte-size')
10const cacache = require('cacache')
11const columnify = require('columnify')
12const cp = require('child_process')
13const deprCheck = require('./utils/depr-check')
14const fpm = require('./fetch-package-metadata')
15const fs = require('graceful-fs')
16const install = require('./install')
17const lifecycle = BB.promisify(require('./utils/lifecycle'))
18const log = require('npmlog')
19const move = require('move-concurrently')
20const npm = require('./npm')
21const npmConfig = require('./config/figgy-config.js')
22const output = require('./utils/output')
23const pacote = require('pacote')
24const path = require('path')
25const PassThrough = require('stream').PassThrough
26const pathIsInside = require('path-is-inside')
27const pipe = BB.promisify(require('mississippi').pipe)
28const prepublishWarning = require('./utils/warn-deprecated')('prepublish-on-install')
29const pinflight = require('promise-inflight')
30const readJson = BB.promisify(require('read-package-json'))
31const tar = require('tar')
32const packlist = require('npm-packlist')
33const ssri = require('ssri')
34
35pack.usage = 'npm pack [[<@scope>/]<pkg>...] [--dry-run]'
36
37// if it can be installed, it can be packed.
38pack.completion = install.completion
39
40module.exports = pack
41function pack (args, silent, cb) {
42 const cwd = process.cwd()
43 if (typeof cb !== 'function') {
44 cb = silent
45 silent = false
46 }
47
48 if (args.length === 0) args = ['.']
49
50 BB.all(
51 args.map((arg) => pack_(arg, cwd))
52 ).then((tarballs) => {
53 if (!silent && npm.config.get('json')) {
54 output(JSON.stringify(tarballs, null, 2))
55 } else if (!silent) {
56 tarballs.forEach(logContents)
57 output(tarballs.map((f) => path.relative(cwd, f.filename)).join('\n'))
58 }
59 return tarballs
60 }).nodeify(cb)
61}
62
63function pack_ (pkg, dir) {
64 return BB.fromNode((cb) => fpm(pkg, dir, cb)).then((mani) => {
65 let name = mani.name[0] === '@'
66 // scoped packages get special treatment
67 ? mani.name.substr(1).replace(/\//g, '-')
68 : mani.name
69 const target = `${name}-${mani.version}.tgz`
70 return pinflight(target, () => {
71 const dryRun = npm.config.get('dry-run')
72 if (mani._requested.type === 'directory') {
73 return prepareDirectory(mani._resolved)
74 .then(() => {
75 return packDirectory(mani, mani._resolved, target, target, true, dryRun)
76 })
77 } else if (dryRun) {
78 log.verbose('pack', '--dry-run mode enabled. Skipping write.')
79 return cacache.tmp.withTmp(npm.tmp, {tmpPrefix: 'packing'}, (tmp) => {
80 const tmpTarget = path.join(tmp, path.basename(target))
81 return packFromPackage(pkg, tmpTarget, target)
82 })
83 } else {
84 return packFromPackage(pkg, target, target)
85 }
86 })
87 })
88}
89
90function packFromPackage (arg, target, filename) {
91 const opts = npmConfig()
92 return pacote.tarball.toFile(arg, target, opts)
93 .then(() => cacache.tmp.withTmp(npm.tmp, {tmpPrefix: 'unpacking'}, (tmp) => {
94 const tmpTarget = path.join(tmp, filename)
95 return pacote.extract(arg, tmpTarget, opts)
96 .then(() => readJson(path.join(tmpTarget, 'package.json')))
97 }))
98 .then((pkg) => getContents(pkg, target, filename))
99}
100
101module.exports.prepareDirectory = prepareDirectory
102function prepareDirectory (dir) {
103 return readJson(path.join(dir, 'package.json')).then((pkg) => {
104 if (!pkg.name) {
105 throw new Error('package.json requires a "name" field')
106 }
107 if (!pkg.version) {
108 throw new Error('package.json requires a valid "version" field')
109 }
110 if (!pathIsInside(dir, npm.tmp)) {
111 if (pkg.scripts && pkg.scripts.prepublish) {
112 prepublishWarning([
113 'As of npm@5, `prepublish` scripts are deprecated.',
114 'Use `prepare` for build steps and `prepublishOnly` for upload-only.',
115 'See the deprecation note in `npm help scripts` for more information.'
116 ])
117 }
118 if (npm.config.get('ignore-prepublish')) {
119 return lifecycle(pkg, 'prepare', dir).then(() => pkg)
120 } else {
121 return lifecycle(pkg, 'prepublish', dir).then(() => {
122 return lifecycle(pkg, 'prepare', dir)
123 }).then(() => pkg)
124 }
125 }
126 return pkg
127 })
128}
129
130module.exports.packDirectory = packDirectory
131function packDirectory (mani, dir, target, filename, logIt, dryRun) {
132 deprCheck(mani)
133 return readJson(path.join(dir, 'package.json')).then((pkg) => {
134 return lifecycle(pkg, 'prepack', dir)
135 }).then(() => {
136 return readJson(path.join(dir, 'package.json'))
137 }).then((pkg) => {
138 return cacache.tmp.withTmp(npm.tmp, {tmpPrefix: 'packing'}, (tmp) => {
139 const tmpTarget = path.join(tmp, path.basename(target))
140
141 const tarOpt = {
142 file: tmpTarget,
143 cwd: dir,
144 prefix: 'package/',
145 portable: true,
146 // Provide a specific date in the 1980s for the benefit of zip,
147 // which is confounded by files dated at the Unix epoch 0.
148 mtime: new Date('1985-10-26T08:15:00.000Z'),
149 gzip: true
150 }
151
152 return BB.resolve(packlist({ path: dir }))
153 // NOTE: node-tar does some Magic Stuff depending on prefixes for files
154 // specifically with @ signs, so we just neutralize that one
155 // and any such future "features" by prepending `./`
156 .then((files) => tar.create(tarOpt, files.map((f) => `./${f}`)))
157 .then(() => getContents(pkg, tmpTarget, filename, logIt))
158 // thread the content info through
159 .tap(() => {
160 if (dryRun) {
161 log.verbose('pack', '--dry-run mode enabled. Skipping write.')
162 } else {
163 return move(tmpTarget, target, {Promise: BB, fs})
164 }
165 })
166 .tap(() => lifecycle(pkg, 'postpack', dir))
167 })
168 })
169}
170
171module.exports.logContents = logContents
172function logContents (tarball) {
173 log.notice('')
174 log.notice('', `${npm.config.get('unicode') ? '📦 ' : 'package:'} ${tarball.name}@${tarball.version}`)
175 log.notice('=== Tarball Contents ===')
176 if (tarball.files.length) {
177 log.notice('', columnify(tarball.files.map((f) => {
178 const bytes = byteSize(f.size)
179 return {path: f.path, size: `${bytes.value}${bytes.unit}`}
180 }), {
181 include: ['size', 'path'],
182 showHeaders: false
183 }))
184 }
185 if (tarball.bundled.length) {
186 log.notice('=== Bundled Dependencies ===')
187 tarball.bundled.forEach((name) => log.notice('', name))
188 }
189 log.notice('=== Tarball Details ===')
190 log.notice('', columnify([
191 {name: 'name:', value: tarball.name},
192 {name: 'version:', value: tarball.version},
193 tarball.filename && {name: 'filename:', value: tarball.filename},
194 {name: 'package size:', value: byteSize(tarball.size)},
195 {name: 'unpacked size:', value: byteSize(tarball.unpackedSize)},
196 {name: 'shasum:', value: tarball.shasum},
197 {
198 name: 'integrity:',
199 value: tarball.integrity.toString().substr(0, 20) + '[...]' + tarball.integrity.toString().substr(80)},
200 tarball.bundled.length && {name: 'bundled deps:', value: tarball.bundled.length},
201 tarball.bundled.length && {name: 'bundled files:', value: tarball.entryCount - tarball.files.length},
202 tarball.bundled.length && {name: 'own files:', value: tarball.files.length},
203 {name: 'total files:', value: tarball.entryCount}
204 ].filter((x) => x), {
205 include: ['name', 'value'],
206 showHeaders: false
207 }))
208 log.notice('', '')
209}
210
211module.exports.getContents = getContents
212function getContents (pkg, target, filename, silent) {
213 const bundledWanted = new Set(
214 pkg.bundleDependencies ||
215 pkg.bundledDependencies ||
216 []
217 )
218 const files = []
219 const bundled = new Set()
220 let totalEntries = 0
221 let totalEntrySize = 0
222 return tar.t({
223 file: target,
224 onentry (entry) {
225 totalEntries++
226 totalEntrySize += entry.size
227 const p = entry.path
228 if (p.startsWith('package/node_modules/')) {
229 const name = p.match(/^package\/node_modules\/((?:@[^/]+\/)?[^/]+)/)[1]
230 if (bundledWanted.has(name)) {
231 bundled.add(name)
232 }
233 } else {
234 files.push({
235 path: entry.path.replace(/^package\//, ''),
236 size: entry.size,
237 mode: entry.mode
238 })
239 }
240 },
241 strip: 1
242 })
243 .then(() => BB.all([
244 BB.fromNode((cb) => fs.stat(target, cb)),
245 ssri.fromStream(fs.createReadStream(target), {
246 algorithms: ['sha1', 'sha512']
247 })
248 ]))
249 .then(([stat, integrity]) => {
250 const shasum = integrity['sha1'][0].hexDigest()
251 return {
252 id: pkg._id,
253 name: pkg.name,
254 version: pkg.version,
255 from: pkg._from,
256 size: stat.size,
257 unpackedSize: totalEntrySize,
258 shasum,
259 integrity: ssri.parse(integrity['sha512'][0]),
260 filename,
261 files,
262 entryCount: totalEntries,
263 bundled: Array.from(bundled)
264 }
265 })
266}
267
268const PASSTHROUGH_OPTS = [
269 'always-auth',
270 'auth-type',
271 'ca',
272 'cafile',
273 'cert',
274 'git',
275 'local-address',
276 'maxsockets',
277 'offline',
278 'prefer-offline',
279 'prefer-online',
280 'proxy',
281 'https-proxy',
282 'registry',
283 'send-metrics',
284 'sso-poll-frequency',
285 'sso-type',
286 'strict-ssl'
287]
288
289module.exports.packGitDep = packGitDep
290function packGitDep (manifest, dir) {
291 const stream = new PassThrough()
292 readJson(path.join(dir, 'package.json')).then((pkg) => {
293 if (pkg.scripts && pkg.scripts.prepare) {
294 log.verbose('prepareGitDep', `${manifest._spec}: installing devDeps and running prepare script.`)
295 const cliArgs = PASSTHROUGH_OPTS.reduce((acc, opt) => {
296 if (npm.config.get(opt, 'cli') != null) {
297 acc.push(`--${opt}=${npm.config.get(opt)}`)
298 }
299 return acc
300 }, [])
301 const child = cp.spawn(process.env.NODE || process.execPath, [
302 require.resolve('../bin/npm-cli.js'),
303 'install',
304 '--dev',
305 '--prod',
306 '--ignore-prepublish',
307 '--no-progress',
308 '--no-save'
309 ].concat(cliArgs), {
310 cwd: dir,
311 env: process.env
312 })
313 let errData = []
314 let errDataLen = 0
315 let outData = []
316 let outDataLen = 0
317 child.stdout.on('data', (data) => {
318 outData.push(data)
319 outDataLen += data.length
320 log.gauge.pulse('preparing git package')
321 })
322 child.stderr.on('data', (data) => {
323 errData.push(data)
324 errDataLen += data.length
325 log.gauge.pulse('preparing git package')
326 })
327 return BB.fromNode((cb) => {
328 child.on('error', cb)
329 child.on('exit', (code, signal) => {
330 if (code > 0) {
331 const err = new Error(`${signal}: npm exited with code ${code} while attempting to build ${manifest._requested}. Clone the repository manually and run 'npm install' in it for more information.`)
332 err.code = code
333 err.signal = signal
334 cb(err)
335 } else {
336 cb()
337 }
338 })
339 }).then(() => {
340 if (outDataLen > 0) log.silly('prepareGitDep', '1>', Buffer.concat(outData, outDataLen).toString())
341 if (errDataLen > 0) log.silly('prepareGitDep', '2>', Buffer.concat(errData, errDataLen).toString())
342 }, (err) => {
343 if (outDataLen > 0) log.error('prepareGitDep', '1>', Buffer.concat(outData, outDataLen).toString())
344 if (errDataLen > 0) log.error('prepareGitDep', '2>', Buffer.concat(errData, errDataLen).toString())
345 throw err
346 })
347 }
348 }).then(() => {
349 return readJson(path.join(dir, 'package.json'))
350 }).then((pkg) => {
351 return cacache.tmp.withTmp(npm.tmp, {
352 tmpPrefix: 'pacote-packing'
353 }, (tmp) => {
354 const tmpTar = path.join(tmp, 'package.tgz')
355 return packDirectory(manifest, dir, tmpTar).then(() => {
356 return pipe(fs.createReadStream(tmpTar), stream)
357 })
358 })
359 }).catch((err) => stream.emit('error', err))
360 return stream
361}