UNPKG

40.4 kBJavaScriptView Raw
1// XXX lib/utils/tar.js and this file need to be rewritten.
2
3// URL-to-cache folder mapping:
4// : -> !
5// @ -> _
6// http://registry.npmjs.org/foo/version -> cache/http!/...
7//
8
9/*
10fetching a url:
111. Check for url in inFlightUrls. If present, add cb, and return.
122. create inFlightURL list
133. Acquire lock at {cache}/{sha(url)}.lock
14 retries = {cache-lock-retries, def=3}
15 stale = {cache-lock-stale, def=30000}
16 wait = {cache-lock-wait, def=100}
174. if lock can't be acquired, then fail
185. fetch url, clear lock, call cbs
19
20cache folders:
211. urls: http!/server.com/path/to/thing
222. c:\path\to\thing: file!/c!/path/to/thing
233. /path/to/thing: file!/path/to/thing
244. git@ private: git_github.com!npm/npm
255. git://public: git!/github.com/npm/npm
266. git+blah:// git-blah!/server.com/foo/bar
27
28adding a folder:
291. tar into tmp/random/package.tgz
302. untar into tmp/random/contents/package, stripping one dir piece
313. tar tmp/random/contents/package to cache/n/v/package.tgz
324. untar cache/n/v/package.tgz into cache/n/v/package
335. rm tmp/random
34
35Adding a url:
361. fetch to tmp/random/package.tgz
372. goto folder(2)
38
39adding a name@version:
401. registry.get(name/version)
412. if response isn't 304, add url(dist.tarball)
42
43adding a name@range:
441. registry.get(name)
452. Find a version that satisfies
463. add name@version
47
48adding a local tarball:
491. untar to tmp/random/{blah}
502. goto folder(2)
51*/
52
53exports = module.exports = cache
54cache.read = read
55cache.clean = clean
56cache.unpack = unpack
57cache.lock = lock
58cache.unlock = unlock
59
60var mkdir = require("mkdirp")
61 , spawn = require("child_process").spawn
62 , exec = require("child_process").execFile
63 , once = require("once")
64 , fetch = require("./utils/fetch.js")
65 , npm = require("./npm.js")
66 , fs = require("graceful-fs")
67 , rm = require("./utils/gently-rm.js")
68 , readJson = require("read-package-json")
69 , registry = npm.registry
70 , log = require("npmlog")
71 , path = require("path")
72 , sha = require("sha")
73 , asyncMap = require("slide").asyncMap
74 , semver = require("semver")
75 , tar = require("./utils/tar.js")
76 , fileCompletion = require("./utils/completion/file-completion.js")
77 , url = require("url")
78 , chownr = require("chownr")
79 , lockFile = require("lockfile")
80 , crypto = require("crypto")
81 , retry = require("retry")
82 , zlib = require("zlib")
83 , chmodr = require("chmodr")
84 , which = require("which")
85 , isGitUrl = require("./utils/is-git-url.js")
86 , pathIsInside = require("path-is-inside")
87 , lookup = require('gh-lookup')
88
89cache.usage = "npm cache add <tarball file>"
90 + "\nnpm cache add <folder>"
91 + "\nnpm cache add <tarball url>"
92 + "\nnpm cache add <git url>"
93 + "\nnpm cache add <name>@<version>"
94 + "\nnpm cache ls [<path>]"
95 + "\nnpm cache clean [<pkg>[@<version>]]"
96
97cache.completion = function (opts, cb) {
98
99 var argv = opts.conf.argv.remain
100 if (argv.length === 2) {
101 return cb(null, ["add", "ls", "clean"])
102 }
103
104 switch (argv[2]) {
105 case "clean":
106 case "ls":
107 // cache and ls are easy, because the completion is
108 // what ls_ returns anyway.
109 // just get the partial words, minus the last path part
110 var p = path.dirname(opts.partialWords.slice(3).join("/"))
111 if (p === ".") p = ""
112 return ls_(p, 2, cb)
113 case "add":
114 // Same semantics as install and publish.
115 return npm.commands.install.completion(opts, cb)
116 }
117}
118
119function cache (args, cb) {
120 var cmd = args.shift()
121 switch (cmd) {
122 case "rm": case "clear": case "clean": return clean(args, cb)
123 case "list": case "sl": case "ls": return ls(args, cb)
124 case "add": return add(args, cb)
125 default: return cb(new Error(
126 "Invalid cache action: "+cmd))
127 }
128}
129
130// if the pkg and ver are in the cache, then
131// just do a readJson and return.
132// if they're not, then fetch them from the registry.
133function read (name, ver, forceBypass, cb) {
134 if (typeof cb !== "function") cb = forceBypass, forceBypass = true
135 var jsonFile = path.join(npm.cache, name, ver, "package", "package.json")
136 function c (er, data) {
137 if (data) deprCheck(data)
138 return cb(er, data)
139 }
140
141 if (forceBypass && npm.config.get("force")) {
142 log.verbose("using force", "skipping cache")
143 return addNamed(name, ver, c)
144 }
145
146 readJson(jsonFile, function (er, data) {
147 er = needName(er, data)
148 er = needVersion(er, data)
149 if (er && er.code !== "ENOENT" && er.code !== "ENOTDIR") return cb(er)
150 if (er) return addNamed(name, ver, c)
151 deprCheck(data)
152 c(er, data)
153 })
154}
155
156// npm cache ls [<path>]
157function ls (args, cb) {
158 args = args.join("/").split("@").join("/")
159 if (args.substr(-1) === "/") args = args.substr(0, args.length - 1)
160 var prefix = npm.config.get("cache")
161 if (0 === prefix.indexOf(process.env.HOME)) {
162 prefix = "~" + prefix.substr(process.env.HOME.length)
163 }
164 ls_(args, npm.config.get("depth"), function (er, files) {
165 console.log(files.map(function (f) {
166 return path.join(prefix, f)
167 }).join("\n").trim())
168 cb(er, files)
169 })
170}
171
172// Calls cb with list of cached pkgs matching show.
173function ls_ (req, depth, cb) {
174 return fileCompletion(npm.cache, req, depth, cb)
175}
176
177// npm cache clean [<path>]
178function clean (args, cb) {
179 if (!cb) cb = args, args = []
180 if (!args) args = []
181 args = args.join("/").split("@").join("/")
182 if (args.substr(-1) === "/") args = args.substr(0, args.length - 1)
183 var f = path.join(npm.cache, path.normalize(args))
184 if (f === npm.cache) {
185 fs.readdir(npm.cache, function (er, files) {
186 if (er) return cb()
187 asyncMap( files.filter(function (f) {
188 return npm.config.get("force") || f !== "-"
189 }).map(function (f) {
190 return path.join(npm.cache, f)
191 })
192 , rm, cb )
193 })
194 } else rm(path.join(npm.cache, path.normalize(args)), cb)
195}
196
197// npm cache add <tarball-url>
198// npm cache add <pkg> <ver>
199// npm cache add <tarball>
200// npm cache add <folder>
201
202cache.add = function (pkg, ver, scrub, cb) {
203 if (typeof cb !== "function") cb = scrub, scrub = false
204 if (typeof cb !== "function") cb = ver, ver = null
205 if (scrub) {
206 return clean([], function (er) {
207 if (er) return cb(er)
208 add([pkg, ver], cb)
209 })
210 }
211 log.verbose("cache add", [pkg, ver])
212 return add([pkg, ver], cb)
213}
214
215function add (args, cb) {
216 // this is hot code. almost everything passes through here.
217 // the args can be any of:
218 // ["url"]
219 // ["pkg", "version"]
220 // ["pkg@version"]
221 // ["pkg", "url"]
222 // This is tricky, because urls can contain @
223 // Also, in some cases we get [name, null] rather
224 // that just a single argument.
225
226 var usage = "Usage:\n"
227 + " npm cache add <tarball-url>\n"
228 + " npm cache add <pkg>@<ver>\n"
229 + " npm cache add <tarball>\n"
230 + " npm cache add <folder>\n"
231 , name
232 , spec
233
234 if (args[1] === undefined) args[1] = null
235
236 // at this point the args length must ==2
237 if (args[1] !== null) {
238 name = args[0]
239 spec = args[1]
240 } else if (args.length === 2) {
241 spec = args[0]
242 }
243
244 log.verbose("cache add", "name=%j spec=%j args=%j", name, spec, args)
245
246
247 if (!name && !spec) return cb(usage)
248
249 // see if the spec is a url
250 // otherwise, treat as name@version
251 var p = url.parse(spec) || {}
252 log.verbose("parsed url", p)
253
254 // If there's a /, and it's a path, then install the path.
255 // If not, and there's a @, it could be that we got name@http://blah
256 // in that case, we will not have a protocol now, but if we
257 // split and check, we will.
258 if (!name && !p.protocol) {
259 if (spec.indexOf("/") !== -1 ||
260 process.platform === "win32" && spec.indexOf("\\") !== -1) {
261 return maybeFile(spec, p, cb)
262 } else if (spec.indexOf("@") !== -1) {
263 return maybeAt(spec, cb)
264 }
265 }
266
267 add_(name, spec, p, cb)
268}
269
270function maybeFile (spec, p, cb) {
271 fs.stat(spec, function (er, stat) {
272 if (!er) {
273 // definitely a local thing
274 addLocal(spec, cb)
275 } else if (er && spec.indexOf("@") !== -1 && spec.split("@")[0].indexOf("/") === -1) {
276 // bar@baz/loofa
277 maybeAt(spec, cb)
278 } else {
279 // Already know it's not a url, so must be local
280 addLocal(spec, cb)
281 }
282 })
283}
284
285function maybeAt (spec, cb) {
286 var tmp = spec.split("@")
287
288 // split name@2.3.4 only if name is a valid package name,
289 // don't split in case of "./test@example.com/" (local path)
290 var name = tmp.shift()
291 spec = tmp.join("@")
292 return add([name, spec], cb)
293}
294
295function add_ (name, spec, p, cb) {
296 switch (p.protocol) {
297 case "http:":
298 case "https:":
299 return addRemoteTarball(spec, null, name, cb)
300
301 default:
302 if (isGitUrl(p))
303 return addRemoteGit(spec, p, name, false, cb)
304
305 // if we have a name and a spec, then try name@spec
306 // if not, then try just spec (which may try name@"" if not found)
307 if (name) {
308 addNamed(name, spec, cb)
309 } else {
310 addLocal(spec, cb)
311 }
312 }
313}
314
315function fetchAndShaCheck (u, tmp, shasum, cb) {
316 fetch(u, tmp, function (er, response) {
317 if (er) {
318 log.error("fetch failed", u)
319 return cb(er, response)
320 }
321 if (!shasum) return cb(null, response)
322 // validate that the url we just downloaded matches the expected shasum.
323 sha.check(tmp, shasum, function (er) {
324 if (er != null && er.message) {
325 // add original filename for better debuggability
326 er.message = er.message + '\n' + 'From: ' + u
327 }
328 return cb(er, response, shasum)
329 })
330 })
331}
332
333// Only have a single download action at once for a given url
334// additional calls stack the callbacks.
335var inFlightURLs = {}
336function addRemoteTarball (u, shasum, name, cb_) {
337 if (typeof cb_ !== "function") cb_ = name, name = ""
338 if (typeof cb_ !== "function") cb_ = shasum, shasum = null
339
340 if (!inFlightURLs[u]) inFlightURLs[u] = []
341 var iF = inFlightURLs[u]
342 iF.push(cb_)
343 if (iF.length > 1) return
344
345 function cb (er, data) {
346 if (data) {
347 data._from = u
348 data._resolved = u
349 }
350 unlock(u, function () {
351 var c
352 while (c = iF.shift()) c(er, data)
353 delete inFlightURLs[u]
354 })
355 }
356
357 var tmp = path.join(npm.tmp, Date.now()+"-"+Math.random(), "tmp.tgz")
358
359 lock(u, function (er) {
360 if (er) return cb(er)
361
362 log.verbose("addRemoteTarball", [u, shasum])
363 mkdir(path.dirname(tmp), function (er) {
364 if (er) return cb(er)
365 addRemoteTarball_(u, tmp, shasum, done)
366 })
367 })
368
369 function done (er, resp, shasum) {
370 if (er) return cb(er)
371 addLocalTarball(tmp, name, shasum, cb)
372 }
373}
374
375function addRemoteTarball_(u, tmp, shasum, cb) {
376 // Tuned to spread 3 attempts over about a minute.
377 // See formula at <https://github.com/tim-kos/node-retry>.
378 var operation = retry.operation
379 ( { retries: npm.config.get("fetch-retries")
380 , factor: npm.config.get("fetch-retry-factor")
381 , minTimeout: npm.config.get("fetch-retry-mintimeout")
382 , maxTimeout: npm.config.get("fetch-retry-maxtimeout") })
383
384 operation.attempt(function (currentAttempt) {
385 log.info("retry", "fetch attempt " + currentAttempt
386 + " at " + (new Date()).toLocaleTimeString())
387 fetchAndShaCheck(u, tmp, shasum, function (er, response, shasum) {
388 // Only retry on 408, 5xx or no `response`.
389 var sc = response && response.statusCode
390 var statusRetry = !sc || (sc === 408 || sc >= 500)
391 if (er && statusRetry && operation.retry(er)) {
392 log.info("retry", "will retry, error on last attempt: " + er)
393 return
394 }
395 cb(er, response, shasum)
396 })
397 })
398}
399
400// 1. cacheDir = path.join(cache,'_git-remotes',sha1(u))
401// 2. checkGitDir(cacheDir) ? 4. : 3. (rm cacheDir if necessary)
402// 3. git clone --mirror u cacheDir
403// 4. cd cacheDir && git fetch -a origin
404// 5. git archive /tmp/random.tgz
405// 6. addLocalTarball(/tmp/random.tgz) <gitref> --format=tar --prefix=package/
406// silent flag is used if this should error quietly
407function addRemoteGit (u, parsed, name, silent, cb_) {
408 if (typeof cb_ !== "function") cb_ = name, name = null
409
410 if (!inFlightURLs[u]) inFlightURLs[u] = []
411 var iF = inFlightURLs[u]
412 iF.push(cb_)
413 if (iF.length > 1) return
414
415 // git is so tricky!
416 // if the path is like ssh://foo:22/some/path then it works, but
417 // it needs the ssh://
418 // If the path is like ssh://foo:some/path then it works, but
419 // only if you remove the ssh://
420 var origUrl = u
421 u = u.replace(/^git\+/, "")
422 .replace(/#.*$/, "")
423
424 // ssh paths that are scp-style urls don't need the ssh://
425 if (parsed.pathname.match(/^\/?:/)) {
426 u = u.replace(/^ssh:\/\//, "")
427 }
428
429 function cb (er, data) {
430 unlock(u, function () {
431 var c
432 while (c = iF.shift()) c(er, data)
433 delete inFlightURLs[origUrl]
434 })
435 }
436
437 lock(u, function (er) {
438 if (er) return cb(er)
439
440 // figure out what we should check out.
441 var co = parsed.hash && parsed.hash.substr(1) || "master"
442
443 var v = crypto.createHash("sha1").update(u).digest("hex").slice(0, 8)
444 v = u.replace(/[^a-zA-Z0-9]+/g, '-') + '-' + v
445
446 log.http("git", u + '#' + co)
447
448 var p = path.join(npm.config.get("cache"), "_git-remotes", v)
449
450 checkGitDir(p, u, co, origUrl, silent, function(er, data) {
451 chmodr(p, npm.modes.file, function(erChmod) {
452 if (er) return cb(er, data)
453 return cb(erChmod, data)
454 })
455 })
456 })
457}
458
459function checkGitDir (p, u, co, origUrl, silent, cb) {
460 fs.stat(p, function (er, s) {
461 if (er) return cloneGitRemote(p, u, co, origUrl, silent, cb)
462 if (!s.isDirectory()) return rm(p, function (er){
463 if (er) return cb(er)
464 cloneGitRemote(p, u, co, origUrl, silent, cb)
465 })
466
467 var git = npm.config.get("git")
468 var args = [ "config", "--get", "remote.origin.url" ]
469 var env = gitEnv()
470
471 // check for git
472 which(git, function (err) {
473 if (err) {
474 err.code = "ENOGIT"
475 return cb(err)
476 }
477 exec(git, args, {cwd: p, env: env}, function (er, stdout, stderr) {
478 stdoutTrimmed = (stdout + "\n" + stderr).trim()
479 if (er || u !== stdout.trim()) {
480 log.warn( "`git config --get remote.origin.url` returned "
481 + "wrong result ("+u+")", stdoutTrimmed )
482 return rm(p, function (er){
483 if (er) return cb(er)
484 cloneGitRemote(p, u, co, origUrl, silent, cb)
485 })
486 }
487 log.verbose("git remote.origin.url", stdoutTrimmed)
488 archiveGitRemote(p, u, co, origUrl, cb)
489 })
490 })
491 })
492}
493
494function cloneGitRemote (p, u, co, origUrl, silent, cb) {
495 mkdir(p, function (er) {
496 if (er) return cb(er)
497
498 var git = npm.config.get("git")
499 var args = [ "clone", "--mirror", u, p ]
500 var env = gitEnv()
501
502 // check for git
503 which(git, function (err) {
504 if (err) {
505 err.code = "ENOGIT"
506 return cb(err)
507 }
508 exec(git, args, {cwd: p, env: env}, function (er, stdout, stderr) {
509 stdout = (stdout + "\n" + stderr).trim()
510 if (er) {
511 if (silent) {
512 log.verbose("git clone " + u, stdout)
513 } else {
514 log.error("git clone " + u, stdout)
515 }
516 return cb(er)
517 }
518 log.verbose("git clone " + u, stdout)
519 archiveGitRemote(p, u, co, origUrl, cb)
520 })
521 })
522 })
523}
524
525function archiveGitRemote (p, u, co, origUrl, cb) {
526 var git = npm.config.get("git")
527 var archive = [ "fetch", "-a", "origin" ]
528 var resolve = [ "rev-list", "-n1", co ]
529 var env = gitEnv()
530
531 var errState = null
532 var n = 0
533 var resolved = null
534 var tmp
535
536 exec(git, archive, {cwd: p, env: env}, function (er, stdout, stderr) {
537 stdout = (stdout + "\n" + stderr).trim()
538 if (er) {
539 log.error("git fetch -a origin ("+u+")", stdout)
540 return cb(er)
541 }
542 log.verbose("git fetch -a origin ("+u+")", stdout)
543 tmp = path.join(npm.tmp, Date.now()+"-"+Math.random(), "tmp.tgz")
544 verifyOwnership()
545 })
546
547 function verifyOwnership() {
548 if (process.platform === "win32") {
549 log.silly("verifyOwnership", "skipping for windows")
550 resolveHead()
551 } else {
552 getCacheStat(function(er, cs) {
553 if (er) {
554 log.error("Could not get cache stat")
555 return cb(er)
556 }
557 chownr(p, cs.uid, cs.gid, function(er) {
558 if (er) {
559 log.error("Failed to change folder ownership under npm cache for %s", p)
560 return cb(er)
561 }
562 resolveHead()
563 })
564 })
565 }
566 }
567
568 function resolveHead () {
569 exec(git, resolve, {cwd: p, env: env}, function (er, stdout, stderr) {
570 stdout = (stdout + "\n" + stderr).trim()
571 if (er) {
572 log.error("Failed resolving git HEAD (" + u + ")", stderr)
573 return cb(er)
574 }
575 log.verbose("git rev-list -n1 " + co, stdout)
576 var parsed = url.parse(origUrl)
577 parsed.hash = stdout
578 resolved = url.format(parsed)
579
580 // https://github.com/npm/npm/issues/3224
581 // node incorrectly sticks a / at the start of the path
582 // We know that the host won't change, so split and detect this
583 var spo = origUrl.split(parsed.host)
584 var spr = resolved.split(parsed.host)
585 if (spo[1].charAt(0) === ':' && spr[1].charAt(0) === '/')
586 spr[1] = spr[1].slice(1)
587 resolved = spr.join(parsed.host)
588
589 log.verbose('resolved git url', resolved)
590 next()
591 })
592 }
593
594 function next () {
595 mkdir(path.dirname(tmp), function (er) {
596 if (er) return cb(er)
597 var gzip = zlib.createGzip({ level: 9 })
598 var git = npm.config.get("git")
599 var args = ["archive", co, "--format=tar", "--prefix=package/"]
600 var out = fs.createWriteStream(tmp)
601 var env = gitEnv()
602 cb = once(cb)
603 var cp = spawn(git, args, { env: env, cwd: p })
604 cp.on("error", cb)
605 cp.stderr.on("data", function(chunk) {
606 log.silly(chunk.toString(), "git archive")
607 })
608
609 cp.stdout.pipe(gzip).pipe(out).on("close", function() {
610 addLocalTarball(tmp, function(er, data) {
611 if (data) data._resolved = resolved
612 cb(er, data)
613 })
614 })
615 })
616 }
617}
618
619var gitEnv_
620function gitEnv () {
621 // git responds to env vars in some weird ways in post-receive hooks
622 // so don't carry those along.
623 if (gitEnv_) return gitEnv_
624 gitEnv_ = {}
625 for (var k in process.env) {
626 if (!~['GIT_PROXY_COMMAND','GIT_SSH','GIT_SSL_NO_VERIFY'].indexOf(k) && k.match(/^GIT/)) continue
627 gitEnv_[k] = process.env[k]
628 }
629 return gitEnv_
630}
631
632
633// only have one request in flight for a given
634// name@blah thing.
635var inFlightNames = {}
636function addNamed (name, x, data, cb_) {
637 if (typeof cb_ !== "function") cb_ = data, data = null
638 log.verbose("addNamed", [name, x])
639
640 var k = name + "@" + x
641 if (!inFlightNames[k]) inFlightNames[k] = []
642 var iF = inFlightNames[k]
643 iF.push(cb_)
644 if (iF.length > 1) return
645
646 function cb (er, data) {
647 if (data && !data._fromGithub) data._from = k
648 unlock(k, function () {
649 var c
650 while (c = iF.shift()) c(er, data)
651 delete inFlightNames[k]
652 })
653 }
654
655 log.verbose("addNamed", [semver.valid(x), semver.validRange(x)])
656 lock(k, function (er, fd) {
657 if (er) return cb(er)
658
659 var fn = ( semver.valid(x, true) ? addNameVersion
660 : semver.validRange(x, true) ? addNameRange
661 : addNameTag
662 )
663 fn(name, x, data, cb)
664 })
665}
666
667function addNameTag (name, tag, data, cb_) {
668 if (typeof cb_ !== "function") cb_ = data, data = null
669 log.info("addNameTag", [name, tag])
670 var explicit = true
671 if (!tag) {
672 explicit = false
673 tag = npm.config.get("tag")
674 }
675
676 function cb(er, data) {
677 // might be username/project
678 // in that case, try it as a github url.
679 if (er && tag.split("/").length === 2) {
680 return maybeGithub(tag, name, er, cb_)
681 }
682 return cb_(er, data)
683 }
684
685 registry.get(name, function (er, data, json, response) {
686 if (er) return cb(er)
687 engineFilter(data)
688 if (data["dist-tags"] && data["dist-tags"][tag]
689 && data.versions[data["dist-tags"][tag]]) {
690 var ver = data["dist-tags"][tag]
691 return addNamed(name, ver, data.versions[ver], cb)
692 }
693 if (!explicit && Object.keys(data.versions).length) {
694 return addNamed(name, "*", data, cb)
695 }
696
697 er = installTargetsError(tag, data)
698 return cb(er)
699 })
700}
701
702
703function engineFilter (data) {
704 var npmv = npm.version
705 , nodev = npm.config.get("node-version")
706 , strict = npm.config.get("engine-strict")
707
708 if (!nodev || npm.config.get("force")) return data
709
710 Object.keys(data.versions || {}).forEach(function (v) {
711 var eng = data.versions[v].engines
712 if (!eng) return
713 if (!strict && !data.versions[v].engineStrict) return
714 if (eng.node && !semver.satisfies(nodev, eng.node, true)
715 || eng.npm && !semver.satisfies(npmv, eng.npm, true)) {
716 delete data.versions[v]
717 }
718 })
719}
720
721function addNameRange (name, range, data, cb) {
722 if (typeof cb !== "function") cb = data, data = null
723
724 range = semver.validRange(range, true)
725 if (range === null) return cb(new Error(
726 "Invalid version range: "+range))
727
728 log.silly("addNameRange", {name:name, range:range, hasData:!!data})
729
730 if (data) return next()
731 registry.get(name, function (er, d, json, response) {
732 if (er) return cb(er)
733 data = d
734 next()
735 })
736
737 function next () {
738 log.silly( "addNameRange", "number 2"
739 , {name:name, range:range, hasData:!!data})
740 engineFilter(data)
741
742 log.silly("addNameRange", "versions"
743 , [data.name, Object.keys(data.versions || {})])
744
745 // if the tagged version satisfies, then use that.
746 var tagged = data["dist-tags"][npm.config.get("tag")]
747 if (tagged
748 && data.versions[tagged]
749 && semver.satisfies(tagged, range, true)) {
750 return addNamed(name, tagged, data.versions[tagged], cb)
751 }
752
753 // find the max satisfying version.
754 var versions = Object.keys(data.versions || {})
755 var ms = semver.maxSatisfying(versions, range, true)
756 if (!ms) {
757 return cb(installTargetsError(range, data))
758 }
759
760 // if we don't have a registry connection, try to see if
761 // there's a cached copy that will be ok.
762 addNamed(name, ms, data.versions[ms], cb)
763 }
764}
765
766function installTargetsError (requested, data) {
767 var targets = Object.keys(data["dist-tags"]).filter(function (f) {
768 return (data.versions || {}).hasOwnProperty(f)
769 }).concat(Object.keys(data.versions || {}))
770
771 requested = data.name + (requested ? "@'" + requested + "'" : "")
772
773 targets = targets.length
774 ? "Valid install targets:\n" + JSON.stringify(targets) + "\n"
775 : "No valid targets found.\n"
776 + "Perhaps not compatible with your version of node?"
777
778 var er = new Error( "No compatible version found: "
779 + requested + "\n" + targets)
780 er.code = "ETARGET"
781 return er
782}
783
784function addNameVersion (name, v, data, cb) {
785 if (typeof cb !== "function") cb = data, data = null
786
787 var ver = semver.valid(v, true)
788 if (!ver) return cb(new Error("Invalid version: "+v))
789
790 var response
791
792 if (data) {
793 response = null
794 return next()
795 }
796 registry.get(name + "/" + ver, function (er, d, json, resp) {
797 if (er) return cb(er)
798 data = d
799 response = resp
800 next()
801 })
802
803 function next () {
804 deprCheck(data)
805 var dist = data.dist
806
807 if (!dist) return cb(new Error("No dist in "+data._id+" package"))
808
809 if (!dist.tarball) return cb(new Error(
810 "No dist.tarball in " + data._id + " package"))
811
812 if ((response && response.statusCode !== 304) || npm.config.get("force")) {
813 return fetchit()
814 }
815
816 // we got cached data, so let's see if we have a tarball.
817 var pkgroot = path.join(npm.cache, name, ver)
818 var pkgtgz = path.join(pkgroot, "package.tgz")
819 var pkgjson = path.join(pkgroot, "package", "package.json")
820 fs.stat(pkgtgz, function (er, s) {
821 if (!er) {
822 readJson(pkgjson, function (er, data) {
823 er = needName(er, data)
824 er = needVersion(er, data)
825 if (er && er.code !== "ENOENT" && er.code !== "ENOTDIR")
826 return cb(er)
827 if (er) return fetchit()
828 return cb(null, data)
829 })
830 } else return fetchit()
831 })
832
833 function fetchit () {
834 if (!npm.config.get("registry")) {
835 return cb(new Error("Cannot fetch: "+dist.tarball))
836 }
837
838 // use the same protocol as the registry.
839 // https registry --> https tarballs, but
840 // only if they're the same hostname, or else
841 // detached tarballs may not work.
842 var tb = url.parse(dist.tarball)
843 var rp = url.parse(npm.config.get("registry"))
844 if (tb.hostname === rp.hostname
845 && tb.protocol !== rp.protocol) {
846 tb.protocol = url.parse(npm.config.get("registry")).protocol
847 delete tb.href
848 }
849 tb = url.format(tb)
850
851 // only add non-shasum'ed packages if --forced.
852 // only ancient things would lack this for good reasons nowadays.
853 if (!dist.shasum && !npm.config.get("force")) {
854 return cb(new Error("package lacks shasum: " + data._id))
855 }
856 return addRemoteTarball( tb
857 , dist.shasum
858 , name+"-"+ver
859 , cb )
860 }
861 }
862}
863
864function addLocal (p, name, cb_) {
865 if (typeof cb_ !== "function") cb_ = name, name = ""
866
867 function cb (er, data) {
868 unlock(p, function () {
869 if (er) {
870 // if it doesn't have a / in it, it might be a
871 // remote thing.
872 if (p.indexOf("/") === -1 && p.charAt(0) !== "."
873 && (process.platform !== "win32" || p.indexOf("\\") === -1)) {
874 return addNamed(p, "", cb_)
875 }
876 log.error("addLocal", "Could not install %s", p)
877 return cb_(er)
878 }
879 if (data && !data._fromGithub) data._from = p
880 return cb_(er, data)
881 })
882 }
883
884 lock(p, function (er) {
885 if (er) return cb(er)
886 // figure out if this is a folder or file.
887 fs.stat(p, function (er, s) {
888 if (er) {
889 // might be username/project
890 // in that case, try it as a github url.
891 if (p.split("/").length === 2) {
892 return maybeGithub(p, name, er, cb)
893 }
894 return cb(er)
895 }
896 if (s.isDirectory()) addLocalDirectory(p, name, cb)
897 else addLocalTarball(p, name, cb)
898 })
899 })
900}
901
902function maybeGithub (p, name, er, cb) {
903 if (~p.indexOf('@')) {
904 // github semver support, i.e. "npm install visionmedia/express@3"
905 var key = p.split('@')[0];
906 var val = p.split('@')[1];
907
908 var token = npm.config.get('github-token');
909
910 lookup({
911 token: token,
912 version: val,
913 repo: key
914 }, function(err, release){
915 if (err) return cb(err);
916 if (!release) return cb(new Error('failed to find release of ' + key + '@' + val));
917 var url = token
918 ? release.tarball_url + '?access_token=' + token
919 : release.tarball_url;
920
921 addRemoteTarball(url, null, name, cb);
922 });
923
924 return;
925 }
926
927 // preserve old github behaviour if it has no semver attached
928 // maybe fetch tarball here instead of git clone?
929 var u = "git://github.com/" + p
930 , up = url.parse(u)
931 log.info("maybeGithub", "Attempting %s from %s", p, u)
932
933 return addRemoteGit(u, up, name, true, function (er2, data) {
934 if (er2) {
935 var upriv = "git+ssh://git@github.com:" + p
936 , uppriv = url.parse(upriv)
937
938 log.info("maybeGithub", "Attempting %s from %s", p, upriv)
939
940 return addRemoteGit(upriv, uppriv, false, name, function (er3, data) {
941 if (er3) return cb(er)
942 success(upriv, data)
943 })
944 }
945 success(u, data)
946 })
947
948 function success (u, data) {
949 data._from = u
950 data._fromGithub = true
951 return cb(null, data)
952 }
953}
954
955function addLocalTarball (p, name, shasum, cb_) {
956 if (typeof cb_ !== "function") cb_ = shasum, shasum = null
957 if (typeof cb_ !== "function") cb_ = name, name = ""
958 // if it's a tar, and not in place,
959 // then unzip to .tmp, add the tmp folder, and clean up tmp
960 if (pathIsInside(p, npm.tmp))
961 return addTmpTarball(p, name, shasum, cb_)
962
963 if (pathIsInside(p, npm.cache)) {
964 if (path.basename(p) !== "package.tgz") return cb_(new Error(
965 "Not a valid cache tarball name: "+p))
966 return addPlacedTarball(p, name, shasum, cb_)
967 }
968
969 function cb (er, data) {
970 if (data) data._resolved = p
971 return cb_(er, data)
972 }
973
974 // just copy it over and then add the temp tarball file.
975 var tmp = path.join(npm.tmp, name + Date.now()
976 + "-" + Math.random(), "tmp.tgz")
977 mkdir(path.dirname(tmp), function (er) {
978 if (er) return cb(er)
979 var from = fs.createReadStream(p)
980 , to = fs.createWriteStream(tmp)
981 , errState = null
982 function errHandler (er) {
983 if (errState) return
984 return cb(errState = er)
985 }
986 from.on("error", errHandler)
987 to.on("error", errHandler)
988 to.on("close", function () {
989 if (errState) return
990 log.verbose("chmod", tmp, npm.modes.file.toString(8))
991 fs.chmod(tmp, npm.modes.file, function (er) {
992 if (er) return cb(er)
993 addTmpTarball(tmp, name, shasum, cb)
994 })
995 })
996 from.pipe(to)
997 })
998}
999
1000// to maintain the cache dir's permissions consistently.
1001var cacheStat = null
1002function getCacheStat (cb) {
1003 if (cacheStat) return cb(null, cacheStat)
1004 fs.stat(npm.cache, function (er, st) {
1005 if (er) return makeCacheDir(cb)
1006 if (!st.isDirectory()) {
1007 log.error("getCacheStat", "invalid cache dir %j", npm.cache)
1008 return cb(er)
1009 }
1010 return cb(null, cacheStat = st)
1011 })
1012}
1013
1014function makeCacheDir (cb) {
1015 if (!process.getuid) return mkdir(npm.cache, cb)
1016
1017 var uid = +process.getuid()
1018 , gid = +process.getgid()
1019
1020 if (uid === 0) {
1021 if (process.env.SUDO_UID) uid = +process.env.SUDO_UID
1022 if (process.env.SUDO_GID) gid = +process.env.SUDO_GID
1023 }
1024 if (uid !== 0 || !process.env.HOME) {
1025 cacheStat = {uid: uid, gid: gid}
1026 return mkdir(npm.cache, afterMkdir)
1027 }
1028
1029 fs.stat(process.env.HOME, function (er, st) {
1030 if (er) {
1031 log.error("makeCacheDir", "homeless?")
1032 return cb(er)
1033 }
1034 cacheStat = st
1035 log.silly("makeCacheDir", "cache dir uid, gid", [st.uid, st.gid])
1036 return mkdir(npm.cache, afterMkdir)
1037 })
1038
1039 function afterMkdir (er, made) {
1040 if (er || !cacheStat || isNaN(cacheStat.uid) || isNaN(cacheStat.gid)) {
1041 return cb(er, cacheStat)
1042 }
1043
1044 if (!made) return cb(er, cacheStat)
1045
1046 // ensure that the ownership is correct.
1047 chownr(made, cacheStat.uid, cacheStat.gid, function (er) {
1048 return cb(er, cacheStat)
1049 })
1050 }
1051}
1052
1053
1054
1055
1056function addPlacedTarball (p, name, shasum, cb) {
1057 if (!cb) cb = name, name = ""
1058 getCacheStat(function (er, cs) {
1059 if (er) return cb(er)
1060 return addPlacedTarball_(p, name, cs.uid, cs.gid, shasum, cb)
1061 })
1062}
1063
1064// Resolved sum is the shasum from the registry dist object, but
1065// *not* necessarily the shasum of this tarball, because for stupid
1066// historical reasons, npm re-packs each package an extra time through
1067// a temp directory, so all installed packages are actually built with
1068// *this* version of npm, on this machine.
1069//
1070// Once upon a time, this meant that we could change package formats
1071// around and fix junk that might be added by incompatible tar
1072// implementations. Then, for a while, it was a way to correct bs
1073// added by bugs in our own tar implementation. Now, it's just
1074// garbage, but cleaning it up is a pain, and likely to cause issues
1075// if anything is overlooked, so it's not high priority.
1076//
1077// If you're bored, and looking to make npm go faster, and you've
1078// already made it this far in this file, here's a better methodology:
1079//
1080// cache.add should really be cache.place. That is, it should take
1081// a set of arguments like it does now, but then also a destination
1082// folder.
1083//
1084// cache.add('foo@bar', '/path/node_modules/foo', cb)
1085//
1086// 1. Resolve 'foo@bar' to some specific:
1087// - git url
1088// - local folder
1089// - local tarball
1090// - tarball url
1091// 2. If resolved through the registry, then pick up the dist.shasum
1092// along the way.
1093// 3. Acquire request() stream fetching bytes: FETCH
1094// 4. FETCH.pipe(tar unpack stream to dest)
1095// 5. FETCH.pipe(shasum generator)
1096// When the tar and shasum streams both finish, make sure that the
1097// shasum matches dist.shasum, and if not, clean up and bail.
1098//
1099// publish(cb)
1100//
1101// 1. read package.json
1102// 2. get root package object (for rev, and versions)
1103// 3. update root package doc with version info
1104// 4. remove _attachments object
1105// 5. remove versions object
1106// 5. jsonify, remove last }
1107// 6. get stream: registry.put(/package)
1108// 7. write trailing-}-less JSON
1109// 8. write "_attachments":
1110// 9. JSON.stringify(attachments), remove trailing }
1111// 10. Write start of attachments (stubs)
1112// 11. JSON(filename)+':{"type":"application/octet-stream","data":"'
1113// 12. acquire tar packing stream, PACK
1114// 13. PACK.pipe(PUT)
1115// 14. PACK.pipe(shasum generator)
1116// 15. when PACK finishes, get shasum
1117// 16. PUT.write('"}},') (finish _attachments
1118// 17. update "versions" object with current package version
1119// (including dist.shasum and dist.tarball)
1120// 18. write '"versions":' + JSON(versions)
1121// 19. write '}}' (versions, close main doc)
1122
1123function addPlacedTarball_ (p, name, uid, gid, resolvedSum, cb) {
1124 // now we know it's in place already as .cache/name/ver/package.tgz
1125 // unpack to .cache/name/ver/package/, read the package.json,
1126 // and fire cb with the json data.
1127 var target = path.dirname(p)
1128 , folder = path.join(target, "package")
1129
1130 lock(folder, function (er) {
1131 if (er) return cb(er)
1132 rmUnpack()
1133 })
1134
1135 function rmUnpack () {
1136 rm(folder, function (er) {
1137 unlock(folder, function () {
1138 if (er) {
1139 log.error("addPlacedTarball", "Could not remove %j", folder)
1140 return cb(er)
1141 }
1142 thenUnpack()
1143 })
1144 })
1145 }
1146
1147 function thenUnpack () {
1148 tar.unpack(p, folder, null, null, uid, gid, function (er) {
1149 if (er) {
1150 log.error("addPlacedTarball", "Could not unpack %j to %j", p, target)
1151 return cb(er)
1152 }
1153 // calculate the sha of the file that we just unpacked.
1154 // this is so that the data is available when publishing.
1155 sha.get(p, function (er, shasum) {
1156 if (er) {
1157 log.error("addPlacedTarball", "shasum fail", p)
1158 return cb(er)
1159 }
1160 readJson(path.join(folder, "package.json"), function (er, data) {
1161 er = needName(er, data)
1162 er = needVersion(er, data)
1163 if (er) {
1164 log.error("addPlacedTarball", "Couldn't read json in %j"
1165 , folder)
1166 return cb(er)
1167 }
1168
1169 data.dist = data.dist || {}
1170 data.dist.shasum = shasum
1171 deprCheck(data)
1172 asyncMap([p], function (f, cb) {
1173 log.verbose("chmod", f, npm.modes.file.toString(8))
1174 fs.chmod(f, npm.modes.file, cb)
1175 }, function (f, cb) {
1176 if (process.platform === "win32") {
1177 log.silly("chown", "skipping for windows", f)
1178 cb()
1179 } else if (typeof uid === "number"
1180 && typeof gid === "number"
1181 && parseInt(uid, 10) === uid
1182 && parseInt(gid, 10) === gid) {
1183 log.verbose("chown", f, [uid, gid])
1184 fs.chown(f, uid, gid, cb)
1185 } else {
1186 log.verbose("chown", "skip for invalid uid/gid", [f, uid, gid])
1187 cb()
1188 }
1189 }, function (er) {
1190 cb(er, data)
1191 })
1192 })
1193 })
1194 })
1195 }
1196}
1197
1198// At this point, if shasum is set, it's something that we've already
1199// read and checked. Just stashing it in the data at this point.
1200function addLocalDirectory (p, name, shasum, cb) {
1201 if (typeof cb !== "function") cb = shasum, shasum = ""
1202 if (typeof cb !== "function") cb = name, name = ""
1203 // if it's a folder, then read the package.json,
1204 // tar it to the proper place, and add the cache tar
1205 if (pathIsInside(p, npm.cache)) return cb(new Error(
1206 "Adding a cache directory to the cache will make the world implode."))
1207 readJson(path.join(p, "package.json"), false, function (er, data) {
1208 er = needName(er, data)
1209 er = needVersion(er, data)
1210 if (er) return cb(er)
1211 deprCheck(data)
1212 var random = Date.now() + "-" + Math.random()
1213 , tmp = path.join(npm.tmp, random)
1214 , tmptgz = path.resolve(tmp, "tmp.tgz")
1215 , placed = path.resolve( npm.cache, data.name
1216 , data.version, "package.tgz" )
1217 , placeDirect = path.basename(p) === "package"
1218 , tgz = placeDirect ? placed : tmptgz
1219 getCacheStat(function (er, cs) {
1220 mkdir(path.dirname(tgz), function (er, made) {
1221 if (er) return cb(er)
1222
1223 var fancy = !pathIsInside(p, npm.tmp)
1224 && !pathIsInside(p, npm.cache)
1225 tar.pack(tgz, p, data, fancy, function (er) {
1226 if (er) {
1227 log.error( "addLocalDirectory", "Could not pack %j to %j"
1228 , p, tgz )
1229 return cb(er)
1230 }
1231
1232 // if we don't get a cache stat, or if the gid/uid is not
1233 // a number, then just move on. chown would fail anyway.
1234 if (!cs || isNaN(cs.uid) || isNaN(cs.gid)) return cb()
1235
1236 chownr(made || tgz, cs.uid, cs.gid, function (er) {
1237 if (er) return cb(er)
1238 addLocalTarball(tgz, name, shasum, cb)
1239 })
1240 })
1241 })
1242 })
1243 })
1244}
1245
1246function addTmpTarball (tgz, name, shasum, cb) {
1247 if (!cb) cb = name, name = ""
1248 getCacheStat(function (er, cs) {
1249 if (er) return cb(er)
1250 var contents = path.dirname(tgz)
1251 tar.unpack( tgz, path.resolve(contents, "package")
1252 , null, null
1253 , cs.uid, cs.gid
1254 , function (er) {
1255 if (er) {
1256 return cb(er)
1257 }
1258 addLocalDirectory(path.resolve(contents, "package"), name, shasum, cb)
1259 })
1260 })
1261}
1262
1263function unpack (pkg, ver, unpackTarget, dMode, fMode, uid, gid, cb) {
1264 if (typeof cb !== "function") cb = gid, gid = null
1265 if (typeof cb !== "function") cb = uid, uid = null
1266 if (typeof cb !== "function") cb = fMode, fMode = null
1267 if (typeof cb !== "function") cb = dMode, dMode = null
1268
1269 read(pkg, ver, false, function (er, data) {
1270 if (er) {
1271 log.error("unpack", "Could not read data for %s", pkg + "@" + ver)
1272 return cb(er)
1273 }
1274 npm.commands.unbuild([unpackTarget], true, function (er) {
1275 if (er) return cb(er)
1276 tar.unpack( path.join(npm.cache, pkg, ver, "package.tgz")
1277 , unpackTarget
1278 , dMode, fMode
1279 , uid, gid
1280 , cb )
1281 })
1282 })
1283}
1284
1285var deprecated = {}
1286 , deprWarned = {}
1287function deprCheck (data) {
1288 if (deprecated[data._id]) data.deprecated = deprecated[data._id]
1289 if (data.deprecated) deprecated[data._id] = data.deprecated
1290 else return
1291 if (!deprWarned[data._id]) {
1292 deprWarned[data._id] = true
1293 log.warn("deprecated", "%s: %s", data._id, data.deprecated)
1294 }
1295}
1296
1297function lockFileName (u) {
1298 var c = u.replace(/[^a-zA-Z0-9]+/g, "-").replace(/^-+|-+$/g, "")
1299 , h = crypto.createHash("sha1").update(u).digest("hex")
1300 h = h.substr(0, 8)
1301 c = c.substr(-32)
1302 log.silly("lockFile", h + "-" + c, u)
1303 return path.resolve(npm.config.get("cache"), h + "-" + c + ".lock")
1304}
1305
1306var myLocks = {}
1307function lock (u, cb) {
1308 // the cache dir needs to exist already for this.
1309 getCacheStat(function (er, cs) {
1310 if (er) return cb(er)
1311 var opts = { stale: npm.config.get("cache-lock-stale")
1312 , retries: npm.config.get("cache-lock-retries")
1313 , wait: npm.config.get("cache-lock-wait") }
1314 var lf = lockFileName(u)
1315 log.verbose("lock", u, lf)
1316 lockFile.lock(lf, opts, function(er) {
1317 if (!er) myLocks[lf] = true
1318 cb(er)
1319 })
1320 })
1321}
1322
1323function unlock (u, cb) {
1324 var lf = lockFileName(u)
1325 , locked = myLocks[lf]
1326 if (locked === false) {
1327 return process.nextTick(cb)
1328 } else if (locked === true) {
1329 myLocks[lf] = false
1330 lockFile.unlock(lockFileName(u), cb)
1331 } else {
1332 throw new Error("Attempt to unlock " + u + ", which hasn't been locked")
1333 }
1334}
1335
1336function needName(er, data) {
1337 return er ? er
1338 : (data && !data.name) ? new Error("No name provided")
1339 : null
1340}
1341
1342function needVersion(er, data) {
1343 return er ? er
1344 : (data && !data.version) ? new Error("No version provided")
1345 : null
1346}