1 // XXX lib/utils/tar.js and this file need to be rewritten.
3 // URL-to-cache folder mapping:
6 // http://registry.npmjs.org/foo/version -> cache/http!/...
11 1. Check for url in inFlightUrls. If present, add cb, and return.
12 2. create inFlightURL list
13 3. Acquire lock at {cache}/{sha(url)}.lock
14 retries = {cache-lock-retries, def=3}
15 stale = {cache-lock-stale, def=30000}
16 wait = {cache-lock-wait, def=100}
17 4. if lock can't be acquired, then fail
18 5. fetch url, clear lock, call cbs
21 1. urls: http!/server.com/path/to/thing
22 2. c:\path\to\thing: file!/c!/path/to/thing
23 3. /path/to/thing: file!/path/to/thing
24 4. git@ private: git_github.com!isaacs/npm
25 5. git://public: git!/github.com/isaacs/npm
26 6. git+blah:// git-blah!/server.com/foo/bar
29 1. tar into tmp/random/package.tgz
30 2. untar into tmp/random/contents/package, stripping one dir piece
31 3. tar tmp/random/contents/package to cache/n/v/package.tgz
32 4. untar cache/n/v/package.tgz into cache/n/v/package
36 1. fetch to tmp/random/package.tgz
39 adding a name@version:
40 1. registry.get(name/version)
41 2. if response isn't 304, add url(dist.tarball)
45 2. Find a version that satisfies
48 adding a local tarball:
49 1. untar to tmp/random/{blah}
53 exports = module.exports = cache
60 var mkdir = require("mkdirp")
61 , spawn = require("child_process").spawn
62 , exec = require("child_process").execFile
63 , once = require("once")
64 , fetch = require("./utils/fetch.js")
65 , npm = require("./npm.js")
66 , fs = require("graceful-fs")
67 , rm = require("rimraf")
68 , readJson = require("read-package-json")
69 , registry = npm.registry
70 , log = require("npmlog")
71 , path = require("path")
72 , sha = require("sha")
73 , asyncMap = require("slide").asyncMap
74 , semver = require("semver")
75 , tar = require("./utils/tar.js")
76 , fileCompletion = require("./utils/completion/file-completion.js")
77 , url = require("url")
78 , chownr = require("chownr")
79 , lockFile = require("lockfile")
80 , crypto = require("crypto")
81 , retry = require("retry")
82 , zlib = require("zlib")
83 , chmodr = require("chmodr")
84 , which = require("which")
85 , isGitUrl = require("./utils/is-git-url.js")
87 cache.usage = "npm cache add <tarball file>"
88 + "\nnpm cache add <folder>"
89 + "\nnpm cache add <tarball url>"
90 + "\nnpm cache add <git url>"
91 + "\nnpm cache add <name>@<version>"
92 + "\nnpm cache ls [<path>]"
93 + "\nnpm cache clean [<pkg>[@<version>]]"
95 cache.completion = function (opts, cb) {
97 var argv = opts.conf.argv.remain
98 if (argv.length === 2) {
99 return cb(null, ["add", "ls", "clean"])
105 // cache and ls are easy, because the completion is
106 // what ls_ returns anyway.
107 // just get the partial words, minus the last path part
108 var p = path.dirname(opts.partialWords.slice(3).join("/"))
109 if (p === ".") p = ""
112 // Same semantics as install and publish.
113 return npm.commands.install.completion(opts, cb)
117 function cache (args, cb) {
118 var cmd = args.shift()
120 case "rm": case "clear": case "clean": return clean(args, cb)
121 case "list": case "sl": case "ls": return ls(args, cb)
122 case "add": return add(args, cb)
123 default: return cb(new Error(
124 "Invalid cache action: "+cmd))
128 // if the pkg and ver are in the cache, then
129 // just do a readJson and return.
130 // if they're not, then fetch them from the registry.
131 function read (name, ver, forceBypass, cb) {
132 if (typeof cb !== "function") cb = forceBypass, forceBypass = true
133 var jsonFile = path.join(npm.cache, name, ver, "package", "package.json")
134 function c (er, data) {
135 if (data) deprCheck(data)
139 if (forceBypass && npm.config.get("force")) {
140 log.verbose("using force", "skipping cache")
141 return addNamed(name, ver, c)
144 readJson(jsonFile, function (er, data) {
145 er = needName(er, data)
146 er = needVersion(er, data)
147 if (er && er.code !== "ENOENT" && er.code !== "ENOTDIR") return cb(er)
148 if (er) return addNamed(name, ver, c)
154 // npm cache ls [<path>]
155 function ls (args, cb) {
156 args = args.join("/").split("@").join("/")
157 if (args.substr(-1) === "/") args = args.substr(0, args.length - 1)
158 var prefix = npm.config.get("cache")
159 if (0 === prefix.indexOf(process.env.HOME)) {
160 prefix = "~" + prefix.substr(process.env.HOME.length)
162 ls_(args, npm.config.get("depth"), function (er, files) {
163 console.log(files.map(function (f) {
164 return path.join(prefix, f)
165 }).join("\n").trim())
170 // Calls cb with list of cached pkgs matching show.
171 function ls_ (req, depth, cb) {
172 return fileCompletion(npm.cache, req, depth, cb)
175 // npm cache clean [<path>]
176 function clean (args, cb) {
177 if (!cb) cb = args, args = []
179 args = args.join("/").split("@").join("/")
180 if (args.substr(-1) === "/") args = args.substr(0, args.length - 1)
181 var f = path.join(npm.cache, path.normalize(args))
182 if (f === npm.cache) {
183 fs.readdir(npm.cache, function (er, files) {
185 asyncMap( files.filter(function (f) {
186 return npm.config.get("force") || f !== "-"
187 }).map(function (f) {
188 return path.join(npm.cache, f)
192 } else rm(path.join(npm.cache, path.normalize(args)), cb)
195 // npm cache add <tarball-url>
196 // npm cache add <pkg> <ver>
197 // npm cache add <tarball>
198 // npm cache add <folder>
199 cache.add = function (pkg, ver, scrub, cb) {
200 if (typeof cb !== "function") cb = scrub, scrub = false
201 if (typeof cb !== "function") cb = ver, ver = null
203 return clean([], function (er) {
204 if (er) return cb(er)
208 log.verbose("cache add", [pkg, ver])
209 return add([pkg, ver], cb)
212 function add (args, cb) {
213 // this is hot code. almost everything passes through here.
214 // the args can be any of:
216 // ["pkg", "version"]
219 // This is tricky, because urls can contain @
220 // Also, in some cases we get [name, null] rather
221 // that just a single argument.
223 var usage = "Usage:\n"
224 + " npm cache add <tarball-url>\n"
225 + " npm cache add <pkg>@<ver>\n"
226 + " npm cache add <tarball>\n"
227 + " npm cache add <folder>\n"
231 if (args[1] === undefined) args[1] = null
233 // at this point the args length must ==2
234 if (args[1] !== null) {
237 } else if (args.length === 2) {
241 log.verbose("cache add", "name=%j spec=%j args=%j", name, spec, args)
244 if (!name && !spec) return cb(usage)
246 // see if the spec is a url
247 // otherwise, treat as name@version
248 var p = url.parse(spec) || {}
249 log.verbose("parsed url", p)
251 // it could be that we got name@http://blah
252 // in that case, we will not have a protocol now, but if we
253 // split and check, we will.
254 if (!name && !p.protocol && spec.indexOf("@") !== -1) {
255 spec = spec.split("@")
257 spec = spec.join("@")
258 return add([name, spec], cb)
261 switch (p.protocol) {
264 return addRemoteTarball(spec, null, name, cb)
268 return addRemoteGit(spec, p, name, false, cb)
270 // if we have a name and a spec, then try name@spec
271 // if not, then try just spec (which may try name@"" if not found)
273 addNamed(name, spec, cb)
280 function fetchAndShaCheck (u, tmp, shasum, cb) {
281 fetch(u, tmp, function (er, response) {
283 log.error("fetch failed", u)
284 return cb(er, response)
286 if (!shasum) return cb(null, response)
287 // validate that the url we just downloaded matches the expected shasum.
288 sha.check(tmp, shasum, function (er) {
289 return cb(er, response, shasum)
294 // Only have a single download action at once for a given url
295 // additional calls stack the callbacks.
296 var inFlightURLs = {}
297 function addRemoteTarball (u, shasum, name, cb_) {
298 if (typeof cb_ !== "function") cb_ = name, name = ""
299 if (typeof cb_ !== "function") cb_ = shasum, shasum = null
301 if (!inFlightURLs[u]) inFlightURLs[u] = []
302 var iF = inFlightURLs[u]
304 if (iF.length > 1) return
306 function cb (er, data) {
311 unlock(u, function () {
313 while (c = iF.shift()) c(er, data)
314 delete inFlightURLs[u]
318 var tmp = path.join(npm.tmp, Date.now()+"-"+Math.random(), "tmp.tgz")
320 lock(u, function (er) {
321 if (er) return cb(er)
323 log.verbose("addRemoteTarball", [u, shasum])
324 mkdir(path.dirname(tmp), function (er) {
325 if (er) return cb(er)
326 addRemoteTarball_(u, tmp, shasum, done)
330 function done (er, resp, shasum) {
331 if (er) return cb(er)
332 addLocalTarball(tmp, name, shasum, cb)
336 function addRemoteTarball_(u, tmp, shasum, cb) {
337 // Tuned to spread 3 attempts over about a minute.
338 // See formula at <https://github.com/tim-kos/node-retry>.
339 var operation = retry.operation
340 ( { retries: npm.config.get("fetch-retries")
341 , factor: npm.config.get("fetch-retry-factor")
342 , minTimeout: npm.config.get("fetch-retry-mintimeout")
343 , maxTimeout: npm.config.get("fetch-retry-maxtimeout") })
345 operation.attempt(function (currentAttempt) {
346 log.info("retry", "fetch attempt " + currentAttempt
347 + " at " + (new Date()).toLocaleTimeString())
348 fetchAndShaCheck(u, tmp, shasum, function (er, response, shasum) {
349 // Only retry on 408, 5xx or no `response`.
350 var sc = response && response.statusCode
351 var statusRetry = !sc || (sc === 408 || sc >= 500)
352 if (er && statusRetry && operation.retry(er)) {
353 log.info("retry", "will retry, error on last attempt: " + er)
356 cb(er, response, shasum)
361 // 1. cacheDir = path.join(cache,'_git-remotes',sha1(u))
362 // 2. checkGitDir(cacheDir) ? 4. : 3. (rm cacheDir if necessary)
363 // 3. git clone --mirror u cacheDir
364 // 4. cd cacheDir && git fetch -a origin
365 // 5. git archive /tmp/random.tgz
366 // 6. addLocalTarball(/tmp/random.tgz) <gitref> --format=tar --prefix=package/
367 // silent flag is used if this should error quietly
368 function addRemoteGit (u, parsed, name, silent, cb_) {
369 if (typeof cb_ !== "function") cb_ = name, name = null
371 if (!inFlightURLs[u]) inFlightURLs[u] = []
372 var iF = inFlightURLs[u]
374 if (iF.length > 1) return
376 function cb (er, data) {
377 unlock(u, function () {
379 while (c = iF.shift()) c(er, data)
380 delete inFlightURLs[u]
384 var p, co // cachePath, git-ref we want to check out
386 lock(u, function (er) {
387 if (er) return cb(er)
389 // figure out what we should check out.
390 var co = parsed.hash && parsed.hash.substr(1) || "master"
392 // if the path is like ssh://foo:22/some/path then it works, but
393 // it needs the ssh://
394 // If the path is like ssh://foo:some/path then it works, but
395 // only if you remove the ssh://
397 u = u.replace(/^git\+/, "")
400 // ssh paths that are scp-style urls don't need the ssh://
401 if (parsed.pathname.match(/^\/?:/)) {
402 u = u.replace(/^ssh:\/\//, "")
405 var v = crypto.createHash("sha1").update(u).digest("hex").slice(0, 8)
406 v = u.replace(/[^a-zA-Z0-9]+/g, '-') + '-' + v
408 log.verbose("addRemoteGit", [u, co])
410 p = path.join(npm.config.get("cache"), "_git-remotes", v)
412 checkGitDir(p, u, co, origUrl, silent, function(er, data) {
413 chmodr(p, npm.modes.file, function(erChmod) {
414 if (er) return cb(er, data)
415 return cb(erChmod, data)
421 function checkGitDir (p, u, co, origUrl, silent, cb) {
422 fs.stat(p, function (er, s) {
423 if (er) return cloneGitRemote(p, u, co, origUrl, silent, cb)
424 if (!s.isDirectory()) return rm(p, function (er){
425 if (er) return cb(er)
426 cloneGitRemote(p, u, co, origUrl, silent, cb)
429 var git = npm.config.get("git")
430 var args = [ "config", "--get", "remote.origin.url" ]
434 which(git, function (err) {
439 exec(git, args, {cwd: p, env: env}, function (er, stdout, stderr) {
440 stdoutTrimmed = (stdout + "\n" + stderr).trim()
441 if (er || u !== stdout.trim()) {
442 log.warn( "`git config --get remote.origin.url` returned "
443 + "wrong result ("+u+")", stdoutTrimmed )
444 return rm(p, function (er){
445 if (er) return cb(er)
446 cloneGitRemote(p, u, co, origUrl, silent, cb)
449 log.verbose("git remote.origin.url", stdoutTrimmed)
450 archiveGitRemote(p, u, co, origUrl, cb)
456 function cloneGitRemote (p, u, co, origUrl, silent, cb) {
457 mkdir(p, function (er) {
458 if (er) return cb(er)
460 var git = npm.config.get("git")
461 var args = [ "clone", "--mirror", u, p ]
465 which(git, function (err) {
470 exec(git, args, {cwd: p, env: env}, function (er, stdout, stderr) {
471 stdout = (stdout + "\n" + stderr).trim()
474 log.verbose("git clone " + u, stdout)
476 log.error("git clone " + u, stdout)
480 log.verbose("git clone " + u, stdout)
481 archiveGitRemote(p, u, co, origUrl, cb)
487 function archiveGitRemote (p, u, co, origUrl, cb) {
488 var git = npm.config.get("git")
489 var archive = [ "fetch", "-a", "origin" ]
490 var resolve = [ "rev-list", "-n1", co ]
498 exec(git, archive, {cwd: p, env: env}, function (er, stdout, stderr) {
499 stdout = (stdout + "\n" + stderr).trim()
501 log.error("git fetch -a origin ("+u+")", stdout)
504 log.verbose("git fetch -a origin ("+u+")", stdout)
505 tmp = path.join(npm.tmp, Date.now()+"-"+Math.random(), "tmp.tgz")
509 function resolveHead () {
510 exec(git, resolve, {cwd: p, env: env}, function (er, stdout, stderr) {
511 stdout = (stdout + "\n" + stderr).trim()
513 log.error("Failed resolving git HEAD (" + u + ")", stderr)
516 log.verbose("git rev-list -n1 " + co, stdout)
517 var parsed = url.parse(origUrl)
519 resolved = url.format(parsed)
521 // https://github.com/isaacs/npm/issues/3224
522 // node incorrectly sticks a / at the start of the path
523 // We know that the host won't change, so split and detect this
524 var spo = origUrl.split(parsed.host)
525 var spr = resolved.split(parsed.host)
526 if (spo[1].charAt(0) === ':' && spr[1].charAt(0) === '/')
527 spr[1] = spr[1].slice(1)
528 resolved = spr.join(parsed.host)
530 log.verbose('resolved git url', resolved)
536 mkdir(path.dirname(tmp), function (er) {
537 if (er) return cb(er)
538 var gzip = zlib.createGzip({ level: 9 })
539 var git = npm.config.get("git")
540 var args = ["archive", co, "--format=tar", "--prefix=package/"]
541 var out = fs.createWriteStream(tmp)
544 var cp = spawn(git, args, { env: env, cwd: p })
546 cp.stderr.on("data", function(chunk) {
547 log.silly(chunk.toString(), "git archive")
550 cp.stdout.pipe(gzip).pipe(out).on("close", function() {
551 addLocalTarball(tmp, function(er, data) {
552 if (data) data._resolved = resolved
562 // git responds to env vars in some weird ways in post-receive hooks
563 // so don't carry those along.
564 if (gitEnv_) return gitEnv_
566 for (var k in process.env) {
567 if (!~['GIT_PROXY_COMMAND','GIT_SSH'].indexOf(k) && k.match(/^GIT/)) continue
568 gitEnv_[k] = process.env[k]
574 // only have one request in flight for a given
576 var inFlightNames = {}
577 function addNamed (name, x, data, cb_) {
578 if (typeof cb_ !== "function") cb_ = data, data = null
579 log.verbose("addNamed", [name, x])
581 var k = name + "@" + x
582 if (!inFlightNames[k]) inFlightNames[k] = []
583 var iF = inFlightNames[k]
585 if (iF.length > 1) return
587 function cb (er, data) {
588 if (data && !data._fromGithub) data._from = k
589 unlock(k, function () {
591 while (c = iF.shift()) c(er, data)
592 delete inFlightNames[k]
596 log.verbose("addNamed", [semver.valid(x), semver.validRange(x)])
597 lock(k, function (er, fd) {
598 if (er) return cb(er)
600 var fn = ( semver.valid(x, true) ? addNameVersion
601 : semver.validRange(x, true) ? addNameRange
604 fn(name, x, data, cb)
608 function addNameTag (name, tag, data, cb_) {
609 if (typeof cb_ !== "function") cb_ = data, data = null
610 log.info("addNameTag", [name, tag])
614 tag = npm.config.get("tag")
617 function cb(er, data) {
618 // might be username/project
619 // in that case, try it as a github url.
620 if (er && tag.split("/").length === 2) {
621 return maybeGithub(tag, name, er, cb_)
626 registry.get(name, function (er, data, json, response) {
627 if (er) return cb(er)
629 if (data["dist-tags"] && data["dist-tags"][tag]
630 && data.versions[data["dist-tags"][tag]]) {
631 var ver = data["dist-tags"][tag]
632 return addNamed(name, ver, data.versions[ver], cb)
634 if (!explicit && Object.keys(data.versions).length) {
635 return addNamed(name, "*", data, cb)
638 er = installTargetsError(tag, data)
644 function engineFilter (data) {
645 var npmv = npm.version
646 , nodev = npm.config.get("node-version")
647 , strict = npm.config.get("engine-strict")
649 if (!nodev || npm.config.get("force")) return data
651 Object.keys(data.versions || {}).forEach(function (v) {
652 var eng = data.versions[v].engines
654 if (!strict && !data.versions[v].engineStrict) return
655 if (eng.node && !semver.satisfies(nodev, eng.node, true)
656 || eng.npm && !semver.satisfies(npmv, eng.npm, true)) {
657 delete data.versions[v]
662 function addNameRange (name, range, data, cb) {
663 if (typeof cb !== "function") cb = data, data = null
665 range = semver.validRange(range, true)
666 if (range === null) return cb(new Error(
667 "Invalid version range: "+range))
669 log.silly("addNameRange", {name:name, range:range, hasData:!!data})
671 if (data) return next()
672 registry.get(name, function (er, d, json, response) {
673 if (er) return cb(er)
679 log.silly( "addNameRange", "number 2"
680 , {name:name, range:range, hasData:!!data})
683 log.silly("addNameRange", "versions"
684 , [data.name, Object.keys(data.versions || {})])
686 // if the tagged version satisfies, then use that.
687 var tagged = data["dist-tags"][npm.config.get("tag")]
689 && data.versions[tagged]
690 && semver.satisfies(tagged, range, true)) {
691 return addNamed(name, tagged, data.versions[tagged], cb)
694 // find the max satisfying version.
695 var versions = Object.keys(data.versions || {})
696 var ms = semver.maxSatisfying(versions, range, true)
698 return cb(installTargetsError(range, data))
701 // if we don't have a registry connection, try to see if
702 // there's a cached copy that will be ok.
703 addNamed(name, ms, data.versions[ms], cb)
707 function installTargetsError (requested, data) {
708 var targets = Object.keys(data["dist-tags"]).filter(function (f) {
709 return (data.versions || {}).hasOwnProperty(f)
710 }).concat(Object.keys(data.versions || {}))
712 requested = data.name + (requested ? "@'" + requested + "'" : "")
714 targets = targets.length
715 ? "Valid install targets:\n" + JSON.stringify(targets) + "\n"
716 : "No valid targets found.\n"
717 + "Perhaps not compatible with your version of node?"
719 var er = new Error( "No compatible version found: "
720 + requested + "\n" + targets)
725 function addNameVersion (name, v, data, cb) {
726 if (typeof cb !== "function") cb = data, data = null
728 var ver = semver.valid(v, true)
729 if (!ver) return cb(new Error("Invalid version: "+v))
737 registry.get(name + "/" + ver, function (er, d, json, resp) {
738 if (er) return cb(er)
748 if (!dist) return cb(new Error("No dist in "+data._id+" package"))
750 if (!dist.tarball) return cb(new Error(
751 "No dist.tarball in " + data._id + " package"))
753 if ((response && response.statusCode !== 304) || npm.config.get("force")) {
757 // we got cached data, so let's see if we have a tarball.
758 var pkgroot = path.join(npm.cache, name, ver)
759 var pkgtgz = path.join(pkgroot, "package.tgz")
760 var pkgjson = path.join(pkgroot, "package", "package.json")
761 fs.stat(pkgtgz, function (er, s) {
763 readJson(pkgjson, function (er, data) {
764 er = needName(er, data)
765 er = needVersion(er, data)
766 if (er && er.code !== "ENOENT" && er.code !== "ENOTDIR")
768 if (er) return fetchit()
769 return cb(null, data)
771 } else return fetchit()
774 function fetchit () {
775 if (!npm.config.get("registry")) {
776 return cb(new Error("Cannot fetch: "+dist.tarball))
779 // use the same protocol as the registry.
780 // https registry --> https tarballs, but
781 // only if they're the same hostname, or else
782 // detached tarballs may not work.
783 var tb = url.parse(dist.tarball)
784 var rp = url.parse(npm.config.get("registry"))
785 if (tb.hostname === rp.hostname
786 && tb.protocol !== rp.protocol) {
787 tb.protocol = url.parse(npm.config.get("registry")).protocol
792 // only add non-shasum'ed packages if --forced.
793 // only ancient things would lack this for good reasons nowadays.
794 if (!dist.shasum && !npm.config.get("force")) {
795 return cb(new Error("package lacks shasum: " + data._id))
797 return addRemoteTarball( tb
805 function addLocal (p, name, cb_) {
806 if (typeof cb_ !== "function") cb_ = name, name = ""
808 function cb (er, data) {
809 unlock(p, function () {
811 // if it doesn't have a / in it, it might be a
813 if (p.indexOf("/") === -1 && p.charAt(0) !== "."
814 && (process.platform !== "win32" || p.indexOf("\\") === -1)) {
815 return addNamed(p, "", cb_)
817 log.error("addLocal", "Could not install %s", p)
820 if (data && !data._fromGithub) data._from = p
825 lock(p, function (er) {
826 if (er) return cb(er)
827 // figure out if this is a folder or file.
828 fs.stat(p, function (er, s) {
830 // might be username/project
831 // in that case, try it as a github url.
832 if (p.split("/").length === 2) {
833 return maybeGithub(p, name, er, cb)
837 if (s.isDirectory()) addLocalDirectory(p, name, cb)
838 else addLocalTarball(p, name, cb)
843 function maybeGithub (p, name, er, cb) {
844 var u = "git://github.com/" + p
846 log.info("maybeGithub", "Attempting %s from %s", p, u)
848 return addRemoteGit(u, up, name, true, function (er2, data) {
850 var upriv = "git+ssh://git@github.com:" + p
851 , uppriv = url.parse(upriv)
853 log.info("maybeGithub", "Attempting %s from %s", p, upriv)
855 return addRemoteGit(upriv, uppriv, false, name, function (er3, data) {
856 if (er3) return cb(er)
863 function success (u, data) {
865 data._fromGithub = true
866 return cb(null, data)
870 function addLocalTarball (p, name, shasum, cb_) {
871 if (typeof cb_ !== "function") cb_ = shasum, shasum = null
872 if (typeof cb_ !== "function") cb_ = name, name = ""
873 // if it's a tar, and not in place,
874 // then unzip to .tmp, add the tmp folder, and clean up tmp
875 if (p.indexOf(npm.tmp) === 0)
876 return addTmpTarball(p, name, shasum, cb_)
878 if (p.indexOf(npm.cache) === 0) {
879 if (path.basename(p) !== "package.tgz") return cb_(new Error(
880 "Not a valid cache tarball name: "+p))
881 return addPlacedTarball(p, name, shasum, cb_)
884 function cb (er, data) {
885 if (data) data._resolved = p
889 // just copy it over and then add the temp tarball file.
890 var tmp = path.join(npm.tmp, name + Date.now()
891 + "-" + Math.random(), "tmp.tgz")
892 mkdir(path.dirname(tmp), function (er) {
893 if (er) return cb(er)
894 var from = fs.createReadStream(p)
895 , to = fs.createWriteStream(tmp)
897 function errHandler (er) {
899 return cb(errState = er)
901 from.on("error", errHandler)
902 to.on("error", errHandler)
903 to.on("close", function () {
905 log.verbose("chmod", tmp, npm.modes.file.toString(8))
906 fs.chmod(tmp, npm.modes.file, function (er) {
907 if (er) return cb(er)
908 addTmpTarball(tmp, name, shasum, cb)
915 // to maintain the cache dir's permissions consistently.
917 function getCacheStat (cb) {
918 if (cacheStat) return cb(null, cacheStat)
919 fs.stat(npm.cache, function (er, st) {
920 if (er) return makeCacheDir(cb)
921 if (!st.isDirectory()) {
922 log.error("getCacheStat", "invalid cache dir %j", npm.cache)
925 return cb(null, cacheStat = st)
929 function makeCacheDir (cb) {
930 if (!process.getuid) return mkdir(npm.cache, cb)
932 var uid = +process.getuid()
933 , gid = +process.getgid()
936 if (process.env.SUDO_UID) uid = +process.env.SUDO_UID
937 if (process.env.SUDO_GID) gid = +process.env.SUDO_GID
939 if (uid !== 0 || !process.env.HOME) {
940 cacheStat = {uid: uid, gid: gid}
941 return mkdir(npm.cache, afterMkdir)
944 fs.stat(process.env.HOME, function (er, st) {
946 log.error("makeCacheDir", "homeless?")
950 log.silly("makeCacheDir", "cache dir uid, gid", [st.uid, st.gid])
951 return mkdir(npm.cache, afterMkdir)
954 function afterMkdir (er, made) {
955 if (er || !cacheStat || isNaN(cacheStat.uid) || isNaN(cacheStat.gid)) {
956 return cb(er, cacheStat)
959 if (!made) return cb(er, cacheStat)
961 // ensure that the ownership is correct.
962 chownr(made, cacheStat.uid, cacheStat.gid, function (er) {
963 return cb(er, cacheStat)
971 function addPlacedTarball (p, name, shasum, cb) {
972 if (!cb) cb = name, name = ""
973 getCacheStat(function (er, cs) {
974 if (er) return cb(er)
975 return addPlacedTarball_(p, name, cs.uid, cs.gid, shasum, cb)
979 // Resolved sum is the shasum from the registry dist object, but
980 // *not* necessarily the shasum of this tarball, because for stupid
981 // historical reasons, npm re-packs each package an extra time through
982 // a temp directory, so all installed packages are actually built with
983 // *this* version of npm, on this machine.
985 // Once upon a time, this meant that we could change package formats
986 // around and fix junk that might be added by incompatible tar
987 // implementations. Then, for a while, it was a way to correct bs
988 // added by bugs in our own tar implementation. Now, it's just
989 // garbage, but cleaning it up is a pain, and likely to cause issues
990 // if anything is overlooked, so it's not high priority.
992 // If you're bored, and looking to make npm go faster, and you've
993 // already made it this far in this file, here's a better methodology:
995 // cache.add should really be cache.place. That is, it should take
996 // a set of arguments like it does now, but then also a destination
999 // cache.add('foo@bar', '/path/node_modules/foo', cb)
1001 // 1. Resolve 'foo@bar' to some specific:
1006 // 2. If resolved through the registry, then pick up the dist.shasum
1008 // 3. Acquire request() stream fetching bytes: FETCH
1009 // 4. FETCH.pipe(tar unpack stream to dest)
1010 // 5. FETCH.pipe(shasum generator)
1011 // When the tar and shasum streams both finish, make sure that the
1012 // shasum matches dist.shasum, and if not, clean up and bail.
1016 // 1. read package.json
1017 // 2. get root package object (for rev, and versions)
1018 // 3. update root package doc with version info
1019 // 4. remove _attachments object
1020 // 5. remove versions object
1021 // 5. jsonify, remove last }
1022 // 6. get stream: registry.put(/package)
1023 // 7. write trailing-}-less JSON
1024 // 8. write "_attachments":
1025 // 9. JSON.stringify(attachments), remove trailing }
1026 // 10. Write start of attachments (stubs)
1027 // 11. JSON(filename)+':{"type":"application/octet-stream","data":"'
1028 // 12. acquire tar packing stream, PACK
1029 // 13. PACK.pipe(PUT)
1030 // 14. PACK.pipe(shasum generator)
1031 // 15. when PACK finishes, get shasum
1032 // 16. PUT.write('"}},') (finish _attachments
1033 // 17. update "versions" object with current package version
1034 // (including dist.shasum and dist.tarball)
1035 // 18. write '"versions":' + JSON(versions)
1036 // 19. write '}}' (versions, close main doc)
1038 function addPlacedTarball_ (p, name, uid, gid, resolvedSum, cb) {
1039 // now we know it's in place already as .cache/name/ver/package.tgz
1040 // unpack to .cache/name/ver/package/, read the package.json,
1041 // and fire cb with the json data.
1042 var target = path.dirname(p)
1043 , folder = path.join(target, "package")
1045 lock(folder, function (er) {
1046 if (er) return cb(er)
1050 function rmUnpack () {
1051 rm(folder, function (er) {
1052 unlock(folder, function () {
1054 log.error("addPlacedTarball", "Could not remove %j", folder)
1062 function thenUnpack () {
1063 tar.unpack(p, folder, null, null, uid, gid, function (er) {
1065 log.error("addPlacedTarball", "Could not unpack %j to %j", p, target)
1068 // calculate the sha of the file that we just unpacked.
1069 // this is so that the data is available when publishing.
1070 sha.get(p, function (er, shasum) {
1072 log.error("addPlacedTarball", "shasum fail", p)
1075 readJson(path.join(folder, "package.json"), function (er, data) {
1076 er = needName(er, data)
1077 er = needVersion(er, data)
1079 log.error("addPlacedTarball", "Couldn't read json in %j"
1084 data.dist = data.dist || {}
1085 data.dist.shasum = shasum
1087 asyncMap([p], function (f, cb) {
1088 log.verbose("chmod", f, npm.modes.file.toString(8))
1089 fs.chmod(f, npm.modes.file, cb)
1090 }, function (f, cb) {
1091 if (process.platform === "win32") {
1092 log.silly("chown", "skipping for windows", f)
1094 } else if (typeof uid === "number"
1095 && typeof gid === "number"
1096 && parseInt(uid, 10) === uid
1097 && parseInt(gid, 10) === gid) {
1098 log.verbose("chown", f, [uid, gid])
1099 fs.chown(f, uid, gid, cb)
1101 log.verbose("chown", "skip for invalid uid/gid", [f, uid, gid])
1113 // At this point, if shasum is set, it's something that we've already
1114 // read and checked. Just stashing it in the data at this point.
1115 function addLocalDirectory (p, name, shasum, cb) {
1116 if (typeof cb !== "function") cb = shasum, shasum = ""
1117 if (typeof cb !== "function") cb = name, name = ""
1118 // if it's a folder, then read the package.json,
1119 // tar it to the proper place, and add the cache tar
1120 if (p.indexOf(npm.cache) === 0) return cb(new Error(
1121 "Adding a cache directory to the cache will make the world implode."))
1122 readJson(path.join(p, "package.json"), false, function (er, data) {
1123 er = needName(er, data)
1124 er = needVersion(er, data)
1125 if (er) return cb(er)
1127 var random = Date.now() + "-" + Math.random()
1128 , tmp = path.join(npm.tmp, random)
1129 , tmptgz = path.resolve(tmp, "tmp.tgz")
1130 , placed = path.resolve( npm.cache, data.name
1131 , data.version, "package.tgz" )
1132 , placeDirect = path.basename(p) === "package"
1133 , tgz = placeDirect ? placed : tmptgz
1134 getCacheStat(function (er, cs) {
1135 mkdir(path.dirname(tgz), function (er, made) {
1136 if (er) return cb(er)
1138 var fancy = p.indexOf(npm.tmp) !== 0
1139 && p.indexOf(npm.cache) !== 0
1140 tar.pack(tgz, p, data, fancy, function (er) {
1142 log.error( "addLocalDirectory", "Could not pack %j to %j"
1147 // if we don't get a cache stat, or if the gid/uid is not
1148 // a number, then just move on. chown would fail anyway.
1149 if (!cs || isNaN(cs.uid) || isNaN(cs.gid)) return cb()
1151 chownr(made || tgz, cs.uid, cs.gid, function (er) {
1152 if (er) return cb(er)
1153 addLocalTarball(tgz, name, shasum, cb)
1161 function addTmpTarball (tgz, name, shasum, cb) {
1162 if (!cb) cb = name, name = ""
1163 getCacheStat(function (er, cs) {
1164 if (er) return cb(er)
1165 var contents = path.dirname(tgz)
1166 tar.unpack( tgz, path.resolve(contents, "package")
1173 addLocalDirectory(path.resolve(contents, "package"), name, shasum, cb)
1178 function unpack (pkg, ver, unpackTarget, dMode, fMode, uid, gid, cb) {
1179 if (typeof cb !== "function") cb = gid, gid = null
1180 if (typeof cb !== "function") cb = uid, uid = null
1181 if (typeof cb !== "function") cb = fMode, fMode = null
1182 if (typeof cb !== "function") cb = dMode, dMode = null
1184 read(pkg, ver, false, function (er, data) {
1186 log.error("unpack", "Could not read data for %s", pkg + "@" + ver)
1189 npm.commands.unbuild([unpackTarget], true, function (er) {
1190 if (er) return cb(er)
1191 tar.unpack( path.join(npm.cache, pkg, ver, "package.tgz")
1202 function deprCheck (data) {
1203 if (deprecated[data._id]) data.deprecated = deprecated[data._id]
1204 if (data.deprecated) deprecated[data._id] = data.deprecated
1206 if (!deprWarned[data._id]) {
1207 deprWarned[data._id] = true
1208 log.warn("deprecated", "%s: %s", data._id, data.deprecated)
1212 function lockFileName (u) {
1213 var c = u.replace(/[^a-zA-Z0-9]+/g, "-").replace(/^-+|-+$/g, "")
1214 , h = crypto.createHash("sha1").update(u).digest("hex")
1217 log.silly("lockFile", h + "-" + c, u)
1218 return path.resolve(npm.config.get("cache"), h + "-" + c + ".lock")
1222 function lock (u, cb) {
1223 // the cache dir needs to exist already for this.
1224 getCacheStat(function (er, cs) {
1225 if (er) return cb(er)
1226 var opts = { stale: npm.config.get("cache-lock-stale")
1227 , retries: npm.config.get("cache-lock-retries")
1228 , wait: npm.config.get("cache-lock-wait") }
1229 var lf = lockFileName(u)
1230 log.verbose("lock", u, lf)
1231 lockFile.lock(lf, opts, function(er) {
1232 if (!er) myLocks[lf] = true
1238 function unlock (u, cb) {
1239 var lf = lockFileName(u)
1240 if (!myLocks[lf]) return process.nextTick(cb)
1242 lockFile.unlock(lockFileName(u), cb)
1245 function needName(er, data) {
1247 : (data && !data.name) ? new Error("No name provided")
1251 function needVersion(er, data) {
1253 : (data && !data.version) ? new Error("No version provided")