The data contained in this repository can be downloaded to your computer using one of several clients.
Please see the documentation of your version control software client for more information.
Please select the desired protocol below to get the URL.
This URL has Read-Only access.
main_repo / deps / npm / lib / cache.js @ 5aef65a9
History | View | Annotate | Download (31.7 KB)
1 |
// XXX lib/utils/tar.js and this file need to be rewritten.
|
---|---|
2 |
|
3 |
// URL-to-cache folder mapping:
|
4 |
// : -> !
|
5 |
// @ -> _
|
6 |
// http://registry.npmjs.org/foo/version -> cache/http!/...
|
7 |
//
|
8 |
|
9 |
/*
|
10 |
fetching a url:
|
11 |
1. Check for url in inFlightUrls. If present, add cb, and return.
|
12 |
2. create inFlightURL list
|
13 |
3. Acquire lock at {cache}/{sha(url)}.lock
|
14 |
retries = {cache-lock-retries, def=3}
|
15 |
stale = {cache-lock-stale, def=30000}
|
16 |
wait = {cache-lock-wait, def=100}
|
17 |
4. if lock can't be acquired, then fail
|
18 |
5. fetch url, clear lock, call cbs
|
19 |
|
20 |
cache folders:
|
21 |
1. urls: http!/server.com/path/to/thing
|
22 |
2. c:\path\to\thing: file!/c!/path/to/thing
|
23 |
3. /path/to/thing: file!/path/to/thing
|
24 |
4. git@ private: git_github.com!isaacs/npm
|
25 |
5. git://public: git!/github.com/isaacs/npm
|
26 |
6. git+blah:// git-blah!/server.com/foo/bar
|
27 |
|
28 |
adding a folder:
|
29 |
1. tar into tmp/random/package.tgz
|
30 |
2. untar into tmp/random/contents/package, stripping one dir piece
|
31 |
3. tar tmp/random/contents/package to cache/n/v/package.tgz
|
32 |
4. untar cache/n/v/package.tgz into cache/n/v/package
|
33 |
5. rm tmp/random
|
34 |
|
35 |
Adding a url:
|
36 |
1. fetch to tmp/random/package.tgz
|
37 |
2. goto folder(2)
|
38 |
|
39 |
adding a name@version:
|
40 |
1. registry.get(name/version)
|
41 |
2. if response isn't 304, add url(dist.tarball)
|
42 |
|
43 |
adding a name@range:
|
44 |
1. registry.get(name)
|
45 |
2. Find a version that satisfies
|
46 |
3. add name@version
|
47 |
|
48 |
adding a local tarball:
|
49 |
1. untar to tmp/random/{blah}
|
50 |
2. goto folder(2)
|
51 |
*/
|
52 |
|
53 |
exports = module.exports = cache |
54 |
cache.read = read |
55 |
cache.clean = clean |
56 |
cache.unpack = unpack |
57 |
cache.lock = lock |
58 |
cache.unlock = unlock |
59 |
|
60 |
var mkdir = require("mkdirp") |
61 |
, exec = require("./utils/exec.js")
|
62 |
, spawn = require("child_process").spawn
|
63 |
, once = require("once")
|
64 |
, fetch = require("./utils/fetch.js")
|
65 |
, npm = require("./npm.js")
|
66 |
, fs = require("graceful-fs")
|
67 |
, rm = require("rimraf")
|
68 |
, readJson = require("read-package-json")
|
69 |
, registry = npm.registry |
70 |
, log = require("npmlog")
|
71 |
, path = require("path")
|
72 |
, sha = require("./utils/sha.js")
|
73 |
, asyncMap = require("slide").asyncMap
|
74 |
, semver = require("semver")
|
75 |
, tar = require("./utils/tar.js")
|
76 |
, fileCompletion = require("./utils/completion/file-completion.js")
|
77 |
, url = require("url")
|
78 |
, chownr = require("chownr")
|
79 |
, lockFile = require("lockfile")
|
80 |
, crypto = require("crypto")
|
81 |
, retry = require("retry")
|
82 |
, zlib = require("zlib")
|
83 |
|
84 |
cache.usage = "npm cache add <tarball file>"
|
85 |
+ "\nnpm cache add <folder>"
|
86 |
+ "\nnpm cache add <tarball url>"
|
87 |
+ "\nnpm cache add <git url>"
|
88 |
+ "\nnpm cache add <name>@<version>"
|
89 |
+ "\nnpm cache ls [<path>]"
|
90 |
+ "\nnpm cache clean [<pkg>[@<version>]]"
|
91 |
|
92 |
cache.completion = function (opts, cb) { |
93 |
|
94 |
var argv = opts.conf.argv.remain
|
95 |
if (argv.length === 2) { |
96 |
return cb(null, ["add", "ls", "clean"]) |
97 |
} |
98 |
|
99 |
switch (argv[2]) { |
100 |
case "clean": |
101 |
case "ls": |
102 |
// cache and ls are easy, because the completion is
|
103 |
// what ls_ returns anyway.
|
104 |
// just get the partial words, minus the last path part
|
105 |
var p = path.dirname(opts.partialWords.slice(3).join("/")) |
106 |
if (p === ".") p = "" |
107 |
return ls_(p, 2, cb) |
108 |
case "add": |
109 |
// Same semantics as install and publish.
|
110 |
return npm.commands.install.completion(opts, cb)
|
111 |
} |
112 |
} |
113 |
|
114 |
function cache (args, cb) { |
115 |
var cmd = args.shift()
|
116 |
switch (cmd) {
|
117 |
case "rm": case "clear": case "clean": return clean(args, cb) |
118 |
case "list": case "sl": case "ls": return ls(args, cb) |
119 |
case "add": return add(args, cb) |
120 |
default: return cb(new Error( |
121 |
"Invalid cache action: "+cmd))
|
122 |
} |
123 |
} |
124 |
|
125 |
// if the pkg and ver are in the cache, then
|
126 |
// just do a readJson and return.
|
127 |
// if they're not, then fetch them from the registry.
|
128 |
function read (name, ver, forceBypass, cb) { |
129 |
if (typeof cb !== "function") cb = forceBypass, forceBypass = true |
130 |
var jsonFile = path.join(npm.cache, name, ver, "package", "package.json") |
131 |
function c (er, data) { |
132 |
if (data) deprCheck(data)
|
133 |
return cb(er, data)
|
134 |
} |
135 |
|
136 |
if (forceBypass && npm.config.get("force")) { |
137 |
log.verbose("using force", "skipping cache") |
138 |
return addNamed(name, ver, c)
|
139 |
} |
140 |
|
141 |
readJson(jsonFile, function (er, data) {
|
142 |
if (er && er.code !== "ENOENT" && er.code !== "ENOTDIR") return cb(er) |
143 |
if (er) return addNamed(name, ver, c) |
144 |
deprCheck(data) |
145 |
c(er, data) |
146 |
}) |
147 |
} |
148 |
|
149 |
// npm cache ls [<path>]
|
150 |
function ls (args, cb) { |
151 |
args = args.join("/").split("@").join("/") |
152 |
if (args.substr(-1) === "/") args = args.substr(0, args.length - 1) |
153 |
var prefix = npm.config.get("cache") |
154 |
if (0 === prefix.indexOf(process.env.HOME)) { |
155 |
prefix = "~" + prefix.substr(process.env.HOME.length)
|
156 |
} |
157 |
ls_(args, npm.config.get("depth"), function (er, files) { |
158 |
console.log(files.map(function (f) {
|
159 |
return path.join(prefix, f)
|
160 |
}).join("\n").trim())
|
161 |
cb(er, files) |
162 |
}) |
163 |
} |
164 |
|
165 |
// Calls cb with list of cached pkgs matching show.
|
166 |
function ls_ (req, depth, cb) { |
167 |
return fileCompletion(npm.cache, req, depth, cb)
|
168 |
} |
169 |
|
170 |
// npm cache clean [<path>]
|
171 |
function clean (args, cb) { |
172 |
if (!cb) cb = args, args = []
|
173 |
if (!args) args = []
|
174 |
args = args.join("/").split("@").join("/") |
175 |
if (args.substr(-1) === "/") args = args.substr(0, args.length - 1) |
176 |
var f = path.join(npm.cache, path.normalize(args))
|
177 |
if (f === npm.cache) {
|
178 |
fs.readdir(npm.cache, function (er, files) {
|
179 |
if (er) return cb() |
180 |
asyncMap( files.filter(function (f) {
|
181 |
return npm.config.get("force") || f !== "-" |
182 |
}).map(function (f) {
|
183 |
return path.join(npm.cache, f)
|
184 |
}) |
185 |
, rm, cb ) |
186 |
}) |
187 |
} else rm(path.join(npm.cache, path.normalize(args)), cb)
|
188 |
} |
189 |
|
190 |
// npm cache add <tarball-url>
|
191 |
// npm cache add <pkg> <ver>
|
192 |
// npm cache add <tarball>
|
193 |
// npm cache add <folder>
|
194 |
cache.add = function (pkg, ver, scrub, cb) { |
195 |
if (typeof cb !== "function") cb = scrub, scrub = false |
196 |
if (typeof cb !== "function") cb = ver, ver = null |
197 |
if (scrub) {
|
198 |
return clean([], function (er) { |
199 |
if (er) return cb(er) |
200 |
add([pkg, ver], cb) |
201 |
}) |
202 |
} |
203 |
log.verbose("cache add", [pkg, ver])
|
204 |
return add([pkg, ver], cb)
|
205 |
} |
206 |
|
207 |
function add (args, cb) { |
208 |
// this is hot code. almost everything passes through here.
|
209 |
// the args can be any of:
|
210 |
// ["url"]
|
211 |
// ["pkg", "version"]
|
212 |
// ["pkg@version"]
|
213 |
// ["pkg", "url"]
|
214 |
// This is tricky, because urls can contain @
|
215 |
// Also, in some cases we get [name, null] rather
|
216 |
// that just a single argument.
|
217 |
|
218 |
var usage = "Usage:\n" |
219 |
+ " npm cache add <tarball-url>\n"
|
220 |
+ " npm cache add <pkg>@<ver>\n"
|
221 |
+ " npm cache add <tarball>\n"
|
222 |
+ " npm cache add <folder>\n"
|
223 |
, name |
224 |
, spec |
225 |
|
226 |
if (args[1] === undefined) args[1] = null |
227 |
|
228 |
// at this point the args length must ==2
|
229 |
if (args[1] !== null) { |
230 |
name = args[0]
|
231 |
spec = args[1]
|
232 |
} else if (args.length === 2) { |
233 |
spec = args[0]
|
234 |
} |
235 |
|
236 |
log.verbose("cache add", "name=%j spec=%j args=%j", name, spec, args) |
237 |
|
238 |
|
239 |
if (!name && !spec) return cb(usage) |
240 |
|
241 |
// see if the spec is a url
|
242 |
// otherwise, treat as name@version
|
243 |
var p = url.parse(spec) || {}
|
244 |
log.verbose("parsed url", p)
|
245 |
|
246 |
// it could be that we got name@http://blah
|
247 |
// in that case, we will not have a protocol now, but if we
|
248 |
// split and check, we will.
|
249 |
if (!name && !p.protocol && spec.indexOf("@") !== -1) { |
250 |
spec = spec.split("@")
|
251 |
name = spec.shift() |
252 |
spec = spec.join("@")
|
253 |
return add([name, spec], cb)
|
254 |
} |
255 |
|
256 |
switch (p.protocol) {
|
257 |
case "http:": |
258 |
case "https:": |
259 |
return addRemoteTarball(spec, null, name, cb) |
260 |
case "git:": |
261 |
case "git+http:": |
262 |
case "git+https:": |
263 |
case "git+rsync:": |
264 |
case "git+ftp:": |
265 |
case "git+ssh:": |
266 |
//p.protocol = p.protocol.replace(/^git([^:])/, "$1")
|
267 |
return addRemoteGit(spec, p, name, cb)
|
268 |
default:
|
269 |
// if we have a name and a spec, then try name@spec
|
270 |
// if not, then try just spec (which may try name@"" if not found)
|
271 |
if (name) {
|
272 |
addNamed(name, spec, cb) |
273 |
} else {
|
274 |
addLocal(spec, cb) |
275 |
} |
276 |
} |
277 |
} |
278 |
|
279 |
function fetchAndShaCheck (u, tmp, shasum, cb) { |
280 |
fetch(u, tmp, function (er, response) {
|
281 |
if (er) {
|
282 |
log.error("fetch failed", u)
|
283 |
return cb(er, response)
|
284 |
} |
285 |
if (!shasum) return cb() |
286 |
// validate that the url we just downloaded matches the expected shasum.
|
287 |
sha.check(tmp, shasum, cb) |
288 |
}) |
289 |
} |
290 |
|
291 |
// Only have a single download action at once for a given url
|
292 |
// additional calls stack the callbacks.
|
293 |
var inFlightURLs = {}
|
294 |
function addRemoteTarball (u, shasum, name, cb_) { |
295 |
if (typeof cb_ !== "function") cb_ = name, name = "" |
296 |
if (typeof cb_ !== "function") cb_ = shasum, shasum = null |
297 |
|
298 |
if (!inFlightURLs[u]) inFlightURLs[u] = []
|
299 |
var iF = inFlightURLs[u]
|
300 |
iF.push(cb_) |
301 |
if (iF.length > 1) return |
302 |
|
303 |
function cb (er, data) { |
304 |
if (data) {
|
305 |
data._from = u |
306 |
data._resolved = u |
307 |
} |
308 |
unlock(u, function () {
|
309 |
var c
|
310 |
while (c = iF.shift()) c(er, data)
|
311 |
delete inFlightURLs[u]
|
312 |
}) |
313 |
} |
314 |
|
315 |
var tmp = path.join(npm.tmp, Date.now()+"-"+Math.random(), "tmp.tgz") |
316 |
|
317 |
lock(u, function (er) {
|
318 |
if (er) return cb(er) |
319 |
|
320 |
log.verbose("addRemoteTarball", [u, shasum])
|
321 |
mkdir(path.dirname(tmp), function (er) {
|
322 |
if (er) return cb(er) |
323 |
addRemoteTarball_(u, tmp, shasum, done) |
324 |
}) |
325 |
}) |
326 |
|
327 |
function done (er) { |
328 |
if (er) return cb(er) |
329 |
addLocalTarball(tmp, name, cb) |
330 |
} |
331 |
} |
332 |
|
333 |
function addRemoteTarball_(u, tmp, shasum, cb) { |
334 |
// Tuned to spread 3 attempts over about a minute.
|
335 |
// See formula at <https://github.com/tim-kos/node-retry>.
|
336 |
var operation = retry.operation
|
337 |
( { retries: npm.config.get("fetch-retries") |
338 |
, factor: npm.config.get("fetch-retry-factor") |
339 |
, minTimeout: npm.config.get("fetch-retry-mintimeout") |
340 |
, maxTimeout: npm.config.get("fetch-retry-maxtimeout") }) |
341 |
|
342 |
operation.attempt(function (currentAttempt) {
|
343 |
log.info("retry", "fetch attempt " + currentAttempt |
344 |
+ " at " + (new Date()).toLocaleTimeString()) |
345 |
fetchAndShaCheck(u, tmp, shasum, function (er, response) {
|
346 |
// Only retry on 408, 5xx or no `response`.
|
347 |
var sc = response && response.statusCode
|
348 |
var statusRetry = !sc || (sc === 408 || sc >= 500) |
349 |
if (er && statusRetry && operation.retry(er)) {
|
350 |
log.info("retry", "will retry, error on last attempt: " + er) |
351 |
return
|
352 |
} |
353 |
cb(er) |
354 |
}) |
355 |
}) |
356 |
} |
357 |
|
358 |
// 1. cacheDir = path.join(cache,'_git-remotes',sha1(u))
|
359 |
// 2. checkGitDir(cacheDir) ? 4. : 3. (rm cacheDir if necessary)
|
360 |
// 3. git clone --mirror u cacheDir
|
361 |
// 4. cd cacheDir && git fetch -a origin
|
362 |
// 5. git archive /tmp/random.tgz
|
363 |
// 6. addLocalTarball(/tmp/random.tgz) <gitref> --format=tar --prefix=package/
|
364 |
function addRemoteGit (u, parsed, name, cb_) { |
365 |
if (typeof cb_ !== "function") cb_ = name, name = null |
366 |
|
367 |
if (!inFlightURLs[u]) inFlightURLs[u] = []
|
368 |
var iF = inFlightURLs[u]
|
369 |
iF.push(cb_) |
370 |
if (iF.length > 1) return |
371 |
|
372 |
function cb (er, data) { |
373 |
unlock(u, function () {
|
374 |
var c
|
375 |
while (c = iF.shift()) c(er, data)
|
376 |
delete inFlightURLs[u]
|
377 |
}) |
378 |
} |
379 |
|
380 |
var p, co // cachePath, git-ref we want to check out |
381 |
|
382 |
lock(u, function (er) {
|
383 |
if (er) return cb(er) |
384 |
|
385 |
// figure out what we should check out.
|
386 |
var co = parsed.hash && parsed.hash.substr(1) || "master" |
387 |
// git is so tricky!
|
388 |
// if the path is like ssh://foo:22/some/path then it works, but
|
389 |
// it needs the ssh://
|
390 |
// If the path is like ssh://foo:some/path then it works, but
|
391 |
// only if you remove the ssh://
|
392 |
var origUrl = u
|
393 |
u = u.replace(/^git\+/, "") |
394 |
.replace(/#.*$/, "") |
395 |
|
396 |
// ssh paths that are scp-style urls don't need the ssh://
|
397 |
if (parsed.pathname.match(/^\/?:/)) { |
398 |
u = u.replace(/^ssh:\/\//, "") |
399 |
} |
400 |
|
401 |
var v = crypto.createHash("sha1").update(u).digest("hex").slice(0, 8) |
402 |
v = u.replace(/[^a-zA-Z0-9]+/g, '-') + '-' + v |
403 |
|
404 |
log.verbose("addRemoteGit", [u, co])
|
405 |
|
406 |
p = path.join(npm.config.get("cache"), "_git-remotes", v) |
407 |
|
408 |
checkGitDir(p, u, co, origUrl, cb) |
409 |
}) |
410 |
} |
411 |
|
412 |
function checkGitDir (p, u, co, origUrl, cb) { |
413 |
fs.stat(p, function (er, s) {
|
414 |
if (er) return cloneGitRemote(p, u, co, origUrl, cb) |
415 |
if (!s.isDirectory()) return rm(p, function (er){ |
416 |
if (er) return cb(er) |
417 |
cloneGitRemote(p, u, co, origUrl, cb) |
418 |
}) |
419 |
|
420 |
var git = npm.config.get("git") |
421 |
var args = ["config", "--get", "remote.origin.url"] |
422 |
var env = gitEnv()
|
423 |
|
424 |
exec(git, args, env, false, p, function (er, code, stdout, stderr) { |
425 |
stdoutTrimmed = (stdout + "\n" + stderr).trim()
|
426 |
if (er || u !== stdout.trim()) {
|
427 |
log.warn( "`git config --get remote.origin.url` returned "
|
428 |
+ "wrong result ("+u+")", stdoutTrimmed ) |
429 |
return rm(p, function (er){ |
430 |
if (er) return cb(er) |
431 |
cloneGitRemote(p, u, co, origUrl, cb) |
432 |
}) |
433 |
} |
434 |
log.verbose("git remote.origin.url", stdoutTrimmed)
|
435 |
archiveGitRemote(p, u, co, origUrl, cb) |
436 |
}) |
437 |
}) |
438 |
} |
439 |
|
440 |
function cloneGitRemote (p, u, co, origUrl, cb) { |
441 |
mkdir(p, function (er) {
|
442 |
if (er) return cb(er) |
443 |
exec( npm.config.get("git"), ["clone", "--mirror", u, p], gitEnv(), false |
444 |
, function (er, code, stdout, stderr) {
|
445 |
stdout = (stdout + "\n" + stderr).trim()
|
446 |
if (er) {
|
447 |
log.error("git clone " + u, stdout)
|
448 |
return cb(er)
|
449 |
} |
450 |
log.verbose("git clone " + u, stdout)
|
451 |
archiveGitRemote(p, u, co, origUrl, cb) |
452 |
}) |
453 |
}) |
454 |
} |
455 |
|
456 |
function archiveGitRemote (p, u, co, origUrl, cb) { |
457 |
var git = npm.config.get("git") |
458 |
var archive = ["fetch", "-a", "origin"] |
459 |
var resolve = ["rev-list", "-n1", co] |
460 |
var env = gitEnv()
|
461 |
|
462 |
var errState = null |
463 |
var n = 0 |
464 |
var resolved = null |
465 |
var tmp
|
466 |
|
467 |
exec(git, archive, env, false, p, function (er, code, stdout, stderr) { |
468 |
stdout = (stdout + "\n" + stderr).trim()
|
469 |
if (er) {
|
470 |
log.error("git fetch -a origin ("+u+")", stdout) |
471 |
return next(er)
|
472 |
} |
473 |
log.verbose("git fetch -a origin ("+u+")", stdout) |
474 |
tmp = path.join(npm.tmp, Date.now()+"-"+Math.random(), "tmp.tgz") |
475 |
next() |
476 |
}) |
477 |
|
478 |
exec(git, resolve, env, false, p, function (er, code, stdout, stderr) { |
479 |
stdout = (stdout + "\n" + stderr).trim()
|
480 |
if (er) {
|
481 |
log.error("Failed resolving git HEAD (" + u + ")", stderr) |
482 |
return next(er)
|
483 |
} |
484 |
log.verbose("git rev-list -n1 " + co, stdout)
|
485 |
var parsed = url.parse(origUrl)
|
486 |
parsed.hash = stdout |
487 |
resolved = url.format(parsed) |
488 |
log.verbose('resolved git url', resolved)
|
489 |
next() |
490 |
}) |
491 |
|
492 |
function next (er) { |
493 |
if (errState) return |
494 |
if (er) return cb(errState = er) |
495 |
|
496 |
if (++n < 2) return |
497 |
|
498 |
mkdir(path.dirname(tmp), function (er) {
|
499 |
if (er) return cb(er) |
500 |
var gzip = zlib.createGzip({ level: 9 }) |
501 |
var git = npm.config.get("git") |
502 |
var args = ["archive", co, "--format=tar", "--prefix=package/"] |
503 |
var out = fs.createWriteStream(tmp)
|
504 |
var env = gitEnv()
|
505 |
cb = once(cb) |
506 |
var cp = spawn(git, args, { env: env, cwd: p }) |
507 |
cp.on("error", cb)
|
508 |
cp.stderr.on("data", function(chunk) { |
509 |
log.silly(chunk.toString(), "git archive")
|
510 |
}) |
511 |
|
512 |
cp.stdout.pipe(gzip).pipe(out).on("close", function() { |
513 |
addLocalTarball(tmp, function(er, data) {
|
514 |
if (data) data._resolved = resolved
|
515 |
cb(er, data) |
516 |
}) |
517 |
}) |
518 |
}) |
519 |
} |
520 |
} |
521 |
|
522 |
var gitEnv_
|
523 |
function gitEnv () { |
524 |
// git responds to env vars in some weird ways in post-receive hooks
|
525 |
// so don't carry those along.
|
526 |
if (gitEnv_) return gitEnv_ |
527 |
gitEnv_ = {} |
528 |
for (var k in process.env) { |
529 |
if (!~['GIT_PROXY_COMMAND','GIT_SSH'].indexOf(k) && k.match(/^GIT/)) continue |
530 |
gitEnv_[k] = process.env[k] |
531 |
} |
532 |
return gitEnv_
|
533 |
} |
534 |
|
535 |
|
536 |
// only have one request in flight for a given
|
537 |
// name@blah thing.
|
538 |
var inFlightNames = {}
|
539 |
function addNamed (name, x, data, cb_) { |
540 |
if (typeof cb_ !== "function") cb_ = data, data = null |
541 |
log.verbose("addNamed", [name, x])
|
542 |
|
543 |
var k = name + "@" + x |
544 |
if (!inFlightNames[k]) inFlightNames[k] = []
|
545 |
var iF = inFlightNames[k]
|
546 |
iF.push(cb_) |
547 |
if (iF.length > 1) return |
548 |
|
549 |
function cb (er, data) { |
550 |
if (data && !data._fromGithub) data._from = k
|
551 |
unlock(k, function () {
|
552 |
var c
|
553 |
while (c = iF.shift()) c(er, data)
|
554 |
delete inFlightNames[k]
|
555 |
}) |
556 |
} |
557 |
|
558 |
log.verbose("addNamed", [semver.valid(x), semver.validRange(x)])
|
559 |
lock(k, function (er, fd) {
|
560 |
if (er) return cb(er) |
561 |
|
562 |
var fn = ( null !== semver.valid(x) ? addNameVersion |
563 |
: null !== semver.validRange(x) ? addNameRange
|
564 |
: addNameTag |
565 |
) |
566 |
fn(name, x, data, cb) |
567 |
}) |
568 |
} |
569 |
|
570 |
function addNameTag (name, tag, data, cb) { |
571 |
if (typeof cb !== "function") cb = data, data = null |
572 |
log.info("addNameTag", [name, tag])
|
573 |
var explicit = true |
574 |
if (!tag) {
|
575 |
explicit = false
|
576 |
tag = npm.config.get("tag")
|
577 |
} |
578 |
|
579 |
registry.get(name, function (er, data, json, response) {
|
580 |
if (er) return cb(er) |
581 |
engineFilter(data) |
582 |
if (data["dist-tags"] && data["dist-tags"][tag] |
583 |
&& data.versions[data["dist-tags"][tag]]) {
|
584 |
var ver = data["dist-tags"][tag] |
585 |
return addNamed(name, ver, data.versions[ver], cb)
|
586 |
} |
587 |
if (!explicit && Object.keys(data.versions).length) {
|
588 |
return addNamed(name, "*", data, cb) |
589 |
} |
590 |
|
591 |
er = installTargetsError(tag, data) |
592 |
|
593 |
// might be username/project
|
594 |
// in that case, try it as a github url.
|
595 |
if (tag.split("/").length === 2) { |
596 |
return maybeGithub(tag, name, er, cb)
|
597 |
} |
598 |
|
599 |
return cb(er)
|
600 |
}) |
601 |
} |
602 |
|
603 |
|
604 |
function engineFilter (data) { |
605 |
var npmv = npm.version
|
606 |
, nodev = npm.config.get("node-version")
|
607 |
, strict = npm.config.get("engine-strict")
|
608 |
|
609 |
if (!nodev || npm.config.get("force")) return data |
610 |
|
611 |
Object.keys(data.versions || {}).forEach(function (v) {
|
612 |
var eng = data.versions[v].engines
|
613 |
if (!eng) return |
614 |
if (!strict && !data.versions[v].engineStrict) return |
615 |
if (eng.node && !semver.satisfies(nodev, eng.node)
|
616 |
|| eng.npm && !semver.satisfies(npmv, eng.npm)) { |
617 |
delete data.versions[v]
|
618 |
} |
619 |
}) |
620 |
} |
621 |
|
622 |
function addNameRange (name, range, data, cb) { |
623 |
if (typeof cb !== "function") cb = data, data = null |
624 |
|
625 |
range = semver.validRange(range) |
626 |
if (range === null) return cb(new Error( |
627 |
"Invalid version range: "+range))
|
628 |
|
629 |
log.silly("addNameRange", {name:name, range:range, hasData:!!data}) |
630 |
|
631 |
if (data) return next() |
632 |
registry.get(name, function (er, d, json, response) {
|
633 |
if (er) return cb(er) |
634 |
data = d |
635 |
next() |
636 |
}) |
637 |
|
638 |
function next () { |
639 |
log.silly( "addNameRange", "number 2" |
640 |
, {name:name, range:range, hasData:!!data}) |
641 |
engineFilter(data) |
642 |
|
643 |
log.silly("addNameRange", "versions" |
644 |
, [data.name, Object.keys(data.versions || {})]) |
645 |
|
646 |
// if the tagged version satisfies, then use that.
|
647 |
var tagged = data["dist-tags"][npm.config.get("tag")] |
648 |
if (tagged && data.versions[tagged] && semver.satisfies(tagged, range)) {
|
649 |
return addNamed(name, tagged, data.versions[tagged], cb)
|
650 |
} |
651 |
|
652 |
// find the max satisfying version.
|
653 |
var ms = semver.maxSatisfying(Object.keys(data.versions || {}), range)
|
654 |
if (!ms) {
|
655 |
return cb(installTargetsError(range, data))
|
656 |
} |
657 |
|
658 |
// if we don't have a registry connection, try to see if
|
659 |
// there's a cached copy that will be ok.
|
660 |
addNamed(name, ms, data.versions[ms], cb) |
661 |
} |
662 |
} |
663 |
|
664 |
function installTargetsError (requested, data) { |
665 |
var targets = Object.keys(data["dist-tags"]).filter(function (f) { |
666 |
return (data.versions || {}).hasOwnProperty(f)
|
667 |
}).concat(Object.keys(data.versions || {})) |
668 |
|
669 |
requested = data.name + (requested ? "@'" + requested + "'" : "") |
670 |
|
671 |
targets = targets.length |
672 |
? "Valid install targets:\n" + JSON.stringify(targets)
|
673 |
: "No valid targets found.\n"
|
674 |
+ "Perhaps not compatible with your version of node?"
|
675 |
|
676 |
return new Error( "No compatible version found: " |
677 |
+ requested + "\n" + targets)
|
678 |
} |
679 |
|
680 |
function addNameVersion (name, ver, data, cb) { |
681 |
if (typeof cb !== "function") cb = data, data = null |
682 |
|
683 |
ver = semver.valid(ver) |
684 |
if (ver === null) return cb(new Error("Invalid version: "+ver)) |
685 |
|
686 |
var response
|
687 |
|
688 |
if (data) {
|
689 |
response = null
|
690 |
return next()
|
691 |
} |
692 |
registry.get(name + "/" + ver, function (er, d, json, resp) { |
693 |
if (er) return cb(er) |
694 |
data = d |
695 |
response = resp |
696 |
next() |
697 |
}) |
698 |
|
699 |
function next () { |
700 |
deprCheck(data) |
701 |
var dist = data.dist
|
702 |
|
703 |
if (!dist) return cb(new Error("No dist in "+data._id+" package")) |
704 |
|
705 |
if (!dist.tarball) return cb(new Error( |
706 |
"No dist.tarball in " + data._id + " package")) |
707 |
|
708 |
if ((response && response.statusCode !== 304) || npm.config.get("force")) { |
709 |
return fetchit()
|
710 |
} |
711 |
|
712 |
// we got cached data, so let's see if we have a tarball.
|
713 |
fs.stat(path.join(npm.cache, name, ver, "package.tgz"), function (er, s) { |
714 |
if (!er) readJson( path.join( npm.cache, name, ver
|
715 |
, "package", "package.json" ) |
716 |
, function (er, data) {
|
717 |
if (er && er.code !== "ENOENT" && er.code !== "ENOTDIR") return cb(er) |
718 |
if (er) return fetchit() |
719 |
return cb(null, data) |
720 |
}) |
721 |
else return fetchit() |
722 |
}) |
723 |
|
724 |
function fetchit () { |
725 |
if (!npm.config.get("registry")) { |
726 |
return cb(new Error("Cannot fetch: "+dist.tarball)) |
727 |
} |
728 |
|
729 |
// use the same protocol as the registry.
|
730 |
// https registry --> https tarballs.
|
731 |
var tb = url.parse(dist.tarball)
|
732 |
tb.protocol = url.parse(npm.config.get("registry")).protocol
|
733 |
delete tb.href
|
734 |
tb = url.format(tb) |
735 |
return addRemoteTarball( tb
|
736 |
, dist.shasum |
737 |
, name+"-"+ver
|
738 |
, cb ) |
739 |
} |
740 |
} |
741 |
} |
742 |
|
743 |
function addLocal (p, name, cb_) { |
744 |
if (typeof cb_ !== "function") cb_ = name, name = "" |
745 |
|
746 |
function cb (er, data) { |
747 |
unlock(p, function () {
|
748 |
if (er) {
|
749 |
// if it doesn't have a / in it, it might be a
|
750 |
// remote thing.
|
751 |
if (p.indexOf("/") === -1 && p.charAt(0) !== "." |
752 |
&& (process.platform !== "win32" || p.indexOf("\\") === -1)) { |
753 |
return addNamed(p, "", cb_) |
754 |
} |
755 |
log.error("addLocal", "Could not install %s", p) |
756 |
return cb_(er)
|
757 |
} |
758 |
if (data && !data._fromGithub) data._from = p
|
759 |
return cb_(er, data)
|
760 |
}) |
761 |
} |
762 |
|
763 |
lock(p, function (er) {
|
764 |
if (er) return cb(er) |
765 |
// figure out if this is a folder or file.
|
766 |
fs.stat(p, function (er, s) {
|
767 |
if (er) {
|
768 |
// might be username/project
|
769 |
// in that case, try it as a github url.
|
770 |
if (p.split("/").length === 2) { |
771 |
return maybeGithub(p, name, er, cb)
|
772 |
} |
773 |
return cb(er)
|
774 |
} |
775 |
if (s.isDirectory()) addLocalDirectory(p, name, cb)
|
776 |
else addLocalTarball(p, name, cb)
|
777 |
}) |
778 |
}) |
779 |
} |
780 |
|
781 |
function maybeGithub (p, name, er, cb) { |
782 |
var u = "git://github.com/" + p |
783 |
, up = url.parse(u) |
784 |
log.info("maybeGithub", "Attempting to fetch %s from %s", p, u) |
785 |
|
786 |
return addRemoteGit(u, up, name, function (er2, data) { |
787 |
if (er2) return cb(er) |
788 |
data._from = u |
789 |
data._fromGithub = true
|
790 |
return cb(null, data) |
791 |
}) |
792 |
} |
793 |
|
794 |
function addLocalTarball (p, name, cb_) { |
795 |
if (typeof cb_ !== "function") cb_ = name, name = "" |
796 |
// if it's a tar, and not in place,
|
797 |
// then unzip to .tmp, add the tmp folder, and clean up tmp
|
798 |
if (p.indexOf(npm.tmp) === 0) return addTmpTarball(p, name, cb_) |
799 |
|
800 |
if (p.indexOf(npm.cache) === 0) { |
801 |
if (path.basename(p) !== "package.tgz") return cb_(new Error( |
802 |
"Not a valid cache tarball name: "+p))
|
803 |
return addPlacedTarball(p, name, cb_)
|
804 |
} |
805 |
|
806 |
function cb (er, data) { |
807 |
if (data) data._resolved = p
|
808 |
return cb_(er, data)
|
809 |
} |
810 |
|
811 |
// just copy it over and then add the temp tarball file.
|
812 |
var tmp = path.join(npm.tmp, name + Date.now()
|
813 |
+ "-" + Math.random(), "tmp.tgz") |
814 |
mkdir(path.dirname(tmp), function (er) {
|
815 |
if (er) return cb(er) |
816 |
var from = fs.createReadStream(p)
|
817 |
, to = fs.createWriteStream(tmp) |
818 |
, errState = null
|
819 |
function errHandler (er) { |
820 |
if (errState) return |
821 |
return cb(errState = er)
|
822 |
} |
823 |
from.on("error", errHandler)
|
824 |
to.on("error", errHandler)
|
825 |
to.on("close", function () { |
826 |
if (errState) return |
827 |
log.verbose("chmod", tmp, npm.modes.file.toString(8)) |
828 |
fs.chmod(tmp, npm.modes.file, function (er) {
|
829 |
if (er) return cb(er) |
830 |
addTmpTarball(tmp, name, cb) |
831 |
}) |
832 |
}) |
833 |
from.pipe(to) |
834 |
}) |
835 |
} |
836 |
|
837 |
// to maintain the cache dir's permissions consistently.
|
838 |
var cacheStat = null |
839 |
function getCacheStat (cb) { |
840 |
if (cacheStat) return cb(null, cacheStat) |
841 |
fs.stat(npm.cache, function (er, st) {
|
842 |
if (er) return makeCacheDir(cb) |
843 |
if (!st.isDirectory()) {
|
844 |
log.error("getCacheStat", "invalid cache dir %j", npm.cache) |
845 |
return cb(er)
|
846 |
} |
847 |
return cb(null, cacheStat = st) |
848 |
}) |
849 |
} |
850 |
|
851 |
function makeCacheDir (cb) { |
852 |
if (!process.getuid) return mkdir(npm.cache, cb) |
853 |
|
854 |
var uid = +process.getuid()
|
855 |
, gid = +process.getgid() |
856 |
|
857 |
if (uid === 0) { |
858 |
if (process.env.SUDO_UID) uid = +process.env.SUDO_UID
|
859 |
if (process.env.SUDO_GID) gid = +process.env.SUDO_GID
|
860 |
} |
861 |
if (uid !== 0 || !process.env.HOME) { |
862 |
cacheStat = {uid: uid, gid: gid} |
863 |
return mkdir(npm.cache, afterMkdir)
|
864 |
} |
865 |
|
866 |
fs.stat(process.env.HOME, function (er, st) {
|
867 |
if (er) {
|
868 |
log.error("makeCacheDir", "homeless?") |
869 |
return cb(er)
|
870 |
} |
871 |
cacheStat = st |
872 |
log.silly("makeCacheDir", "cache dir uid, gid", [st.uid, st.gid]) |
873 |
return mkdir(npm.cache, afterMkdir)
|
874 |
}) |
875 |
|
876 |
function afterMkdir (er, made) { |
877 |
if (er || !cacheStat || isNaN(cacheStat.uid) || isNaN(cacheStat.gid)) {
|
878 |
return cb(er, cacheStat)
|
879 |
} |
880 |
|
881 |
if (!made) return cb(er, cacheStat) |
882 |
|
883 |
// ensure that the ownership is correct.
|
884 |
chownr(made, cacheStat.uid, cacheStat.gid, function (er) {
|
885 |
return cb(er, cacheStat)
|
886 |
}) |
887 |
} |
888 |
} |
889 |
|
890 |
|
891 |
|
892 |
|
893 |
function addPlacedTarball (p, name, cb) { |
894 |
if (!cb) cb = name, name = "" |
895 |
getCacheStat(function (er, cs) {
|
896 |
if (er) return cb(er) |
897 |
return addPlacedTarball_(p, name, cs.uid, cs.gid, cb)
|
898 |
}) |
899 |
} |
900 |
|
901 |
function addPlacedTarball_ (p, name, uid, gid, cb) { |
902 |
// now we know it's in place already as .cache/name/ver/package.tgz
|
903 |
// unpack to .cache/name/ver/package/, read the package.json,
|
904 |
// and fire cb with the json data.
|
905 |
var target = path.dirname(p)
|
906 |
, folder = path.join(target, "package")
|
907 |
|
908 |
lock(folder, function (er) {
|
909 |
if (er) return cb(er) |
910 |
rmUnpack() |
911 |
}) |
912 |
|
913 |
function rmUnpack () { |
914 |
rm(folder, function (er) {
|
915 |
unlock(folder, function () {
|
916 |
if (er) {
|
917 |
log.error("addPlacedTarball", "Could not remove %j", folder) |
918 |
return cb(er)
|
919 |
} |
920 |
thenUnpack() |
921 |
}) |
922 |
}) |
923 |
} |
924 |
|
925 |
function thenUnpack () { |
926 |
tar.unpack(p, folder, null, null, uid, gid, function (er) { |
927 |
if (er) {
|
928 |
log.error("addPlacedTarball", "Could not unpack %j to %j", p, target) |
929 |
return cb(er)
|
930 |
} |
931 |
// calculate the sha of the file that we just unpacked.
|
932 |
// this is so that the data is available when publishing.
|
933 |
sha.get(p, function (er, shasum) {
|
934 |
if (er) {
|
935 |
log.error("addPlacedTarball", "shasum fail", p) |
936 |
return cb(er)
|
937 |
} |
938 |
readJson(path.join(folder, "package.json"), function (er, data) { |
939 |
if (er) {
|
940 |
log.error("addPlacedTarball", "Couldn't read json in %j" |
941 |
, folder) |
942 |
return cb(er)
|
943 |
} |
944 |
data.dist = data.dist || {} |
945 |
if (shasum) data.dist.shasum = shasum
|
946 |
deprCheck(data) |
947 |
asyncMap([p], function (f, cb) {
|
948 |
log.verbose("chmod", f, npm.modes.file.toString(8)) |
949 |
fs.chmod(f, npm.modes.file, cb) |
950 |
}, function (f, cb) {
|
951 |
if (process.platform === "win32") { |
952 |
log.silly("chown", "skipping for windows", f) |
953 |
cb() |
954 |
} else if (typeof uid === "number" |
955 |
&& typeof gid === "number" |
956 |
&& parseInt(uid, 10) === uid
|
957 |
&& parseInt(gid, 10) === gid) {
|
958 |
log.verbose("chown", f, [uid, gid])
|
959 |
fs.chown(f, uid, gid, cb) |
960 |
} else {
|
961 |
log.verbose("chown", "skip for invalid uid/gid", [f, uid, gid]) |
962 |
cb() |
963 |
} |
964 |
}, function (er) {
|
965 |
cb(er, data) |
966 |
}) |
967 |
}) |
968 |
}) |
969 |
}) |
970 |
} |
971 |
} |
972 |
|
973 |
function addLocalDirectory (p, name, cb) { |
974 |
if (typeof cb !== "function") cb = name, name = "" |
975 |
// if it's a folder, then read the package.json,
|
976 |
// tar it to the proper place, and add the cache tar
|
977 |
if (p.indexOf(npm.cache) === 0) return cb(new Error( |
978 |
"Adding a cache directory to the cache will make the world implode."))
|
979 |
readJson(path.join(p, "package.json"), function (er, data) { |
980 |
if (er) return cb(er) |
981 |
deprCheck(data) |
982 |
var random = Date.now() + "-" + Math.random() |
983 |
, tmp = path.join(npm.tmp, random) |
984 |
, tmptgz = path.resolve(tmp, "tmp.tgz")
|
985 |
, placed = path.resolve( npm.cache, data.name |
986 |
, data.version, "package.tgz" )
|
987 |
, placeDirect = path.basename(p) === "package"
|
988 |
, tgz = placeDirect ? placed : tmptgz |
989 |
, doFancyCrap = p.indexOf(npm.tmp) !== 0
|
990 |
&& p.indexOf(npm.cache) !== 0
|
991 |
getCacheStat(function (er, cs) {
|
992 |
mkdir(path.dirname(tgz), function (er, made) {
|
993 |
if (er) return cb(er) |
994 |
tar.pack(tgz, p, data, doFancyCrap, function (er) {
|
995 |
if (er) {
|
996 |
log.error( "addLocalDirectory", "Could not pack %j to %j" |
997 |
, p, tgz ) |
998 |
return cb(er)
|
999 |
} |
1000 |
|
1001 |
// if we don't get a cache stat, or if the gid/uid is not
|
1002 |
// a number, then just move on. chown would fail anyway.
|
1003 |
if (!cs || isNaN(cs.uid) || isNaN(cs.gid)) return cb() |
1004 |
|
1005 |
chownr(made || tgz, cs.uid, cs.gid, function (er) {
|
1006 |
if (er) return cb(er) |
1007 |
addLocalTarball(tgz, name, cb) |
1008 |
}) |
1009 |
}) |
1010 |
}) |
1011 |
}) |
1012 |
}) |
1013 |
} |
1014 |
|
1015 |
function addTmpTarball (tgz, name, cb) { |
1016 |
if (!cb) cb = name, name = "" |
1017 |
getCacheStat(function (er, cs) {
|
1018 |
if (er) return cb(er) |
1019 |
var contents = path.dirname(tgz)
|
1020 |
tar.unpack( tgz, path.resolve(contents, "package")
|
1021 |
, null, null |
1022 |
, cs.uid, cs.gid |
1023 |
, function (er) {
|
1024 |
if (er) {
|
1025 |
return cb(er)
|
1026 |
} |
1027 |
addLocalDirectory(path.resolve(contents, "package"), name, cb)
|
1028 |
}) |
1029 |
}) |
1030 |
} |
1031 |
|
1032 |
function unpack (pkg, ver, unpackTarget, dMode, fMode, uid, gid, cb) { |
1033 |
if (typeof cb !== "function") cb = gid, gid = null |
1034 |
if (typeof cb !== "function") cb = uid, uid = null |
1035 |
if (typeof cb !== "function") cb = fMode, fMode = null |
1036 |
if (typeof cb !== "function") cb = dMode, dMode = null |
1037 |
|
1038 |
read(pkg, ver, false, function (er, data) { |
1039 |
if (er) {
|
1040 |
log.error("unpack", "Could not read data for %s", pkg + "@" + ver) |
1041 |
return cb(er)
|
1042 |
} |
1043 |
npm.commands.unbuild([unpackTarget], function (er) {
|
1044 |
if (er) return cb(er) |
1045 |
tar.unpack( path.join(npm.cache, pkg, ver, "package.tgz")
|
1046 |
, unpackTarget |
1047 |
, dMode, fMode |
1048 |
, uid, gid |
1049 |
, cb ) |
1050 |
}) |
1051 |
}) |
1052 |
} |
1053 |
|
1054 |
var deprecated = {}
|
1055 |
, deprWarned = {} |
1056 |
function deprCheck (data) { |
1057 |
if (deprecated[data._id]) data.deprecated = deprecated[data._id]
|
1058 |
if (data.deprecated) deprecated[data._id] = data.deprecated
|
1059 |
else return |
1060 |
if (!deprWarned[data._id]) {
|
1061 |
deprWarned[data._id] = true
|
1062 |
log.warn("deprecated", "%s: %s", data._id, data.deprecated) |
1063 |
} |
1064 |
} |
1065 |
|
1066 |
function lockFileName (u) { |
1067 |
var c = u.replace(/[^a-zA-Z0-9]+/g, "-").replace(/^-+|-+$/g, "") |
1068 |
, h = crypto.createHash("sha1").update(u).digest("hex") |
1069 |
h = h.substr(0, 8) |
1070 |
c = c.substr(-32)
|
1071 |
log.silly("lockFile", h + "-" + c, u) |
1072 |
return path.resolve(npm.config.get("cache"), h + "-" + c + ".lock") |
1073 |
} |
1074 |
|
1075 |
var madeCache = false |
1076 |
var myLocks = {}
|
1077 |
function lock (u, cb) { |
1078 |
// the cache dir needs to exist already for this.
|
1079 |
if (madeCache) then()
|
1080 |
else mkdir(npm.config.get("cache"), function (er) { |
1081 |
if (er) return cb(er) |
1082 |
madeCache = true
|
1083 |
then() |
1084 |
}) |
1085 |
function then () { |
1086 |
var opts = { stale: npm.config.get("cache-lock-stale") |
1087 |
, retries: npm.config.get("cache-lock-retries") |
1088 |
, wait: npm.config.get("cache-lock-wait") } |
1089 |
var lf = lockFileName(u)
|
1090 |
log.verbose("lock", u, lf)
|
1091 |
lockFile.lock(lf, opts, function(er) {
|
1092 |
if (!er) myLocks[lf] = true |
1093 |
cb(er) |
1094 |
}) |
1095 |
} |
1096 |
} |
1097 |
|
1098 |
function unlock (u, cb) { |
1099 |
var lf = lockFileName(u)
|
1100 |
if (!myLocks[lf]) return process.nextTick(cb) |
1101 |
myLocks[lf] = false
|
1102 |
lockFile.unlock(lockFileName(u), cb) |
1103 |
} |