mirror of
https://github.com/S2-/minifyfromhtml.git
synced 2025-08-04 12:40:05 +02:00
update packages to latest version
This commit is contained in:
180
node_modules/npm/lib/cache/add-local-tarball.js
generated
vendored
Normal file
180
node_modules/npm/lib/cache/add-local-tarball.js
generated
vendored
Normal file
@@ -0,0 +1,180 @@
|
||||
var mkdir = require("mkdirp")
|
||||
, assert = require("assert")
|
||||
, fs = require("graceful-fs")
|
||||
, writeFileAtomic = require("write-file-atomic")
|
||||
, path = require("path")
|
||||
, sha = require("sha")
|
||||
, npm = require("../npm.js")
|
||||
, log = require("npmlog")
|
||||
, tar = require("../utils/tar.js")
|
||||
, pathIsInside = require("path-is-inside")
|
||||
, getCacheStat = require("./get-stat.js")
|
||||
, cachedPackageRoot = require("./cached-package-root.js")
|
||||
, chownr = require("chownr")
|
||||
, inflight = require("inflight")
|
||||
, once = require("once")
|
||||
, writeStreamAtomic = require("fs-write-stream-atomic")
|
||||
, randomBytes = require("crypto").pseudoRandomBytes // only need uniqueness
|
||||
|
||||
module.exports = addLocalTarball
|
||||
|
||||
function addLocalTarball (p, pkgData, shasum, cb) {
|
||||
assert(typeof p === "string", "must have path")
|
||||
assert(typeof cb === "function", "must have callback")
|
||||
|
||||
if (!pkgData) pkgData = {}
|
||||
|
||||
// If we don't have a shasum yet, compute it.
|
||||
if (!shasum) {
|
||||
return sha.get(p, function (er, shasum) {
|
||||
if (er) return cb(er)
|
||||
log.silly("addLocalTarball", "shasum (computed)", shasum)
|
||||
addLocalTarball(p, pkgData, shasum, cb)
|
||||
})
|
||||
}
|
||||
|
||||
if (pathIsInside(p, npm.cache)) {
|
||||
if (path.basename(p) !== "package.tgz") {
|
||||
return cb(new Error("Not a valid cache tarball name: "+p))
|
||||
}
|
||||
log.verbose("addLocalTarball", "adding from inside cache", p)
|
||||
return addPlacedTarball(p, pkgData, shasum, cb)
|
||||
}
|
||||
|
||||
addTmpTarball(p, pkgData, shasum, function (er, data) {
|
||||
if (data) {
|
||||
data._resolved = p
|
||||
data._shasum = data._shasum || shasum
|
||||
}
|
||||
return cb(er, data)
|
||||
})
|
||||
}
|
||||
|
||||
function addPlacedTarball (p, pkgData, shasum, cb) {
|
||||
assert(pkgData, "should have package data by now")
|
||||
assert(typeof cb === "function", "cb function required")
|
||||
|
||||
getCacheStat(function (er, cs) {
|
||||
if (er) return cb(er)
|
||||
return addPlacedTarball_(p, pkgData, cs.uid, cs.gid, shasum, cb)
|
||||
})
|
||||
}
|
||||
|
||||
function addPlacedTarball_ (p, pkgData, uid, gid, resolvedSum, cb) {
|
||||
var folder = path.join(cachedPackageRoot(pkgData), "package")
|
||||
|
||||
// First, make sure we have the shasum, if we don't already.
|
||||
if (!resolvedSum) {
|
||||
sha.get(p, function (er, shasum) {
|
||||
if (er) return cb(er)
|
||||
addPlacedTarball_(p, pkgData, uid, gid, shasum, cb)
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
mkdir(folder, function (er) {
|
||||
if (er) return cb(er)
|
||||
var pj = path.join(folder, "package.json")
|
||||
var json = JSON.stringify(pkgData, null, 2)
|
||||
writeFileAtomic(pj, json, function (er) {
|
||||
cb(er, pkgData)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function addTmpTarball (tgz, pkgData, shasum, cb) {
|
||||
assert(typeof cb === "function", "must have callback function")
|
||||
assert(shasum, "must have shasum by now")
|
||||
|
||||
cb = inflight("addTmpTarball:" + tgz, cb)
|
||||
if (!cb) return log.verbose("addTmpTarball", tgz, "already in flight; not adding")
|
||||
log.verbose("addTmpTarball", tgz, "not in flight; adding")
|
||||
|
||||
// we already have the package info, so just move into place
|
||||
if (pkgData && pkgData.name && pkgData.version) {
|
||||
log.verbose(
|
||||
"addTmpTarball",
|
||||
"already have metadata; skipping unpack for",
|
||||
pkgData.name + "@" + pkgData.version
|
||||
)
|
||||
return addTmpTarball_(tgz, pkgData, shasum, cb)
|
||||
}
|
||||
|
||||
// This is a tarball we probably downloaded from the internet. The shasum's
|
||||
// already been checked, but we haven't ever had a peek inside, so we unpack
|
||||
// it here just to make sure it is what it says it is.
|
||||
//
|
||||
// NOTE: we might not have any clue what we think it is, for example if the
|
||||
// user just did `npm install ./foo.tgz`
|
||||
|
||||
// generate a unique filename
|
||||
randomBytes(6, function (er, random) {
|
||||
if (er) return cb(er)
|
||||
|
||||
var target = path.join(npm.tmp, "unpack-" + random.toString("hex"))
|
||||
getCacheStat(function (er, cs) {
|
||||
if (er) return cb(er)
|
||||
|
||||
log.verbose("addTmpTarball", "validating metadata from", tgz)
|
||||
tar.unpack(tgz, target, null, null, cs.uid, cs.gid, function (er, data) {
|
||||
if (er) return cb(er)
|
||||
|
||||
// check that this is what we expected.
|
||||
if (!data.name) {
|
||||
return cb(new Error("No name provided"))
|
||||
}
|
||||
else if (pkgData.name && data.name !== pkgData.name) {
|
||||
return cb(new Error("Invalid Package: expected " + pkgData.name +
|
||||
" but found " + data.name))
|
||||
}
|
||||
|
||||
if (!data.version) {
|
||||
return cb(new Error("No version provided"))
|
||||
}
|
||||
else if (pkgData.version && data.version !== pkgData.version) {
|
||||
return cb(new Error("Invalid Package: expected " +
|
||||
pkgData.name + "@" + pkgData.version +
|
||||
" but found " + data.name + "@" + data.version))
|
||||
}
|
||||
|
||||
addTmpTarball_(tgz, data, shasum, cb)
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function addTmpTarball_ (tgz, data, shasum, cb) {
|
||||
assert(typeof cb === "function", "must have callback function")
|
||||
cb = once(cb)
|
||||
|
||||
assert(data.name, "should have package name by now")
|
||||
assert(data.version, "should have package version by now")
|
||||
|
||||
var root = cachedPackageRoot(data)
|
||||
var pkg = path.resolve(root, "package")
|
||||
var target = path.resolve(root, "package.tgz")
|
||||
getCacheStat(function (er, cs) {
|
||||
if (er) return cb(er)
|
||||
mkdir(pkg, function (er, created) {
|
||||
|
||||
// chown starting from the first dir created by mkdirp,
|
||||
// or the root dir, if none had to be created, so that
|
||||
// we know that we get all the children.
|
||||
function chown () {
|
||||
chownr(created || root, cs.uid, cs.gid, done)
|
||||
}
|
||||
|
||||
if (er) return cb(er)
|
||||
var read = fs.createReadStream(tgz)
|
||||
var write = writeStreamAtomic(target, { mode: npm.modes.file })
|
||||
var fin = cs.uid && cs.gid ? chown : done
|
||||
read.on("error", cb).pipe(write).on("error", cb).on("close", fin)
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
function done() {
|
||||
data._shasum = data._shasum || shasum
|
||||
cb(null, data)
|
||||
}
|
||||
}
|
126
node_modules/npm/lib/cache/add-local.js
generated
vendored
Normal file
126
node_modules/npm/lib/cache/add-local.js
generated
vendored
Normal file
@@ -0,0 +1,126 @@
|
||||
var assert = require("assert")
|
||||
, path = require("path")
|
||||
, mkdir = require("mkdirp")
|
||||
, chownr = require("chownr")
|
||||
, pathIsInside = require("path-is-inside")
|
||||
, readJson = require("read-package-json")
|
||||
, log = require("npmlog")
|
||||
, npm = require("../npm.js")
|
||||
, tar = require("../utils/tar.js")
|
||||
, deprCheck = require("../utils/depr-check.js")
|
||||
, getCacheStat = require("./get-stat.js")
|
||||
, cachedPackageRoot = require("./cached-package-root.js")
|
||||
, addLocalTarball = require("./add-local-tarball.js")
|
||||
, sha = require("sha")
|
||||
, inflight = require("inflight")
|
||||
|
||||
module.exports = addLocal
|
||||
|
||||
function addLocal (p, pkgData, cb_) {
|
||||
assert(typeof p === "object", "must have spec info")
|
||||
assert(typeof cb_ === "function", "must have callback")
|
||||
|
||||
pkgData = pkgData || {}
|
||||
|
||||
function cb (er, data) {
|
||||
if (er) {
|
||||
log.error("addLocal", "Could not install %s", p.spec)
|
||||
return cb_(er)
|
||||
}
|
||||
if (data && !data._fromGithub) {
|
||||
data._from = path.relative(npm.prefix, p.spec) || "."
|
||||
var resolved = path.relative(npm.prefix, p.spec)
|
||||
if (resolved) data._resolved = "file:"+resolved
|
||||
}
|
||||
return cb_(er, data)
|
||||
}
|
||||
|
||||
if (p.type === "directory") {
|
||||
addLocalDirectory(p.spec, pkgData, null, cb)
|
||||
}
|
||||
else {
|
||||
addLocalTarball(p.spec, pkgData, null, cb)
|
||||
}
|
||||
}
|
||||
|
||||
// At this point, if shasum is set, it's something that we've already
|
||||
// read and checked. Just stashing it in the data at this point.
|
||||
function addLocalDirectory (p, pkgData, shasum, cb) {
|
||||
assert(pkgData, "must pass package data")
|
||||
assert(typeof cb === "function", "must have callback")
|
||||
|
||||
// if it's a folder, then read the package.json,
|
||||
// tar it to the proper place, and add the cache tar
|
||||
if (pathIsInside(p, npm.cache)) return cb(new Error(
|
||||
"Adding a cache directory to the cache will make the world implode."))
|
||||
|
||||
readJson(path.join(p, "package.json"), false, function (er, data) {
|
||||
if (er) return cb(er)
|
||||
|
||||
if (!data.name) {
|
||||
return cb(new Error("No name provided in package.json"))
|
||||
}
|
||||
else if (pkgData.name && pkgData.name !== data.name) {
|
||||
return cb(new Error(
|
||||
"Invalid package: expected " + pkgData.name + " but found " + data.name
|
||||
))
|
||||
}
|
||||
|
||||
if (!data.version) {
|
||||
return cb(new Error("No version provided in package.json"))
|
||||
}
|
||||
else if (pkgData.version && pkgData.version !== data.version) {
|
||||
return cb(new Error(
|
||||
"Invalid package: expected " + pkgData.name + "@" + pkgData.version +
|
||||
" but found " + data.name + "@" + data.version
|
||||
))
|
||||
}
|
||||
|
||||
deprCheck(data)
|
||||
|
||||
// pack to {cache}/name/ver/package.tgz
|
||||
var root = cachedPackageRoot(data)
|
||||
var tgz = path.resolve(root, "package.tgz")
|
||||
var pj = path.resolve(root, "package/package.json")
|
||||
|
||||
var wrapped = inflight(tgz, next)
|
||||
if (!wrapped) return log.verbose("addLocalDirectory", tgz, "already in flight; waiting")
|
||||
log.verbose("addLocalDirectory", tgz, "not in flight; packing")
|
||||
|
||||
getCacheStat(function (er, cs) {
|
||||
mkdir(path.dirname(pj), function (er, made) {
|
||||
if (er) return wrapped(er)
|
||||
var fancy = !pathIsInside(p, npm.tmp)
|
||||
tar.pack(tgz, p, data, fancy, function (er) {
|
||||
if (er) {
|
||||
log.error("addLocalDirectory", "Could not pack", p, "to", tgz)
|
||||
return wrapped(er)
|
||||
}
|
||||
|
||||
if (!cs || isNaN(cs.uid) || isNaN(cs.gid)) return wrapped()
|
||||
|
||||
chownr(made || tgz, cs.uid, cs.gid, function (er) {
|
||||
if (er && er.code === 'ENOENT') return wrapped()
|
||||
wrapped(er)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
function next (er) {
|
||||
if (er) return cb(er)
|
||||
// if we have the shasum already, just add it
|
||||
if (shasum) {
|
||||
return addLocalTarball(tgz, data, shasum, cb)
|
||||
} else {
|
||||
sha.get(tgz, function (er, shasum) {
|
||||
if (er) {
|
||||
return cb(er)
|
||||
}
|
||||
data._shasum = shasum
|
||||
return addLocalTarball(tgz, data, shasum, cb)
|
||||
})
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
299
node_modules/npm/lib/cache/add-named.js
generated
vendored
Normal file
299
node_modules/npm/lib/cache/add-named.js
generated
vendored
Normal file
@@ -0,0 +1,299 @@
|
||||
var path = require("path")
|
||||
, assert = require("assert")
|
||||
, fs = require("graceful-fs")
|
||||
, http = require("http")
|
||||
, log = require("npmlog")
|
||||
, semver = require("semver")
|
||||
, readJson = require("read-package-json")
|
||||
, url = require("url")
|
||||
, npm = require("../npm.js")
|
||||
, deprCheck = require("../utils/depr-check.js")
|
||||
, inflight = require("inflight")
|
||||
, addRemoteTarball = require("./add-remote-tarball.js")
|
||||
, cachedPackageRoot = require("./cached-package-root.js")
|
||||
, mapToRegistry = require("../utils/map-to-registry.js")
|
||||
|
||||
|
||||
module.exports = addNamed
|
||||
|
||||
function getOnceFromRegistry (name, from, next, done) {
|
||||
function fixName(err, data, json, resp) {
|
||||
// this is only necessary until npm/npm-registry-client#80 is fixed
|
||||
if (err && err.pkgid && err.pkgid !== name) {
|
||||
err.message = err.message.replace(
|
||||
new RegExp(': ' + err.pkgid.replace(/(\W)/g, '\\$1') + '$'),
|
||||
': ' + name
|
||||
)
|
||||
err.pkgid = name
|
||||
}
|
||||
next(err, data, json, resp)
|
||||
}
|
||||
|
||||
mapToRegistry(name, npm.config, function (er, uri, auth) {
|
||||
if (er) return done(er)
|
||||
|
||||
var key = "registry:" + uri
|
||||
next = inflight(key, next)
|
||||
if (!next) return log.verbose(from, key, "already in flight; waiting")
|
||||
else log.verbose(from, key, "not in flight; fetching")
|
||||
|
||||
npm.registry.get(uri, { auth : auth }, fixName)
|
||||
})
|
||||
}
|
||||
|
||||
function addNamed (name, version, data, cb_) {
|
||||
assert(typeof name === "string", "must have module name")
|
||||
assert(typeof cb_ === "function", "must have callback")
|
||||
|
||||
var key = name + "@" + version
|
||||
log.silly("addNamed", key)
|
||||
|
||||
function cb (er, data) {
|
||||
if (data && !data._fromGithub) data._from = key
|
||||
cb_(er, data)
|
||||
}
|
||||
|
||||
if (semver.valid(version, true)) {
|
||||
log.verbose('addNamed', JSON.stringify(version), 'is a plain semver version for', name)
|
||||
addNameVersion(name, version, data, cb)
|
||||
} else if (semver.validRange(version, true)) {
|
||||
log.verbose('addNamed', JSON.stringify(version), 'is a valid semver range for', name)
|
||||
addNameRange(name, version, data, cb)
|
||||
} else {
|
||||
log.verbose('addNamed', JSON.stringify(version), 'is being treated as a dist-tag for', name)
|
||||
addNameTag(name, version, data, cb)
|
||||
}
|
||||
}
|
||||
|
||||
function addNameTag (name, tag, data, cb) {
|
||||
log.info("addNameTag", [name, tag])
|
||||
var explicit = true
|
||||
if (!tag) {
|
||||
explicit = false
|
||||
tag = npm.config.get("tag")
|
||||
}
|
||||
|
||||
getOnceFromRegistry(name, "addNameTag", next, cb)
|
||||
|
||||
function next (er, data, json, resp) {
|
||||
if (!er) er = errorResponse(name, resp)
|
||||
if (er) return cb(er)
|
||||
|
||||
log.silly("addNameTag", "next cb for", name, "with tag", tag)
|
||||
|
||||
engineFilter(data)
|
||||
if (data["dist-tags"] && data["dist-tags"][tag]
|
||||
&& data.versions[data["dist-tags"][tag]]) {
|
||||
var ver = data["dist-tags"][tag]
|
||||
return addNamed(name, ver, data.versions[ver], cb)
|
||||
}
|
||||
if (!explicit && Object.keys(data.versions).length) {
|
||||
return addNamed(name, "*", data, cb)
|
||||
}
|
||||
|
||||
er = installTargetsError(tag, data)
|
||||
return cb(er)
|
||||
}
|
||||
}
|
||||
|
||||
function engineFilter (data) {
|
||||
var npmv = npm.version
|
||||
, nodev = npm.config.get("node-version")
|
||||
, strict = npm.config.get("engine-strict")
|
||||
|
||||
if (!nodev || npm.config.get("force")) return data
|
||||
|
||||
Object.keys(data.versions || {}).forEach(function (v) {
|
||||
var eng = data.versions[v].engines
|
||||
if (!eng) return
|
||||
if (!strict && !data.versions[v].engineStrict) return
|
||||
if (eng.node && !semver.satisfies(nodev, eng.node, true)
|
||||
|| eng.npm && !semver.satisfies(npmv, eng.npm, true)) {
|
||||
delete data.versions[v]
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function addNameVersion (name, v, data, cb) {
|
||||
var ver = semver.valid(v, true)
|
||||
if (!ver) return cb(new Error("Invalid version: "+v))
|
||||
|
||||
var response
|
||||
|
||||
if (data) {
|
||||
response = null
|
||||
return next()
|
||||
}
|
||||
|
||||
getOnceFromRegistry(name, "addNameVersion", setData, cb)
|
||||
|
||||
function setData (er, d, json, resp) {
|
||||
if (!er) {
|
||||
er = errorResponse(name, resp)
|
||||
}
|
||||
if (er) return cb(er)
|
||||
data = d && d.versions[ver]
|
||||
if (!data) {
|
||||
er = new Error("version not found: "+name+"@"+ver)
|
||||
er.package = name
|
||||
er.statusCode = 404
|
||||
return cb(er)
|
||||
}
|
||||
response = resp
|
||||
next()
|
||||
}
|
||||
|
||||
function next () {
|
||||
deprCheck(data)
|
||||
var dist = data.dist
|
||||
|
||||
if (!dist) return cb(new Error("No dist in "+data._id+" package"))
|
||||
|
||||
if (!dist.tarball) return cb(new Error(
|
||||
"No dist.tarball in " + data._id + " package"))
|
||||
|
||||
if ((response && response.statusCode !== 304) || npm.config.get("force")) {
|
||||
return fetchit()
|
||||
}
|
||||
|
||||
// we got cached data, so let's see if we have a tarball.
|
||||
var pkgroot = cachedPackageRoot({name : name, version : ver})
|
||||
var pkgtgz = path.join(pkgroot, "package.tgz")
|
||||
var pkgjson = path.join(pkgroot, "package", "package.json")
|
||||
fs.stat(pkgtgz, function (er) {
|
||||
if (!er) {
|
||||
readJson(pkgjson, function (er, data) {
|
||||
if (er && er.code !== "ENOENT" && er.code !== "ENOTDIR") return cb(er)
|
||||
|
||||
if (data) {
|
||||
if (!data.name) return cb(new Error("No name provided"))
|
||||
if (!data.version) return cb(new Error("No version provided"))
|
||||
|
||||
// check the SHA of the package we have, to ensure it wasn't installed
|
||||
// from somewhere other than the registry (eg, a fork)
|
||||
if (data._shasum && dist.shasum && data._shasum !== dist.shasum) {
|
||||
return fetchit()
|
||||
}
|
||||
}
|
||||
|
||||
if (er) return fetchit()
|
||||
else return cb(null, data)
|
||||
})
|
||||
} else return fetchit()
|
||||
})
|
||||
|
||||
function fetchit () {
|
||||
mapToRegistry(name, npm.config, function (er, _, auth, ruri) {
|
||||
if (er) return cb(er)
|
||||
|
||||
// Use the same protocol as the registry. https registry --> https
|
||||
// tarballs, but only if they're the same hostname, or else detached
|
||||
// tarballs may not work.
|
||||
var tb = url.parse(dist.tarball)
|
||||
var rp = url.parse(ruri)
|
||||
if (tb.hostname === rp.hostname && tb.protocol !== rp.protocol) {
|
||||
tb.protocol = rp.protocol
|
||||
// If a different port is associated with the other protocol
|
||||
// we need to update that as well
|
||||
if (rp.port !== tb.port) {
|
||||
tb.port = rp.port
|
||||
delete tb.host
|
||||
}
|
||||
delete tb.href
|
||||
}
|
||||
tb = url.format(tb)
|
||||
|
||||
// Only add non-shasum'ed packages if --forced. Only ancient things
|
||||
// would lack this for good reasons nowadays.
|
||||
if (!dist.shasum && !npm.config.get("force")) {
|
||||
return cb(new Error("package lacks shasum: " + data._id))
|
||||
}
|
||||
|
||||
addRemoteTarball(tb, data, dist.shasum, auth, cb)
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function addNameRange (name, range, data, cb) {
|
||||
range = semver.validRange(range, true)
|
||||
if (range === null) return cb(new Error(
|
||||
"Invalid version range: " + range
|
||||
))
|
||||
|
||||
log.silly("addNameRange", {name:name, range:range, hasData:!!data})
|
||||
|
||||
if (data) return next()
|
||||
|
||||
getOnceFromRegistry(name, "addNameRange", setData, cb)
|
||||
|
||||
function setData (er, d, json, resp) {
|
||||
if (!er) {
|
||||
er = errorResponse(name, resp)
|
||||
}
|
||||
if (er) return cb(er)
|
||||
data = d
|
||||
next()
|
||||
}
|
||||
|
||||
function next () {
|
||||
log.silly( "addNameRange", "number 2"
|
||||
, {name:name, range:range, hasData:!!data})
|
||||
engineFilter(data)
|
||||
|
||||
log.silly("addNameRange", "versions"
|
||||
, [data.name, Object.keys(data.versions || {})])
|
||||
|
||||
// if the tagged version satisfies, then use that.
|
||||
var tagged = data["dist-tags"][npm.config.get("tag")]
|
||||
if (tagged
|
||||
&& data.versions[tagged]
|
||||
&& semver.satisfies(tagged, range, true)) {
|
||||
return addNamed(name, tagged, data.versions[tagged], cb)
|
||||
}
|
||||
|
||||
// find the max satisfying version.
|
||||
var versions = Object.keys(data.versions || {})
|
||||
var ms = semver.maxSatisfying(versions, range, true)
|
||||
if (!ms) {
|
||||
if (range === "*" && versions.length) {
|
||||
return addNameTag(name, "latest", data, cb)
|
||||
} else {
|
||||
return cb(installTargetsError(range, data))
|
||||
}
|
||||
}
|
||||
|
||||
// if we don't have a registry connection, try to see if
|
||||
// there's a cached copy that will be ok.
|
||||
addNamed(name, ms, data.versions[ms], cb)
|
||||
}
|
||||
}
|
||||
|
||||
function installTargetsError (requested, data) {
|
||||
var targets = Object.keys(data["dist-tags"]).filter(function (f) {
|
||||
return (data.versions || {}).hasOwnProperty(f)
|
||||
}).concat(Object.keys(data.versions || {}))
|
||||
|
||||
requested = data.name + (requested ? "@'" + requested + "'" : "")
|
||||
|
||||
targets = targets.length
|
||||
? "Valid install targets:\n" + JSON.stringify(targets) + "\n"
|
||||
: "No valid targets found.\n"
|
||||
+ "Perhaps not compatible with your version of node?"
|
||||
|
||||
var er = new Error( "No compatible version found: "
|
||||
+ requested + "\n" + targets)
|
||||
er.code = "ETARGET"
|
||||
return er
|
||||
}
|
||||
|
||||
function errorResponse (name, response) {
|
||||
var er
|
||||
if (response.statusCode >= 400) {
|
||||
er = new Error(http.STATUS_CODES[response.statusCode])
|
||||
er.statusCode = response.statusCode
|
||||
er.code = "E" + er.statusCode
|
||||
er.pkgid = name
|
||||
}
|
||||
return er
|
||||
}
|
481
node_modules/npm/lib/cache/add-remote-git.js
generated
vendored
Normal file
481
node_modules/npm/lib/cache/add-remote-git.js
generated
vendored
Normal file
@@ -0,0 +1,481 @@
|
||||
var assert = require('assert')
|
||||
var crypto = require('crypto')
|
||||
var fs = require('graceful-fs')
|
||||
var path = require('path')
|
||||
var url = require('url')
|
||||
|
||||
var chownr = require('chownr')
|
||||
var dezalgo = require('dezalgo')
|
||||
var hostedFromURL = require('hosted-git-info').fromUrl
|
||||
var inflight = require('inflight')
|
||||
var log = require('npmlog')
|
||||
var mkdir = require('mkdirp')
|
||||
var normalizeGitUrl = require('normalize-git-url')
|
||||
var npa = require('npm-package-arg')
|
||||
var realizePackageSpecifier = require('realize-package-specifier')
|
||||
|
||||
var addLocal = require('./add-local.js')
|
||||
var correctMkdir = require('../utils/correct-mkdir.js')
|
||||
var git = require('../utils/git.js')
|
||||
var npm = require('../npm.js')
|
||||
var rm = require('../utils/gently-rm.js')
|
||||
|
||||
var remotes = path.resolve(npm.config.get('cache'), '_git-remotes')
|
||||
var templates = path.join(remotes, '_templates')
|
||||
|
||||
var VALID_VARIABLES = [
|
||||
'GIT_ASKPASS',
|
||||
'GIT_EXEC_PATH',
|
||||
'GIT_PROXY_COMMAND',
|
||||
'GIT_SSH',
|
||||
'GIT_SSH_COMMAND',
|
||||
'GIT_SSL_CAINFO',
|
||||
'GIT_SSL_NO_VERIFY'
|
||||
]
|
||||
|
||||
module.exports = addRemoteGit
|
||||
function addRemoteGit (uri, _cb) {
|
||||
assert(typeof uri === 'string', 'must have git URL')
|
||||
assert(typeof _cb === 'function', 'must have callback')
|
||||
var cb = dezalgo(_cb)
|
||||
|
||||
log.verbose('addRemoteGit', 'caching', uri)
|
||||
|
||||
// the URL comes in exactly as it was passed on the command line, or as
|
||||
// normalized by normalize-package-data / read-package-json / read-installed,
|
||||
// so figure out what to do with it using hosted-git-info
|
||||
var parsed = hostedFromURL(uri)
|
||||
if (parsed) {
|
||||
// normalize GitHub syntax to org/repo (for now)
|
||||
var from
|
||||
if (parsed.type === 'github' && parsed.default === 'shortcut') {
|
||||
from = parsed.path()
|
||||
} else {
|
||||
from = parsed.toString()
|
||||
}
|
||||
|
||||
log.verbose('addRemoteGit', from, 'is a repository hosted by', parsed.type)
|
||||
|
||||
// prefer explicit URLs to pushing everything through shortcuts
|
||||
if (parsed.default !== 'shortcut') {
|
||||
return tryClone(from, parsed.toString(), false, cb)
|
||||
}
|
||||
|
||||
// try git:, then git+ssh:, then git+https: before failing
|
||||
tryGitProto(from, parsed, cb)
|
||||
} else {
|
||||
// verify that this is a Git URL before continuing
|
||||
parsed = npa(uri)
|
||||
if (parsed.type !== 'git') {
|
||||
return cb(new Error(uri + 'is not a Git or GitHub URL'))
|
||||
}
|
||||
|
||||
tryClone(parsed.rawSpec, uri, false, cb)
|
||||
}
|
||||
}
|
||||
|
||||
function tryGitProto (from, hostedInfo, cb) {
|
||||
var gitURL = hostedInfo.git()
|
||||
if (!gitURL) return trySSH(from, hostedInfo, cb)
|
||||
|
||||
log.silly('tryGitProto', 'attempting to clone', gitURL)
|
||||
tryClone(from, gitURL, true, function (er) {
|
||||
if (er) return tryHTTPS(from, hostedInfo, cb)
|
||||
|
||||
cb.apply(this, arguments)
|
||||
})
|
||||
}
|
||||
|
||||
function tryHTTPS (from, hostedInfo, cb) {
|
||||
var httpsURL = hostedInfo.https()
|
||||
if (!httpsURL) {
|
||||
return cb(new Error(from + ' can not be cloned via Git, SSH, or HTTPS'))
|
||||
}
|
||||
|
||||
log.silly('tryHTTPS', 'attempting to clone', httpsURL)
|
||||
tryClone(from, httpsURL, true, function (er) {
|
||||
if (er) return trySSH(from, hostedInfo, cb)
|
||||
|
||||
cb.apply(this, arguments)
|
||||
})
|
||||
}
|
||||
|
||||
function trySSH (from, hostedInfo, cb) {
|
||||
var sshURL = hostedInfo.ssh()
|
||||
if (!sshURL) return tryHTTPS(from, hostedInfo, cb)
|
||||
|
||||
log.silly('trySSH', 'attempting to clone', sshURL)
|
||||
tryClone(from, sshURL, false, cb)
|
||||
}
|
||||
|
||||
function tryClone (from, combinedURL, silent, cb) {
|
||||
log.silly('tryClone', 'cloning', from, 'via', combinedURL)
|
||||
|
||||
var normalized = normalizeGitUrl(combinedURL)
|
||||
var cloneURL = normalized.url
|
||||
var treeish = normalized.branch
|
||||
|
||||
// ensure that similarly-named remotes don't collide
|
||||
var repoID = cloneURL.replace(/[^a-zA-Z0-9]+/g, '-') + '-' +
|
||||
crypto.createHash('sha1').update(combinedURL).digest('hex').slice(0, 8)
|
||||
var cachedRemote = path.join(remotes, repoID)
|
||||
|
||||
cb = inflight(repoID, cb)
|
||||
if (!cb) {
|
||||
return log.verbose('tryClone', repoID, 'already in flight; waiting')
|
||||
}
|
||||
log.verbose('tryClone', repoID, 'not in flight; caching')
|
||||
|
||||
// initialize the remotes cache with the correct perms
|
||||
getGitDir(function (er) {
|
||||
if (er) return cb(er)
|
||||
fs.stat(cachedRemote, function (er, s) {
|
||||
if (er) return mirrorRemote(from, cloneURL, treeish, cachedRemote, silent, finish)
|
||||
if (!s.isDirectory()) return resetRemote(from, cloneURL, treeish, cachedRemote, finish)
|
||||
|
||||
validateExistingRemote(from, cloneURL, treeish, cachedRemote, finish)
|
||||
})
|
||||
|
||||
// always set permissions on the cached remote
|
||||
function finish (er, data) {
|
||||
if (er) return cb(er, data)
|
||||
addModeRecursive(cachedRemote, npm.modes.file, function (er) {
|
||||
return cb(er, data)
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// don't try too hard to hold on to a remote
|
||||
function resetRemote (from, cloneURL, treeish, cachedRemote, cb) {
|
||||
log.info('resetRemote', 'resetting', cachedRemote, 'for', from)
|
||||
rm(cachedRemote, function (er) {
|
||||
if (er) return cb(er)
|
||||
mirrorRemote(from, cloneURL, treeish, cachedRemote, false, cb)
|
||||
})
|
||||
}
|
||||
|
||||
// reuse a cached remote when possible, but nuke it if it's in an
|
||||
// inconsistent state
|
||||
function validateExistingRemote (from, cloneURL, treeish, cachedRemote, cb) {
|
||||
git.whichAndExec(
|
||||
['config', '--get', 'remote.origin.url'],
|
||||
{ cwd: cachedRemote, env: gitEnv() },
|
||||
function (er, stdout, stderr) {
|
||||
var originURL
|
||||
if (stdout) {
|
||||
originURL = stdout.trim()
|
||||
log.silly('validateExistingRemote', from, 'remote.origin.url:', originURL)
|
||||
}
|
||||
|
||||
if (stderr) stderr = stderr.trim()
|
||||
if (stderr || er) {
|
||||
log.warn('addRemoteGit', from, 'resetting remote', cachedRemote, 'because of error:', stderr || er)
|
||||
return resetRemote(from, cloneURL, treeish, cachedRemote, cb)
|
||||
} else if (cloneURL !== originURL) {
|
||||
log.warn(
|
||||
'addRemoteGit',
|
||||
from,
|
||||
'pre-existing cached repo', cachedRemote, 'points to', originURL, 'and not', cloneURL
|
||||
)
|
||||
return resetRemote(from, cloneURL, treeish, cachedRemote, cb)
|
||||
}
|
||||
|
||||
log.verbose('validateExistingRemote', from, 'is updating existing cached remote', cachedRemote)
|
||||
updateRemote(from, cloneURL, treeish, cachedRemote, cb)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
// make a complete bare mirror of the remote repo
|
||||
// NOTE: npm uses a blank template directory to prevent weird inconsistencies
|
||||
// https://github.com/npm/npm/issues/5867
|
||||
function mirrorRemote (from, cloneURL, treeish, cachedRemote, silent, cb) {
|
||||
mkdir(cachedRemote, function (er) {
|
||||
if (er) return cb(er)
|
||||
|
||||
var args = [
|
||||
'clone',
|
||||
'--template=' + templates,
|
||||
'--mirror',
|
||||
cloneURL, cachedRemote
|
||||
]
|
||||
git.whichAndExec(
|
||||
['clone', '--template=' + templates, '--mirror', cloneURL, cachedRemote],
|
||||
{ cwd: cachedRemote, env: gitEnv() },
|
||||
function (er, stdout, stderr) {
|
||||
if (er) {
|
||||
var combined = (stdout + '\n' + stderr).trim()
|
||||
var command = 'git ' + args.join(' ') + ':'
|
||||
if (silent) {
|
||||
log.verbose(command, combined)
|
||||
} else {
|
||||
log.error(command, combined)
|
||||
}
|
||||
return cb(er)
|
||||
}
|
||||
log.verbose('mirrorRemote', from, 'git clone ' + cloneURL, stdout.trim())
|
||||
setPermissions(from, cloneURL, treeish, cachedRemote, cb)
|
||||
}
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
function setPermissions (from, cloneURL, treeish, cachedRemote, cb) {
|
||||
if (process.platform === 'win32') {
|
||||
log.verbose('setPermissions', from, 'skipping chownr on Windows')
|
||||
resolveHead(from, cloneURL, treeish, cachedRemote, cb)
|
||||
} else {
|
||||
getGitDir(function (er, cs) {
|
||||
if (er) {
|
||||
log.error('setPermissions', from, 'could not get cache stat')
|
||||
return cb(er)
|
||||
}
|
||||
|
||||
chownr(cachedRemote, cs.uid, cs.gid, function (er) {
|
||||
if (er) {
|
||||
log.error(
|
||||
'setPermissions',
|
||||
'Failed to change git repository ownership under npm cache for',
|
||||
cachedRemote
|
||||
)
|
||||
return cb(er)
|
||||
}
|
||||
|
||||
log.verbose('setPermissions', from, 'set permissions on', cachedRemote)
|
||||
resolveHead(from, cloneURL, treeish, cachedRemote, cb)
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// always fetch the origin, even right after mirroring, because this way
|
||||
// permissions will get set correctly
|
||||
function updateRemote (from, cloneURL, treeish, cachedRemote, cb) {
|
||||
git.whichAndExec(
|
||||
['fetch', '-a', 'origin'],
|
||||
{ cwd: cachedRemote, env: gitEnv() },
|
||||
function (er, stdout, stderr) {
|
||||
if (er) {
|
||||
var combined = (stdout + '\n' + stderr).trim()
|
||||
log.error('git fetch -a origin (' + cloneURL + ')', combined)
|
||||
return cb(er)
|
||||
}
|
||||
log.verbose('updateRemote', 'git fetch -a origin (' + cloneURL + ')', stdout.trim())
|
||||
|
||||
setPermissions(from, cloneURL, treeish, cachedRemote, cb)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
// branches and tags are both symbolic labels that can be attached to different
|
||||
// commits, so resolve the commit-ish to the current actual treeish the label
|
||||
// corresponds to
|
||||
//
|
||||
// important for shrinkwrap
|
||||
function resolveHead (from, cloneURL, treeish, cachedRemote, cb) {
|
||||
log.verbose('resolveHead', from, 'original treeish:', treeish)
|
||||
var args = ['rev-list', '-n1', treeish]
|
||||
git.whichAndExec(
|
||||
args,
|
||||
{ cwd: cachedRemote, env: gitEnv() },
|
||||
function (er, stdout, stderr) {
|
||||
if (er) {
|
||||
log.error('git ' + args.join(' ') + ':', stderr)
|
||||
return cb(er)
|
||||
}
|
||||
|
||||
var resolvedTreeish = stdout.trim()
|
||||
log.silly('resolveHead', from, 'resolved treeish:', resolvedTreeish)
|
||||
|
||||
var resolvedURL = getResolved(cloneURL, resolvedTreeish)
|
||||
if (!resolvedURL) {
|
||||
return cb(new Error(
|
||||
'unable to clone ' + from + ' because git clone string ' +
|
||||
cloneURL + ' is in a form npm can\'t handle'
|
||||
))
|
||||
}
|
||||
log.verbose('resolveHead', from, 'resolved Git URL:', resolvedURL)
|
||||
|
||||
// generate a unique filename
|
||||
var tmpdir = path.join(
|
||||
npm.tmp,
|
||||
'git-cache-' + crypto.pseudoRandomBytes(6).toString('hex'),
|
||||
resolvedTreeish
|
||||
)
|
||||
log.silly('resolveHead', 'Git working directory:', tmpdir)
|
||||
|
||||
mkdir(tmpdir, function (er) {
|
||||
if (er) return cb(er)
|
||||
|
||||
cloneResolved(from, resolvedURL, resolvedTreeish, cachedRemote, tmpdir, cb)
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
// make a clone from the mirrored cache so we have a temporary directory in
|
||||
// which we can check out the resolved treeish
|
||||
function cloneResolved (from, resolvedURL, resolvedTreeish, cachedRemote, tmpdir, cb) {
|
||||
var args = ['clone', cachedRemote, tmpdir]
|
||||
git.whichAndExec(
|
||||
args,
|
||||
{ cwd: cachedRemote, env: gitEnv() },
|
||||
function (er, stdout, stderr) {
|
||||
stdout = (stdout + '\n' + stderr).trim()
|
||||
if (er) {
|
||||
log.error('git ' + args.join(' ') + ':', stderr)
|
||||
return cb(er)
|
||||
}
|
||||
log.verbose('cloneResolved', from, 'clone', stdout)
|
||||
|
||||
checkoutTreeish(from, resolvedURL, resolvedTreeish, tmpdir, cb)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
// there is no safe way to do a one-step clone to a treeish that isn't
|
||||
// guaranteed to be a branch, so explicitly check out the treeish once it's
|
||||
// cloned
|
||||
function checkoutTreeish (from, resolvedURL, resolvedTreeish, tmpdir, cb) {
|
||||
var args = ['checkout', resolvedTreeish]
|
||||
git.whichAndExec(
|
||||
args,
|
||||
{ cwd: tmpdir, env: gitEnv() },
|
||||
function (er, stdout, stderr) {
|
||||
stdout = (stdout + '\n' + stderr).trim()
|
||||
if (er) {
|
||||
log.error('git ' + args.join(' ') + ':', stderr)
|
||||
return cb(er)
|
||||
}
|
||||
log.verbose('checkoutTreeish', from, 'checkout', stdout)
|
||||
|
||||
// convince addLocal that the checkout is a local dependency
|
||||
realizePackageSpecifier(tmpdir, function (er, spec) {
|
||||
if (er) {
|
||||
log.error('addRemoteGit', 'Failed to map', tmpdir, 'to a package specifier')
|
||||
return cb(er)
|
||||
}
|
||||
|
||||
// ensure pack logic is applied
|
||||
// https://github.com/npm/npm/issues/6400
|
||||
addLocal(spec, null, function (er, data) {
|
||||
if (data) {
|
||||
if (npm.config.get('save-exact')) {
|
||||
log.verbose('addRemoteGit', 'data._from:', resolvedURL, '(save-exact)')
|
||||
data._from = resolvedURL
|
||||
} else {
|
||||
log.verbose('addRemoteGit', 'data._from:', from)
|
||||
data._from = from
|
||||
}
|
||||
|
||||
log.verbose('addRemoteGit', 'data._resolved:', resolvedURL)
|
||||
data._resolved = resolvedURL
|
||||
}
|
||||
|
||||
cb(er, data)
|
||||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
function getGitDir (cb) {
|
||||
correctMkdir(remotes, function (er, stats) {
|
||||
if (er) return cb(er)
|
||||
|
||||
// We don't need global templates when cloning. Use an empty directory for
|
||||
// the templates, creating it (and setting its permissions) if necessary.
|
||||
mkdir(templates, function (er) {
|
||||
if (er) return cb(er)
|
||||
|
||||
// Ensure that both the template and remotes directories have the correct
|
||||
// permissions.
|
||||
fs.chown(templates, stats.uid, stats.gid, function (er) {
|
||||
cb(er, stats)
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
var gitEnv_
|
||||
function gitEnv () {
|
||||
// git responds to env vars in some weird ways in post-receive hooks
|
||||
// so don't carry those along.
|
||||
if (gitEnv_) return gitEnv_
|
||||
|
||||
// allow users to override npm's insistence on not prompting for
|
||||
// passphrases, but default to just failing when credentials
|
||||
// aren't available
|
||||
gitEnv_ = { GIT_ASKPASS: 'echo' }
|
||||
|
||||
for (var k in process.env) {
|
||||
if (!~VALID_VARIABLES.indexOf(k) && k.match(/^GIT/)) continue
|
||||
gitEnv_[k] = process.env[k]
|
||||
}
|
||||
return gitEnv_
|
||||
}
|
||||
|
||||
addRemoteGit.getResolved = getResolved
|
||||
function getResolved (uri, treeish) {
|
||||
// normalize hosted-git-info clone URLs back into regular URLs
|
||||
// this will only work on URLs that hosted-git-info recognizes
|
||||
// https://github.com/npm/npm/issues/7961
|
||||
var rehydrated = hostedFromURL(uri)
|
||||
if (rehydrated) uri = rehydrated.toString()
|
||||
|
||||
var parsed = url.parse(uri)
|
||||
|
||||
// Checks for known protocols:
|
||||
// http:, https:, ssh:, and git:, with optional git+ prefix.
|
||||
if (!parsed.protocol ||
|
||||
!parsed.protocol.match(/^(((git\+)?(https?|ssh))|git|file):$/)) {
|
||||
uri = 'git+ssh://' + uri
|
||||
}
|
||||
|
||||
if (!/^git[+:]/.test(uri)) {
|
||||
uri = 'git+' + uri
|
||||
}
|
||||
|
||||
// Not all URIs are actually URIs, so use regex for the treeish.
|
||||
return uri.replace(/(?:#.*)?$/, '#' + treeish)
|
||||
}
|
||||
|
||||
// similar to chmodr except it add permissions rather than overwriting them
|
||||
// adapted from https://github.com/isaacs/chmodr/blob/master/chmodr.js
|
||||
function addModeRecursive (cachedRemote, mode, cb) {
|
||||
fs.readdir(cachedRemote, function (er, children) {
|
||||
// Any error other than ENOTDIR means it's not readable, or doesn't exist.
|
||||
// Give up.
|
||||
if (er && er.code !== 'ENOTDIR') return cb(er)
|
||||
if (er || !children.length) return addMode(cachedRemote, mode, cb)
|
||||
|
||||
var len = children.length
|
||||
var errState = null
|
||||
children.forEach(function (child) {
|
||||
addModeRecursive(path.resolve(cachedRemote, child), mode, then)
|
||||
})
|
||||
|
||||
function then (er) {
|
||||
if (errState) return undefined
|
||||
if (er) return cb(errState = er)
|
||||
if (--len === 0) return addMode(cachedRemote, dirMode(mode), cb)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function addMode (cachedRemote, mode, cb) {
|
||||
fs.stat(cachedRemote, function (er, stats) {
|
||||
if (er) return cb(er)
|
||||
mode = stats.mode | mode
|
||||
fs.chmod(cachedRemote, mode, cb)
|
||||
})
|
||||
}
|
||||
|
||||
// taken from https://github.com/isaacs/chmodr/blob/master/chmodr.js
|
||||
function dirMode (mode) {
|
||||
if (mode & parseInt('0400', 8)) mode |= parseInt('0100', 8)
|
||||
if (mode & parseInt('040', 8)) mode |= parseInt('010', 8)
|
||||
if (mode & parseInt('04', 8)) mode |= parseInt('01', 8)
|
||||
return mode
|
||||
}
|
120
node_modules/npm/lib/cache/add-remote-tarball.js
generated
vendored
Normal file
120
node_modules/npm/lib/cache/add-remote-tarball.js
generated
vendored
Normal file
@@ -0,0 +1,120 @@
|
||||
var mkdir = require("mkdirp")
|
||||
, assert = require("assert")
|
||||
, log = require("npmlog")
|
||||
, path = require("path")
|
||||
, sha = require("sha")
|
||||
, retry = require("retry")
|
||||
, writeStreamAtomic = require("fs-write-stream-atomic")
|
||||
, PassThrough = require('readable-stream').PassThrough
|
||||
, npm = require("../npm.js")
|
||||
, inflight = require("inflight")
|
||||
, addLocalTarball = require("./add-local-tarball.js")
|
||||
, cacheFile = require("npm-cache-filename")
|
||||
|
||||
module.exports = addRemoteTarball
|
||||
|
||||
function addRemoteTarball (u, pkgData, shasum, auth, cb_) {
|
||||
assert(typeof u === "string", "must have module URL")
|
||||
assert(typeof cb_ === "function", "must have callback")
|
||||
|
||||
function cb (er, data) {
|
||||
if (data) {
|
||||
data._from = u
|
||||
data._resolved = u
|
||||
data._shasum = data._shasum || shasum
|
||||
}
|
||||
cb_(er, data)
|
||||
}
|
||||
|
||||
cb_ = inflight(u, cb_)
|
||||
if (!cb_) return log.verbose("addRemoteTarball", u, "already in flight; waiting")
|
||||
log.verbose("addRemoteTarball", u, "not in flight; adding")
|
||||
|
||||
// XXX Fetch direct to cache location, store tarballs under
|
||||
// ${cache}/registry.npmjs.org/pkg/-/pkg-1.2.3.tgz
|
||||
var tmp = cacheFile(npm.tmp, u)
|
||||
|
||||
function next (er, resp, shasum) {
|
||||
if (er) return cb(er)
|
||||
addLocalTarball(tmp, pkgData, shasum, cb)
|
||||
}
|
||||
|
||||
log.verbose("addRemoteTarball", [u, shasum])
|
||||
mkdir(path.dirname(tmp), function (er) {
|
||||
if (er) return cb(er)
|
||||
addRemoteTarball_(u, tmp, shasum, auth, next)
|
||||
})
|
||||
}
|
||||
|
||||
function addRemoteTarball_ (u, tmp, shasum, auth, cb) {
|
||||
// Tuned to spread 3 attempts over about a minute.
|
||||
// See formula at <https://github.com/tim-kos/node-retry>.
|
||||
var operation = retry.operation({
|
||||
retries: npm.config.get("fetch-retries")
|
||||
, factor: npm.config.get("fetch-retry-factor")
|
||||
, minTimeout: npm.config.get("fetch-retry-mintimeout")
|
||||
, maxTimeout: npm.config.get("fetch-retry-maxtimeout")
|
||||
})
|
||||
|
||||
operation.attempt(function (currentAttempt) {
|
||||
log.info("retry", "fetch attempt " + currentAttempt
|
||||
+ " at " + (new Date()).toLocaleTimeString())
|
||||
fetchAndShaCheck(u, tmp, shasum, auth, function (er, response, shasum) {
|
||||
// Only retry on 408, 5xx or no `response`.
|
||||
var sc = response && response.statusCode
|
||||
var statusRetry = !sc || (sc === 408 || sc >= 500)
|
||||
if (er && statusRetry && operation.retry(er)) {
|
||||
log.warn("retry", "will retry, error on last attempt: " + er)
|
||||
return
|
||||
}
|
||||
cb(er, response, shasum)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function fetchAndShaCheck (u, tmp, shasum, auth, cb) {
|
||||
npm.registry.fetch(u, { auth : auth }, function (er, response) {
|
||||
if (er) {
|
||||
log.error("fetch failed", u)
|
||||
return cb(er, response)
|
||||
}
|
||||
|
||||
var tarball = writeStreamAtomic(tmp, { mode: npm.modes.file })
|
||||
tarball.on('error', function (er) {
|
||||
cb(er)
|
||||
tarball.destroy()
|
||||
})
|
||||
|
||||
tarball.on("finish", function () {
|
||||
if (!shasum) {
|
||||
// Well, we weren't given a shasum, so at least sha what we have
|
||||
// in case we want to compare it to something else later
|
||||
return sha.get(tmp, function (er, shasum) {
|
||||
log.silly("fetchAndShaCheck", "shasum", shasum)
|
||||
cb(er, response, shasum)
|
||||
})
|
||||
}
|
||||
|
||||
// validate that the url we just downloaded matches the expected shasum.
|
||||
log.silly("fetchAndShaCheck", "shasum", shasum)
|
||||
sha.check(tmp, shasum, function (er) {
|
||||
if (er && er.message) {
|
||||
// add original filename for better debuggability
|
||||
er.message = er.message + "\n" + "From: " + u
|
||||
}
|
||||
return cb(er, response, shasum)
|
||||
})
|
||||
})
|
||||
|
||||
// 0.8 http streams have a bug, where if they're paused with data in
|
||||
// their buffers when the socket closes, they call `end` before emptying
|
||||
// those buffers, which results in the entire pipeline ending and thus
|
||||
// the point that applied backpressure never being able to trigger a
|
||||
// `resume`.
|
||||
// We work around this by piping into a pass through stream that has
|
||||
// unlimited buffering. The pass through stream is from readable-stream
|
||||
// and is thus a current streams3 implementation that is free of these
|
||||
// bugs even on 0.8.
|
||||
response.pipe(PassThrough({highWaterMark: Infinity})).pipe(tarball)
|
||||
})
|
||||
}
|
14
node_modules/npm/lib/cache/cached-package-root.js
generated
vendored
Normal file
14
node_modules/npm/lib/cache/cached-package-root.js
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
var assert = require("assert")
|
||||
var resolve = require("path").resolve
|
||||
|
||||
var npm = require("../npm.js")
|
||||
|
||||
module.exports = getCacheRoot
|
||||
|
||||
function getCacheRoot (data) {
|
||||
assert(data, "must pass package metadata")
|
||||
assert(data.name, "package metadata must include name")
|
||||
assert(data.version, "package metadata must include version")
|
||||
|
||||
return resolve(npm.cache, data.name, data.version)
|
||||
}
|
218
node_modules/npm/lib/cache/caching-client.js
generated
vendored
Normal file
218
node_modules/npm/lib/cache/caching-client.js
generated
vendored
Normal file
@@ -0,0 +1,218 @@
|
||||
module.exports = CachingRegistryClient
|
||||
|
||||
var path = require("path")
|
||||
, fs = require("graceful-fs")
|
||||
, url = require("url")
|
||||
, assert = require("assert")
|
||||
, inherits = require("util").inherits
|
||||
|
||||
var RegistryClient = require("npm-registry-client")
|
||||
, npm = require("../npm.js")
|
||||
, log = require("npmlog")
|
||||
, getCacheStat = require("./get-stat.js")
|
||||
, cacheFile = require("npm-cache-filename")
|
||||
, mkdirp = require("mkdirp")
|
||||
, rimraf = require("rimraf")
|
||||
, chownr = require("chownr")
|
||||
, writeFile = require("write-file-atomic")
|
||||
|
||||
function CachingRegistryClient (config) {
|
||||
RegistryClient.call(this, adaptConfig(config))
|
||||
|
||||
this._mapToCache = cacheFile(config.get("cache"))
|
||||
|
||||
// swizzle in our custom cache invalidation logic
|
||||
this._request = this.request
|
||||
this.request = this._invalidatingRequest
|
||||
this.get = get
|
||||
}
|
||||
inherits(CachingRegistryClient, RegistryClient)
|
||||
|
||||
CachingRegistryClient.prototype._invalidatingRequest = function (uri, params, cb) {
|
||||
var client = this
|
||||
this._request.call(this, uri, params, function () {
|
||||
var args = arguments
|
||||
|
||||
var method = params.method
|
||||
if (method !== "HEAD" && method !== "GET") {
|
||||
var invalidated = client._mapToCache(uri)
|
||||
// invalidate cache
|
||||
//
|
||||
// This is irrelevant for commands that do etag / last-modified caching,
|
||||
// but ls and view also have a timed cache, so this keeps the user from
|
||||
// thinking that it didn't work when it did.
|
||||
// Note that failure is an acceptable option here, since the only
|
||||
// result will be a stale cache for some helper commands.
|
||||
log.verbose("request", "invalidating", invalidated, "on", method)
|
||||
return rimraf(invalidated, function () {
|
||||
cb.apply(undefined, args)
|
||||
})
|
||||
}
|
||||
|
||||
cb.apply(undefined, args)
|
||||
})
|
||||
}
|
||||
|
||||
function get (uri, params, cb) {
|
||||
assert(typeof uri === "string", "must pass registry URI to get")
|
||||
assert(params && typeof params === "object", "must pass params to get")
|
||||
assert(typeof cb === "function", "must pass callback to get")
|
||||
|
||||
var parsed = url.parse(uri)
|
||||
assert(
|
||||
parsed.protocol === "http:" || parsed.protocol === "https:",
|
||||
"must have a URL that starts with http: or https:"
|
||||
)
|
||||
|
||||
var cacheBase = cacheFile(npm.config.get("cache"))(uri)
|
||||
var cachePath = path.join(cacheBase, ".cache.json")
|
||||
|
||||
// If the GET is part of a write operation (PUT or DELETE), then
|
||||
// skip past the cache entirely, but still save the results.
|
||||
if (uri.match(/\?write=true$/)) {
|
||||
log.verbose("get", "GET as part of write; not caching result")
|
||||
return get_.call(this, uri, cachePath, params, cb)
|
||||
}
|
||||
|
||||
var client = this
|
||||
fs.stat(cachePath, function (er, stat) {
|
||||
if (!er) {
|
||||
fs.readFile(cachePath, function (er, data) {
|
||||
try {
|
||||
data = JSON.parse(data)
|
||||
}
|
||||
catch (ex) {
|
||||
data = null
|
||||
}
|
||||
|
||||
params.stat = stat
|
||||
params.data = data
|
||||
|
||||
get_.call(client, uri, cachePath, params, cb)
|
||||
})
|
||||
}
|
||||
else {
|
||||
get_.call(client, uri, cachePath, params, cb)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function get_ (uri, cachePath, params, cb) {
|
||||
var staleOk = params.staleOk === undefined ? false : params.staleOk
|
||||
, timeout = params.timeout === undefined ? -1 : params.timeout
|
||||
, data = params.data
|
||||
, stat = params.stat
|
||||
, etag
|
||||
, lastModified
|
||||
|
||||
timeout = Math.min(timeout, npm.config.get("cache-max") || 0)
|
||||
timeout = Math.max(timeout, npm.config.get("cache-min") || -Infinity)
|
||||
if (process.env.COMP_CWORD !== undefined &&
|
||||
process.env.COMP_LINE !== undefined &&
|
||||
process.env.COMP_POINT !== undefined) {
|
||||
timeout = Math.max(timeout, 60000)
|
||||
}
|
||||
|
||||
if (data) {
|
||||
if (data._etag) etag = data._etag
|
||||
if (data._lastModified) lastModified = data._lastModified
|
||||
|
||||
if (stat && timeout && timeout > 0) {
|
||||
if ((Date.now() - stat.mtime.getTime())/1000 < timeout) {
|
||||
log.verbose("get", uri, "not expired, no request")
|
||||
delete data._etag
|
||||
delete data._lastModified
|
||||
return cb(null, data, JSON.stringify(data), { statusCode : 304 })
|
||||
}
|
||||
|
||||
if (staleOk) {
|
||||
log.verbose("get", uri, "staleOk, background update")
|
||||
delete data._etag
|
||||
delete data._lastModified
|
||||
process.nextTick(
|
||||
cb.bind(null, null, data, JSON.stringify(data), { statusCode : 304 } )
|
||||
)
|
||||
cb = function () {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var options = {
|
||||
etag : etag,
|
||||
lastModified : lastModified,
|
||||
follow : params.follow,
|
||||
auth : params.auth
|
||||
}
|
||||
this.request(uri, options, function (er, remoteData, raw, response) {
|
||||
// if we get an error talking to the registry, but we have it
|
||||
// from the cache, then just pretend we got it.
|
||||
if (er && cachePath && data && !data.error) {
|
||||
er = null
|
||||
response = { statusCode: 304 }
|
||||
}
|
||||
|
||||
if (response) {
|
||||
log.silly("get", "cb", [response.statusCode, response.headers])
|
||||
if (response.statusCode === 304 && (etag || lastModified)) {
|
||||
remoteData = data
|
||||
log.verbose(etag ? "etag" : "lastModified", uri+" from cache")
|
||||
}
|
||||
}
|
||||
|
||||
data = remoteData
|
||||
if (!data) er = er || new Error("failed to fetch from registry: " + uri)
|
||||
|
||||
if (er) return cb(er, data, raw, response)
|
||||
|
||||
saveToCache(cachePath, data, saved)
|
||||
|
||||
// just give the write the old college try. if it fails, whatever.
|
||||
function saved () {
|
||||
delete data._etag
|
||||
delete data._lastModified
|
||||
cb(er, data, raw, response)
|
||||
}
|
||||
|
||||
function saveToCache (cachePath, data, saved) {
|
||||
log.verbose("get", "saving", data.name, "to", cachePath)
|
||||
getCacheStat(function (er, st) {
|
||||
mkdirp(path.dirname(cachePath), function (er, made) {
|
||||
if (er) return saved()
|
||||
|
||||
writeFile(cachePath, JSON.stringify(data), function (er) {
|
||||
if (er) return saved()
|
||||
|
||||
chownr(made || cachePath, st.uid, st.gid, saved)
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function adaptConfig (config) {
|
||||
return {
|
||||
proxy : {
|
||||
http : config.get("proxy"),
|
||||
https : config.get("https-proxy"),
|
||||
localAddress : config.get("local-address")
|
||||
},
|
||||
ssl : {
|
||||
certificate : config.get("cert"),
|
||||
key : config.get("key"),
|
||||
ca : config.get("ca"),
|
||||
strict : config.get("strict-ssl")
|
||||
},
|
||||
retry : {
|
||||
retries : config.get("fetch-retries"),
|
||||
factor : config.get("fetch-retry-factor"),
|
||||
minTimeout : config.get("fetch-retry-mintimeout"),
|
||||
maxTimeout : config.get("fetch-retry-maxtimeout")
|
||||
},
|
||||
userAgent : config.get("user-agent"),
|
||||
log : log,
|
||||
defaultTag : config.get("tag"),
|
||||
couchToken : config.get("_token"),
|
||||
maxSockets : config.get('maxsockets')
|
||||
}
|
||||
}
|
6
node_modules/npm/lib/cache/get-stat.js
generated
vendored
Normal file
6
node_modules/npm/lib/cache/get-stat.js
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
var npm = require('../npm.js')
|
||||
var correctMkdir = require('../utils/correct-mkdir.js')
|
||||
|
||||
module.exports = function getCacheStat (cb) {
|
||||
correctMkdir(npm.cache, cb)
|
||||
}
|
104
node_modules/npm/lib/cache/update-index.js
generated
vendored
Normal file
104
node_modules/npm/lib/cache/update-index.js
generated
vendored
Normal file
@@ -0,0 +1,104 @@
|
||||
module.exports = updateIndex
|
||||
|
||||
var fs = require('graceful-fs')
|
||||
var assert = require('assert')
|
||||
var path = require('path')
|
||||
var mkdir = require('mkdirp')
|
||||
var chownr = require('chownr')
|
||||
var npm = require('../npm.js')
|
||||
var log = require('npmlog')
|
||||
var cacheFile = require('npm-cache-filename')
|
||||
var getCacheStat = require('./get-stat.js')
|
||||
var mapToRegistry = require('../utils/map-to-registry.js')
|
||||
|
||||
/* /-/all is special.
|
||||
* It uses timestamp-based caching and partial updates,
|
||||
* because it is a monster.
|
||||
*/
|
||||
function updateIndex (staleness, cb) {
|
||||
assert(typeof cb === 'function', 'must pass callback to updateIndex')
|
||||
|
||||
mapToRegistry('-/all', npm.config, function (er, uri, auth) {
|
||||
if (er) return cb(er)
|
||||
|
||||
var params = {
|
||||
timeout: staleness,
|
||||
follow: true,
|
||||
staleOk: true,
|
||||
auth: auth
|
||||
}
|
||||
var cacheBase = cacheFile(npm.config.get('cache'))(uri)
|
||||
var cachePath = path.join(cacheBase, '.cache.json')
|
||||
log.info('updateIndex', cachePath)
|
||||
|
||||
getCacheStat(function (er, st) {
|
||||
if (er) return cb(er)
|
||||
|
||||
mkdir(cacheBase, function (er, made) {
|
||||
if (er) return cb(er)
|
||||
|
||||
fs.readFile(cachePath, function (er, data) {
|
||||
if (er) {
|
||||
log.warn('', 'Building the local index for the first time, please be patient')
|
||||
return updateIndex_(uri, params, {}, cachePath, cb)
|
||||
}
|
||||
|
||||
chownr(made || cachePath, st.uid, st.gid, function (er) {
|
||||
if (er) return cb(er)
|
||||
|
||||
try {
|
||||
data = JSON.parse(data)
|
||||
} catch (ex) {
|
||||
fs.writeFile(cachePath, '{}', function (er) {
|
||||
if (er) return cb(new Error('Broken cache.'))
|
||||
|
||||
log.warn('', 'Building the local index for the first time, please be patient')
|
||||
return updateIndex_(uri, params, {}, cachePath, cb)
|
||||
})
|
||||
}
|
||||
|
||||
var t = +data._updated || 0
|
||||
// use the cache and update in the background if it's not too old
|
||||
if (Date.now() - t < 60000) {
|
||||
cb(null, data)
|
||||
cb = function () {}
|
||||
}
|
||||
|
||||
if (t === 0) {
|
||||
log.warn('', 'Building the local index for the first time, please be patient')
|
||||
} else {
|
||||
log.verbose('updateIndex', 'Cached search data present with timestamp', t)
|
||||
uri += '/since?stale=update_after&startkey=' + t
|
||||
}
|
||||
updateIndex_(uri, params, data, cachePath, cb)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function updateIndex_ (all, params, data, cachePath, cb) {
|
||||
log.silly('update-index', 'fetching', all)
|
||||
npm.registry.request(all, params, function (er, updates, _, res) {
|
||||
if (er) return cb(er, data)
|
||||
|
||||
var headers = res.headers
|
||||
var updated = updates._updated || Date.parse(headers.date)
|
||||
|
||||
Object.keys(updates).forEach(function (p) { data[p] = updates[p] })
|
||||
|
||||
data._updated = updated
|
||||
getCacheStat(function (er, st) {
|
||||
if (er) return cb(er)
|
||||
|
||||
fs.writeFile(cachePath, JSON.stringify(data), function (er) {
|
||||
delete data._updated
|
||||
if (er) return cb(er)
|
||||
chownr(cachePath, st.uid, st.gid, function (er) {
|
||||
cb(er, data)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
Reference in New Issue
Block a user