1
0
mirror of https://github.com/S2-/minifyfromhtml.git synced 2025-08-03 04:10:04 +02:00

update packages to latest version

This commit is contained in:
s2
2022-08-20 18:51:33 +02:00
parent 09663a35a5
commit 806ebf9a57
4513 changed files with 366205 additions and 92512 deletions

127
node_modules/npm/lib/access.js generated vendored Normal file
View File

@@ -0,0 +1,127 @@
'use strict'
var resolve = require('path').resolve
var readPackageJson = require('read-package-json')
var mapToRegistry = require('./utils/map-to-registry.js')
var npm = require('./npm.js')
var whoami = require('./whoami')
module.exports = access
access.usage =
'npm access public [<package>]\n' +
'npm access restricted [<package>]\n' +
'npm access grant <read-only|read-write> <scope:team> [<package>]\n' +
'npm access revoke <scope:team> [<package>]\n' +
'npm access ls-packages [<user>|<scope>|<scope:team>]\n' +
'npm access ls-collaborators [<package> [<user>]]\n' +
'npm access edit [<package>]'
access.subcommands = ['public', 'restricted', 'grant', 'revoke',
'ls-packages', 'ls-collaborators', 'edit']
access.completion = function (opts, cb) {
var argv = opts.conf.argv.remain
if (argv.length === 2) {
return cb(null, access.subcommands)
}
switch (argv[2]) {
case 'grant':
if (argv.length === 3) {
return cb(null, ['read-only', 'read-write'])
} else {
return cb(null, [])
}
break
case 'public':
case 'restricted':
case 'ls-packages':
case 'ls-collaborators':
case 'edit':
return cb(null, [])
case 'revoke':
return cb(null, [])
default:
return cb(new Error(argv[2] + ' not recognized'))
}
}
function access (args, cb) {
var cmd = args.shift()
var params
return parseParams(cmd, args, function (err, p) {
if (err) { return cb(err) }
params = p
return mapToRegistry(params.package, npm.config, invokeCmd)
})
function invokeCmd (err, uri, auth, base) {
if (err) { return cb(err) }
params.auth = auth
try {
return npm.registry.access(cmd, uri, params, function (err, data) {
!err && data && console.log(JSON.stringify(data, undefined, 2))
cb(err, data)
})
} catch (e) {
cb(e.message + '\n\nUsage:\n' + access.usage)
}
}
}
function parseParams (cmd, args, cb) {
// mapToRegistry will complain if package is undefined,
// but it's not needed for ls-packages
var params = { 'package': '' }
if (cmd === 'grant') {
params.permissions = args.shift()
}
if (['grant', 'revoke', 'ls-packages'].indexOf(cmd) !== -1) {
var entity = (args.shift() || '').split(':')
params.scope = entity[0]
params.team = entity[1]
}
if (cmd === 'ls-packages') {
if (!params.scope) {
whoami([], true, function (err, scope) {
params.scope = scope
cb(err, params)
})
} else {
cb(null, params)
}
} else {
getPackage(args.shift(), function (err, pkg) {
if (err) return cb(err)
params.package = pkg
if (cmd === 'ls-collaborators') params.user = args.shift()
cb(null, params)
})
}
}
function getPackage (name, cb) {
if (name && name.trim()) {
cb(null, name.trim())
} else {
readPackageJson(
resolve(npm.prefix, 'package.json'),
function (err, data) {
if (err) {
if (err.code === 'ENOENT') {
cb(new Error('no package name passed to command and no package.json found'))
} else {
cb(err)
}
} else {
cb(null, data.name)
}
}
)
}
}

177
node_modules/npm/lib/adduser.js generated vendored Normal file
View File

@@ -0,0 +1,177 @@
module.exports = adduser
var log = require("npmlog")
, npm = require("./npm.js")
, read = require("read")
, userValidate = require("npm-user-validate")
, crypto
try {
crypto = require("crypto")
} catch (ex) {}
adduser.usage = 'npm adduser [--registry=url] [--scope=@orgname] [--always-auth]' +
'\n\naliases: login'
function adduser (args, cb) {
npm.spinner.stop()
if (!crypto) return cb(new Error(
"You must compile node with ssl support to use the adduser feature"))
var creds = npm.config.getCredentialsByURI(npm.config.get("registry"))
var c = { u : creds.username || ""
, p : creds.password || ""
, e : creds.email || ""
}
, u = {}
, fns = [readUsername, readPassword, readEmail, save]
loop()
function loop (er) {
if (er) return cb(er)
var fn = fns.shift()
if (fn) return fn(c, u, loop)
cb()
}
}
function readUsername (c, u, cb) {
var v = userValidate.username
read({prompt: "Username: ", default: c.u || ""}, function (er, un) {
if (er) {
return cb(er.message === "cancelled" ? er.message : er)
}
// make sure it's valid. we have to do this here, because
// couchdb will only ever say "bad password" with a 401 when
// you try to PUT a _users record that the validate_doc_update
// rejects for *any* reason.
if (!un) {
return readUsername(c, u, cb)
}
var error = v(un)
if (error) {
log.warn(error.message)
return readUsername(c, u, cb)
}
c.changed = c.u !== un
u.u = un
cb(er)
})
}
function readPassword (c, u, cb) {
var v = userValidate.pw
var prompt
if (c.p && !c.changed) {
prompt = "Password: (or leave unchanged) "
} else {
prompt = "Password: "
}
read({prompt: prompt, silent: true}, function (er, pw) {
if (er) {
return cb(er.message === "cancelled" ? er.message : er)
}
if (!c.changed && pw === "") {
// when the username was not changed,
// empty response means "use the old value"
pw = c.p
}
if (!pw) {
return readPassword(c, u, cb)
}
var error = v(pw)
if (error) {
log.warn(error.message)
return readPassword(c, u, cb)
}
c.changed = c.changed || c.p !== pw
u.p = pw
cb(er)
})
}
function readEmail (c, u, cb) {
var v = userValidate.email
var r = { prompt: "Email: (this IS public) ", default: c.e || "" }
read(r, function (er, em) {
if (er) {
return cb(er.message === "cancelled" ? er.message : er)
}
if (!em) {
return readEmail(c, u, cb)
}
var error = v(em)
if (error) {
log.warn(error.message)
return readEmail(c, u, cb)
}
u.e = em
cb(er)
})
}
function save (c, u, cb) {
npm.spinner.start()
// save existing configs, but yank off for this PUT
var uri = npm.config.get("registry")
var scope = npm.config.get("scope")
// there may be a saved scope and no --registry (for login)
if (scope) {
if (scope.charAt(0) !== "@") scope = "@" + scope
var scopedRegistry = npm.config.get(scope + ":registry")
var cliRegistry = npm.config.get("registry", "cli")
if (scopedRegistry && !cliRegistry) uri = scopedRegistry
}
var params = {
auth : {
username : u.u,
password : u.p,
email : u.e
}
}
npm.registry.adduser(uri, params, function (er, doc) {
npm.spinner.stop()
if (er) return cb(er)
// don't want this polluting the configuration
npm.config.del("_token", "user")
if (scope) npm.config.set(scope + ":registry", uri, "user")
if (doc && doc.token) {
npm.config.setCredentialsByURI(uri, {
token : doc.token
})
}
else {
npm.config.setCredentialsByURI(uri, {
username : u.u,
password : u.p,
email : u.e,
alwaysAuth : npm.config.get("always-auth")
})
}
log.info('adduser', 'Authorized user %s', u.u)
var scopeMessage = scope ? ' to scope ' + scope : ''
console.log('Logged in as %s%s on %s.', u.u, scopeMessage, uri)
npm.config.save('user', cb)
})
}

19
node_modules/npm/lib/bin.js generated vendored Normal file
View File

@@ -0,0 +1,19 @@
module.exports = bin
var npm = require("./npm.js")
var osenv = require("osenv")
bin.usage = "npm bin\nnpm bin -g\n(just prints the bin folder)"
function bin (args, silent, cb) {
if (typeof cb !== "function") cb = silent, silent = false
var b = npm.bin
, PATH = osenv.path()
if (!silent) console.log(b)
process.nextTick(cb.bind(this, null, b))
if (npm.config.get("global") && PATH.indexOf(b) === -1) {
npm.config.get("logstream").write("(not in PATH env variable)\n")
}
}

69
node_modules/npm/lib/bugs.js generated vendored Normal file
View File

@@ -0,0 +1,69 @@
module.exports = bugs
bugs.usage = "npm bugs <pkgname>"
var npm = require("./npm.js")
, log = require("npmlog")
, opener = require("opener")
, path = require("path")
, readJson = require("read-package-json")
, npa = require("npm-package-arg")
, fs = require("fs")
, mapToRegistry = require("./utils/map-to-registry.js")
bugs.completion = function (opts, cb) {
// FIXME: there used to be registry completion here, but it stopped making
// sense somewhere around 50,000 packages on the registry
cb()
}
function bugs (args, cb) {
var n = args.length && npa(args[0]).name || "."
fs.stat(n, function (er, s) {
if (er) {
if (er.code === "ENOENT") return callRegistry(n, cb)
return cb(er)
}
if (!s.isDirectory()) return callRegistry(n, cb)
readJson(path.resolve(n, "package.json"), function(er, d) {
if (er) return cb(er)
getUrlAndOpen(d, cb)
})
})
}
function getUrlAndOpen (d, cb) {
var repo = d.repository || d.repositories
, url
if (d.bugs) {
url = (typeof d.bugs === "string") ? d.bugs : d.bugs.url
}
else if (repo) {
if (Array.isArray(repo)) repo = repo.shift()
if (repo.hasOwnProperty("url")) repo = repo.url
log.verbose("bugs", "repository", repo)
if (repo && repo.match(/^(https?:\/\/|git(:\/\/|@))github.com/)) {
url = repo.replace(/^git(@|:\/\/)/, "https://")
.replace(/^https?:\/\/github.com:/, "https://github.com/")
.replace(/\.git$/, "")+"/issues"
}
}
if (!url) {
url = "https://www.npmjs.org/package/" + d.name
}
log.silly("bugs", "url", url)
opener(url, { command: npm.config.get("browser") }, cb)
}
function callRegistry (name, cb) {
mapToRegistry(name, npm.config, function (er, uri, auth) {
if (er) return cb(er)
npm.registry.get(uri + "/latest", { auth : auth }, function (er, d) {
if (er) return cb(er)
getUrlAndOpen(d, cb)
})
})
}

253
node_modules/npm/lib/build.js generated vendored Normal file
View File

@@ -0,0 +1,253 @@
// npm build command
// everything about the installation after the creation of
// the .npm/{name}/{version}/package folder.
// linking the modules into the npm.root,
// resolving dependencies, etc.
// This runs AFTER install or link are completed.
var npm = require("./npm.js")
, log = require("npmlog")
, chain = require("slide").chain
, fs = require("graceful-fs")
, path = require("path")
, lifecycle = require("./utils/lifecycle.js")
, readJson = require("read-package-json")
, link = require("./utils/link.js")
, linkIfExists = link.ifExists
, cmdShim = require("cmd-shim")
, cmdShimIfExists = cmdShim.ifExists
, asyncMap = require("slide").asyncMap
, ini = require("ini")
, writeFile = require("write-file-atomic")
module.exports = build
build.usage = "npm build <folder>\n(this is plumbing)"
build._didBuild = {}
build._noLC = {}
function build (args, global, didPre, didRB, cb) {
if (typeof cb !== "function") cb = didRB, didRB = false
if (typeof cb !== "function") cb = didPre, didPre = false
if (typeof cb !== "function") {
cb = global, global = npm.config.get("global")
}
// it'd be nice to asyncMap these, but actually, doing them
// in parallel generally munges up the output from node-waf
var builder = build_(global, didPre, didRB)
chain(args.map(function (arg) { return function (cb) {
builder(arg, cb)
}}), cb)
}
function build_ (global, didPre, didRB) { return function (folder, cb) {
folder = path.resolve(folder)
if (build._didBuild[folder]) log.info("build", "already built", folder)
build._didBuild[folder] = true
log.info("build", folder)
readJson(path.resolve(folder, "package.json"), function (er, pkg) {
if (er) return cb(er)
chain
( [ !didPre && [lifecycle, pkg, "preinstall", folder]
, [linkStuff, pkg, folder, global, didRB]
, [writeBuiltinConf, pkg, folder]
, didPre !== build._noLC && [lifecycle, pkg, "install", folder]
, didPre !== build._noLC && [lifecycle, pkg, "postinstall", folder]
, didPre !== build._noLC
&& npm.config.get("npat")
&& [lifecycle, pkg, "test", folder] ]
, cb )
})
}}
function writeBuiltinConf (pkg, folder, cb) {
// the builtin config is "sticky". Any time npm installs
// itself globally, it puts its builtin config file there
var parent = path.dirname(folder)
var dir = npm.globalDir
if (pkg.name !== "npm" ||
!npm.config.get("global") ||
!npm.config.usingBuiltin ||
dir !== parent) {
return cb()
}
var data = ini.stringify(npm.config.sources.builtin.data)
writeFile(path.resolve(folder, "npmrc"), data, cb)
}
function linkStuff (pkg, folder, global, didRB, cb) {
// allow to opt out of linking binaries.
if (npm.config.get("bin-links") === false) return cb()
// if it's global, and folder is in {prefix}/node_modules,
// then bins are in {prefix}/bin
// otherwise, then bins are in folder/../.bin
var parent = pkg.name[0] === '@' ? path.dirname(path.dirname(folder)) : path.dirname(folder)
var gnm = global && npm.globalDir
var gtop = parent === gnm
log.info('linkStuff', pkg._id)
log.silly('linkStuff', pkg._id, 'has', parent, 'as its parent node_modules')
if (global) log.silly('linkStuff', pkg._id, 'is part of a global install')
if (gnm) log.silly('linkStuff', pkg._id, 'is installed into a global node_modules')
if (gtop) log.silly('linkStuff', pkg._id, 'is installed into the top-level global node_modules')
shouldWarn(pkg, folder, global, function () {
asyncMap(
[linkBins, linkMans, !didRB && rebuildBundles],
function (fn, cb) {
if (!fn) return cb()
log.verbose(fn.name, pkg._id)
fn(pkg, folder, parent, gtop, cb)
},
cb
)
})
}
function shouldWarn(pkg, folder, global, cb) {
var parent = path.dirname(folder)
, top = parent === npm.dir
, cwd = npm.localPrefix
readJson(path.resolve(cwd, "package.json"), function(er, topPkg) {
if (er) return cb(er)
var linkedPkg = path.basename(cwd)
, currentPkg = path.basename(folder)
// current searched package is the linked package on first call
if (linkedPkg !== currentPkg) {
// don't generate a warning if it's listed in dependencies
if (Object.keys(topPkg.dependencies || {})
.concat(Object.keys(topPkg.devDependencies || {}))
.indexOf(currentPkg) === -1) {
if (top && pkg.preferGlobal && !global) {
log.warn("prefer global", pkg._id + " should be installed with -g")
}
}
}
cb()
})
}
function rebuildBundles (pkg, folder, parent, gtop, cb) {
if (!npm.config.get("rebuild-bundle")) return cb()
var deps = Object.keys(pkg.dependencies || {})
.concat(Object.keys(pkg.devDependencies || {}))
, bundles = pkg.bundleDependencies || pkg.bundledDependencies || []
fs.readdir(path.resolve(folder, "node_modules"), function (er, files) {
// error means no bundles
if (er) return cb()
log.verbose("rebuildBundles", files)
// don't asyncMap these, because otherwise build script output
// gets interleaved and is impossible to read
chain(files.filter(function (file) {
// rebuild if:
// not a .folder, like .bin or .hooks
return !file.match(/^[\._-]/)
// not some old 0.x style bundle
&& file.indexOf("@") === -1
// either not a dep, or explicitly bundled
&& (deps.indexOf(file) === -1 || bundles.indexOf(file) !== -1)
}).map(function (file) {
file = path.resolve(folder, "node_modules", file)
return function (cb) {
if (build._didBuild[file]) return cb()
log.verbose("rebuild bundle", file)
// if file is not a package dir, then don't do it.
fs.lstat(path.resolve(file, "package.json"), function (er) {
if (er) return cb()
build_(false)(file, cb)
})
}}), cb)
})
}
function linkBins (pkg, folder, parent, gtop, cb) {
if (!pkg.bin || !gtop && path.basename(parent) !== "node_modules") {
return cb()
}
var binRoot = gtop ? npm.globalBin
: path.resolve(parent, ".bin")
log.verbose("link bins", [pkg.bin, binRoot, gtop])
asyncMap(Object.keys(pkg.bin), function (b, cb) {
linkBin( path.resolve(folder, pkg.bin[b])
, path.resolve(binRoot, b)
, gtop && folder
, function (er) {
if (er) return cb(er)
// bins should always be executable.
// XXX skip chmod on windows?
var src = path.resolve(folder, pkg.bin[b])
fs.chmod(src, npm.modes.exec, function (er) {
if (er && er.code === "ENOENT" && npm.config.get("ignore-scripts")) {
return cb()
}
if (er || !gtop) return cb(er)
var dest = path.resolve(binRoot, b)
, out = npm.config.get("parseable")
? dest + "::" + src + ":BINFILE"
: dest + " -> " + src
console.log(out)
cb()
})
})
}, cb)
}
function linkBin (from, to, gently, cb) {
if (process.platform !== "win32") {
return linkIfExists(from, to, gently, cb)
} else {
return cmdShimIfExists(from, to, cb)
}
}
function linkMans (pkg, folder, parent, gtop, cb) {
if (!pkg.man || !gtop || process.platform === "win32") return cb()
var manRoot = path.resolve(npm.config.get("prefix"), "share", "man")
log.verbose("linkMans", "man files are", pkg.man, "in", manRoot)
// make sure that the mans are unique.
// otherwise, if there are dupes, it'll fail with EEXIST
var set = pkg.man.reduce(function (acc, man) {
acc[path.basename(man)] = man
return acc
}, {})
pkg.man = pkg.man.filter(function (man) {
return set[path.basename(man)] === man
})
asyncMap(pkg.man, function (man, cb) {
if (typeof man !== "string") return cb()
log.silly("linkMans", "preparing to link", man)
var parseMan = man.match(/(.*\.([0-9]+)(\.gz)?)$/)
if (!parseMan) {
return cb(new Error(
man+" is not a valid name for a man file. " +
"Man files must end with a number, " +
"and optionally a .gz suffix if they are compressed."
))
}
var stem = parseMan[1]
var sxn = parseMan[2]
var bn = path.basename(stem)
var manSrc = path.resolve(folder, man)
var manDest = path.join(manRoot, "man" + sxn, bn)
linkIfExists(manSrc, manDest, gtop && folder, cb)
}, cb)
}

354
node_modules/npm/lib/cache.js generated vendored Normal file
View File

@@ -0,0 +1,354 @@
// XXX lib/utils/tar.js and this file need to be rewritten.
// URL-to-cache folder mapping:
// : -> !
// @ -> _
// http://registry.npmjs.org/foo/version -> cache/http!/...
//
/*
fetching a URL:
1. Check for URL in inflight URLs. If present, add cb, and return.
2. Acquire lock at {cache}/{sha(url)}.lock
retries = {cache-lock-retries, def=10}
stale = {cache-lock-stale, def=60000}
wait = {cache-lock-wait, def=10000}
3. if lock can't be acquired, then fail
4. fetch url, clear lock, call cbs
cache folders:
1. urls: http!/server.com/path/to/thing
2. c:\path\to\thing: file!/c!/path/to/thing
3. /path/to/thing: file!/path/to/thing
4. git@ private: git_github.com!npm/npm
5. git://public: git!/github.com/npm/npm
6. git+blah:// git-blah!/server.com/foo/bar
adding a folder:
1. tar into tmp/random/package.tgz
2. untar into tmp/random/contents/package, stripping one dir piece
3. tar tmp/random/contents/package to cache/n/v/package.tgz
4. untar cache/n/v/package.tgz into cache/n/v/package
5. rm tmp/random
Adding a url:
1. fetch to tmp/random/package.tgz
2. goto folder(2)
adding a name@version:
1. registry.get(name/version)
2. if response isn't 304, add url(dist.tarball)
adding a name@range:
1. registry.get(name)
2. Find a version that satisfies
3. add name@version
adding a local tarball:
1. untar to tmp/random/{blah}
2. goto folder(2)
adding a namespaced package:
1. lookup registry for @namespace
2. namespace_registry.get('name')
3. add url(namespace/latest.tarball)
*/
exports = module.exports = cache
cache.unpack = unpack
cache.clean = clean
cache.read = read
var npm = require("./npm.js")
, fs = require("graceful-fs")
, writeFileAtomic = require("write-file-atomic")
, assert = require("assert")
, rm = require("./utils/gently-rm.js")
, readJson = require("read-package-json")
, log = require("npmlog")
, path = require("path")
, asyncMap = require("slide").asyncMap
, tar = require("./utils/tar.js")
, fileCompletion = require("./utils/completion/file-completion.js")
, deprCheck = require("./utils/depr-check.js")
, addNamed = require("./cache/add-named.js")
, addLocal = require("./cache/add-local.js")
, addRemoteTarball = require("./cache/add-remote-tarball.js")
, addRemoteGit = require("./cache/add-remote-git.js")
, inflight = require("inflight")
, realizePackageSpecifier = require("realize-package-specifier")
, npa = require("npm-package-arg")
, getStat = require("./cache/get-stat.js")
, cachedPackageRoot = require("./cache/cached-package-root.js")
, mapToRegistry = require("./utils/map-to-registry.js")
cache.usage = "npm cache add <tarball file>"
+ "\nnpm cache add <folder>"
+ "\nnpm cache add <tarball url>"
+ "\nnpm cache add <git url>"
+ "\nnpm cache add <name>@<version>"
+ "\nnpm cache ls [<path>]"
+ "\nnpm cache clean [<pkg>[@<version>]]"
cache.completion = function (opts, cb) {
var argv = opts.conf.argv.remain
if (argv.length === 2) {
return cb(null, ["add", "ls", "clean"])
}
switch (argv[2]) {
case "clean":
case "ls":
// cache and ls are easy, because the completion is
// what ls_ returns anyway.
// just get the partial words, minus the last path part
var p = path.dirname(opts.partialWords.slice(3).join("/"))
if (p === ".") p = ""
return ls_(p, 2, cb)
case "add":
// Same semantics as install and publish.
return npm.commands.install.completion(opts, cb)
}
}
function cache (args, cb) {
var cmd = args.shift()
switch (cmd) {
case "rm": case "clear": case "clean": return clean(args, cb)
case "list": case "sl": case "ls": return ls(args, cb)
case "add": return add(args, npm.prefix, cb)
default: return cb("Usage: "+cache.usage)
}
}
// if the pkg and ver are in the cache, then
// just do a readJson and return.
// if they're not, then fetch them from the registry.
function read (name, ver, forceBypass, cb) {
assert(typeof name === "string", "must include name of module to install")
assert(typeof cb === "function", "must include callback")
if (forceBypass === undefined || forceBypass === null) forceBypass = true
var root = cachedPackageRoot({name : name, version : ver})
function c (er, data) {
if (er) log.verbose("cache", "addNamed error for", name+"@"+ver, er)
if (data) deprCheck(data)
return cb(er, data)
}
if (forceBypass && npm.config.get("force")) {
log.verbose("using force", "skipping cache")
return addNamed(name, ver, null, c)
}
readJson(path.join(root, "package", "package.json"), function (er, data) {
if (er && er.code !== "ENOENT" && er.code !== "ENOTDIR") return cb(er)
if (data) {
if (!data.name) return cb(new Error("No name provided"))
if (!data.version) return cb(new Error("No version provided"))
}
if (er) return addNamed(name, ver, null, c)
else c(er, data)
})
}
function normalize (args) {
var normalized = ""
if (args.length > 0) {
var a = npa(args[0])
if (a.name) normalized = a.name
if (a.rawSpec) normalized = [normalized, a.rawSpec].join("/")
if (args.length > 1) normalized = [normalized].concat(args.slice(1)).join("/")
}
if (normalized.substr(-1) === "/") {
normalized = normalized.substr(0, normalized.length - 1)
}
normalized = path.normalize(normalized)
log.silly("ls", "normalized", normalized)
return normalized
}
// npm cache ls [<path>]
function ls (args, cb) {
var prefix = npm.config.get("cache")
if (prefix.indexOf(process.env.HOME) === 0) {
prefix = "~" + prefix.substr(process.env.HOME.length)
}
ls_(normalize(args), npm.config.get("depth"), function (er, files) {
console.log(files.map(function (f) {
return path.join(prefix, f)
}).join("\n").trim())
cb(er, files)
})
}
// Calls cb with list of cached pkgs matching show.
function ls_ (req, depth, cb) {
return fileCompletion(npm.cache, req, depth, cb)
}
// npm cache clean [<path>]
function clean (args, cb) {
assert(typeof cb === "function", "must include callback")
if (!args) args = []
var f = path.join(npm.cache, normalize(args))
if (f === npm.cache) {
fs.readdir(npm.cache, function (er, files) {
if (er) return cb()
asyncMap( files.filter(function (f) {
return npm.config.get("force") || f !== "-"
}).map(function (f) {
return path.join(npm.cache, f)
})
, rm, cb )
})
}
else {
rm(f, cb)
}
}
// npm cache add <tarball-url>
// npm cache add <pkg> <ver>
// npm cache add <tarball>
// npm cache add <folder>
cache.add = function (pkg, ver, where, scrub, cb) {
assert(typeof pkg === "string", "must include name of package to install")
assert(typeof cb === "function", "must include callback")
if (scrub) {
return clean([], function (er) {
if (er) return cb(er)
add([pkg, ver], where, cb)
})
}
return add([pkg, ver], where, cb)
}
var adding = 0
function add (args, where, cb) {
// this is hot code. almost everything passes through here.
// the args can be any of:
// ["url"]
// ["pkg", "version"]
// ["pkg@version"]
// ["pkg", "url"]
// This is tricky, because urls can contain @
// Also, in some cases we get [name, null] rather
// that just a single argument.
var usage = "Usage:\n"
+ " npm cache add <tarball-url>\n"
+ " npm cache add <pkg>@<ver>\n"
+ " npm cache add <tarball>\n"
+ " npm cache add <folder>\n"
, spec
log.silly("cache add", "args", args)
if (args[1] === undefined) args[1] = null
// at this point the args length must ==2
if (args[1] !== null) {
spec = args[0]+"@"+args[1]
} else if (args.length === 2) {
spec = args[0]
}
log.verbose("cache add", "spec", spec)
if (!spec) return cb(usage)
if (adding <= 0) {
npm.spinner.start()
}
adding++
cb = afterAdd(cb)
realizePackageSpecifier(spec, where, function (err, p) {
if (err) return cb(err)
log.silly("cache add", "parsed spec", p)
switch (p.type) {
case "local":
case "directory":
addLocal(p, null, cb)
break
case "remote":
// get auth, if possible
mapToRegistry(p.raw, npm.config, function (err, uri, auth) {
if (err) return cb(err)
addRemoteTarball(p.spec, {name : p.name}, null, auth, cb)
})
break
case "git":
case "hosted":
addRemoteGit(p.rawSpec, cb)
break
default:
if (p.name) return addNamed(p.name, p.spec, null, cb)
cb(new Error("couldn't figure out how to install " + spec))
}
})
}
function unpack (pkg, ver, unpackTarget, dMode, fMode, uid, gid, cb) {
if (typeof cb !== "function") cb = gid, gid = null
if (typeof cb !== "function") cb = uid, uid = null
if (typeof cb !== "function") cb = fMode, fMode = null
if (typeof cb !== "function") cb = dMode, dMode = null
read(pkg, ver, false, function (er) {
if (er) {
log.error("unpack", "Could not read data for %s", pkg + "@" + ver)
return cb(er)
}
npm.commands.unbuild([unpackTarget], true, function (er) {
if (er) return cb(er)
tar.unpack( path.join(cachedPackageRoot({name : pkg, version : ver}), "package.tgz")
, unpackTarget
, dMode, fMode
, uid, gid
, cb )
})
})
}
function afterAdd (cb) { return function (er, data) {
adding--
if (adding <= 0) npm.spinner.stop()
if (er || !data || !data.name || !data.version) return cb(er, data)
log.silly("cache", "afterAdd", data.name+"@"+data.version)
// Save the resolved, shasum, etc. into the data so that the next
// time we load from this cached data, we have all the same info.
// Ignore if it fails.
var pj = path.join(cachedPackageRoot(data), "package", "package.json")
var done = inflight(pj, cb)
if (!done) return log.verbose("afterAdd", pj, "already in flight; not writing")
log.verbose("afterAdd", pj, "not in flight; writing")
getStat(function (er, cs) {
if (er) return done(er)
writeFileAtomic(pj, JSON.stringify(data), {chown : cs}, function (er) {
if (!er) log.verbose("afterAdd", pj, "written")
return done(null, data)
})
})
}}

180
node_modules/npm/lib/cache/add-local-tarball.js generated vendored Normal file
View File

@@ -0,0 +1,180 @@
var mkdir = require("mkdirp")
, assert = require("assert")
, fs = require("graceful-fs")
, writeFileAtomic = require("write-file-atomic")
, path = require("path")
, sha = require("sha")
, npm = require("../npm.js")
, log = require("npmlog")
, tar = require("../utils/tar.js")
, pathIsInside = require("path-is-inside")
, getCacheStat = require("./get-stat.js")
, cachedPackageRoot = require("./cached-package-root.js")
, chownr = require("chownr")
, inflight = require("inflight")
, once = require("once")
, writeStreamAtomic = require("fs-write-stream-atomic")
, randomBytes = require("crypto").pseudoRandomBytes // only need uniqueness
module.exports = addLocalTarball
function addLocalTarball (p, pkgData, shasum, cb) {
assert(typeof p === "string", "must have path")
assert(typeof cb === "function", "must have callback")
if (!pkgData) pkgData = {}
// If we don't have a shasum yet, compute it.
if (!shasum) {
return sha.get(p, function (er, shasum) {
if (er) return cb(er)
log.silly("addLocalTarball", "shasum (computed)", shasum)
addLocalTarball(p, pkgData, shasum, cb)
})
}
if (pathIsInside(p, npm.cache)) {
if (path.basename(p) !== "package.tgz") {
return cb(new Error("Not a valid cache tarball name: "+p))
}
log.verbose("addLocalTarball", "adding from inside cache", p)
return addPlacedTarball(p, pkgData, shasum, cb)
}
addTmpTarball(p, pkgData, shasum, function (er, data) {
if (data) {
data._resolved = p
data._shasum = data._shasum || shasum
}
return cb(er, data)
})
}
function addPlacedTarball (p, pkgData, shasum, cb) {
assert(pkgData, "should have package data by now")
assert(typeof cb === "function", "cb function required")
getCacheStat(function (er, cs) {
if (er) return cb(er)
return addPlacedTarball_(p, pkgData, cs.uid, cs.gid, shasum, cb)
})
}
function addPlacedTarball_ (p, pkgData, uid, gid, resolvedSum, cb) {
var folder = path.join(cachedPackageRoot(pkgData), "package")
// First, make sure we have the shasum, if we don't already.
if (!resolvedSum) {
sha.get(p, function (er, shasum) {
if (er) return cb(er)
addPlacedTarball_(p, pkgData, uid, gid, shasum, cb)
})
return
}
mkdir(folder, function (er) {
if (er) return cb(er)
var pj = path.join(folder, "package.json")
var json = JSON.stringify(pkgData, null, 2)
writeFileAtomic(pj, json, function (er) {
cb(er, pkgData)
})
})
}
function addTmpTarball (tgz, pkgData, shasum, cb) {
assert(typeof cb === "function", "must have callback function")
assert(shasum, "must have shasum by now")
cb = inflight("addTmpTarball:" + tgz, cb)
if (!cb) return log.verbose("addTmpTarball", tgz, "already in flight; not adding")
log.verbose("addTmpTarball", tgz, "not in flight; adding")
// we already have the package info, so just move into place
if (pkgData && pkgData.name && pkgData.version) {
log.verbose(
"addTmpTarball",
"already have metadata; skipping unpack for",
pkgData.name + "@" + pkgData.version
)
return addTmpTarball_(tgz, pkgData, shasum, cb)
}
// This is a tarball we probably downloaded from the internet. The shasum's
// already been checked, but we haven't ever had a peek inside, so we unpack
// it here just to make sure it is what it says it is.
//
// NOTE: we might not have any clue what we think it is, for example if the
// user just did `npm install ./foo.tgz`
// generate a unique filename
randomBytes(6, function (er, random) {
if (er) return cb(er)
var target = path.join(npm.tmp, "unpack-" + random.toString("hex"))
getCacheStat(function (er, cs) {
if (er) return cb(er)
log.verbose("addTmpTarball", "validating metadata from", tgz)
tar.unpack(tgz, target, null, null, cs.uid, cs.gid, function (er, data) {
if (er) return cb(er)
// check that this is what we expected.
if (!data.name) {
return cb(new Error("No name provided"))
}
else if (pkgData.name && data.name !== pkgData.name) {
return cb(new Error("Invalid Package: expected " + pkgData.name +
" but found " + data.name))
}
if (!data.version) {
return cb(new Error("No version provided"))
}
else if (pkgData.version && data.version !== pkgData.version) {
return cb(new Error("Invalid Package: expected " +
pkgData.name + "@" + pkgData.version +
" but found " + data.name + "@" + data.version))
}
addTmpTarball_(tgz, data, shasum, cb)
})
})
})
}
function addTmpTarball_ (tgz, data, shasum, cb) {
assert(typeof cb === "function", "must have callback function")
cb = once(cb)
assert(data.name, "should have package name by now")
assert(data.version, "should have package version by now")
var root = cachedPackageRoot(data)
var pkg = path.resolve(root, "package")
var target = path.resolve(root, "package.tgz")
getCacheStat(function (er, cs) {
if (er) return cb(er)
mkdir(pkg, function (er, created) {
// chown starting from the first dir created by mkdirp,
// or the root dir, if none had to be created, so that
// we know that we get all the children.
function chown () {
chownr(created || root, cs.uid, cs.gid, done)
}
if (er) return cb(er)
var read = fs.createReadStream(tgz)
var write = writeStreamAtomic(target, { mode: npm.modes.file })
var fin = cs.uid && cs.gid ? chown : done
read.on("error", cb).pipe(write).on("error", cb).on("close", fin)
})
})
function done() {
data._shasum = data._shasum || shasum
cb(null, data)
}
}

126
node_modules/npm/lib/cache/add-local.js generated vendored Normal file
View File

@@ -0,0 +1,126 @@
var assert = require("assert")
, path = require("path")
, mkdir = require("mkdirp")
, chownr = require("chownr")
, pathIsInside = require("path-is-inside")
, readJson = require("read-package-json")
, log = require("npmlog")
, npm = require("../npm.js")
, tar = require("../utils/tar.js")
, deprCheck = require("../utils/depr-check.js")
, getCacheStat = require("./get-stat.js")
, cachedPackageRoot = require("./cached-package-root.js")
, addLocalTarball = require("./add-local-tarball.js")
, sha = require("sha")
, inflight = require("inflight")
module.exports = addLocal
function addLocal (p, pkgData, cb_) {
assert(typeof p === "object", "must have spec info")
assert(typeof cb_ === "function", "must have callback")
pkgData = pkgData || {}
function cb (er, data) {
if (er) {
log.error("addLocal", "Could not install %s", p.spec)
return cb_(er)
}
if (data && !data._fromGithub) {
data._from = path.relative(npm.prefix, p.spec) || "."
var resolved = path.relative(npm.prefix, p.spec)
if (resolved) data._resolved = "file:"+resolved
}
return cb_(er, data)
}
if (p.type === "directory") {
addLocalDirectory(p.spec, pkgData, null, cb)
}
else {
addLocalTarball(p.spec, pkgData, null, cb)
}
}
// At this point, if shasum is set, it's something that we've already
// read and checked. Just stashing it in the data at this point.
function addLocalDirectory (p, pkgData, shasum, cb) {
assert(pkgData, "must pass package data")
assert(typeof cb === "function", "must have callback")
// if it's a folder, then read the package.json,
// tar it to the proper place, and add the cache tar
if (pathIsInside(p, npm.cache)) return cb(new Error(
"Adding a cache directory to the cache will make the world implode."))
readJson(path.join(p, "package.json"), false, function (er, data) {
if (er) return cb(er)
if (!data.name) {
return cb(new Error("No name provided in package.json"))
}
else if (pkgData.name && pkgData.name !== data.name) {
return cb(new Error(
"Invalid package: expected " + pkgData.name + " but found " + data.name
))
}
if (!data.version) {
return cb(new Error("No version provided in package.json"))
}
else if (pkgData.version && pkgData.version !== data.version) {
return cb(new Error(
"Invalid package: expected " + pkgData.name + "@" + pkgData.version +
" but found " + data.name + "@" + data.version
))
}
deprCheck(data)
// pack to {cache}/name/ver/package.tgz
var root = cachedPackageRoot(data)
var tgz = path.resolve(root, "package.tgz")
var pj = path.resolve(root, "package/package.json")
var wrapped = inflight(tgz, next)
if (!wrapped) return log.verbose("addLocalDirectory", tgz, "already in flight; waiting")
log.verbose("addLocalDirectory", tgz, "not in flight; packing")
getCacheStat(function (er, cs) {
mkdir(path.dirname(pj), function (er, made) {
if (er) return wrapped(er)
var fancy = !pathIsInside(p, npm.tmp)
tar.pack(tgz, p, data, fancy, function (er) {
if (er) {
log.error("addLocalDirectory", "Could not pack", p, "to", tgz)
return wrapped(er)
}
if (!cs || isNaN(cs.uid) || isNaN(cs.gid)) return wrapped()
chownr(made || tgz, cs.uid, cs.gid, function (er) {
if (er && er.code === 'ENOENT') return wrapped()
wrapped(er)
})
})
})
})
function next (er) {
if (er) return cb(er)
// if we have the shasum already, just add it
if (shasum) {
return addLocalTarball(tgz, data, shasum, cb)
} else {
sha.get(tgz, function (er, shasum) {
if (er) {
return cb(er)
}
data._shasum = shasum
return addLocalTarball(tgz, data, shasum, cb)
})
}
}
})
}

299
node_modules/npm/lib/cache/add-named.js generated vendored Normal file
View File

@@ -0,0 +1,299 @@
var path = require("path")
, assert = require("assert")
, fs = require("graceful-fs")
, http = require("http")
, log = require("npmlog")
, semver = require("semver")
, readJson = require("read-package-json")
, url = require("url")
, npm = require("../npm.js")
, deprCheck = require("../utils/depr-check.js")
, inflight = require("inflight")
, addRemoteTarball = require("./add-remote-tarball.js")
, cachedPackageRoot = require("./cached-package-root.js")
, mapToRegistry = require("../utils/map-to-registry.js")
module.exports = addNamed
function getOnceFromRegistry (name, from, next, done) {
function fixName(err, data, json, resp) {
// this is only necessary until npm/npm-registry-client#80 is fixed
if (err && err.pkgid && err.pkgid !== name) {
err.message = err.message.replace(
new RegExp(': ' + err.pkgid.replace(/(\W)/g, '\\$1') + '$'),
': ' + name
)
err.pkgid = name
}
next(err, data, json, resp)
}
mapToRegistry(name, npm.config, function (er, uri, auth) {
if (er) return done(er)
var key = "registry:" + uri
next = inflight(key, next)
if (!next) return log.verbose(from, key, "already in flight; waiting")
else log.verbose(from, key, "not in flight; fetching")
npm.registry.get(uri, { auth : auth }, fixName)
})
}
function addNamed (name, version, data, cb_) {
assert(typeof name === "string", "must have module name")
assert(typeof cb_ === "function", "must have callback")
var key = name + "@" + version
log.silly("addNamed", key)
function cb (er, data) {
if (data && !data._fromGithub) data._from = key
cb_(er, data)
}
if (semver.valid(version, true)) {
log.verbose('addNamed', JSON.stringify(version), 'is a plain semver version for', name)
addNameVersion(name, version, data, cb)
} else if (semver.validRange(version, true)) {
log.verbose('addNamed', JSON.stringify(version), 'is a valid semver range for', name)
addNameRange(name, version, data, cb)
} else {
log.verbose('addNamed', JSON.stringify(version), 'is being treated as a dist-tag for', name)
addNameTag(name, version, data, cb)
}
}
function addNameTag (name, tag, data, cb) {
log.info("addNameTag", [name, tag])
var explicit = true
if (!tag) {
explicit = false
tag = npm.config.get("tag")
}
getOnceFromRegistry(name, "addNameTag", next, cb)
function next (er, data, json, resp) {
if (!er) er = errorResponse(name, resp)
if (er) return cb(er)
log.silly("addNameTag", "next cb for", name, "with tag", tag)
engineFilter(data)
if (data["dist-tags"] && data["dist-tags"][tag]
&& data.versions[data["dist-tags"][tag]]) {
var ver = data["dist-tags"][tag]
return addNamed(name, ver, data.versions[ver], cb)
}
if (!explicit && Object.keys(data.versions).length) {
return addNamed(name, "*", data, cb)
}
er = installTargetsError(tag, data)
return cb(er)
}
}
function engineFilter (data) {
var npmv = npm.version
, nodev = npm.config.get("node-version")
, strict = npm.config.get("engine-strict")
if (!nodev || npm.config.get("force")) return data
Object.keys(data.versions || {}).forEach(function (v) {
var eng = data.versions[v].engines
if (!eng) return
if (!strict && !data.versions[v].engineStrict) return
if (eng.node && !semver.satisfies(nodev, eng.node, true)
|| eng.npm && !semver.satisfies(npmv, eng.npm, true)) {
delete data.versions[v]
}
})
}
function addNameVersion (name, v, data, cb) {
var ver = semver.valid(v, true)
if (!ver) return cb(new Error("Invalid version: "+v))
var response
if (data) {
response = null
return next()
}
getOnceFromRegistry(name, "addNameVersion", setData, cb)
function setData (er, d, json, resp) {
if (!er) {
er = errorResponse(name, resp)
}
if (er) return cb(er)
data = d && d.versions[ver]
if (!data) {
er = new Error("version not found: "+name+"@"+ver)
er.package = name
er.statusCode = 404
return cb(er)
}
response = resp
next()
}
function next () {
deprCheck(data)
var dist = data.dist
if (!dist) return cb(new Error("No dist in "+data._id+" package"))
if (!dist.tarball) return cb(new Error(
"No dist.tarball in " + data._id + " package"))
if ((response && response.statusCode !== 304) || npm.config.get("force")) {
return fetchit()
}
// we got cached data, so let's see if we have a tarball.
var pkgroot = cachedPackageRoot({name : name, version : ver})
var pkgtgz = path.join(pkgroot, "package.tgz")
var pkgjson = path.join(pkgroot, "package", "package.json")
fs.stat(pkgtgz, function (er) {
if (!er) {
readJson(pkgjson, function (er, data) {
if (er && er.code !== "ENOENT" && er.code !== "ENOTDIR") return cb(er)
if (data) {
if (!data.name) return cb(new Error("No name provided"))
if (!data.version) return cb(new Error("No version provided"))
// check the SHA of the package we have, to ensure it wasn't installed
// from somewhere other than the registry (eg, a fork)
if (data._shasum && dist.shasum && data._shasum !== dist.shasum) {
return fetchit()
}
}
if (er) return fetchit()
else return cb(null, data)
})
} else return fetchit()
})
function fetchit () {
mapToRegistry(name, npm.config, function (er, _, auth, ruri) {
if (er) return cb(er)
// Use the same protocol as the registry. https registry --> https
// tarballs, but only if they're the same hostname, or else detached
// tarballs may not work.
var tb = url.parse(dist.tarball)
var rp = url.parse(ruri)
if (tb.hostname === rp.hostname && tb.protocol !== rp.protocol) {
tb.protocol = rp.protocol
// If a different port is associated with the other protocol
// we need to update that as well
if (rp.port !== tb.port) {
tb.port = rp.port
delete tb.host
}
delete tb.href
}
tb = url.format(tb)
// Only add non-shasum'ed packages if --forced. Only ancient things
// would lack this for good reasons nowadays.
if (!dist.shasum && !npm.config.get("force")) {
return cb(new Error("package lacks shasum: " + data._id))
}
addRemoteTarball(tb, data, dist.shasum, auth, cb)
})
}
}
}
function addNameRange (name, range, data, cb) {
range = semver.validRange(range, true)
if (range === null) return cb(new Error(
"Invalid version range: " + range
))
log.silly("addNameRange", {name:name, range:range, hasData:!!data})
if (data) return next()
getOnceFromRegistry(name, "addNameRange", setData, cb)
function setData (er, d, json, resp) {
if (!er) {
er = errorResponse(name, resp)
}
if (er) return cb(er)
data = d
next()
}
function next () {
log.silly( "addNameRange", "number 2"
, {name:name, range:range, hasData:!!data})
engineFilter(data)
log.silly("addNameRange", "versions"
, [data.name, Object.keys(data.versions || {})])
// if the tagged version satisfies, then use that.
var tagged = data["dist-tags"][npm.config.get("tag")]
if (tagged
&& data.versions[tagged]
&& semver.satisfies(tagged, range, true)) {
return addNamed(name, tagged, data.versions[tagged], cb)
}
// find the max satisfying version.
var versions = Object.keys(data.versions || {})
var ms = semver.maxSatisfying(versions, range, true)
if (!ms) {
if (range === "*" && versions.length) {
return addNameTag(name, "latest", data, cb)
} else {
return cb(installTargetsError(range, data))
}
}
// if we don't have a registry connection, try to see if
// there's a cached copy that will be ok.
addNamed(name, ms, data.versions[ms], cb)
}
}
function installTargetsError (requested, data) {
var targets = Object.keys(data["dist-tags"]).filter(function (f) {
return (data.versions || {}).hasOwnProperty(f)
}).concat(Object.keys(data.versions || {}))
requested = data.name + (requested ? "@'" + requested + "'" : "")
targets = targets.length
? "Valid install targets:\n" + JSON.stringify(targets) + "\n"
: "No valid targets found.\n"
+ "Perhaps not compatible with your version of node?"
var er = new Error( "No compatible version found: "
+ requested + "\n" + targets)
er.code = "ETARGET"
return er
}
function errorResponse (name, response) {
var er
if (response.statusCode >= 400) {
er = new Error(http.STATUS_CODES[response.statusCode])
er.statusCode = response.statusCode
er.code = "E" + er.statusCode
er.pkgid = name
}
return er
}

481
node_modules/npm/lib/cache/add-remote-git.js generated vendored Normal file
View File

@@ -0,0 +1,481 @@
var assert = require('assert')
var crypto = require('crypto')
var fs = require('graceful-fs')
var path = require('path')
var url = require('url')
var chownr = require('chownr')
var dezalgo = require('dezalgo')
var hostedFromURL = require('hosted-git-info').fromUrl
var inflight = require('inflight')
var log = require('npmlog')
var mkdir = require('mkdirp')
var normalizeGitUrl = require('normalize-git-url')
var npa = require('npm-package-arg')
var realizePackageSpecifier = require('realize-package-specifier')
var addLocal = require('./add-local.js')
var correctMkdir = require('../utils/correct-mkdir.js')
var git = require('../utils/git.js')
var npm = require('../npm.js')
var rm = require('../utils/gently-rm.js')
var remotes = path.resolve(npm.config.get('cache'), '_git-remotes')
var templates = path.join(remotes, '_templates')
var VALID_VARIABLES = [
'GIT_ASKPASS',
'GIT_EXEC_PATH',
'GIT_PROXY_COMMAND',
'GIT_SSH',
'GIT_SSH_COMMAND',
'GIT_SSL_CAINFO',
'GIT_SSL_NO_VERIFY'
]
module.exports = addRemoteGit
function addRemoteGit (uri, _cb) {
assert(typeof uri === 'string', 'must have git URL')
assert(typeof _cb === 'function', 'must have callback')
var cb = dezalgo(_cb)
log.verbose('addRemoteGit', 'caching', uri)
// the URL comes in exactly as it was passed on the command line, or as
// normalized by normalize-package-data / read-package-json / read-installed,
// so figure out what to do with it using hosted-git-info
var parsed = hostedFromURL(uri)
if (parsed) {
// normalize GitHub syntax to org/repo (for now)
var from
if (parsed.type === 'github' && parsed.default === 'shortcut') {
from = parsed.path()
} else {
from = parsed.toString()
}
log.verbose('addRemoteGit', from, 'is a repository hosted by', parsed.type)
// prefer explicit URLs to pushing everything through shortcuts
if (parsed.default !== 'shortcut') {
return tryClone(from, parsed.toString(), false, cb)
}
// try git:, then git+ssh:, then git+https: before failing
tryGitProto(from, parsed, cb)
} else {
// verify that this is a Git URL before continuing
parsed = npa(uri)
if (parsed.type !== 'git') {
return cb(new Error(uri + 'is not a Git or GitHub URL'))
}
tryClone(parsed.rawSpec, uri, false, cb)
}
}
function tryGitProto (from, hostedInfo, cb) {
var gitURL = hostedInfo.git()
if (!gitURL) return trySSH(from, hostedInfo, cb)
log.silly('tryGitProto', 'attempting to clone', gitURL)
tryClone(from, gitURL, true, function (er) {
if (er) return tryHTTPS(from, hostedInfo, cb)
cb.apply(this, arguments)
})
}
function tryHTTPS (from, hostedInfo, cb) {
var httpsURL = hostedInfo.https()
if (!httpsURL) {
return cb(new Error(from + ' can not be cloned via Git, SSH, or HTTPS'))
}
log.silly('tryHTTPS', 'attempting to clone', httpsURL)
tryClone(from, httpsURL, true, function (er) {
if (er) return trySSH(from, hostedInfo, cb)
cb.apply(this, arguments)
})
}
function trySSH (from, hostedInfo, cb) {
var sshURL = hostedInfo.ssh()
if (!sshURL) return tryHTTPS(from, hostedInfo, cb)
log.silly('trySSH', 'attempting to clone', sshURL)
tryClone(from, sshURL, false, cb)
}
function tryClone (from, combinedURL, silent, cb) {
log.silly('tryClone', 'cloning', from, 'via', combinedURL)
var normalized = normalizeGitUrl(combinedURL)
var cloneURL = normalized.url
var treeish = normalized.branch
// ensure that similarly-named remotes don't collide
var repoID = cloneURL.replace(/[^a-zA-Z0-9]+/g, '-') + '-' +
crypto.createHash('sha1').update(combinedURL).digest('hex').slice(0, 8)
var cachedRemote = path.join(remotes, repoID)
cb = inflight(repoID, cb)
if (!cb) {
return log.verbose('tryClone', repoID, 'already in flight; waiting')
}
log.verbose('tryClone', repoID, 'not in flight; caching')
// initialize the remotes cache with the correct perms
getGitDir(function (er) {
if (er) return cb(er)
fs.stat(cachedRemote, function (er, s) {
if (er) return mirrorRemote(from, cloneURL, treeish, cachedRemote, silent, finish)
if (!s.isDirectory()) return resetRemote(from, cloneURL, treeish, cachedRemote, finish)
validateExistingRemote(from, cloneURL, treeish, cachedRemote, finish)
})
// always set permissions on the cached remote
function finish (er, data) {
if (er) return cb(er, data)
addModeRecursive(cachedRemote, npm.modes.file, function (er) {
return cb(er, data)
})
}
})
}
// don't try too hard to hold on to a remote
function resetRemote (from, cloneURL, treeish, cachedRemote, cb) {
log.info('resetRemote', 'resetting', cachedRemote, 'for', from)
rm(cachedRemote, function (er) {
if (er) return cb(er)
mirrorRemote(from, cloneURL, treeish, cachedRemote, false, cb)
})
}
// reuse a cached remote when possible, but nuke it if it's in an
// inconsistent state
function validateExistingRemote (from, cloneURL, treeish, cachedRemote, cb) {
git.whichAndExec(
['config', '--get', 'remote.origin.url'],
{ cwd: cachedRemote, env: gitEnv() },
function (er, stdout, stderr) {
var originURL
if (stdout) {
originURL = stdout.trim()
log.silly('validateExistingRemote', from, 'remote.origin.url:', originURL)
}
if (stderr) stderr = stderr.trim()
if (stderr || er) {
log.warn('addRemoteGit', from, 'resetting remote', cachedRemote, 'because of error:', stderr || er)
return resetRemote(from, cloneURL, treeish, cachedRemote, cb)
} else if (cloneURL !== originURL) {
log.warn(
'addRemoteGit',
from,
'pre-existing cached repo', cachedRemote, 'points to', originURL, 'and not', cloneURL
)
return resetRemote(from, cloneURL, treeish, cachedRemote, cb)
}
log.verbose('validateExistingRemote', from, 'is updating existing cached remote', cachedRemote)
updateRemote(from, cloneURL, treeish, cachedRemote, cb)
}
)
}
// make a complete bare mirror of the remote repo
// NOTE: npm uses a blank template directory to prevent weird inconsistencies
// https://github.com/npm/npm/issues/5867
function mirrorRemote (from, cloneURL, treeish, cachedRemote, silent, cb) {
mkdir(cachedRemote, function (er) {
if (er) return cb(er)
var args = [
'clone',
'--template=' + templates,
'--mirror',
cloneURL, cachedRemote
]
git.whichAndExec(
['clone', '--template=' + templates, '--mirror', cloneURL, cachedRemote],
{ cwd: cachedRemote, env: gitEnv() },
function (er, stdout, stderr) {
if (er) {
var combined = (stdout + '\n' + stderr).trim()
var command = 'git ' + args.join(' ') + ':'
if (silent) {
log.verbose(command, combined)
} else {
log.error(command, combined)
}
return cb(er)
}
log.verbose('mirrorRemote', from, 'git clone ' + cloneURL, stdout.trim())
setPermissions(from, cloneURL, treeish, cachedRemote, cb)
}
)
})
}
function setPermissions (from, cloneURL, treeish, cachedRemote, cb) {
if (process.platform === 'win32') {
log.verbose('setPermissions', from, 'skipping chownr on Windows')
resolveHead(from, cloneURL, treeish, cachedRemote, cb)
} else {
getGitDir(function (er, cs) {
if (er) {
log.error('setPermissions', from, 'could not get cache stat')
return cb(er)
}
chownr(cachedRemote, cs.uid, cs.gid, function (er) {
if (er) {
log.error(
'setPermissions',
'Failed to change git repository ownership under npm cache for',
cachedRemote
)
return cb(er)
}
log.verbose('setPermissions', from, 'set permissions on', cachedRemote)
resolveHead(from, cloneURL, treeish, cachedRemote, cb)
})
})
}
}
// always fetch the origin, even right after mirroring, because this way
// permissions will get set correctly
function updateRemote (from, cloneURL, treeish, cachedRemote, cb) {
git.whichAndExec(
['fetch', '-a', 'origin'],
{ cwd: cachedRemote, env: gitEnv() },
function (er, stdout, stderr) {
if (er) {
var combined = (stdout + '\n' + stderr).trim()
log.error('git fetch -a origin (' + cloneURL + ')', combined)
return cb(er)
}
log.verbose('updateRemote', 'git fetch -a origin (' + cloneURL + ')', stdout.trim())
setPermissions(from, cloneURL, treeish, cachedRemote, cb)
}
)
}
// branches and tags are both symbolic labels that can be attached to different
// commits, so resolve the commit-ish to the current actual treeish the label
// corresponds to
//
// important for shrinkwrap
function resolveHead (from, cloneURL, treeish, cachedRemote, cb) {
log.verbose('resolveHead', from, 'original treeish:', treeish)
var args = ['rev-list', '-n1', treeish]
git.whichAndExec(
args,
{ cwd: cachedRemote, env: gitEnv() },
function (er, stdout, stderr) {
if (er) {
log.error('git ' + args.join(' ') + ':', stderr)
return cb(er)
}
var resolvedTreeish = stdout.trim()
log.silly('resolveHead', from, 'resolved treeish:', resolvedTreeish)
var resolvedURL = getResolved(cloneURL, resolvedTreeish)
if (!resolvedURL) {
return cb(new Error(
'unable to clone ' + from + ' because git clone string ' +
cloneURL + ' is in a form npm can\'t handle'
))
}
log.verbose('resolveHead', from, 'resolved Git URL:', resolvedURL)
// generate a unique filename
var tmpdir = path.join(
npm.tmp,
'git-cache-' + crypto.pseudoRandomBytes(6).toString('hex'),
resolvedTreeish
)
log.silly('resolveHead', 'Git working directory:', tmpdir)
mkdir(tmpdir, function (er) {
if (er) return cb(er)
cloneResolved(from, resolvedURL, resolvedTreeish, cachedRemote, tmpdir, cb)
})
}
)
}
// make a clone from the mirrored cache so we have a temporary directory in
// which we can check out the resolved treeish
function cloneResolved (from, resolvedURL, resolvedTreeish, cachedRemote, tmpdir, cb) {
var args = ['clone', cachedRemote, tmpdir]
git.whichAndExec(
args,
{ cwd: cachedRemote, env: gitEnv() },
function (er, stdout, stderr) {
stdout = (stdout + '\n' + stderr).trim()
if (er) {
log.error('git ' + args.join(' ') + ':', stderr)
return cb(er)
}
log.verbose('cloneResolved', from, 'clone', stdout)
checkoutTreeish(from, resolvedURL, resolvedTreeish, tmpdir, cb)
}
)
}
// there is no safe way to do a one-step clone to a treeish that isn't
// guaranteed to be a branch, so explicitly check out the treeish once it's
// cloned
function checkoutTreeish (from, resolvedURL, resolvedTreeish, tmpdir, cb) {
var args = ['checkout', resolvedTreeish]
git.whichAndExec(
args,
{ cwd: tmpdir, env: gitEnv() },
function (er, stdout, stderr) {
stdout = (stdout + '\n' + stderr).trim()
if (er) {
log.error('git ' + args.join(' ') + ':', stderr)
return cb(er)
}
log.verbose('checkoutTreeish', from, 'checkout', stdout)
// convince addLocal that the checkout is a local dependency
realizePackageSpecifier(tmpdir, function (er, spec) {
if (er) {
log.error('addRemoteGit', 'Failed to map', tmpdir, 'to a package specifier')
return cb(er)
}
// ensure pack logic is applied
// https://github.com/npm/npm/issues/6400
addLocal(spec, null, function (er, data) {
if (data) {
if (npm.config.get('save-exact')) {
log.verbose('addRemoteGit', 'data._from:', resolvedURL, '(save-exact)')
data._from = resolvedURL
} else {
log.verbose('addRemoteGit', 'data._from:', from)
data._from = from
}
log.verbose('addRemoteGit', 'data._resolved:', resolvedURL)
data._resolved = resolvedURL
}
cb(er, data)
})
})
}
)
}
function getGitDir (cb) {
correctMkdir(remotes, function (er, stats) {
if (er) return cb(er)
// We don't need global templates when cloning. Use an empty directory for
// the templates, creating it (and setting its permissions) if necessary.
mkdir(templates, function (er) {
if (er) return cb(er)
// Ensure that both the template and remotes directories have the correct
// permissions.
fs.chown(templates, stats.uid, stats.gid, function (er) {
cb(er, stats)
})
})
})
}
var gitEnv_
function gitEnv () {
// git responds to env vars in some weird ways in post-receive hooks
// so don't carry those along.
if (gitEnv_) return gitEnv_
// allow users to override npm's insistence on not prompting for
// passphrases, but default to just failing when credentials
// aren't available
gitEnv_ = { GIT_ASKPASS: 'echo' }
for (var k in process.env) {
if (!~VALID_VARIABLES.indexOf(k) && k.match(/^GIT/)) continue
gitEnv_[k] = process.env[k]
}
return gitEnv_
}
addRemoteGit.getResolved = getResolved
function getResolved (uri, treeish) {
// normalize hosted-git-info clone URLs back into regular URLs
// this will only work on URLs that hosted-git-info recognizes
// https://github.com/npm/npm/issues/7961
var rehydrated = hostedFromURL(uri)
if (rehydrated) uri = rehydrated.toString()
var parsed = url.parse(uri)
// Checks for known protocols:
// http:, https:, ssh:, and git:, with optional git+ prefix.
if (!parsed.protocol ||
!parsed.protocol.match(/^(((git\+)?(https?|ssh))|git|file):$/)) {
uri = 'git+ssh://' + uri
}
if (!/^git[+:]/.test(uri)) {
uri = 'git+' + uri
}
// Not all URIs are actually URIs, so use regex for the treeish.
return uri.replace(/(?:#.*)?$/, '#' + treeish)
}
// similar to chmodr except it add permissions rather than overwriting them
// adapted from https://github.com/isaacs/chmodr/blob/master/chmodr.js
function addModeRecursive (cachedRemote, mode, cb) {
fs.readdir(cachedRemote, function (er, children) {
// Any error other than ENOTDIR means it's not readable, or doesn't exist.
// Give up.
if (er && er.code !== 'ENOTDIR') return cb(er)
if (er || !children.length) return addMode(cachedRemote, mode, cb)
var len = children.length
var errState = null
children.forEach(function (child) {
addModeRecursive(path.resolve(cachedRemote, child), mode, then)
})
function then (er) {
if (errState) return undefined
if (er) return cb(errState = er)
if (--len === 0) return addMode(cachedRemote, dirMode(mode), cb)
}
})
}
function addMode (cachedRemote, mode, cb) {
fs.stat(cachedRemote, function (er, stats) {
if (er) return cb(er)
mode = stats.mode | mode
fs.chmod(cachedRemote, mode, cb)
})
}
// taken from https://github.com/isaacs/chmodr/blob/master/chmodr.js
function dirMode (mode) {
if (mode & parseInt('0400', 8)) mode |= parseInt('0100', 8)
if (mode & parseInt('040', 8)) mode |= parseInt('010', 8)
if (mode & parseInt('04', 8)) mode |= parseInt('01', 8)
return mode
}

120
node_modules/npm/lib/cache/add-remote-tarball.js generated vendored Normal file
View File

@@ -0,0 +1,120 @@
var mkdir = require("mkdirp")
, assert = require("assert")
, log = require("npmlog")
, path = require("path")
, sha = require("sha")
, retry = require("retry")
, writeStreamAtomic = require("fs-write-stream-atomic")
, PassThrough = require('readable-stream').PassThrough
, npm = require("../npm.js")
, inflight = require("inflight")
, addLocalTarball = require("./add-local-tarball.js")
, cacheFile = require("npm-cache-filename")
module.exports = addRemoteTarball
function addRemoteTarball (u, pkgData, shasum, auth, cb_) {
assert(typeof u === "string", "must have module URL")
assert(typeof cb_ === "function", "must have callback")
function cb (er, data) {
if (data) {
data._from = u
data._resolved = u
data._shasum = data._shasum || shasum
}
cb_(er, data)
}
cb_ = inflight(u, cb_)
if (!cb_) return log.verbose("addRemoteTarball", u, "already in flight; waiting")
log.verbose("addRemoteTarball", u, "not in flight; adding")
// XXX Fetch direct to cache location, store tarballs under
// ${cache}/registry.npmjs.org/pkg/-/pkg-1.2.3.tgz
var tmp = cacheFile(npm.tmp, u)
function next (er, resp, shasum) {
if (er) return cb(er)
addLocalTarball(tmp, pkgData, shasum, cb)
}
log.verbose("addRemoteTarball", [u, shasum])
mkdir(path.dirname(tmp), function (er) {
if (er) return cb(er)
addRemoteTarball_(u, tmp, shasum, auth, next)
})
}
function addRemoteTarball_ (u, tmp, shasum, auth, cb) {
// Tuned to spread 3 attempts over about a minute.
// See formula at <https://github.com/tim-kos/node-retry>.
var operation = retry.operation({
retries: npm.config.get("fetch-retries")
, factor: npm.config.get("fetch-retry-factor")
, minTimeout: npm.config.get("fetch-retry-mintimeout")
, maxTimeout: npm.config.get("fetch-retry-maxtimeout")
})
operation.attempt(function (currentAttempt) {
log.info("retry", "fetch attempt " + currentAttempt
+ " at " + (new Date()).toLocaleTimeString())
fetchAndShaCheck(u, tmp, shasum, auth, function (er, response, shasum) {
// Only retry on 408, 5xx or no `response`.
var sc = response && response.statusCode
var statusRetry = !sc || (sc === 408 || sc >= 500)
if (er && statusRetry && operation.retry(er)) {
log.warn("retry", "will retry, error on last attempt: " + er)
return
}
cb(er, response, shasum)
})
})
}
function fetchAndShaCheck (u, tmp, shasum, auth, cb) {
npm.registry.fetch(u, { auth : auth }, function (er, response) {
if (er) {
log.error("fetch failed", u)
return cb(er, response)
}
var tarball = writeStreamAtomic(tmp, { mode: npm.modes.file })
tarball.on('error', function (er) {
cb(er)
tarball.destroy()
})
tarball.on("finish", function () {
if (!shasum) {
// Well, we weren't given a shasum, so at least sha what we have
// in case we want to compare it to something else later
return sha.get(tmp, function (er, shasum) {
log.silly("fetchAndShaCheck", "shasum", shasum)
cb(er, response, shasum)
})
}
// validate that the url we just downloaded matches the expected shasum.
log.silly("fetchAndShaCheck", "shasum", shasum)
sha.check(tmp, shasum, function (er) {
if (er && er.message) {
// add original filename for better debuggability
er.message = er.message + "\n" + "From: " + u
}
return cb(er, response, shasum)
})
})
// 0.8 http streams have a bug, where if they're paused with data in
// their buffers when the socket closes, they call `end` before emptying
// those buffers, which results in the entire pipeline ending and thus
// the point that applied backpressure never being able to trigger a
// `resume`.
// We work around this by piping into a pass through stream that has
// unlimited buffering. The pass through stream is from readable-stream
// and is thus a current streams3 implementation that is free of these
// bugs even on 0.8.
response.pipe(PassThrough({highWaterMark: Infinity})).pipe(tarball)
})
}

14
node_modules/npm/lib/cache/cached-package-root.js generated vendored Normal file
View File

@@ -0,0 +1,14 @@
var assert = require("assert")
var resolve = require("path").resolve
var npm = require("../npm.js")
module.exports = getCacheRoot
function getCacheRoot (data) {
assert(data, "must pass package metadata")
assert(data.name, "package metadata must include name")
assert(data.version, "package metadata must include version")
return resolve(npm.cache, data.name, data.version)
}

218
node_modules/npm/lib/cache/caching-client.js generated vendored Normal file
View File

@@ -0,0 +1,218 @@
module.exports = CachingRegistryClient
var path = require("path")
, fs = require("graceful-fs")
, url = require("url")
, assert = require("assert")
, inherits = require("util").inherits
var RegistryClient = require("npm-registry-client")
, npm = require("../npm.js")
, log = require("npmlog")
, getCacheStat = require("./get-stat.js")
, cacheFile = require("npm-cache-filename")
, mkdirp = require("mkdirp")
, rimraf = require("rimraf")
, chownr = require("chownr")
, writeFile = require("write-file-atomic")
function CachingRegistryClient (config) {
RegistryClient.call(this, adaptConfig(config))
this._mapToCache = cacheFile(config.get("cache"))
// swizzle in our custom cache invalidation logic
this._request = this.request
this.request = this._invalidatingRequest
this.get = get
}
inherits(CachingRegistryClient, RegistryClient)
CachingRegistryClient.prototype._invalidatingRequest = function (uri, params, cb) {
var client = this
this._request.call(this, uri, params, function () {
var args = arguments
var method = params.method
if (method !== "HEAD" && method !== "GET") {
var invalidated = client._mapToCache(uri)
// invalidate cache
//
// This is irrelevant for commands that do etag / last-modified caching,
// but ls and view also have a timed cache, so this keeps the user from
// thinking that it didn't work when it did.
// Note that failure is an acceptable option here, since the only
// result will be a stale cache for some helper commands.
log.verbose("request", "invalidating", invalidated, "on", method)
return rimraf(invalidated, function () {
cb.apply(undefined, args)
})
}
cb.apply(undefined, args)
})
}
function get (uri, params, cb) {
assert(typeof uri === "string", "must pass registry URI to get")
assert(params && typeof params === "object", "must pass params to get")
assert(typeof cb === "function", "must pass callback to get")
var parsed = url.parse(uri)
assert(
parsed.protocol === "http:" || parsed.protocol === "https:",
"must have a URL that starts with http: or https:"
)
var cacheBase = cacheFile(npm.config.get("cache"))(uri)
var cachePath = path.join(cacheBase, ".cache.json")
// If the GET is part of a write operation (PUT or DELETE), then
// skip past the cache entirely, but still save the results.
if (uri.match(/\?write=true$/)) {
log.verbose("get", "GET as part of write; not caching result")
return get_.call(this, uri, cachePath, params, cb)
}
var client = this
fs.stat(cachePath, function (er, stat) {
if (!er) {
fs.readFile(cachePath, function (er, data) {
try {
data = JSON.parse(data)
}
catch (ex) {
data = null
}
params.stat = stat
params.data = data
get_.call(client, uri, cachePath, params, cb)
})
}
else {
get_.call(client, uri, cachePath, params, cb)
}
})
}
function get_ (uri, cachePath, params, cb) {
var staleOk = params.staleOk === undefined ? false : params.staleOk
, timeout = params.timeout === undefined ? -1 : params.timeout
, data = params.data
, stat = params.stat
, etag
, lastModified
timeout = Math.min(timeout, npm.config.get("cache-max") || 0)
timeout = Math.max(timeout, npm.config.get("cache-min") || -Infinity)
if (process.env.COMP_CWORD !== undefined &&
process.env.COMP_LINE !== undefined &&
process.env.COMP_POINT !== undefined) {
timeout = Math.max(timeout, 60000)
}
if (data) {
if (data._etag) etag = data._etag
if (data._lastModified) lastModified = data._lastModified
if (stat && timeout && timeout > 0) {
if ((Date.now() - stat.mtime.getTime())/1000 < timeout) {
log.verbose("get", uri, "not expired, no request")
delete data._etag
delete data._lastModified
return cb(null, data, JSON.stringify(data), { statusCode : 304 })
}
if (staleOk) {
log.verbose("get", uri, "staleOk, background update")
delete data._etag
delete data._lastModified
process.nextTick(
cb.bind(null, null, data, JSON.stringify(data), { statusCode : 304 } )
)
cb = function () {}
}
}
}
var options = {
etag : etag,
lastModified : lastModified,
follow : params.follow,
auth : params.auth
}
this.request(uri, options, function (er, remoteData, raw, response) {
// if we get an error talking to the registry, but we have it
// from the cache, then just pretend we got it.
if (er && cachePath && data && !data.error) {
er = null
response = { statusCode: 304 }
}
if (response) {
log.silly("get", "cb", [response.statusCode, response.headers])
if (response.statusCode === 304 && (etag || lastModified)) {
remoteData = data
log.verbose(etag ? "etag" : "lastModified", uri+" from cache")
}
}
data = remoteData
if (!data) er = er || new Error("failed to fetch from registry: " + uri)
if (er) return cb(er, data, raw, response)
saveToCache(cachePath, data, saved)
// just give the write the old college try. if it fails, whatever.
function saved () {
delete data._etag
delete data._lastModified
cb(er, data, raw, response)
}
function saveToCache (cachePath, data, saved) {
log.verbose("get", "saving", data.name, "to", cachePath)
getCacheStat(function (er, st) {
mkdirp(path.dirname(cachePath), function (er, made) {
if (er) return saved()
writeFile(cachePath, JSON.stringify(data), function (er) {
if (er) return saved()
chownr(made || cachePath, st.uid, st.gid, saved)
})
})
})
}
})
}
function adaptConfig (config) {
return {
proxy : {
http : config.get("proxy"),
https : config.get("https-proxy"),
localAddress : config.get("local-address")
},
ssl : {
certificate : config.get("cert"),
key : config.get("key"),
ca : config.get("ca"),
strict : config.get("strict-ssl")
},
retry : {
retries : config.get("fetch-retries"),
factor : config.get("fetch-retry-factor"),
minTimeout : config.get("fetch-retry-mintimeout"),
maxTimeout : config.get("fetch-retry-maxtimeout")
},
userAgent : config.get("user-agent"),
log : log,
defaultTag : config.get("tag"),
couchToken : config.get("_token"),
maxSockets : config.get('maxsockets')
}
}

6
node_modules/npm/lib/cache/get-stat.js generated vendored Normal file
View File

@@ -0,0 +1,6 @@
var npm = require('../npm.js')
var correctMkdir = require('../utils/correct-mkdir.js')
module.exports = function getCacheStat (cb) {
correctMkdir(npm.cache, cb)
}

104
node_modules/npm/lib/cache/update-index.js generated vendored Normal file
View File

@@ -0,0 +1,104 @@
module.exports = updateIndex
var fs = require('graceful-fs')
var assert = require('assert')
var path = require('path')
var mkdir = require('mkdirp')
var chownr = require('chownr')
var npm = require('../npm.js')
var log = require('npmlog')
var cacheFile = require('npm-cache-filename')
var getCacheStat = require('./get-stat.js')
var mapToRegistry = require('../utils/map-to-registry.js')
/* /-/all is special.
* It uses timestamp-based caching and partial updates,
* because it is a monster.
*/
function updateIndex (staleness, cb) {
assert(typeof cb === 'function', 'must pass callback to updateIndex')
mapToRegistry('-/all', npm.config, function (er, uri, auth) {
if (er) return cb(er)
var params = {
timeout: staleness,
follow: true,
staleOk: true,
auth: auth
}
var cacheBase = cacheFile(npm.config.get('cache'))(uri)
var cachePath = path.join(cacheBase, '.cache.json')
log.info('updateIndex', cachePath)
getCacheStat(function (er, st) {
if (er) return cb(er)
mkdir(cacheBase, function (er, made) {
if (er) return cb(er)
fs.readFile(cachePath, function (er, data) {
if (er) {
log.warn('', 'Building the local index for the first time, please be patient')
return updateIndex_(uri, params, {}, cachePath, cb)
}
chownr(made || cachePath, st.uid, st.gid, function (er) {
if (er) return cb(er)
try {
data = JSON.parse(data)
} catch (ex) {
fs.writeFile(cachePath, '{}', function (er) {
if (er) return cb(new Error('Broken cache.'))
log.warn('', 'Building the local index for the first time, please be patient')
return updateIndex_(uri, params, {}, cachePath, cb)
})
}
var t = +data._updated || 0
// use the cache and update in the background if it's not too old
if (Date.now() - t < 60000) {
cb(null, data)
cb = function () {}
}
if (t === 0) {
log.warn('', 'Building the local index for the first time, please be patient')
} else {
log.verbose('updateIndex', 'Cached search data present with timestamp', t)
uri += '/since?stale=update_after&startkey=' + t
}
updateIndex_(uri, params, data, cachePath, cb)
})
})
})
})
})
}
function updateIndex_ (all, params, data, cachePath, cb) {
log.silly('update-index', 'fetching', all)
npm.registry.request(all, params, function (er, updates, _, res) {
if (er) return cb(er, data)
var headers = res.headers
var updated = updates._updated || Date.parse(headers.date)
Object.keys(updates).forEach(function (p) { data[p] = updates[p] })
data._updated = updated
getCacheStat(function (er, st) {
if (er) return cb(er)
fs.writeFile(cachePath, JSON.stringify(data), function (er) {
delete data._updated
if (er) return cb(er)
chownr(cachePath, st.uid, st.gid, function (er) {
cb(er, data)
})
})
})
})
}

248
node_modules/npm/lib/completion.js generated vendored Normal file
View File

@@ -0,0 +1,248 @@
module.exports = completion
completion.usage = "npm completion >> ~/.bashrc\n"
+ "npm completion >> ~/.zshrc\n"
+ "source <(npm completion)"
var npm = require("./npm.js")
, npmconf = require("./config/core.js")
, configDefs = npmconf.defs
, configTypes = configDefs.types
, shorthands = configDefs.shorthands
, nopt = require("nopt")
, configNames = Object.keys(configTypes).filter(function (e) {
return e.charAt(0) !== "_"
})
, shorthandNames = Object.keys(shorthands)
, allConfs = configNames.concat(shorthandNames)
, once = require("once")
completion.completion = function (opts, cb) {
if (opts.w > 3) return cb()
var fs = require("graceful-fs")
, path = require("path")
, bashExists = null
, zshExists = null
fs.stat(path.resolve(process.env.HOME, ".bashrc"), function (er) {
bashExists = !er
next()
})
fs.stat(path.resolve(process.env.HOME, ".zshrc"), function (er) {
zshExists = !er
next()
})
function next () {
if (zshExists === null || bashExists === null) return
var out = []
if (zshExists) out.push("~/.zshrc")
if (bashExists) out.push("~/.bashrc")
if (opts.w === 2) out = out.map(function (m) {
return [">>", m]
})
cb(null, out)
}
}
function completion (args, cb) {
if (process.platform === 'win32' && !(/^MINGW(32|64)$/.test(process.env.MSYSTEM))) {
var e = new Error('npm completion supported only in MINGW / Git bash on Windows')
e.code = 'ENOTSUP'
e.errno = require('constants').ENOTSUP
return cb(e)
}
// if the COMP_* isn't in the env, then just dump the script.
if (process.env.COMP_CWORD === undefined
||process.env.COMP_LINE === undefined
||process.env.COMP_POINT === undefined
) return dumpScript(cb)
console.error(process.env.COMP_CWORD)
console.error(process.env.COMP_LINE)
console.error(process.env.COMP_POINT)
//console.log("abracadabrasauce\nabracad cat monger")
//if (Math.random() * 3 < 1) console.log("man\\ bear\\ pig")
//else if (Math.random() * 3 < 1)
// console.log("porkchop\\ sandwiches\nporkman")
//else console.log("encephylophagy")
// get the partial line and partial word,
// if the point isn't at the end.
// ie, tabbing at: npm foo b|ar
var w = +process.env.COMP_CWORD
, words = args.map(unescape)
, word = words[w]
, line = process.env.COMP_LINE
, point = +process.env.COMP_POINT
, partialLine = line.substr(0, point)
, partialWords = words.slice(0, w)
// figure out where in that last word the point is.
var partialWord = args[w]
, i = partialWord.length
while (partialWord.substr(0, i) !== partialLine.substr(-1*i) && i > 0) {
i --
}
partialWord = unescape(partialWord.substr(0, i))
partialWords.push(partialWord)
var opts = { words : words
, w : w
, word : word
, line : line
, lineLength : line.length
, point : point
, partialLine : partialLine
, partialWords : partialWords
, partialWord : partialWord
, raw: args
}
cb = wrapCb(cb, opts)
console.error(opts)
if (partialWords.slice(0, -1).indexOf("--") === -1) {
if (word.charAt(0) === "-") return configCompl(opts, cb)
if (words[w - 1]
&& words[w - 1].charAt(0) === "-"
&& !isFlag(words[w - 1])) {
// awaiting a value for a non-bool config.
// don't even try to do this for now
console.error("configValueCompl")
return configValueCompl(opts, cb)
}
}
// try to find the npm command.
// it's the first thing after all the configs.
// take a little shortcut and use npm's arg parsing logic.
// don't have to worry about the last arg being implicitly
// boolean'ed, since the last block will catch that.
var parsed = opts.conf =
nopt(configTypes, shorthands, partialWords.slice(0, -1), 0)
// check if there's a command already.
console.error(parsed)
var cmd = parsed.argv.remain[1]
if (!cmd) return cmdCompl(opts, cb)
Object.keys(parsed).forEach(function (k) {
npm.config.set(k, parsed[k])
})
// at this point, if words[1] is some kind of npm command,
// then complete on it.
// otherwise, do nothing
cmd = npm.commands[cmd]
if (cmd && cmd.completion) return cmd.completion(opts, cb)
// nothing to do.
cb()
}
function dumpScript (cb) {
var fs = require("graceful-fs")
, path = require("path")
, p = path.resolve(__dirname, "utils/completion.sh")
// The Darwin patch below results in callbacks first for the write and then
// for the error handler, so make sure we only call our callback once.
cb = once(cb)
fs.readFile(p, "utf8", function (er, d) {
if (er) return cb(er)
d = d.replace(/^\#\!.*?\n/, "")
process.stdout.write(d, function () { cb() })
process.stdout.on("error", function (er) {
// Darwin is a pain sometimes.
//
// This is necessary because the "source" or "." program in
// bash on OS X closes its file argument before reading
// from it, meaning that you get exactly 1 write, which will
// work most of the time, and will always raise an EPIPE.
//
// Really, one should not be tossing away EPIPE errors, or any
// errors, so casually. But, without this, `. <(npm completion)`
// can never ever work on OS X.
if (er.errno === "EPIPE") er = null
cb(er)
})
})
}
function unescape (w) {
if (w.charAt(0) === "\"") return w.replace(/^"|"$/g, "")
else return w.replace(/\\ /g, " ")
}
function escape (w) {
if (!w.match(/\s+/)) return w
return "\"" + w + "\""
}
// The command should respond with an array. Loop over that,
// wrapping quotes around any that have spaces, and writing
// them to stdout. Use console.log, not the outfd config.
// If any of the items are arrays, then join them with a space.
// Ie, returning ["a", "b c", ["d", "e"]] would allow it to expand
// to: "a", "b c", or "d" "e"
function wrapCb (cb, opts) { return function (er, compls) {
if (!Array.isArray(compls)) compls = compls ? [compls] : []
compls = compls.map(function (c) {
if (Array.isArray(c)) c = c.map(escape).join(" ")
else c = escape(c)
return c
})
if (opts.partialWord) compls = compls.filter(function (c) {
return c.indexOf(opts.partialWord) === 0
})
console.error([er && er.stack, compls, opts.partialWord])
if (er || compls.length === 0) return cb(er)
console.log(compls.join("\n"))
cb()
}}
// the current word has a dash. Return the config names,
// with the same number of dashes as the current word has.
function configCompl (opts, cb) {
var word = opts.word
, split = word.match(/^(-+)((?:no-)*)(.*)$/)
, dashes = split[1]
, no = split[2]
, flags = configNames.filter(isFlag)
console.error(flags)
return cb(null, allConfs.map(function (c) {
return dashes + c
}).concat(flags.map(function (f) {
return dashes + (no || "no-") + f
})))
}
// expand with the valid values of various config values.
// not yet implemented.
function configValueCompl (opts, cb) {
console.error("configValue", opts)
return cb(null, [])
}
// check if the thing is a flag or not.
function isFlag (word) {
// shorthands never take args.
var split = word.match(/^(-*)((?:no-)+)?(.*)$/)
, no = split[2]
, conf = split[3]
return no || configTypes[conf] === Boolean || shorthands[conf]
}
// complete against the npm commands
function cmdCompl (opts, cb) {
return cb(null, npm.fullList)
}

285
node_modules/npm/lib/config.js generated vendored Normal file
View File

@@ -0,0 +1,285 @@
module.exports = config
config.usage = "npm config set <key> <value>"
+ "\nnpm config get [<key>]"
+ "\nnpm config delete <key>"
+ "\nnpm config list"
+ "\nnpm config edit"
+ "\nnpm set <key> <value>"
+ "\nnpm get [<key>]"
var log = require("npmlog")
, npm = require("./npm.js")
, npmconf = require("./config/core.js")
, fs = require("graceful-fs")
, writeFileAtomic = require("write-file-atomic")
, types = npmconf.defs.types
, ini = require("ini")
, editor = require("editor")
, os = require("os")
, umask = require("./utils/umask")
config.completion = function (opts, cb) {
var argv = opts.conf.argv.remain
if (argv[1] !== "config") argv.unshift("config")
if (argv.length === 2) {
var cmds = ["get", "set", "delete", "ls", "rm", "edit"]
if (opts.partialWord !== "l") cmds.push("list")
return cb(null, cmds)
}
var action = argv[2]
switch (action) {
case "set":
// todo: complete with valid values, if possible.
if (argv.length > 3) return cb(null, [])
// fallthrough
case "get":
case "delete":
case "rm":
return cb(null, Object.keys(types))
case "edit":
case "list": case "ls":
return cb(null, [])
default: return cb(null, [])
}
}
// npm config set key value
// npm config get key
// npm config list
function config (args, cb) {
var action = args.shift()
switch (action) {
case "set": return set(args[0], args[1], cb)
case "get": return get(args[0], cb)
case "delete": case "rm": case "del": return del(args[0], cb)
case "list": case "ls": return list(cb)
case "edit": return edit(cb)
default: return unknown(action, cb)
}
}
function edit (cb) {
var e = npm.config.get("editor")
, which = npm.config.get("global") ? "global" : "user"
, f = npm.config.get(which + "config")
if (!e) return cb(new Error("No EDITOR config or environ set."))
npm.config.save(which, function (er) {
if (er) return cb(er)
fs.readFile(f, "utf8", function (er, data) {
if (er) data = ""
data = [ ";;;;"
, "; npm "+(npm.config.get("global") ?
"globalconfig" : "userconfig")+" file"
, "; this is a simple ini-formatted file"
, "; lines that start with semi-colons are comments."
, "; read `npm help config` for help on the various options"
, ";;;;"
, ""
, data
].concat( [ ";;;;"
, "; all options with default values"
, ";;;;"
]
)
.concat(Object.keys(npmconf.defaults).reduce(function (arr, key) {
var obj = {}
obj[key] = npmconf.defaults[key]
if (key === "logstream") return arr
return arr.concat(
ini.stringify(obj)
.replace(/\n$/m, "")
.replace(/^/g, "; ")
.replace(/\n/g, "\n; ")
.split("\n"))
}, []))
.concat([""])
.join(os.EOL)
writeFileAtomic
( f
, data
, function (er) {
if (er) return cb(er)
editor(f, { editor: e }, cb)
}
)
})
})
}
function del (key, cb) {
if (!key) return cb(new Error("no key provided"))
var where = npm.config.get("global") ? "global" : "user"
npm.config.del(key, where)
npm.config.save(where, cb)
}
function set (key, val, cb) {
if (key === undefined) {
return unknown("", cb)
}
if (val === undefined) {
if (key.indexOf("=") !== -1) {
var k = key.split("=")
key = k.shift()
val = k.join("=")
} else {
val = ""
}
}
key = key.trim()
val = val.trim()
log.info("config", "set %j %j", key, val)
var where = npm.config.get("global") ? "global" : "user"
if (key.match(/umask/)) val = umask.fromString(val)
npm.config.set(key, val, where)
npm.config.save(where, cb)
}
function get (key, cb) {
if (!key) return list(cb)
if (!public(key)) {
return cb(new Error("---sekretz---"))
}
var val = npm.config.get(key)
if (key.match(/umask/)) val = umask.toString(val)
console.log(val)
cb()
}
function sort (a, b) {
return a > b ? 1 : -1
}
function public (k) {
return !(k.charAt(0) === "_" ||
k.indexOf(":_") !== -1 ||
types[k] !== types[k])
}
function getKeys (data) {
return Object.keys(data).filter(public).sort(sort)
}
function list (cb) {
var msg = ""
, long = npm.config.get("long")
var cli = npm.config.sources.cli.data
, cliKeys = getKeys(cli)
if (cliKeys.length) {
msg += "; cli configs\n"
cliKeys.forEach(function (k) {
if (cli[k] && typeof cli[k] === "object") return
if (k === "argv") return
msg += k + " = " + JSON.stringify(cli[k]) + "\n"
})
msg += "\n"
}
// env configs
var env = npm.config.sources.env.data
, envKeys = getKeys(env)
if (envKeys.length) {
msg += "; environment configs\n"
envKeys.forEach(function (k) {
if (env[k] !== npm.config.get(k)) {
if (!long) return
msg += "; " + k + " = " + JSON.stringify(env[k])
+ " (overridden)\n"
} else msg += k + " = " + JSON.stringify(env[k]) + "\n"
})
msg += "\n"
}
// user config file
var uconf = npm.config.sources.user.data
, uconfKeys = getKeys(uconf)
if (uconfKeys.length) {
msg += "; userconfig " + npm.config.get("userconfig") + "\n"
uconfKeys.forEach(function (k) {
var val = (k.charAt(0) === "_")
? "---sekretz---"
: JSON.stringify(uconf[k])
if (uconf[k] !== npm.config.get(k)) {
if (!long) return
msg += "; " + k + " = " + val
+ " (overridden)\n"
} else msg += k + " = " + val + "\n"
})
msg += "\n"
}
// global config file
var gconf = npm.config.sources.global.data
, gconfKeys = getKeys(gconf)
if (gconfKeys.length) {
msg += "; globalconfig " + npm.config.get("globalconfig") + "\n"
gconfKeys.forEach(function (k) {
var val = (k.charAt(0) === "_")
? "---sekretz---"
: JSON.stringify(gconf[k])
if (gconf[k] !== npm.config.get(k)) {
if (!long) return
msg += "; " + k + " = " + val
+ " (overridden)\n"
} else msg += k + " = " + val + "\n"
})
msg += "\n"
}
// builtin config file
var builtin = npm.config.sources.builtin || {}
if (builtin && builtin.data) {
var bconf = builtin.data
, bpath = builtin.path
, bconfKeys = getKeys(bconf)
if (bconfKeys.length) {
msg += "; builtin config " + bpath + "\n"
bconfKeys.forEach(function (k) {
var val = (k.charAt(0) === "_")
? "---sekretz---"
: JSON.stringify(bconf[k])
if (bconf[k] !== npm.config.get(k)) {
if (!long) return
msg += "; " + k + " = " + val
+ " (overridden)\n"
} else msg += k + " = " + val + "\n"
})
msg += "\n"
}
}
// only show defaults if --long
if (!long) {
msg += "; node bin location = " + process.execPath + "\n"
+ "; cwd = " + process.cwd() + "\n"
+ "; HOME = " + process.env.HOME + "\n"
+ "; 'npm config ls -l' to show all defaults.\n"
console.log(msg)
return cb()
}
var defaults = npmconf.defaults
, defKeys = getKeys(defaults)
msg += "; default values\n"
defKeys.forEach(function (k) {
if (defaults[k] && typeof defaults[k] === "object") return
var val = JSON.stringify(defaults[k])
if (defaults[k] !== npm.config.get(k)) {
msg += "; " + k + " = " + val
+ " (overridden)\n"
} else msg += k + " = " + val + "\n"
})
msg += "\n"
console.log(msg)
return cb()
}
function unknown (action, cb) {
cb("Usage:\n" + config.usage)
}

View File

@@ -0,0 +1,16 @@
var assert = require("assert")
var toNerfDart = require("./nerf-dart.js")
module.exports = clearCredentialsByURI
function clearCredentialsByURI (uri) {
assert(uri && typeof uri === "string", "registry URL is required")
var nerfed = toNerfDart(uri)
this.del(nerfed + ":_authToken", "user")
this.del(nerfed + ":_password", "user")
this.del(nerfed + ":username", "user")
this.del(nerfed + ":email", "user")
}

441
node_modules/npm/lib/config/core.js generated vendored Normal file
View File

@@ -0,0 +1,441 @@
var CC = require("config-chain").ConfigChain
var inherits = require("inherits")
var configDefs = require("./defaults.js")
var types = configDefs.types
var once = require("once")
var fs = require("fs")
var path = require("path")
var nopt = require("nopt")
var ini = require("ini")
var Umask = configDefs.Umask
var mkdirp = require("mkdirp")
var umask = require("../utils/umask")
exports.load = load
exports.Conf = Conf
exports.loaded = false
exports.rootConf = null
exports.usingBuiltin = false
exports.defs = configDefs
Object.defineProperty(exports, "defaults", { get: function () {
return configDefs.defaults
}, enumerable: true })
Object.defineProperty(exports, "types", { get: function () {
return configDefs.types
}, enumerable: true })
exports.validate = validate
var myUid = process.env.SUDO_UID !== undefined
? process.env.SUDO_UID : (process.getuid && process.getuid())
var myGid = process.env.SUDO_GID !== undefined
? process.env.SUDO_GID : (process.getgid && process.getgid())
var loading = false
var loadCbs = []
function load () {
var cli, builtin, cb
for (var i = 0; i < arguments.length; i++)
switch (typeof arguments[i]) {
case "string": builtin = arguments[i]; break
case "object": cli = arguments[i]; break
case "function": cb = arguments[i]; break
}
if (!cb)
cb = function () {}
if (exports.loaded) {
var ret = exports.loaded
if (cli) {
ret = new Conf(ret)
ret.unshift(cli)
}
return process.nextTick(cb.bind(null, null, ret))
}
// either a fresh object, or a clone of the passed in obj
if (!cli)
cli = {}
else
cli = Object.keys(cli).reduce(function (c, k) {
c[k] = cli[k]
return c
}, {})
loadCbs.push(cb)
if (loading)
return
loading = true
cb = once(function (er, conf) {
if (!er) {
exports.loaded = conf
loading = false
}
loadCbs.forEach(function (fn) {
fn(er, conf)
})
loadCbs.length = 0
})
// check for a builtin if provided.
exports.usingBuiltin = !!builtin
var rc = exports.rootConf = new Conf()
if (builtin)
rc.addFile(builtin, "builtin")
else
rc.add({}, "builtin")
rc.on("load", function () {
load_(builtin, rc, cli, cb)
})
rc.on("error", cb)
}
function load_(builtin, rc, cli, cb) {
var defaults = configDefs.defaults
var conf = new Conf(rc)
conf.usingBuiltin = !!builtin
conf.add(cli, "cli")
conf.addEnv()
conf.loadPrefix(function(er) {
if (er)
return cb(er)
// If you're doing `npm --userconfig=~/foo.npmrc` then you'd expect
// that ~/.npmrc won't override the stuff in ~/foo.npmrc (or, indeed
// be used at all).
//
// However, if the cwd is ~, then ~/.npmrc is the home for the project
// config, and will override the userconfig.
//
// If you're not setting the userconfig explicitly, then it will be loaded
// twice, which is harmless but excessive. If you *are* setting the
// userconfig explicitly then it will override your explicit intent, and
// that IS harmful and unexpected.
//
// Solution: Do not load project config file that is the same as either
// the default or resolved userconfig value. npm will log a "verbose"
// message about this when it happens, but it is a rare enough edge case
// that we don't have to be super concerned about it.
var projectConf = path.resolve(conf.localPrefix, ".npmrc")
var defaultUserConfig = rc.get("userconfig")
var resolvedUserConfig = conf.get("userconfig")
if (!conf.get("global") &&
projectConf !== defaultUserConfig &&
projectConf !== resolvedUserConfig) {
conf.addFile(projectConf, "project")
conf.once("load", afterPrefix)
} else {
conf.add({}, "project")
afterPrefix()
}
})
function afterPrefix() {
conf.addFile(conf.get("userconfig"), "user")
conf.once("error", cb)
conf.once("load", afterUser)
}
function afterUser () {
// globalconfig and globalignorefile defaults
// need to respond to the 'prefix' setting up to this point.
// Eg, `npm config get globalconfig --prefix ~/local` should
// return `~/local/etc/npmrc`
// annoying humans and their expectations!
if (conf.get("prefix")) {
var etc = path.resolve(conf.get("prefix"), "etc")
mkdirp(etc, function (err) {
defaults.globalconfig = path.resolve(etc, "npmrc")
defaults.globalignorefile = path.resolve(etc, "npmignore")
afterUserContinuation()
})
} else {
afterUserContinuation()
}
}
function afterUserContinuation() {
conf.addFile(conf.get("globalconfig"), "global")
// move the builtin into the conf stack now.
conf.root = defaults
conf.add(rc.shift(), "builtin")
conf.once("load", function () {
conf.loadExtras(afterExtras)
})
}
function afterExtras(er) {
if (er)
return cb(er)
// warn about invalid bits.
validate(conf)
var cafile = conf.get("cafile")
if (cafile) {
return conf.loadCAFile(cafile, finalize)
}
finalize()
}
function finalize(er) {
if (er) {
return cb(er)
}
exports.loaded = conf
cb(er, conf)
}
}
// Basically the same as CC, but:
// 1. Always ini
// 2. Parses environment variable names in field values
// 3. Field values that start with ~/ are replaced with process.env.HOME
// 4. Can inherit from another Conf object, using it as the base.
inherits(Conf, CC)
function Conf (base) {
if (!(this instanceof Conf))
return new Conf(base)
CC.apply(this)
if (base)
if (base instanceof Conf)
this.root = base.list[0] || base.root
else
this.root = base
else
this.root = configDefs.defaults
}
Conf.prototype.loadPrefix = require("./load-prefix.js")
Conf.prototype.loadCAFile = require("./load-cafile.js")
Conf.prototype.loadUid = require("./load-uid.js")
Conf.prototype.setUser = require("./set-user.js")
Conf.prototype.findPrefix = require("./find-prefix.js")
Conf.prototype.getCredentialsByURI = require("./get-credentials-by-uri.js")
Conf.prototype.setCredentialsByURI = require("./set-credentials-by-uri.js")
Conf.prototype.clearCredentialsByURI = require("./clear-credentials-by-uri.js")
Conf.prototype.loadExtras = function(cb) {
this.setUser(function(er) {
if (er)
return cb(er)
this.loadUid(function(er) {
if (er)
return cb(er)
// Without prefix, nothing will ever work
mkdirp(this.prefix, cb)
}.bind(this))
}.bind(this))
}
Conf.prototype.save = function (where, cb) {
var target = this.sources[where]
if (!target || !(target.path || target.source) || !target.data) {
if (where !== "builtin")
var er = new Error("bad save target: " + where)
if (cb) {
process.nextTick(cb.bind(null, er))
return this
}
return this.emit("error", er)
}
if (target.source) {
var pref = target.prefix || ""
Object.keys(target.data).forEach(function (k) {
target.source[pref + k] = target.data[k]
})
if (cb) process.nextTick(cb)
return this
}
var data = ini.stringify(target.data)
then = then.bind(this)
done = done.bind(this)
this._saving ++
var mode = where === "user" ? "0600" : "0666"
if (!data.trim()) {
fs.unlink(target.path, function () {
// ignore the possible error (e.g. the file doesn't exist)
done(null)
})
} else {
mkdirp(path.dirname(target.path), function (er) {
if (er)
return then(er)
fs.writeFile(target.path, data, "utf8", function (er) {
if (er)
return then(er)
if (where === "user" && myUid && myGid)
fs.chown(target.path, +myUid, +myGid, then)
else
then()
})
})
}
function then (er) {
if (er)
return done(er)
fs.chmod(target.path, mode, done)
}
function done (er) {
if (er) {
if (cb) return cb(er)
else return this.emit("error", er)
}
this._saving --
if (this._saving === 0) {
if (cb) cb()
this.emit("save")
}
}
return this
}
Conf.prototype.addFile = function (file, name) {
name = name || file
var marker = {__source__:name}
this.sources[name] = { path: file, type: "ini" }
this.push(marker)
this._await()
fs.readFile(file, "utf8", function (er, data) {
if (er) // just ignore missing files.
return this.add({}, marker)
this.addString(data, file, "ini", marker)
}.bind(this))
return this
}
// always ini files.
Conf.prototype.parse = function (content, file) {
return CC.prototype.parse.call(this, content, file, "ini")
}
Conf.prototype.add = function (data, marker) {
try {
Object.keys(data).forEach(function (k) {
data[k] = parseField(data[k], k)
})
}
catch (e) {
this.emit("error", e)
return this
}
return CC.prototype.add.call(this, data, marker)
}
Conf.prototype.addEnv = function (env) {
env = env || process.env
var conf = {}
Object.keys(env)
.filter(function (k) { return k.match(/^npm_config_/i) })
.forEach(function (k) {
if (!env[k])
return
// leave first char untouched, even if
// it is a "_" - convert all other to "-"
var p = k.toLowerCase()
.replace(/^npm_config_/, "")
.replace(/(?!^)_/g, "-")
conf[p] = env[k]
})
return CC.prototype.addEnv.call(this, "", conf, "env")
}
function parseField (f, k) {
if (typeof f !== "string" && !(f instanceof String))
return f
// type can be an array or single thing.
var typeList = [].concat(types[k])
var isPath = -1 !== typeList.indexOf(path)
var isBool = -1 !== typeList.indexOf(Boolean)
var isString = -1 !== typeList.indexOf(String)
var isUmask = -1 !== typeList.indexOf(Umask)
var isNumber = -1 !== typeList.indexOf(Number)
f = (""+f).trim()
if (f.match(/^".*"$/)) {
try {
f = JSON.parse(f)
}
catch (e) {
throw new Error("Failed parsing JSON config key " + k + ": " + f)
}
}
if (isBool && !isString && f === "")
return true
switch (f) {
case "true": return true
case "false": return false
case "null": return null
case "undefined": return undefined
}
f = envReplace(f)
if (isPath) {
var homePattern = process.platform === "win32" ? /^~(\/|\\)/ : /^~\//
if (f.match(homePattern) && process.env.HOME) {
f = path.resolve(process.env.HOME, f.substr(2))
}
f = path.resolve(f)
}
if (isUmask)
f = umask.fromString(f)
if (isNumber && !isNaN(f))
f = +f
return f
}
function envReplace (f) {
if (typeof f !== "string" || !f) return f
// replace any ${ENV} values with the appropriate environ.
var envExpr = /(\\*)\$\{([^}]+)\}/g
return f.replace(envExpr, function (orig, esc, name) {
esc = esc.length && esc.length % 2
if (esc)
return orig
if (undefined === process.env[name])
throw new Error("Failed to replace env in config: "+orig)
return process.env[name]
})
}
function validate (cl) {
// warn about invalid configs at every level.
cl.list.forEach(function (conf) {
nopt.clean(conf, configDefs.types)
})
nopt.clean(cl.root, configDefs.types)
}

380
node_modules/npm/lib/config/defaults.js generated vendored Normal file
View File

@@ -0,0 +1,380 @@
// defaults, types, and shorthands.
var path = require("path")
, url = require("url")
, Stream = require("stream").Stream
, semver = require("semver")
, stableFamily = semver.parse(process.version)
, nopt = require("nopt")
, os = require("os")
, osenv = require("osenv")
, umask = require("../utils/umask")
var log
try {
log = require("npmlog")
} catch (er) {
var util = require("util")
log = { warn: function (m) {
console.warn(m + " " + util.format.apply(util, [].slice.call(arguments, 1)))
} }
}
exports.Umask = Umask
function Umask () {}
function validateUmask (data, k, val) {
return umask.validate(data, k, val)
}
function validateSemver (data, k, val) {
if (!semver.valid(val)) return false
data[k] = semver.valid(val)
}
function validateStream (data, k, val) {
if (!(val instanceof Stream)) return false
data[k] = val
}
nopt.typeDefs.semver = { type: semver, validate: validateSemver }
nopt.typeDefs.Stream = { type: Stream, validate: validateStream }
nopt.typeDefs.Umask = { type: Umask, validate: validateUmask }
nopt.invalidHandler = function (k, val, type) {
log.warn("invalid config", k + "=" + JSON.stringify(val))
if (Array.isArray(type)) {
if (type.indexOf(url) !== -1) type = url
else if (type.indexOf(path) !== -1) type = path
}
switch (type) {
case Umask:
log.warn("invalid config", "Must be umask, octal number in range 0000..0777")
break
case url:
log.warn("invalid config", "Must be a full url with 'http://'")
break
case path:
log.warn("invalid config", "Must be a valid filesystem path")
break
case Number:
log.warn("invalid config", "Must be a numeric value")
break
case Stream:
log.warn("invalid config", "Must be an instance of the Stream class")
break
}
}
if (!stableFamily || (+stableFamily.minor % 2)) stableFamily = null
else stableFamily = stableFamily.major + "." + stableFamily.minor
var defaults
var temp = osenv.tmpdir()
var home = osenv.home()
var uidOrPid = process.getuid ? process.getuid() : process.pid
if (home) process.env.HOME = home
else home = path.resolve(temp, "npm-" + uidOrPid)
var cacheExtra = process.platform === "win32" ? "npm-cache" : ".npm"
var cacheRoot = process.platform === "win32" && process.env.APPDATA || home
var cache = path.resolve(cacheRoot, cacheExtra)
var globalPrefix
Object.defineProperty(exports, "defaults", {get: function () {
if (defaults) return defaults
if (process.env.PREFIX) {
globalPrefix = process.env.PREFIX
} else if (process.platform === "win32") {
// c:\node\node.exe --> prefix=c:\node\
globalPrefix = path.dirname(process.execPath)
} else {
// /usr/local/bin/node --> prefix=/usr/local
globalPrefix = path.dirname(path.dirname(process.execPath))
// destdir only is respected on Unix
if (process.env.DESTDIR) {
globalPrefix = path.join(process.env.DESTDIR, globalPrefix)
}
}
defaults = {
access : null
, "always-auth" : false
, "bin-links" : true
, browser : null
, ca: null
, cafile: null
, cache : cache
, "cache-lock-stale": 60000
, "cache-lock-retries": 10
, "cache-lock-wait": 10000
, "cache-max": Infinity
, "cache-min": 10
, cert: null
, color : true
, depth: Infinity
, description : true
, dev : false
, editor : osenv.editor()
, "engine-strict": false
, force : false
, "fetch-retries": 2
, "fetch-retry-factor": 10
, "fetch-retry-mintimeout": 10000
, "fetch-retry-maxtimeout": 60000
, git: "git"
, "git-tag-version": true
, global : false
, globalconfig : path.resolve(globalPrefix, "etc", "npmrc")
, group : process.platform === "win32" ? 0
: process.env.SUDO_GID || (process.getgid && process.getgid())
, heading: "npm"
, "if-present": false
, "ignore-scripts": false
, "init-module": path.resolve(home, ".npm-init.js")
, "init-author-name" : ""
, "init-author-email" : ""
, "init-author-url" : ""
, "init-version": "1.0.0"
, "init-license": "ISC"
, json: false
, key: null
, link: false
, "local-address" : undefined
, loglevel : "warn"
, logstream : process.stderr
, long : false
, maxsockets : 50
, message : "%s"
, "node-version" : process.version
, npat : false
, "onload-script" : false
, optional: true
, parseable : false
, prefix : globalPrefix
, production: process.env.NODE_ENV === "production"
, "proprietary-attribs": true
, proxy : null
, "https-proxy" : null
, "user-agent" : "npm/{npm-version} "
+ "node/{node-version} "
+ "{platform} "
+ "{arch}"
, "rebuild-bundle" : true
, registry : "https://registry.npmjs.org/"
, rollback : true
, save : false
, "save-bundle": false
, "save-dev" : false
, "save-exact" : false
, "save-optional" : false
, "save-prefix": "^"
, scope : ""
, searchopts: ""
, searchexclude: null
, searchsort: "name"
, shell : osenv.shell()
, shrinkwrap: true
, "sign-git-tag": false
, spin: true
, "strict-ssl": true
, tag : "latest"
, "tag-version-prefix" : "v"
, tmp : temp
, unicode : true
, "unsafe-perm" : process.platform === "win32"
|| process.platform === "cygwin"
|| !( process.getuid && process.setuid
&& process.getgid && process.setgid )
|| process.getuid() !== 0
, usage : false
, user : process.platform === "win32" ? 0 : "nobody"
, userconfig : path.resolve(home, ".npmrc")
, umask: process.umask ? process.umask() : umask.fromString("022")
, version : false
, versions : false
, viewer: process.platform === "win32" ? "browser" : "man"
, _exit : true
}
return defaults
}})
exports.types =
{ access : [null, "restricted", "public"]
, "always-auth" : Boolean
, "bin-links": Boolean
, browser : [null, String]
, ca: [null, String, Array]
, cafile : path
, cache : path
, "cache-lock-stale": Number
, "cache-lock-retries": Number
, "cache-lock-wait": Number
, "cache-max": Number
, "cache-min": Number
, cert: [null, String]
, color : ["always", Boolean]
, depth : Number
, description : Boolean
, dev : Boolean
, editor : String
, "engine-strict": Boolean
, force : Boolean
, "fetch-retries": Number
, "fetch-retry-factor": Number
, "fetch-retry-mintimeout": Number
, "fetch-retry-maxtimeout": Number
, git: String
, "git-tag-version": Boolean
, global : Boolean
, globalconfig : path
, group : [Number, String]
, "https-proxy" : [null, url]
, "user-agent" : String
, "heading": String
, "if-present": Boolean
, "ignore-scripts": Boolean
, "init-module": path
, "init-author-name" : String
, "init-author-email" : String
, "init-author-url" : ["", url]
, "init-license": String
, "init-version": semver
, json: Boolean
, key: [null, String]
, link: Boolean
// local-address must be listed as an IP for a local network interface
// must be IPv4 due to node bug
, "local-address" : getLocalAddresses()
, loglevel : ["silent", "error", "warn", "http", "info", "verbose", "silly"]
, logstream : Stream
, long : Boolean
, maxsockets : Number
, message: String
, "node-version" : [null, semver]
, npat : Boolean
, "onload-script" : [null, String]
, optional: Boolean
, parseable : Boolean
, prefix: path
, production: Boolean
, "proprietary-attribs": Boolean
, proxy : [null, false, url] // allow proxy to be disabled explicitly
, "rebuild-bundle" : Boolean
, registry : [null, url]
, rollback : Boolean
, save : Boolean
, "save-bundle": Boolean
, "save-dev" : Boolean
, "save-exact" : Boolean
, "save-optional" : Boolean
, "save-prefix": String
, scope : String
, searchopts : String
, searchexclude: [null, String]
, searchsort: [ "name", "-name"
, "description", "-description"
, "author", "-author"
, "date", "-date"
, "keywords", "-keywords" ]
, shell : String
, shrinkwrap: Boolean
, "sign-git-tag": Boolean
, spin: ["always", Boolean]
, "strict-ssl": Boolean
, tag : String
, tmp : path
, unicode : Boolean
, "unsafe-perm" : Boolean
, usage : Boolean
, user : [Number, String]
, userconfig : path
, umask: Umask
, version : Boolean
, "tag-version-prefix" : String
, versions : Boolean
, viewer: String
, _exit : Boolean
}
function getLocalAddresses () {
var interfaces
// #8094: some environments require elevated permissions to enumerate
// interfaces, and synchronously throw EPERM when run without
// elevated privileges
try {
interfaces = os.networkInterfaces()
} catch (e) {
interfaces = {}
}
return Object.keys(interfaces).map(function (nic) {
return interfaces[nic].filter(function (addr) {
return addr.family === 'IPv4'
})
.map(function (addr) {
return addr.address
})
}).reduce(function (curr, next) {
return curr.concat(next)
}, []).concat(undefined)
}
exports.shorthands =
{ s : ["--loglevel", "silent"]
, d : ["--loglevel", "info"]
, dd : ["--loglevel", "verbose"]
, ddd : ["--loglevel", "silly"]
, noreg : ["--no-registry"]
, N : ["--no-registry"]
, reg : ["--registry"]
, "no-reg" : ["--no-registry"]
, silent : ["--loglevel", "silent"]
, verbose : ["--loglevel", "verbose"]
, quiet: ["--loglevel", "warn"]
, q: ["--loglevel", "warn"]
, h : ["--usage"]
, H : ["--usage"]
, "?" : ["--usage"]
, help : ["--usage"]
, v : ["--version"]
, f : ["--force"]
, gangster : ["--force"]
, gangsta : ["--force"]
, desc : ["--description"]
, "no-desc" : ["--no-description"]
, "local" : ["--no-global"]
, l : ["--long"]
, m : ["--message"]
, p : ["--parseable"]
, porcelain : ["--parseable"]
, g : ["--global"]
, S : ["--save"]
, D : ["--save-dev"]
, E : ["--save-exact"]
, O : ["--save-optional"]
, y : ["--yes"]
, n : ["--no-yes"]
, B : ["--save-bundle"]
, C : ["--prefix"]
}

56
node_modules/npm/lib/config/find-prefix.js generated vendored Normal file
View File

@@ -0,0 +1,56 @@
// try to find the most reasonable prefix to use
module.exports = findPrefix
var fs = require("fs")
var path = require("path")
function findPrefix (p, cb_) {
function cb (er, p) {
process.nextTick(function () {
cb_(er, p)
})
}
p = path.resolve(p)
// if there's no node_modules folder, then
// walk up until we hopefully find one.
// if none anywhere, then use cwd.
var walkedUp = false
while (path.basename(p) === "node_modules") {
p = path.dirname(p)
walkedUp = true
}
if (walkedUp) return cb(null, p)
findPrefix_(p, p, cb)
}
function findPrefix_ (p, original, cb) {
if (p === "/"
|| (process.platform === "win32" && p.match(/^[a-zA-Z]:(\\|\/)?$/))) {
return cb(null, original)
}
fs.readdir(p, function (er, files) {
// an error right away is a bad sign.
// unless the prefix was simply a non
// existent directory.
if (er && p === original) {
if (er.code === "ENOENT") return cb(null, original);
return cb(er)
}
// walked up too high or something.
if (er) return cb(null, original)
if (files.indexOf("node_modules") !== -1
|| files.indexOf("package.json") !== -1) {
return cb(null, p)
}
var d = path.dirname(p)
if (d === p) return cb(null, original)
return findPrefix_(d, original, cb)
})
}

74
node_modules/npm/lib/config/get-credentials-by-uri.js generated vendored Normal file
View File

@@ -0,0 +1,74 @@
var assert = require("assert")
var toNerfDart = require("./nerf-dart.js")
module.exports = getCredentialsByURI
function getCredentialsByURI (uri) {
assert(uri && typeof uri === "string", "registry URL is required")
var nerfed = toNerfDart(uri)
var defnerf = toNerfDart(this.get("registry"))
// hidden class micro-optimization
var c = {
scope : nerfed,
token : undefined,
password : undefined,
username : undefined,
email : undefined,
auth : undefined,
alwaysAuth : undefined
}
// used to override scope matching for tokens as well as legacy auth
if (this.get(nerfed + ':always-auth') !== undefined) {
var val = this.get(nerfed + ':always-auth')
c.alwaysAuth = val === 'false' ? false : !!val
} else if (this.get('always-auth') !== undefined) {
c.alwaysAuth = this.get('always-auth')
}
if (this.get(nerfed + ':_authToken')) {
c.token = this.get(nerfed + ':_authToken')
// the bearer token is enough, don't confuse things
return c
}
// Handle the old-style _auth=<base64> style for the default
// registry, if set.
//
// XXX(isaacs): Remove when npm 1.4 is no longer relevant
var authDef = this.get("_auth")
var userDef = this.get("username")
var passDef = this.get("_password")
if (authDef && !(userDef && passDef)) {
authDef = new Buffer(authDef, "base64").toString()
authDef = authDef.split(":")
userDef = authDef.shift()
passDef = authDef.join(":")
}
if (this.get(nerfed + ":_password")) {
c.password = new Buffer(this.get(nerfed + ":_password"), "base64").toString("utf8")
} else if (nerfed === defnerf && passDef) {
c.password = passDef
}
if (this.get(nerfed + ":username")) {
c.username = this.get(nerfed + ":username")
} else if (nerfed === defnerf && userDef) {
c.username = userDef
}
if (this.get(nerfed + ":email")) {
c.email = this.get(nerfed + ":email")
} else if (this.get("email")) {
c.email = this.get("email")
}
if (c.username && c.password) {
c.auth = new Buffer(c.username + ":" + c.password).toString("base64")
}
return c
}

34
node_modules/npm/lib/config/load-cafile.js generated vendored Normal file
View File

@@ -0,0 +1,34 @@
module.exports = loadCAFile
var fs = require("fs")
function loadCAFile(cafilePath, cb) {
if (!cafilePath)
return process.nextTick(cb)
fs.readFile(cafilePath, "utf8", afterCARead.bind(this))
function afterCARead(er, cadata) {
if (er) {
// previous cafile no longer exists, so just continue on gracefully
if (er.code === 'ENOENT') return cb()
return cb(er)
}
var delim = "-----END CERTIFICATE-----"
var output
output = cadata
.split(delim)
.filter(function(xs) {
return !!xs.trim()
})
.map(function(xs) {
return xs.trimLeft() + delim
})
this.set("ca", output)
cb(null)
}
}

49
node_modules/npm/lib/config/load-prefix.js generated vendored Normal file
View File

@@ -0,0 +1,49 @@
module.exports = loadPrefix
var findPrefix = require("./find-prefix.js")
var path = require("path")
function loadPrefix (cb) {
var cli = this.list[0]
Object.defineProperty(this, "prefix",
{ set : function (prefix) {
var g = this.get("global")
this[g ? "globalPrefix" : "localPrefix"] = prefix
}.bind(this)
, get : function () {
var g = this.get("global")
return g ? this.globalPrefix : this.localPrefix
}.bind(this)
, enumerable : true
})
Object.defineProperty(this, "globalPrefix",
{ set : function (prefix) {
this.set("prefix", prefix)
}.bind(this)
, get : function () {
return path.resolve(this.get("prefix"))
}.bind(this)
, enumerable : true
})
var p
Object.defineProperty(this, "localPrefix",
{ set : function (prefix) { p = prefix },
get : function () { return p }
, enumerable: true })
// try to guess at a good node_modules location.
// If we are *explicitly* given a prefix on the cli, then
// always use that. otherwise, infer local prefix from cwd.
if (Object.prototype.hasOwnProperty.call(cli, "prefix")) {
p = path.resolve(cli.prefix)
process.nextTick(cb)
} else {
findPrefix(process.cwd(), function (er, found) {
p = found
cb(er)
})
}
}

15
node_modules/npm/lib/config/load-uid.js generated vendored Normal file
View File

@@ -0,0 +1,15 @@
module.exports = loadUid
var getUid = require("uid-number")
// Call in the context of a npmconf object
function loadUid (cb) {
// if we're not in unsafe-perm mode, then figure out who
// to run stuff as. Do this first, to support `npm update npm -g`
if (!this.get("unsafe-perm")) {
getUid(this.get("user"), this.get("group"), cb)
} else {
process.nextTick(cb)
}
}

23
node_modules/npm/lib/config/nerf-dart.js generated vendored Normal file
View File

@@ -0,0 +1,23 @@
var url = require("url")
module.exports = toNerfDart
/**
* Maps a URL to an identifier.
*
* Name courtesy schiffertronix media LLC, a New Jersey corporation
*
* @param {String} uri The URL to be nerfed.
*
* @returns {String} A nerfed URL.
*/
function toNerfDart(uri) {
var parsed = url.parse(uri)
delete parsed.protocol
delete parsed.auth
delete parsed.query
delete parsed.search
delete parsed.hash
return url.resolve(url.format(parsed), ".")
}

42
node_modules/npm/lib/config/set-credentials-by-uri.js generated vendored Normal file
View File

@@ -0,0 +1,42 @@
var assert = require("assert")
var toNerfDart = require("./nerf-dart.js")
module.exports = setCredentialsByURI
function setCredentialsByURI (uri, c) {
assert(uri && typeof uri === "string", "registry URL is required")
assert(c && typeof c === "object", "credentials are required")
var nerfed = toNerfDart(uri)
if (c.token) {
this.set(nerfed + ":_authToken", c.token, "user")
this.del(nerfed + ":_password", "user")
this.del(nerfed + ":username", "user")
this.del(nerfed + ":email", "user")
this.del(nerfed + ":always-auth", "user")
}
else if (c.username || c.password || c.email) {
assert(c.username, "must include username")
assert(c.password, "must include password")
assert(c.email, "must include email address")
this.del(nerfed + ":_authToken", "user")
var encoded = new Buffer(c.password, "utf8").toString("base64")
this.set(nerfed + ":_password", encoded, "user")
this.set(nerfed + ":username", c.username, "user")
this.set(nerfed + ":email", c.email, "user")
if (c.alwaysAuth !== undefined) {
this.set(nerfed + ":always-auth", c.alwaysAuth, "user")
}
else {
this.del(nerfed + ":always-auth", "user")
}
}
else {
throw new Error("No credentials to set.")
}
}

29
node_modules/npm/lib/config/set-user.js generated vendored Normal file
View File

@@ -0,0 +1,29 @@
module.exports = setUser
var assert = require("assert")
var path = require("path")
var fs = require("fs")
var mkdirp = require("mkdirp")
function setUser (cb) {
var defaultConf = this.root
assert(defaultConf !== Object.prototype)
// If global, leave it as-is.
// If not global, then set the user to the owner of the prefix folder.
// Just set the default, so it can be overridden.
if (this.get("global")) return cb()
if (process.env.SUDO_UID) {
defaultConf.user = +(process.env.SUDO_UID)
return cb()
}
var prefix = path.resolve(this.get("prefix"))
mkdirp(prefix, function (er) {
if (er) return cb(er)
fs.stat(prefix, function (er, st) {
defaultConf.user = st && st.uid
return cb(er)
})
})
}

375
node_modules/npm/lib/dedupe.js generated vendored Normal file
View File

@@ -0,0 +1,375 @@
// traverse the node_modules/package.json tree
// looking for duplicates. If any duplicates are found,
// then move them up to the highest level necessary
// in order to make them no longer duplicated.
//
// This is kind of ugly, and really highlights the need for
// much better "put pkg X at folder Y" abstraction. Oh well,
// whatever. Perfect enemy of the good, and all that.
var fs = require("fs")
var asyncMap = require("slide").asyncMap
var path = require("path")
var readJson = require("read-package-json")
var semver = require("semver")
var rm = require("./utils/gently-rm.js")
var log = require("npmlog")
var npm = require("./npm.js")
var mapToRegistry = require("./utils/map-to-registry.js")
module.exports = dedupe
dedupe.usage = "npm dedupe [pkg pkg...]"
function dedupe (args, silent, cb) {
if (typeof silent === "function") cb = silent, silent = false
var dryrun = false
if (npm.command.match(/^find/)) dryrun = true
return dedupe_(npm.prefix, args, {}, dryrun, silent, cb)
}
function dedupe_ (dir, filter, unavoidable, dryrun, silent, cb) {
readInstalled(path.resolve(dir), {}, null, function (er, data, counter) {
if (er) {
return cb(er)
}
if (!data) {
return cb()
}
// find out which things are dupes
var dupes = Object.keys(counter || {}).filter(function (k) {
if (filter.length && -1 === filter.indexOf(k)) return false
return counter[k] > 1 && !unavoidable[k]
}).reduce(function (s, k) {
s[k] = []
return s
}, {})
// any that are unavoidable need to remain as they are. don't even
// try to touch them or figure it out. Maybe some day, we can do
// something a bit more clever here, but for now, just skip over it,
// and all its children.
;(function U (obj) {
if (unavoidable[obj.name]) {
obj.unavoidable = true
}
if (obj.parent && obj.parent.unavoidable) {
obj.unavoidable = true
}
Object.keys(obj.children).forEach(function (k) {
U(obj.children[k])
})
})(data)
// then collect them up and figure out who needs them
;(function C (obj) {
if (dupes[obj.name] && !obj.unavoidable) {
dupes[obj.name].push(obj)
obj.duplicate = true
}
obj.dependents = whoDepends(obj)
Object.keys(obj.children).forEach(function (k) {
C(obj.children[k])
})
})(data)
if (dryrun) {
var k = Object.keys(dupes)
if (!k.length) return cb()
return npm.commands.ls(k, silent, cb)
}
var summary = Object.keys(dupes).map(function (n) {
return [n, dupes[n].filter(function (d) {
return d && d.parent && !d.parent.duplicate && !d.unavoidable
}).map(function M (d) {
return [d.path, d.version, d.dependents.map(function (k) {
return [k.path, k.version, k.dependencies[d.name] || ""]
})]
})]
}).map(function (item) {
var set = item[1]
var ranges = set.map(function (i) {
return i[2].map(function (d) {
return d[2]
})
}).reduce(function (l, r) {
return l.concat(r)
}, []).map(function (v, i, set) {
if (set.indexOf(v) !== i) return false
return v
}).filter(function (v) {
return v !== false
})
var locs = set.map(function (i) {
return i[0]
})
var versions = set.map(function (i) {
return i[1]
}).filter(function (v, i, set) {
return set.indexOf(v) === i
})
var has = set.map(function (i) {
return [i[0], i[1]]
}).reduce(function (set, kv) {
set[kv[0]] = kv[1]
return set
}, {})
var loc = locs.length ? locs.reduce(function (a, b) {
// a=/path/to/node_modules/foo/node_modules/bar
// b=/path/to/node_modules/elk/node_modules/bar
// ==/path/to/node_modules/bar
var nmReg = new RegExp("\\" + path.sep + "node_modules\\" + path.sep)
a = a.split(nmReg)
b = b.split(nmReg)
var name = a.pop()
b.pop()
// find the longest chain that both A and B share.
// then push the name back on it, and join by /node_modules/
for (var i = 0, al = a.length, bl = b.length; i < al && i < bl && a[i] === b[i]; i++);
return a.slice(0, i).concat(name).join(path.sep + "node_modules" + path.sep)
}) : undefined
return [item[0], { item: item
, ranges: ranges
, locs: locs
, loc: loc
, has: has
, versions: versions
}]
}).filter(function (i) {
return i[1].loc
})
findVersions(npm, summary, function (er, set) {
if (er) return cb(er)
if (!set.length) return cb()
installAndRetest(set, filter, dir, unavoidable, silent, cb)
})
})
}
function installAndRetest (set, filter, dir, unavoidable, silent, cb) {
//return cb(null, set)
var remove = []
asyncMap(set, function (item, cb) {
// [name, has, loc, locMatch, regMatch, others]
var name = item[0]
var has = item[1]
var where = item[2]
var locMatch = item[3]
var regMatch = item[4]
var others = item[5]
// nothing to be done here. oh well. just a conflict.
if (!locMatch && !regMatch) {
log.warn("unavoidable conflict", item[0], item[1])
log.warn("unavoidable conflict", "Not de-duplicating")
unavoidable[item[0]] = true
return cb()
}
// nothing to do except to clean up the extraneous deps
if (locMatch && has[where] === locMatch) {
remove.push.apply(remove, others)
return cb()
}
if (regMatch) {
var what = name + "@" + regMatch
// where is /path/to/node_modules/foo/node_modules/bar
// for package "bar", but we need it to be just
// /path/to/node_modules/foo
var nmReg = new RegExp("\\" + path.sep + "node_modules\\" + path.sep)
where = where.split(nmReg)
where.pop()
where = where.join(path.sep + "node_modules" + path.sep)
remove.push.apply(remove, others)
return npm.commands.install(where, what, cb)
}
// hrm?
return cb(new Error("danger zone\n" + name + " " +
regMatch + " " + locMatch))
}, function (er) {
if (er) return cb(er)
asyncMap(remove, rm, function (er) {
if (er) return cb(er)
remove.forEach(function (r) {
log.info("rm", r)
})
dedupe_(dir, filter, unavoidable, false, silent, cb)
})
})
}
function findVersions (npm, summary, cb) {
// now, for each item in the summary, try to find the maximum version
// that will satisfy all the ranges. next step is to install it at
// the specified location.
asyncMap(summary, function (item, cb) {
var name = item[0]
var data = item[1]
var loc = data.loc
var locs = data.locs.filter(function (l) {
return l !== loc
})
// not actually a dupe, or perhaps all the other copies were
// children of a dupe, so this'll maybe be picked up later.
if (locs.length === 0) {
return cb(null, [])
}
// { <folder>: <version> }
var has = data.has
// the versions that we already have.
// if one of these is ok, then prefer to use that.
// otherwise, try fetching from the registry.
var versions = data.versions
var ranges = data.ranges
mapToRegistry(name, npm.config, function (er, uri, auth) {
if (er) return cb(er)
npm.registry.get(uri, { auth : auth }, next)
})
function next (er, data) {
var regVersions = er ? [] : Object.keys(data.versions)
var locMatch = bestMatch(versions, ranges)
var tag = npm.config.get("tag")
var distTag = data["dist-tags"] && data["dist-tags"][tag]
var regMatch
if (distTag && data.versions[distTag] && matches(distTag, ranges)) {
regMatch = distTag
} else {
regMatch = bestMatch(regVersions, ranges)
}
cb(null, [[name, has, loc, locMatch, regMatch, locs]])
}
}, cb)
}
function matches (version, ranges) {
return !ranges.some(function (r) {
return !semver.satisfies(version, r, true)
})
}
function bestMatch (versions, ranges) {
return versions.filter(function (v) {
return matches(v, ranges)
}).sort(semver.compareLoose).pop()
}
function readInstalled (dir, counter, parent, cb) {
var pkg, children, realpath
fs.realpath(dir, function (er, rp) {
realpath = rp
next()
})
readJson(path.resolve(dir, "package.json"), function (er, data) {
if (er && er.code !== "ENOENT" && er.code !== "ENOTDIR") return cb(er)
if (er) return cb() // not a package, probably.
counter[data.name] = counter[data.name] || 0
counter[data.name]++
pkg =
{ _id: data._id
, name: data.name
, version: data.version
, dependencies: data.dependencies || {}
, optionalDependencies: data.optionalDependencies || {}
, devDependencies: data.devDependencies || {}
, bundledDependencies: data.bundledDependencies || []
, path: dir
, realPath: dir
, children: {}
, parent: parent
, family: Object.create(parent ? parent.family : null)
, unavoidable: false
, duplicate: false
}
if (parent) {
parent.children[data.name] = pkg
parent.family[data.name] = pkg
}
next()
})
fs.readdir(path.resolve(dir, "node_modules"), function (er, c) {
children = children || [] // error is ok, just means no children.
// check if there are scoped packages.
asyncMap(c || [], function (child, cb) {
if (child.indexOf('@') === 0) {
fs.readdir(path.resolve(dir, "node_modules", child), function (er, scopedChildren) {
// error is ok, just means no children.
(scopedChildren || []).forEach(function (sc) {
children.push(path.join(child, sc))
})
cb()
})
} else {
children.push(child)
cb()
}
}, function (er) {
if (er) return cb(er)
children = children.filter(function (p) {
return !p.match(/^[\._-]/)
})
next();
});
})
function next () {
if (!children || !pkg || !realpath) return
// ignore devDependencies. Just leave them where they are.
children = children.filter(function (c) {
return !pkg.devDependencies.hasOwnProperty(c)
})
pkg.realPath = realpath
if (pkg.realPath !== pkg.path) children = []
var d = path.resolve(dir, "node_modules")
asyncMap(children, function (child, cb) {
readInstalled(path.resolve(d, child), counter, pkg, cb)
}, function (er) {
cb(er, pkg, counter)
})
}
}
function whoDepends (pkg) {
var start = pkg.parent || pkg
return whoDepends_(pkg, [], start)
}
function whoDepends_ (pkg, who, test) {
if (test !== pkg &&
test.dependencies[pkg.name] &&
test.family[pkg.name] === pkg) {
who.push(test)
}
Object.keys(test.children).forEach(function (n) {
whoDepends_(pkg, who, test.children[n])
})
return who
}

54
node_modules/npm/lib/deprecate.js generated vendored Normal file
View File

@@ -0,0 +1,54 @@
var npm = require("./npm.js")
, mapToRegistry = require("./utils/map-to-registry.js")
, npa = require("npm-package-arg")
module.exports = deprecate
deprecate.usage = "npm deprecate <pkg>[@<version>] <message>"
deprecate.completion = function (opts, cb) {
// first, get a list of remote packages this user owns.
// once we have a user account, then don't complete anything.
if (opts.conf.argv.remain.length > 2) return cb()
// get the list of packages by user
var path = "/-/by-user/"
mapToRegistry(path, npm.config, function (er, uri, c) {
if (er) return cb(er)
if (!(c && c.username)) return cb()
var params = {
timeout : 60000,
auth : c
}
npm.registry.get(uri + c.username, params, function (er, list) {
if (er) return cb()
console.error(list)
return cb(null, list[c.username])
})
})
}
function deprecate (args, cb) {
var pkg = args[0]
, msg = args[1]
if (msg === undefined) return cb("Usage: " + deprecate.usage)
// fetch the data and make sure it exists.
var p = npa(pkg)
// npa makes the default spec "latest", but for deprecation
// "*" is the appropriate default.
if (p.rawSpec === '') p.spec = '*'
mapToRegistry(p.name, npm.config, function (er, uri, auth) {
if (er) return cb(er)
var params = {
version : p.spec,
message : msg,
auth : auth
}
npm.registry.deprecate(uri, params, cb)
})
}

151
node_modules/npm/lib/dist-tag.js generated vendored Normal file
View File

@@ -0,0 +1,151 @@
module.exports = distTag
var log = require("npmlog")
var npa = require("npm-package-arg")
var semver = require("semver")
var npm = require("./npm.js")
var mapToRegistry = require("./utils/map-to-registry.js")
var readLocalPkg = require("./utils/read-local-package.js")
distTag.usage = "npm dist-tag add <pkg>@<version> [<tag>]"
+ "\nnpm dist-tag rm <pkg> <tag>"
+ "\nnpm dist-tag ls [<pkg>]"
distTag.completion = function (opts, cb) {
var argv = opts.conf.argv.remain
if (argv.length === 2) {
return cb(null, ["add", "rm", "ls"])
}
switch (argv[2]) {
default:
return cb()
}
}
function distTag (args, cb) {
var cmd = args.shift()
switch (cmd) {
case "add": case "a": case "set": case "s":
return add(args[0], args[1], cb)
case "rm": case "r": case "del": case "d": case "remove":
return remove(args[1], args[0], cb)
case "ls": case "l": case "sl": case "list":
return list(args[0], cb)
default:
return cb("Usage:\n"+distTag.usage)
}
}
function add (spec, tag, cb) {
var thing = npa(spec || "")
var pkg = thing.name
var version = thing.rawSpec
var t = (tag || npm.config.get("tag")).trim()
log.verbose("dist-tag add", t, "to", pkg+"@"+version)
if (!pkg || !version || !t) return cb("Usage:\n"+distTag.usage)
if (semver.validRange(t)) {
var er = new Error("Tag name must not be a valid SemVer range: " + t)
return cb(er)
}
fetchTags(pkg, function (er, tags) {
if (er) return cb(er)
if (tags[t] === version) {
log.warn("dist-tag add", t, "is already set to version", version)
return cb()
}
tags[t] = version
mapToRegistry(pkg, npm.config, function (er, uri, auth, base) {
var params = {
package : pkg,
distTag : t,
version : version,
auth : auth
}
npm.registry.distTags.add(base, params, function (er) {
if (er) return cb(er)
console.log("+"+t+": "+pkg+"@"+version)
cb()
})
})
})
}
function remove (tag, pkg, cb) {
log.verbose("dist-tag del", tag, "from", pkg)
fetchTags(pkg, function (er, tags) {
if (er) return cb(er)
if (!tags[tag]) {
log.info("dist-tag del", tag, "is not a dist-tag on", pkg)
return cb(new Error(tag+" is not a dist-tag on "+pkg))
}
var version = tags[tag]
delete tags[tag]
mapToRegistry(pkg, npm.config, function (er, uri, auth, base) {
var params = {
package : pkg,
distTag : tag,
auth : auth
}
npm.registry.distTags.rm(base, params, function (er) {
if (er) return cb(er)
console.log("-"+tag+": "+pkg+"@"+version)
cb()
})
})
})
}
function list (pkg, cb) {
if (!pkg) return readLocalPkg(function (er, pkg) {
if (er) return cb(er)
if (!pkg) return cb(distTag.usage)
list(pkg, cb)
})
fetchTags(pkg, function (er, tags) {
if (er) {
log.error("dist-tag ls", "Couldn't get dist-tag data for", pkg)
return cb(er)
}
var msg = Object.keys(tags).map(function (k) {
return k+": "+tags[k]
}).sort().join("\n")
console.log(msg)
cb(er, tags)
})
}
function fetchTags (pkg, cb) {
mapToRegistry(pkg, npm.config, function (er, uri, auth, base) {
if (er) return cb(er)
var params = {
package : pkg,
auth : auth
}
npm.registry.distTags.fetch(base, params, function (er, tags) {
if (er) return cb(er)
if (!tags || !Object.keys(tags).length) {
return cb(new Error("No dist-tags found for " + pkg))
}
cb(null, tags)
})
})
}

71
node_modules/npm/lib/docs.js generated vendored Normal file
View File

@@ -0,0 +1,71 @@
module.exports = docs
docs.usage = "npm docs <pkgname>"
docs.usage += "\n"
docs.usage += "npm docs ."
var npm = require("./npm.js")
, opener = require("opener")
, path = require("path")
, log = require("npmlog")
, mapToRegistry = require("./utils/map-to-registry.js")
docs.completion = function (opts, cb) {
// FIXME: there used to be registry completion here, but it stopped making
// sense somewhere around 50,000 packages on the registry
cb()
}
function url (json) {
return json.homepage ? json.homepage : "https://npmjs.org/package/" + json.name
}
function docs (args, cb) {
args = args || []
var pending = args.length
if (!pending) return getDoc(".", cb)
args.forEach(function(proj) {
getDoc(proj, function(err) {
if (err) {
return cb(err)
}
--pending || cb()
})
})
}
function getDoc (project, cb) {
project = project || "."
var package = path.resolve(npm.localPrefix, "package.json")
if (project === "." || project === "./") {
var json
try {
json = require(package)
if (!json.name) throw new Error('package.json does not have a valid "name" property')
project = json.name
} catch (e) {
log.error(e.message)
return cb(docs.usage)
}
return opener(url(json), { command: npm.config.get("browser") }, cb)
}
mapToRegistry(project, npm.config, function (er, uri, auth) {
if (er) return cb(er)
npm.registry.get(uri + "/latest", { timeout : 3600, auth : auth }, next)
})
function next (er, json) {
var github = "https://github.com/" + project + "#readme"
if (er) {
if (project.split("/").length !== 2) return cb(er)
return opener(github, { command: npm.config.get("browser") }, cb)
}
return opener(url(json), { command: npm.config.get("browser") }, cb)
}
}

31
node_modules/npm/lib/edit.js generated vendored Normal file
View File

@@ -0,0 +1,31 @@
// npm edit <pkg>[@<version>]
// open the package folder in the $EDITOR
module.exports = edit
edit.usage = "npm edit <pkg>"
edit.completion = require("./utils/completion/installed-shallow.js")
var npm = require("./npm.js")
, path = require("path")
, fs = require("graceful-fs")
, editor = require("editor")
function edit (args, cb) {
var p = args[0]
if (args.length !== 1 || !p) return cb(edit.usage)
var e = npm.config.get("editor")
if (!e) return cb(new Error(
"No editor set. Set the 'editor' config, or $EDITOR environ."))
p = p.split("/")
.join("/node_modules/")
.replace(/(\/node_modules)+/, "/node_modules")
var f = path.resolve(npm.dir, p)
fs.lstat(f, function (er) {
if (er) return cb(er)
editor(f, { editor: e }, function (er) {
if (er) return cb(er)
npm.commands.rebuild(args, cb)
})
})
}

37
node_modules/npm/lib/explore.js generated vendored Normal file
View File

@@ -0,0 +1,37 @@
// npm explore <pkg>[@<version>]
// open a subshell to the package folder.
module.exports = explore
explore.usage = "npm explore <pkg> [ -- <cmd>]"
explore.completion = require("./utils/completion/installed-shallow.js")
var npm = require("./npm.js")
, spawn = require("./utils/spawn")
, path = require("path")
, fs = require("graceful-fs")
function explore (args, cb) {
if (args.length < 1 || !args[0]) return cb(explore.usage)
var p = args.shift()
args = args.join(" ").trim()
if (args) args = ["-c", args]
else args = []
var cwd = path.resolve(npm.dir, p)
var sh = npm.config.get("shell")
fs.stat(cwd, function (er, s) {
if (er || !s.isDirectory()) return cb(new Error(
"It doesn't look like "+p+" is installed."))
if (!args.length) console.log(
"\nExploring "+cwd+"\n"+
"Type 'exit' or ^D when finished\n")
npm.spinner.stop()
var shell = spawn(sh, args, { cwd: cwd, stdio: "inherit" })
shell.on("close", function (er) {
// only fail if non-interactive.
if (!args.length) return cb()
cb(er)
})
})
}

8
node_modules/npm/lib/faq.js generated vendored Normal file
View File

@@ -0,0 +1,8 @@
module.exports = faq
faq.usage = "npm faq"
var npm = require("./npm.js")
function faq (args, cb) { npm.commands.help(["faq"], cb) }

12
node_modules/npm/lib/get.js generated vendored Normal file
View File

@@ -0,0 +1,12 @@
module.exports = get
get.usage = "npm get <key> <value> (See `npm config`)"
var npm = require("./npm.js")
get.completion = npm.commands.config.completion
function get (args, cb) {
npm.commands.config(["get"].concat(args), cb)
}

210
node_modules/npm/lib/help-search.js generated vendored Normal file
View File

@@ -0,0 +1,210 @@
module.exports = helpSearch
var fs = require("graceful-fs")
, path = require("path")
, asyncMap = require("slide").asyncMap
, npm = require("./npm.js")
, glob = require("glob")
, color = require("ansicolors")
helpSearch.usage = "npm help-search <text>"
function helpSearch (args, silent, cb) {
if (typeof cb !== "function") cb = silent, silent = false
if (!args.length) return cb(helpSearch.usage)
var docPath = path.resolve(__dirname, "..", "doc")
return glob(docPath + "/*/*.md", function (er, files) {
if (er)
return cb(er)
readFiles(files, function (er, data) {
if (er)
return cb(er)
searchFiles(args, data, function (er, results) {
if (er)
return cb(er)
formatResults(args, results, cb)
})
})
})
}
function readFiles (files, cb) {
var res = {}
asyncMap(files, function (file, cb) {
fs.readFile(file, 'utf8', function (er, data) {
res[file] = data
return cb(er)
})
}, function (er) {
return cb(er, res)
})
}
function searchFiles (args, files, cb) {
var results = []
Object.keys(files).forEach(function (file) {
var data = files[file]
// skip if no matches at all
var match
for (var a = 0, l = args.length; a < l && !match; a++) {
match = data.toLowerCase().indexOf(args[a].toLowerCase()) !== -1
}
if (!match)
return
var lines = data.split(/\n+/)
// if a line has a search term, then skip it and the next line.
// if the next line has a search term, then skip all 3
// otherwise, set the line to null. then remove the nulls.
l = lines.length
for (var i = 0; i < l; i ++) {
var line = lines[i]
, nextLine = lines[i + 1]
, ll
match = false
if (nextLine) {
for (a = 0, ll = args.length; a < ll && !match; a ++) {
match = nextLine.toLowerCase()
.indexOf(args[a].toLowerCase()) !== -1
}
if (match) {
// skip over the next line, and the line after it.
i += 2
continue
}
}
match = false
for (a = 0, ll = args.length; a < ll && !match; a ++) {
match = line.toLowerCase().indexOf(args[a].toLowerCase()) !== -1
}
if (match) {
// skip over the next line
i ++
continue
}
lines[i] = null
}
// now squish any string of nulls into a single null
lines = lines.reduce(function (l, r) {
if (!(r === null && l[l.length-1] === null)) l.push(r)
return l
}, [])
if (lines[lines.length - 1] === null) lines.pop()
if (lines[0] === null) lines.shift()
// now see how many args were found at all.
var found = {}
, totalHits = 0
lines.forEach(function (line) {
args.forEach(function (arg) {
var hit = (line || "").toLowerCase()
.split(arg.toLowerCase()).length - 1
if (hit > 0) {
found[arg] = (found[arg] || 0) + hit
totalHits += hit
}
})
})
var cmd = "npm help "
if (path.basename(path.dirname(file)) === "api") {
cmd = "npm apihelp "
}
cmd += path.basename(file, ".md").replace(/^npm-/, "")
results.push({ file: file
, cmd: cmd
, lines: lines
, found: Object.keys(found)
, hits: found
, totalHits: totalHits
})
})
// if only one result, then just show that help section.
if (results.length === 1) {
return npm.commands.help([results[0].file.replace(/\.md$/, "")], cb)
}
if (results.length === 0) {
console.log("No results for " + args.map(JSON.stringify).join(" "))
return cb()
}
// sort results by number of results found, then by number of hits
// then by number of matching lines
results = results.sort(function (a, b) {
return a.found.length > b.found.length ? -1
: a.found.length < b.found.length ? 1
: a.totalHits > b.totalHits ? -1
: a.totalHits < b.totalHits ? 1
: a.lines.length > b.lines.length ? -1
: a.lines.length < b.lines.length ? 1
: 0
})
cb(null, results)
}
function formatResults (args, results, cb) {
if (!results) return cb(null)
var cols = Math.min(process.stdout.columns || Infinity, 80) + 1
var out = results.map(function (res) {
var out = res.cmd
, r = Object.keys(res.hits).map(function (k) {
return k + ":" + res.hits[k]
}).sort(function (a, b) {
return a > b ? 1 : -1
}).join(" ")
out += ((new Array(Math.max(1, cols - out.length - r.length)))
.join(" ")) + r
if (!npm.config.get("long")) return out
out = "\n\n" + out
+ "\n" + (new Array(cols)).join("—") + "\n"
+ res.lines.map(function (line, i) {
if (line === null || i > 3) return ""
for (var out = line, a = 0, l = args.length; a < l; a ++) {
var finder = out.toLowerCase().split(args[a].toLowerCase())
, newOut = ""
, p = 0
finder.forEach(function (f) {
newOut += out.substr(p, f.length)
var hilit = out.substr(p + f.length, args[a].length)
if (npm.color) hilit = color.bgBlack(color.red(hilit))
newOut += hilit
p += f.length + args[a].length
})
}
return newOut
}).join("\n").trim()
return out
}).join("\n")
if (results.length && !npm.config.get("long")) {
out = "Top hits for "+(args.map(JSON.stringify).join(" "))
+ "\n" + (new Array(cols)).join("—") + "\n"
+ out
+ "\n" + (new Array(cols)).join("—") + "\n"
+ "(run with -l or --long to see more context)"
}
console.log(out.trim())
cb(null, results)
}

240
node_modules/npm/lib/help.js generated vendored Normal file
View File

@@ -0,0 +1,240 @@
module.exports = help
help.completion = function (opts, cb) {
if (opts.conf.argv.remain.length > 2) return cb(null, [])
getSections(cb)
}
var path = require("path")
, spawn = require("./utils/spawn")
, npm = require("./npm.js")
, log = require("npmlog")
, opener = require("opener")
, glob = require("glob")
function help (args, cb) {
npm.spinner.stop()
var argv = npm.config.get("argv").cooked
var argnum = 0
if (args.length === 2 && ~~args[0]) {
argnum = ~~args.shift()
}
// npm help foo bar baz: search topics
if (args.length > 1 && args[0]) {
return npm.commands["help-search"](args, argnum, cb)
}
var section = npm.deref(args[0]) || args[0]
// npm help <noargs>: show basic usage
if (!section) {
var valid = argv[0] === "help" ? 0 : 1
return npmUsage(valid, cb)
}
// npm <cmd> -h: show command usage
if ( npm.config.get("usage")
&& npm.commands[section]
&& npm.commands[section].usage
) {
npm.config.set("loglevel", "silent")
log.level = "silent"
console.log(npm.commands[section].usage)
return cb()
}
// npm apihelp <section>: Prefer section 3 over section 1
var apihelp = argv.length && -1 !== argv[0].indexOf("api")
var pref = apihelp ? [3, 1, 5, 7] : [1, 3, 5, 7]
if (argnum)
pref = [ argnum ].concat(pref.filter(function (n) {
return n !== argnum
}))
// npm help <section>: Try to find the path
var manroot = path.resolve(__dirname, "..", "man")
// legacy
if (section === "global") section = "folders"
else if (section === "json") section = "package.json"
// find either /section.n or /npm-section.n
// The glob is used in the glob. The regexp is used much
// further down. Globs and regexps are different
var compextglob = ".+(gz|bz2|lzma|[FYzZ]|xz)"
var compextre = "\\.(gz|bz2|lzma|[FYzZ]|xz)$"
var f = "+(npm-" + section + "|" + section + ").[0-9]?(" + compextglob + ")"
return glob(manroot + "/*/" + f, function (er, mans) {
if (er) return cb(er)
if (!mans.length) return npm.commands["help-search"](args, cb)
mans = mans.map(function (man) {
var ext = path.extname(man)
if (man.match(new RegExp(compextre))) man = path.basename(man, ext)
return man
})
viewMan(pickMan(mans, pref), cb)
})
}
function pickMan (mans, pref_) {
var nre = /([0-9]+)$/
var pref = {}
pref_.forEach(function (sect, i) {
pref[sect] = i
})
mans = mans.sort(function (a, b) {
var an = a.match(nre)[1]
var bn = b.match(nre)[1]
return an === bn ? (a > b ? -1 : 1)
: pref[an] < pref[bn] ? -1
: 1
})
return mans[0]
}
function viewMan (man, cb) {
var nre = /([0-9]+)$/
var num = man.match(nre)[1]
var section = path.basename(man, "." + num)
// at this point, we know that the specified man page exists
var manpath = path.join(__dirname, "..", "man")
, env = {}
Object.keys(process.env).forEach(function (i) {
env[i] = process.env[i]
})
env.MANPATH = manpath
var viewer = npm.config.get("viewer")
var conf
switch (viewer) {
case "woman":
var a = ["-e", "(woman-find-file \"" + man + "\")"]
conf = { env: env, stdio: "inherit" }
var woman = spawn("emacsclient", a, conf)
woman.on("close", cb)
break
case "browser":
opener(htmlMan(man), { command: npm.config.get("browser") }, cb)
break
default:
conf = { env: env, stdio: "inherit" }
var manProcess = spawn("man", [num, section], conf)
manProcess.on("close", cb)
break
}
}
function htmlMan (man) {
var sect = +man.match(/([0-9]+)$/)[1]
var f = path.basename(man).replace(/([0-9]+)$/, "html")
switch (sect) {
case 1:
sect = "cli"
break
case 3:
sect = "api"
break
case 5:
sect = "files"
break
case 7:
sect = "misc"
break
default:
throw new Error("invalid man section: " + sect)
}
return path.resolve(__dirname, "..", "html", "doc", sect, f)
}
function npmUsage (valid, cb) {
npm.config.set("loglevel", "silent")
log.level = "silent"
console.log(
[ "\nUsage: npm <command>"
, ""
, "where <command> is one of:"
, npm.config.get("long") ? usages()
: " " + wrap(Object.keys(npm.commands))
, ""
, "npm <cmd> -h quick help on <cmd>"
, "npm -l display full usage info"
, "npm faq commonly asked questions"
, "npm help <term> search for help on <term>"
, "npm help npm involved overview"
, ""
, "Specify configs in the ini-formatted file:"
, " " + npm.config.get("userconfig")
, "or on the command line via: npm <command> --key value"
, "Config info can be viewed via: npm help config"
, ""
, "npm@" + npm.version + " " + path.dirname(__dirname)
].join("\n"))
cb(valid)
}
function usages () {
// return a string of <cmd>: <usage>
var maxLen = 0
return Object.keys(npm.commands).filter(function (c) {
return c === npm.deref(c)
}).reduce(function (set, c) {
set.push([c, npm.commands[c].usage || ""])
maxLen = Math.max(maxLen, c.length)
return set
}, []).map(function (item) {
var c = item[0]
, usage = item[1]
return "\n " + c + (new Array(maxLen - c.length + 2).join(" "))
+ (usage.split("\n")
.join("\n" + (new Array(maxLen + 6).join(" "))))
}).join("\n")
}
function wrap (arr) {
var out = [""]
, l = 0
, line
line = process.stdout.columns
if (!line)
line = 60
else
line = Math.min(60, Math.max(line - 16, 24))
arr.sort(function (a,b) { return a<b?-1:1 })
.forEach(function (c) {
if (out[l].length + c.length + 2 < line) {
out[l] += ", "+c
} else {
out[l++] += ","
out[l] = c
}
})
return out.join("\n ").substr(2)
}
function getSections (cb) {
var g = path.resolve(__dirname, "../man/man[0-9]/*.[0-9]")
glob(g, function (er, files) {
if (er)
return cb(er)
cb(null, Object.keys(files.reduce(function (acc, file) {
file = path.basename(file).replace(/\.[0-9]+$/, "")
file = file.replace(/^npm-/, "")
acc[file] = true
return acc
}, { help: true })))
})
}

41
node_modules/npm/lib/init.js generated vendored Normal file
View File

@@ -0,0 +1,41 @@
// initialize a package.json file
module.exports = init
var log = require("npmlog")
, npm = require("./npm.js")
, initJson = require("init-package-json")
init.usage = "npm init [--force/-f]"
function init (args, cb) {
var dir = process.cwd()
log.pause()
npm.spinner.stop()
var initFile = npm.config.get("init-module")
if (!initJson.yes(npm.config)) {
console.log(
["This utility will walk you through creating a package.json file."
,"It only covers the most common items, and tries to guess sensible defaults."
,""
,"See `npm help json` for definitive documentation on these fields"
,"and exactly what they do."
,""
,"Use `npm install <pkg> --save` afterwards to install a package and"
,"save it as a dependency in the package.json file."
,""
,"Press ^C at any time to quit."
].join("\n"))
}
initJson(dir, initFile, npm.config, function (er, data) {
log.resume()
log.silly("package data", data)
if (er && er.message === "canceled") {
log.warn("init", "canceled")
return cb(null, data)
}
log.info("init", "written successfully")
cb(er, data)
})
}

1196
node_modules/npm/lib/install.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

170
node_modules/npm/lib/link.js generated vendored Normal file
View File

@@ -0,0 +1,170 @@
// link with no args: symlink the folder to the global location
// link with package arg: symlink the global to the local
var npm = require("./npm.js")
, symlink = require("./utils/link.js")
, fs = require("graceful-fs")
, log = require("npmlog")
, asyncMap = require("slide").asyncMap
, chain = require("slide").chain
, path = require("path")
, build = require("./build.js")
, npa = require("npm-package-arg")
module.exports = link
link.usage = "npm link (in package dir)"
+ "\nnpm link <pkg> (link global into local)"
link.completion = function (opts, cb) {
var dir = npm.globalDir
fs.readdir(dir, function (er, files) {
cb(er, files.filter(function (f) {
return !f.match(/^[\._-]/)
}))
})
}
function link (args, cb) {
if (process.platform === 'win32') {
var semver = require('semver')
if (!semver.gte(process.version, '0.7.9')) {
var msg = 'npm link not supported on windows prior to node 0.7.9'
var e = new Error(msg)
e.code = 'ENOTSUP'
e.errno = require('constants').ENOTSUP
return cb(e)
}
}
if (npm.config.get("global")) {
return cb(new Error("link should never be --global.\n"
+"Please re-run this command with --local"))
}
if (args.length === 1 && args[0] === ".") args = []
if (args.length) return linkInstall(args, cb)
linkPkg(npm.prefix, cb)
}
function linkInstall (pkgs, cb) {
asyncMap(pkgs, function (pkg, cb) {
var t = path.resolve(npm.globalDir, "..")
, pp = path.resolve(npm.globalDir, pkg)
, rp = null
, target = path.resolve(npm.dir, pkg)
function n (er, data) {
if (er) return cb(er, data)
// install returns [ [folder, pkgId], ... ]
// but we definitely installed just one thing.
var d = data.filter(function (d) { return !d[3] })
var what = npa(d[0][0])
pp = d[0][1]
pkg = what.name
target = path.resolve(npm.dir, pkg)
next()
}
// if it's a folder, a random not-installed thing, or not a scoped package,
// then link or install it first
if (pkg[0] !== "@" && (pkg.indexOf("/") !== -1 || pkg.indexOf("\\") !== -1)) {
return fs.lstat(path.resolve(pkg), function (er, st) {
if (er || !st.isDirectory()) {
npm.commands.install(t, pkg, n)
} else {
rp = path.resolve(pkg)
linkPkg(rp, n)
}
})
}
fs.lstat(pp, function (er, st) {
if (er) {
rp = pp
return npm.commands.install(t, pkg, n)
} else if (!st.isSymbolicLink()) {
rp = pp
next()
} else {
return fs.realpath(pp, function (er, real) {
if (er) log.warn("invalid symbolic link", pkg)
else rp = real
next()
})
}
})
function next () {
chain
( [ [function (cb) {
log.verbose("link", "symlinking %s to %s", pp, target)
cb()
}]
, [symlink, pp, target]
// do not run any scripts
, rp && [build, [target], npm.config.get("global"), build._noLC, true]
, [ resultPrinter, pkg, pp, target, rp ] ]
, cb )
}
}, cb)
}
function linkPkg (folder, cb_) {
var me = folder || npm.prefix
, readJson = require("read-package-json")
log.verbose("linkPkg", folder)
readJson(path.resolve(me, "package.json"), function (er, d) {
function cb (er) {
return cb_(er, [[d && d._id, target, null, null]])
}
if (er) return cb(er)
if (!d.name) {
er = new Error("Package must have a name field to be linked")
return cb(er)
}
var target = path.resolve(npm.globalDir, d.name)
symlink(me, target, false, true, function (er) {
if (er) return cb(er)
log.verbose("link", "build target", target)
// also install missing dependencies.
npm.commands.install(me, [], function (er) {
if (er) return cb(er)
// build the global stuff. Don't run *any* scripts, because
// install command already will have done that.
build([target], true, build._noLC, true, function (er) {
if (er) return cb(er)
resultPrinter(path.basename(me), me, target, cb)
})
})
})
})
}
function resultPrinter (pkg, src, dest, rp, cb) {
if (typeof cb !== "function") cb = rp, rp = null
var where = dest
rp = (rp || "").trim()
src = (src || "").trim()
// XXX If --json is set, then look up the data from the package.json
if (npm.config.get("parseable")) {
return parseableOutput(dest, rp || src, cb)
}
if (rp === src) rp = null
console.log(where + " -> " + src + (rp ? " -> " + rp: ""))
cb()
}
function parseableOutput (dest, rp, cb) {
// XXX this should match ls --parseable and install --parseable
// look up the data from package.json, format it the same way.
//
// link is always effectively "long", since it doesn't help much to
// *just* print the target folder.
// However, we don't actually ever read the version number, so
// the second field is always blank.
console.log(dest + "::" + rp)
cb()
}

40
node_modules/npm/lib/logout.js generated vendored Normal file
View File

@@ -0,0 +1,40 @@
module.exports = logout
var dezalgo = require("dezalgo")
var log = require("npmlog")
var npm = require("./npm.js")
var mapToRegistry = require("./utils/map-to-registry.js")
logout.usage = "npm logout [--registry] [--scope]"
function logout (args, cb) {
npm.spinner.start()
cb = dezalgo(cb)
mapToRegistry("/", npm.config, function (err, uri, auth, normalized) {
if (err) return cb(err)
if (auth.token) {
log.verbose("logout", "clearing session token for", normalized)
npm.registry.logout(normalized, { auth: auth }, function (err) {
if (err) return cb(err)
npm.config.clearCredentialsByURI(normalized)
npm.spinner.stop()
npm.config.save("user", cb)
})
}
else if (auth.username || auth.password) {
log.verbose("logout", "clearing user credentials for", normalized)
npm.config.clearCredentialsByURI(normalized)
npm.spinner.stop()
npm.config.save("user", cb)
}
else {
cb(new Error(
"Not logged in to", normalized + ",", "so can't log out."
))
}
})
}

404
node_modules/npm/lib/ls.js generated vendored Normal file
View File

@@ -0,0 +1,404 @@
// show the installed versions of packages
//
// --parseable creates output like this:
// <fullpath>:<name@ver>:<realpath>:<flags>
// Flags are a :-separated list of zero or more indicators
module.exports = exports = ls
var npm = require("./npm.js")
, readInstalled = require("read-installed")
, log = require("npmlog")
, path = require("path")
, archy = require("archy")
, semver = require("semver")
, url = require("url")
, color = require("ansicolors")
, npa = require("npm-package-arg")
ls.usage = "npm ls"
ls.completion = require("./utils/completion/installed-deep.js")
function ls (args, silent, cb) {
if (typeof cb !== "function") cb = silent, silent = false
var dir = path.resolve(npm.dir, "..")
// npm ls 'foo@~1.3' bar 'baz@<2'
if (!args) args = []
else args = args.map(function (a) {
var p = npa(a)
, name = p.name
, ver = semver.validRange(p.rawSpec) || ""
return [ name, ver ]
})
var depth = npm.config.get("depth")
var opt = { depth: depth, log: log.warn, dev: true }
readInstalled(dir, opt, function (er, data) {
pruneNestedExtraneous(data)
filterByEnv(data)
var bfs = bfsify(data, args)
, lite = getLite(bfs)
if (er || silent) return cb(er, data, lite)
var long = npm.config.get("long")
, json = npm.config.get("json")
, out
if (json) {
var seen = []
var d = long ? bfs : lite
// the raw data can be circular
out = JSON.stringify(d, function (k, o) {
if (typeof o === "object") {
if (-1 !== seen.indexOf(o)) return "[Circular]"
seen.push(o)
}
return o
}, 2)
} else if (npm.config.get("parseable")) {
out = makeParseable(bfs, long, dir)
} else if (data) {
out = makeArchy(bfs, long, dir)
}
console.log(out)
if (args.length && !data._found) process.exitCode = 1
// if any errors were found, then complain and exit status 1
if (lite.problems && lite.problems.length) {
er = lite.problems.join("\n")
}
cb(er, data, lite)
})
}
function pruneNestedExtraneous (data, visited) {
visited = visited || []
visited.push(data)
for (var i in data.dependencies) {
if (data.dependencies[i].extraneous) {
data.dependencies[i].dependencies = {}
} else if (visited.indexOf(data.dependencies[i]) === -1) {
pruneNestedExtraneous(data.dependencies[i], visited)
}
}
}
function filterByEnv (data) {
var dev = npm.config.get("dev")
var production = npm.config.get("production")
if (dev === production) return
var dependencies = {}
var devDependencies = data.devDependencies || []
Object.keys(data.dependencies).forEach(function (name) {
var keys = Object.keys(devDependencies)
if (production && keys.indexOf(name) !== -1) return
if (dev && keys.indexOf(name) === -1) return
dependencies[name] = data.dependencies[name]
})
data.dependencies = dependencies
}
function alphasort (a, b) {
a = a.toLowerCase()
b = b.toLowerCase()
return a > b ? 1
: a < b ? -1 : 0
}
function isCruft (data) {
return data.extraneous && data.error && data.error.code === 'ENOTDIR'
}
function getLite (data, noname, depth) {
var lite = {}
, maxDepth = npm.config.get("depth")
if (typeof depth === 'undefined') depth = 0
if (!noname && data.name) lite.name = data.name
if (data.version) lite.version = data.version
if (data.extraneous) {
lite.extraneous = true
lite.problems = lite.problems || []
lite.problems.push( "extraneous: "
+ data.name + "@" + data.version
+ " " + (data.path || "") )
}
if (data._from)
lite.from = data._from
if (data._resolved)
lite.resolved = data._resolved
if (data.invalid) {
lite.invalid = true
lite.problems = lite.problems || []
lite.problems.push( "invalid: "
+ data.name + "@" + data.version
+ " " + (data.path || "") )
}
if (data.peerInvalid) {
lite.peerInvalid = true
lite.problems = lite.problems || []
lite.problems.push( "peer invalid: "
+ data.name + "@" + data.version
+ " " + (data.path || "") )
}
if (data.dependencies) {
var deps = Object.keys(data.dependencies)
if (deps.length) lite.dependencies = deps.map(function (d) {
var dep = data.dependencies[d]
if (typeof dep === "string") {
lite.problems = lite.problems || []
var p
if (data.depth > maxDepth) {
p = "max depth reached: "
} else {
p = "missing: "
}
p += d + "@" + dep
+ ", required by "
+ data.name + "@" + data.version
lite.problems.push(p)
return [d, { required: dep.requiredBy, missing: true }]
} else if (dep.peerMissing) {
lite.problems = lite.problems || []
dep.peerMissing.forEach(function (missing) {
var pdm = 'peer dep missing: ' +
missing.requires +
', required by ' +
missing.requiredBy
lite.problems.push(pdm)
})
return [d, { required: dep, peerMissing: true }]
} else if (npm.config.get('json')) {
if (depth === maxDepth) delete dep.dependencies
return [d, getLite(dep, true, depth + 1)]
}
return [d, getLite(dep, true)]
}).reduce(function (deps, d) {
if (d[1].problems) {
lite.problems = lite.problems || []
lite.problems.push.apply(lite.problems, d[1].problems)
}
deps[d[0]] = d[1]
return deps
}, {})
}
return lite
}
function bfsify (root, args, current, queue, seen) {
// walk over the data, and turn it from this:
// +-- a
// | `-- b
// | `-- a (truncated)
// `--b (truncated)
// into this:
// +-- a
// `-- b
// which looks nicer
args = args || []
current = current || root
queue = queue || []
seen = seen || [root]
var deps = current.dependencies = current.dependencies || {}
Object.keys(deps).forEach(function (d) {
var dep = deps[d]
if (typeof dep !== "object") return
if (seen.indexOf(dep) !== -1) {
if (npm.config.get("parseable") || !npm.config.get("long")) {
delete deps[d]
return
} else {
dep = deps[d] = Object.create(dep)
dep.dependencies = {}
}
}
queue.push(dep)
seen.push(dep)
})
if (!queue.length) {
// if there were args, then only show the paths to found nodes.
return filterFound(root, args)
}
return bfsify(root, args, queue.shift(), queue, seen)
}
function filterFound (root, args) {
if (!args.length) return root
var deps = root.dependencies
if (deps) Object.keys(deps).forEach(function (d) {
var dep = filterFound(deps[d], args)
// see if this one itself matches
var found = false
for (var i = 0; !found && i < args.length; i ++) {
if (d === args[i][0]) {
found = semver.satisfies(dep.version, args[i][1], true)
}
}
// included explicitly
if (found) dep._found = true
// included because a child was included
if (dep._found && !root._found) root._found = 1
// not included
if (!dep._found) delete deps[d]
})
if (!root._found) root._found = false
return root
}
function makeArchy (data, long, dir) {
var out = makeArchy_(data, long, dir, 0)
return archy(out, "", { unicode: npm.config.get("unicode") })
}
function makeArchy_ (data, long, dir, depth, parent, d) {
if (typeof data === "string") {
if (depth -1 <= npm.config.get("depth")) {
// just missing
var unmet = "UNMET DEPENDENCY"
if (npm.color) {
unmet = color.bgBlack(color.red(unmet))
}
data = unmet + " " + d + "@" + data
} else {
data = d+"@"+ data
}
return data
}
var out = {}
// the top level is a bit special.
out.label = data._id || ""
if (data._found === true && data._id) {
if (npm.color) {
out.label = color.bgBlack(color.yellow(out.label.trim())) + " "
} else {
out.label = out.label.trim() + " "
}
}
if (data.link) out.label += " -> " + data.link
if (data.invalid) {
if (data.realName !== data.name) out.label += " ("+data.realName+")"
var invalid = "invalid"
if (npm.color) invalid = color.bgBlack(color.red(invalid))
out.label += " " + invalid
}
if (data.peerInvalid) {
var peerInvalid = "peer invalid"
if (npm.color) peerInvalid = color.bgBlack(color.red(peerInvalid))
out.label += " " + peerInvalid
}
if (data.extraneous && data.path !== dir) {
var extraneous = "extraneous"
if (npm.color) extraneous = color.bgBlack(color.green(extraneous))
out.label += " " + extraneous
}
// add giturl to name@version
if (data._resolved) {
var type = npa(data._resolved).type
var isGit = type === 'git' || type === 'hosted'
if (isGit) {
out.label += ' (' + data._resolved + ')'
}
}
if (long) {
if (dir === data.path) out.label += "\n" + dir
out.label += "\n" + getExtras(data, dir)
} else if (dir === data.path) {
if (out.label) out.label += " "
out.label += dir
}
// now all the children.
out.nodes = []
if (depth <= npm.config.get("depth")) {
out.nodes = Object.keys(data.dependencies || {})
.sort(alphasort).map(function (d) {
return makeArchy_(data.dependencies[d], long, dir, depth + 1, data, d)
})
}
if (out.nodes.length === 0 && data.path === dir) {
out.nodes = ["(empty)"]
}
return out
}
function getExtras (data) {
var extras = []
if (data.description) extras.push(data.description)
if (data.repository) extras.push(data.repository.url)
if (data.homepage) extras.push(data.homepage)
if (data._from) {
var from = data._from
if (from.indexOf(data.name + "@") === 0) {
from = from.substr(data.name.length + 1)
}
var u = url.parse(from)
if (u.protocol) extras.push(from)
}
return extras.join("\n")
}
function makeParseable (data, long, dir, depth, parent, d) {
depth = depth || 0
return [ makeParseable_(data, long, dir, depth, parent, d) ]
.concat(Object.keys(data.dependencies || {})
.sort(alphasort).map(function (d) {
return makeParseable(data.dependencies[d], long, dir, depth + 1, data, d)
}))
.filter(function (x) { return x })
.join("\n")
}
function makeParseable_ (data, long, dir, depth, parent, d) {
if (data.hasOwnProperty("_found") && data._found !== true) return ""
if (typeof data === "string") {
if (data.depth < npm.config.get("depth")) {
data = npm.config.get("long")
? path.resolve(parent.path, "node_modules", d)
+ ":"+d+"@"+JSON.stringify(data)+":INVALID:MISSING"
: ""
} else {
data = path.resolve(data.path || "", "node_modules", d || "")
+ (npm.config.get("long")
? ":" + d + "@" + JSON.stringify(data)
+ ":" // no realpath resolved
+ ":MAXDEPTH"
: "")
}
return data
}
if (!npm.config.get("long")) return data.path
return data.path
+ ":" + (data._id || "")
+ ":" + (data.realPath !== data.path ? data.realPath : "")
+ (data.extraneous ? ":EXTRANEOUS" : "")
+ (data.invalid ? ":INVALID" : "")
+ (data.peerInvalid ? ":PEERINVALID" : "")
}

475
node_modules/npm/lib/npm.js generated vendored Normal file
View File

@@ -0,0 +1,475 @@
;(function(){
// windows: running "npm blah" in this folder will invoke WSH, not node.
if (typeof WScript !== "undefined") {
WScript.echo("npm does not work when run\n"
+"with the Windows Scripting Host\n\n"
+"'cd' to a different directory,\n"
+"or type 'npm.cmd <args>',\n"
+"or type 'node npm <args>'.")
WScript.quit(1)
return
}
var EventEmitter = require("events").EventEmitter
, npm = module.exports = new EventEmitter()
, npmconf = require("./config/core.js")
, log = require("npmlog")
, gfs = require('graceful-fs')
, fs = gfs.gracefulify(require('fs'))
, path = require("path")
, abbrev = require("abbrev")
, which = require("which")
, CachingRegClient = require("./cache/caching-client.js")
, charSpin = require("char-spinner")
npm.config = {
loaded: false,
get: function() {
throw new Error('npm.load() required')
},
set: function() {
throw new Error('npm.load() required')
}
}
npm.commands = {}
npm.rollbacks = []
try {
// startup, ok to do this synchronously
var j = JSON.parse(fs.readFileSync(
path.join(__dirname, "../package.json"))+"")
npm.version = j.version
} catch (ex) {
try {
log.info("error reading version", ex)
} catch (er) {}
npm.version = ex
}
var commandCache = {}
// short names for common things
, aliases = { "rm" : "uninstall"
, "r" : "uninstall"
, "un" : "uninstall"
, "unlink" : "uninstall"
, "remove" : "uninstall"
, "rb" : "rebuild"
, "list" : "ls"
, "la" : "ls"
, "ll" : "ls"
, "ln" : "link"
, "i" : "install"
, "isntall" : "install"
, "up" : "update"
, "upgrade" : "update"
, "c" : "config"
, "dist-tags" : "dist-tag"
, "info" : "view"
, "show" : "view"
, "find" : "search"
, "s" : "search"
, "se" : "search"
, "author" : "owner"
, "home" : "docs"
, "issues": "bugs"
, "unstar": "star" // same function
, "apihelp" : "help"
, "login": "adduser"
, "add-user": "adduser"
, "tst": "test"
, "t": "test"
, "find-dupes": "dedupe"
, "ddp": "dedupe"
, "v": "view"
, "verison": "version"
}
, aliasNames = Object.keys(aliases)
// these are filenames in .
, cmdList = [ "install"
, "uninstall"
, "cache"
, "config"
, "set"
, "get"
, "update"
, "outdated"
, "prune"
, "pack"
, "dedupe"
, "rebuild"
, "link"
, "publish"
, "star"
, "stars"
, "tag"
, "adduser"
, "logout"
, "unpublish"
, "owner"
, "access"
, "team"
, "deprecate"
, "shrinkwrap"
, "help"
, "help-search"
, "ls"
, "search"
, "view"
, "init"
, "version"
, "edit"
, "explore"
, "docs"
, "repo"
, "bugs"
, "faq"
, "root"
, "prefix"
, "bin"
, "whoami"
, "dist-tag"
, "ping"
, "test"
, "stop"
, "start"
, "restart"
, "run-script"
, "completion"
]
, plumbing = [ "build"
, "unbuild"
, "xmas"
, "substack"
, "visnup"
]
, littleGuys = [ "isntall", "verison" ]
, fullList = cmdList.concat(aliasNames).filter(function (c) {
return plumbing.indexOf(c) === -1
})
, abbrevs = abbrev(fullList)
// we have our reasons
fullList = npm.fullList = fullList.filter(function (c) {
return littleGuys.indexOf(c) === -1
})
npm.spinner =
{ int: null
, started: false
, start: function () {
if (npm.spinner.int) return
var c = npm.config.get("spin")
if (!c) return
var stream = npm.config.get("logstream")
var opt = { tty: c !== "always", stream: stream }
opt.cleanup = !npm.spinner.started
npm.spinner.int = charSpin(opt)
npm.spinner.started = true
}
, stop: function () {
clearInterval(npm.spinner.int)
npm.spinner.int = null
}
}
Object.keys(abbrevs).concat(plumbing).forEach(function addCommand (c) {
Object.defineProperty(npm.commands, c, { get : function () {
if (!loaded) throw new Error(
"Call npm.load(config, cb) before using this command.\n"+
"See the README.md or cli.js for example usage.")
var a = npm.deref(c)
if (c === "la" || c === "ll") {
npm.config.set("long", true)
}
npm.command = c
if (commandCache[a]) return commandCache[a]
var cmd = require(__dirname+"/"+a+".js")
commandCache[a] = function () {
var args = Array.prototype.slice.call(arguments, 0)
if (typeof args[args.length - 1] !== "function") {
args.push(defaultCb)
}
if (args.length === 1) args.unshift([])
npm.registry.version = npm.version
if (!npm.registry.refer) {
npm.registry.refer = [a].concat(args[0]).map(function (arg) {
// exclude anything that might be a URL, path, or private module
// Those things will always have a slash in them somewhere
if (arg && arg.match && arg.match(/\/|\\/)) {
return "[REDACTED]"
} else {
return arg
}
}).filter(function (arg) {
return arg && arg.match
}).join(" ")
}
cmd.apply(npm, args)
}
Object.keys(cmd).forEach(function (k) {
commandCache[a][k] = cmd[k]
})
return commandCache[a]
}, enumerable: fullList.indexOf(c) !== -1, configurable: true })
// make css-case commands callable via camelCase as well
if (c.match(/\-([a-z])/)) {
addCommand(c.replace(/\-([a-z])/g, function (a, b) {
return b.toUpperCase()
}))
}
})
function defaultCb (er, data) {
if (er) console.error(er.stack || er.message)
else console.log(data)
}
npm.deref = function (c) {
if (!c) return ""
if (c.match(/[A-Z]/)) c = c.replace(/([A-Z])/g, function (m) {
return "-" + m.toLowerCase()
})
if (plumbing.indexOf(c) !== -1) return c
var a = abbrevs[c]
if (aliases[a]) a = aliases[a]
return a
}
var loaded = false
, loading = false
, loadErr = null
, loadListeners = []
function loadCb (er) {
loadListeners.forEach(function (cb) {
process.nextTick(cb.bind(npm, er, npm))
})
loadListeners.length = 0
}
npm.load = function (cli, cb_) {
if (!cb_ && typeof cli === "function") cb_ = cli , cli = {}
if (!cb_) cb_ = function () {}
if (!cli) cli = {}
loadListeners.push(cb_)
if (loaded || loadErr) return cb(loadErr)
if (loading) return
loading = true
var onload = true
function cb (er) {
if (loadErr) return
loadErr = er
if (er) return cb_(er)
if (npm.config.get("force")) {
log.warn("using --force", "I sure hope you know what you are doing.")
}
npm.config.loaded = true
loaded = true
loadCb(loadErr = er)
onload = onload && npm.config.get('onload-script')
if (onload) {
try {
require(onload)
} catch (err) {
log.warn('onload-script', 'failed to require onload script', onload)
log.warn('onload-script', err)
}
onload = false
}
}
log.pause()
load(npm, cli, cb)
}
function load (npm, cli, cb) {
which(process.argv[0], function (er, node) {
if (!er && node.toUpperCase() !== process.execPath.toUpperCase()) {
log.verbose("node symlink", node)
process.execPath = node
process.installPrefix = path.resolve(node, "..", "..")
}
// look up configs
//console.error("about to look up configs")
var builtin = path.resolve(__dirname, "..", "npmrc")
npmconf.load(cli, builtin, function (er, config) {
if (er === config) er = null
npm.config = config
if (er) return cb(er)
// if the "project" config is not a filename, and we're
// not in global mode, then that means that it collided
// with either the default or effective userland config
if (!config.get("global")
&& config.sources.project
&& config.sources.project.type !== "ini") {
log.verbose("config"
, "Skipping project config: %s. "
+ "(matches userconfig)"
, config.localPrefix + "/.npmrc")
}
// Include npm-version and node-version in user-agent
var ua = config.get("user-agent") || ""
ua = ua.replace(/\{node-version\}/gi, process.version)
ua = ua.replace(/\{npm-version\}/gi, npm.version)
ua = ua.replace(/\{platform\}/gi, process.platform)
ua = ua.replace(/\{arch\}/gi, process.arch)
config.set("user-agent", ua)
var color = config.get("color")
log.level = config.get("loglevel")
log.heading = config.get("heading") || "npm"
log.stream = config.get("logstream")
switch (color) {
case "always":
log.enableColor()
npm.color = true
break
case false:
log.disableColor()
npm.color = false
break
default:
var tty = require("tty")
if (process.stdout.isTTY) npm.color = true
else if (!tty.isatty) npm.color = true
else if (tty.isatty(1)) npm.color = true
else npm.color = false
break
}
log.resume()
// at this point the configs are all set.
// go ahead and spin up the registry client.
npm.registry = new CachingRegClient(npm.config)
var umask = npm.config.get("umask")
npm.modes = { exec: 0777 & (~umask)
, file: 0666 & (~umask)
, umask: umask }
var gp = Object.getOwnPropertyDescriptor(config, "globalPrefix")
Object.defineProperty(npm, "globalPrefix", gp)
var lp = Object.getOwnPropertyDescriptor(config, "localPrefix")
Object.defineProperty(npm, "localPrefix", lp)
return cb(null, npm)
})
})
}
Object.defineProperty(npm, "prefix",
{ get : function () {
return npm.config.get("global") ? npm.globalPrefix : npm.localPrefix
}
, set : function (r) {
var k = npm.config.get("global") ? "globalPrefix" : "localPrefix"
return npm[k] = r
}
, enumerable : true
})
Object.defineProperty(npm, "bin",
{ get : function () {
if (npm.config.get("global")) return npm.globalBin
return path.resolve(npm.root, ".bin")
}
, enumerable : true
})
Object.defineProperty(npm, "globalBin",
{ get : function () {
var b = npm.globalPrefix
if (process.platform !== "win32") b = path.resolve(b, "bin")
return b
}
})
Object.defineProperty(npm, "dir",
{ get : function () {
if (npm.config.get("global")) return npm.globalDir
return path.resolve(npm.prefix, "node_modules")
}
, enumerable : true
})
Object.defineProperty(npm, "globalDir",
{ get : function () {
return (process.platform !== "win32")
? path.resolve(npm.globalPrefix, "lib", "node_modules")
: path.resolve(npm.globalPrefix, "node_modules")
}
, enumerable : true
})
Object.defineProperty(npm, "root",
{ get : function () { return npm.dir } })
Object.defineProperty(npm, "cache",
{ get : function () { return npm.config.get("cache") }
, set : function (r) { return npm.config.set("cache", r) }
, enumerable : true
})
var tmpFolder
var rand = require("crypto").randomBytes(4).toString("hex")
Object.defineProperty(npm, "tmp",
{ get : function () {
if (!tmpFolder) tmpFolder = "npm-" + process.pid + "-" + rand
return path.resolve(npm.config.get("tmp"), tmpFolder)
}
, enumerable : true
})
// the better to repl you with
Object.getOwnPropertyNames(npm.commands).forEach(function (n) {
if (npm.hasOwnProperty(n) || n === "config") return
Object.defineProperty(npm, n, { get: function () {
return function () {
var args = Array.prototype.slice.call(arguments, 0)
, cb = defaultCb
if (args.length === 1 && Array.isArray(args[0])) {
args = args[0]
}
if (typeof args[args.length - 1] === "function") {
cb = args.pop()
}
npm.commands[n](args, cb)
}
}, enumerable: false, configurable: true })
})
if (require.main === module) {
require("../bin/npm-cli.js")
}
})()

404
node_modules/npm/lib/outdated.js generated vendored Normal file
View File

@@ -0,0 +1,404 @@
/*
npm outdated [pkg]
Does the following:
1. check for a new version of pkg
If no packages are specified, then run for all installed
packages.
--parseable creates output like this:
<fullpath>:<name@wanted>:<name@installed>:<name@latest>
*/
module.exports = outdated
outdated.usage = "npm outdated [<pkg> [<pkg> ...]]"
outdated.completion = require("./utils/completion/installed-deep.js")
var path = require("path")
, readJson = require("read-package-json")
, cache = require("./cache.js")
, asyncMap = require("slide").asyncMap
, npm = require("./npm.js")
, url = require("url")
, color = require("ansicolors")
, styles = require("ansistyles")
, table = require("text-table")
, semver = require("semver")
, os = require("os")
, mapToRegistry = require("./utils/map-to-registry.js")
, npa = require("npm-package-arg")
, readInstalled = require("read-installed")
, long = npm.config.get("long")
, log = require("npmlog")
function outdated (args, silent, cb) {
if (typeof cb !== "function") cb = silent, silent = false
var dir = path.resolve(npm.dir, "..")
// default depth for `outdated` is 0 (cf. `ls`)
if (npm.config.get("depth") === Infinity) npm.config.set("depth", 0)
outdated_(args, dir, {}, 0, function (er, list) {
if (!list) list = []
if (er || silent || list.length === 0) return cb(er, list)
list.sort(function(a, b) {
var aa = a[1].toLowerCase()
, bb = b[1].toLowerCase()
return aa === bb ? 0
: aa < bb ? -1 : 1
})
if (npm.config.get("json")) {
console.log(makeJSON(list))
} else if (npm.config.get("parseable")) {
console.log(makeParseable(list))
} else {
var outList = list.map(makePretty)
var outHead = [ "Package"
, "Current"
, "Wanted"
, "Latest"
, "Location"
]
if (long) outHead.push("Package Type")
var outTable = [outHead].concat(outList)
if (npm.color) {
outTable[0] = outTable[0].map(function(heading) {
return styles.underline(heading)
})
}
var tableOpts = { align: ["l", "r", "r", "r", "l"]
, stringLength: function(s) { return ansiTrim(s).length }
}
console.log(table(outTable, tableOpts))
}
cb(null, list)
})
}
// [[ dir, dep, has, want, latest, type ]]
function makePretty (p) {
var dep = p[1]
, dir = path.resolve(p[0], "node_modules", dep)
, has = p[2]
, want = p[3]
, latest = p[4]
, type = p[6]
if (!npm.config.get("global")) {
dir = path.relative(process.cwd(), dir)
}
var columns = [ dep
, has || "MISSING"
, want
, latest
, dirToPrettyLocation(dir)
]
if (long) columns[5] = type
if (npm.color) {
columns[0] = color[has === want ? "yellow" : "red"](columns[0]) // dep
columns[2] = color.green(columns[2]) // want
columns[3] = color.magenta(columns[3]) // latest
columns[4] = color.brightBlack(columns[4]) // dir
if (long) columns[5] = color.brightBlack(columns[5]) // type
}
return columns
}
function ansiTrim (str) {
var r = new RegExp("\x1b(?:\\[(?:\\d+[ABCDEFGJKSTm]|\\d+;\\d+[Hfm]|" +
"\\d+;\\d+;\\d+m|6n|s|u|\\?25[lh])|\\w)", "g")
return str.replace(r, "")
}
function dirToPrettyLocation (dir) {
return dir.replace(/^node_modules[/\\]/, "")
.replace(/[[/\\]node_modules[/\\]/g, " > ")
}
function makeParseable (list) {
return list.map(function (p) {
var dep = p[1]
, dir = path.resolve(p[0], "node_modules", dep)
, has = p[2]
, want = p[3]
, latest = p[4]
, type = p[6]
var out = [ dir
, dep + "@" + want
, (has ? (dep + "@" + has) : "MISSING")
, dep + "@" + latest
]
if (long) out.push(type)
return out.join(":")
}).join(os.EOL)
}
function makeJSON (list) {
var out = {}
list.forEach(function (p) {
var dir = path.resolve(p[0], "node_modules", p[1])
if (!npm.config.get("global")) {
dir = path.relative(process.cwd(), dir)
}
out[p[1]] = { current: p[2]
, wanted: p[3]
, latest: p[4]
, location: dir
}
if (long) out[p[1]].type = p[6]
})
return JSON.stringify(out, null, 2)
}
function outdated_ (args, dir, parentHas, depth, cb) {
// get the deps from package.json, or {<dir/node_modules/*>:"*"}
// asyncMap over deps:
// shouldHave = cache.add(dep, req).version
// if has === shouldHave then
// return outdated(args, dir/node_modules/dep, parentHas + has)
// else if dep in args or args is empty
// return [dir, dep, has, shouldHave]
if (depth > npm.config.get("depth")) {
return cb(null, [])
}
var deps = null
var types = {}
readJson(path.resolve(dir, "package.json"), function (er, d) {
d = d || {}
if (er && er.code !== "ENOENT" && er.code !== "ENOTDIR") return cb(er)
deps = (er) ? true : (d.dependencies || {})
if (!er) {
Object.keys(deps).forEach(function (k) {
types[k] = "dependencies"
})
}
if (npm.config.get("save-dev")) {
deps = d.devDependencies || {}
Object.keys(deps).forEach(function (k) {
types[k] = "devDependencies"
})
return next()
}
if (npm.config.get("save")) {
// remove optional dependencies from dependencies during --save.
Object.keys(d.optionalDependencies || {}).forEach(function (k) {
delete deps[k]
})
return next()
}
if (npm.config.get("save-optional")) {
deps = d.optionalDependencies || {}
Object.keys(deps).forEach(function (k) {
types[k] = "optionalDependencies"
})
return next()
}
var doUpdate = npm.config.get("dev") ||
(!npm.config.get("production") &&
!Object.keys(parentHas).length &&
!npm.config.get("global"))
if (!er && d && doUpdate) {
Object.keys(d.devDependencies || {}).forEach(function (k) {
if (!(k in parentHas)) {
deps[k] = d.devDependencies[k]
types[k] = "devDependencies"
}
})
}
return next()
})
var has = null
readInstalled(path.resolve(dir), { dev : true }, function (er, data) {
if (er) {
has = Object.create(parentHas)
return next()
}
var pkgs = Object.keys(data.dependencies)
pkgs = pkgs.filter(function (p) {
return !p.match(/^[\._-]/)
})
asyncMap(pkgs, function (pkg, cb) {
var jsonFile = path.resolve(dir, "node_modules", pkg, "package.json")
readJson(jsonFile, function (er, d) {
if (er && er.code !== "ENOENT" && er.code !== "ENOTDIR") return cb(er)
if (d && d.name && d.private) delete deps[d.name]
cb(null, er ? [] : [[d.name, d.version, d._from]])
})
}, function (er, pvs) {
if (er) return cb(er)
has = Object.create(parentHas)
pvs.forEach(function (pv) {
has[pv[0]] = {
link: data.dependencies[pv[0]].link,
version: pv[1],
from: pv[2]
}
})
next()
})
})
function next () {
if (!has || !deps) return
if (deps === true) {
deps = Object.keys(has).reduce(function (l, r) {
l[r] = "latest"
return l
}, {})
}
// now get what we should have, based on the dep.
// if has[dep] !== shouldHave[dep], then cb with the data
// otherwise dive into the folder
asyncMap(Object.keys(deps), function (dep, cb) {
if (!long) return shouldUpdate(args, dir, dep, has, deps[dep], depth, cb)
shouldUpdate(args, dir, dep, has, deps[dep], depth, cb, types[dep])
}, cb)
}
}
function shouldUpdate (args, dir, dep, has, req, depth, cb, type) {
// look up the most recent version.
// if that's what we already have, or if it's not on the args list,
// then dive into it. Otherwise, cb() with the data.
// { version: , from: }
var curr = has[dep]
function skip (er) {
// show user that no viable version can be found
if (er) return cb(er)
outdated_( args
, path.resolve(dir, "node_modules", dep)
, has
, depth + 1
, cb )
}
function doIt (wanted, latest) {
if (!long) {
return cb(null, [[ dir, dep, curr && curr.version, wanted, latest, req]])
}
cb(null, [[ dir, dep, curr && curr.version, wanted, latest, req, type]])
}
if (args.length && args.indexOf(dep) === -1) return skip()
var parsed = npa(dep + '@' + req)
if (parsed.type === "git" || (parsed.hosted && parsed.hosted.type === "github")) {
return doIt("git", "git")
}
if (curr && curr.link) {
return doIt("linked", "linked")
}
// search for the latest package
mapToRegistry(dep, npm.config, function (er, uri, auth) {
if (er) return cb(er)
npm.registry.get(uri, { auth : auth }, updateDeps)
})
function updateLocalDeps (latestRegistryVersion) {
readJson(path.resolve(parsed.spec, 'package.json'), function (er, localDependency) {
if (er) return cb()
var wanted = localDependency.version
var latest = localDependency.version
if (latestRegistryVersion) {
latest = latestRegistryVersion
if (semver.lt(wanted, latestRegistryVersion)) {
wanted = latestRegistryVersion
req = dep + '@' + latest
}
}
if (curr.version !== wanted) {
doIt(wanted, latest)
} else {
skip()
}
})
}
function updateDeps (er, d) {
if (er) {
if (parsed.type !== 'local') return cb(er)
return updateLocalDeps()
}
if (!d || !d["dist-tags"] || !d.versions) return cb()
var l = d.versions[d["dist-tags"].latest]
if (!l) return cb()
var r = req
if (d["dist-tags"][req])
r = d["dist-tags"][req]
if (semver.validRange(r, true)) {
// some kind of semver range.
// see if it's in the doc.
var vers = Object.keys(d.versions)
var v = semver.maxSatisfying(vers, r, true)
if (v) {
return onCacheAdd(null, d.versions[v])
}
}
// We didn't find the version in the doc. See if cache can find it.
cache.add(dep, req, null, false, onCacheAdd)
function onCacheAdd(er, d) {
// if this fails, then it means we can't update this thing.
// it's probably a thing that isn't published.
if (er) {
if (er.code && er.code === "ETARGET") {
// no viable version found
return skip(er)
}
return skip()
}
// check that the url origin hasn't changed (#1727) and that
// there is no newer version available
var dFromUrl = d._from && url.parse(d._from).protocol
var cFromUrl = curr && curr.from && url.parse(curr.from).protocol
if (!curr || dFromUrl && cFromUrl && d._from !== curr.from
|| d.version !== curr.version
|| d.version !== l.version) {
if (parsed.type === 'local') return updateLocalDeps(l.version)
doIt(d.version, l.version)
}
else {
skip()
}
}
}
}

254
node_modules/npm/lib/owner.js generated vendored Normal file
View File

@@ -0,0 +1,254 @@
module.exports = owner
owner.usage = "npm owner add <username> <pkg>"
+ "\nnpm owner rm <username> <pkg>"
+ "\nnpm owner ls <pkg>"
var npm = require("./npm.js")
, log = require("npmlog")
, mapToRegistry = require("./utils/map-to-registry.js")
, readLocalPkg = require("./utils/read-local-package.js")
owner.completion = function (opts, cb) {
var argv = opts.conf.argv.remain
if (argv.length > 4) return cb()
if (argv.length <= 2) {
var subs = ["add", "rm"]
if (opts.partialWord === "l") subs.push("ls")
else subs.push("ls", "list")
return cb(null, subs)
}
npm.commands.whoami([], true, function (er, username) {
if (er) return cb()
var un = encodeURIComponent(username)
var byUser, theUser
switch (argv[2]) {
case "ls":
// FIXME: there used to be registry completion here, but it stopped
// making sense somewhere around 50,000 packages on the registry
return cb()
case "rm":
if (argv.length > 3) {
theUser = encodeURIComponent(argv[3])
byUser = "-/by-user/" + theUser + "|" + un
return mapToRegistry(byUser, npm.config, function (er, uri, auth) {
if (er) return cb(er)
console.error(uri)
npm.registry.get(uri, { auth : auth }, function (er, d) {
if (er) return cb(er)
// return the intersection
return cb(null, d[theUser].filter(function (p) {
// kludge for server adminery.
return un === "isaacs" || d[un].indexOf(p) === -1
}))
})
})
}
// else fallthrough
case "add":
if (argv.length > 3) {
theUser = encodeURIComponent(argv[3])
byUser = "-/by-user/" + theUser + "|" + un
return mapToRegistry(byUser, npm.config, function (er, uri, auth) {
if (er) return cb(er)
console.error(uri)
npm.registry.get(uri, { auth : auth }, function (er, d) {
console.error(uri, er || d)
// return mine that they're not already on.
if (er) return cb(er)
var mine = d[un] || []
, theirs = d[theUser] || []
return cb(null, mine.filter(function (p) {
return theirs.indexOf(p) === -1
}))
})
})
}
// just list all users who aren't me.
return mapToRegistry("-/users", npm.config, function (er, uri, auth) {
if (er) return cb(er)
npm.registry.get(uri, { auth : auth }, function (er, list) {
if (er) return cb()
return cb(null, Object.keys(list).filter(function (n) {
return n !== un
}))
})
})
default:
return cb()
}
})
}
function owner (args, cb) {
var action = args.shift()
switch (action) {
case "ls": case "list": return ls(args[0], cb)
case "add": return add(args[0], args[1], cb)
case "rm": case "remove": return rm(args[0], args[1], cb)
default: return unknown(action, cb)
}
}
function ls (pkg, cb) {
if (!pkg) return readLocalPkg(function (er, pkg) {
if (er) return cb(er)
if (!pkg) return cb(owner.usage)
ls(pkg, cb)
})
mapToRegistry(pkg, npm.config, function (er, uri, auth) {
if (er) return cb(er)
npm.registry.get(uri, { auth : auth }, function (er, data) {
var msg = ""
if (er) {
log.error("owner ls", "Couldn't get owner data", pkg)
return cb(er)
}
var owners = data.maintainers
if (!owners || !owners.length) msg = "admin party!"
else msg = owners.map(function (o) {
return o.name + " <" + o.email + ">"
}).join("\n")
console.log(msg)
cb(er, owners)
})
})
}
function add (user, pkg, cb) {
if (!user) return cb(owner.usage)
if (!pkg) return readLocalPkg(function (er, pkg) {
if (er) return cb(er)
if (!pkg) return cb(new Error(owner.usage))
add(user, pkg, cb)
})
log.verbose("owner add", "%s to %s", user, pkg)
mutate(pkg, user, function (u, owners) {
if (!owners) owners = []
for (var i = 0, l = owners.length; i < l; i ++) {
var o = owners[i]
if (o.name === u.name) {
log.info( "owner add"
, "Already a package owner: " + o.name + " <" + o.email + ">")
return false
}
}
owners.push(u)
return owners
}, cb)
}
function rm (user, pkg, cb) {
if (!pkg) return readLocalPkg(function (er, pkg) {
if (er) return cb(er)
if (!pkg) return cb(new Error(owner.usage))
rm(user, pkg, cb)
})
log.verbose("owner rm", "%s from %s", user, pkg)
mutate(pkg, user, function (u, owners) {
var found = false
, m = owners.filter(function (o) {
var match = (o.name === user)
found = found || match
return !match
})
if (!found) {
log.info("owner rm", "Not a package owner: " + user)
return false
}
if (!m.length) return new Error(
"Cannot remove all owners of a package. Add someone else first.")
return m
}, cb)
}
function mutate (pkg, user, mutation, cb) {
if (user) {
var byUser = "-/user/org.couchdb.user:" + user
mapToRegistry(byUser, npm.config, function (er, uri, auth) {
if (er) return cb(er)
npm.registry.get(uri, { auth : auth }, mutate_)
})
} else {
mutate_(null, null)
}
function mutate_ (er, u) {
if (!er && user && (!u || u.error)) er = new Error(
"Couldn't get user data for " + user + ": " + JSON.stringify(u))
if (er) {
log.error("owner mutate", "Error getting user data for %s", user)
return cb(er)
}
if (u) u = { "name" : u.name, "email" : u.email }
mapToRegistry(pkg, npm.config, function (er, uri, auth) {
if (er) return cb(er)
npm.registry.get(uri, { auth : auth }, function (er, data) {
if (er) {
log.error("owner mutate", "Error getting package data for %s", pkg)
return cb(er)
}
// save the number of maintainers before mutation so that we can figure
// out if maintainers were added or removed
var beforeMutation = data.maintainers.length
var m = mutation(u, data.maintainers)
if (!m) return cb() // handled
if (m instanceof Error) return cb(m) // error
data = {
_id : data._id,
_rev : data._rev,
maintainers : m
}
var dataPath = pkg.replace("/", "%2f") + "/-rev/" + data._rev
mapToRegistry(dataPath, npm.config, function (er, uri, auth) {
if (er) return cb(er)
var params = {
method : "PUT",
body : data,
auth : auth
}
npm.registry.request(uri, params, function (er, data) {
if (!er && data.error) {
er = new Error("Failed to update package metadata: "+JSON.stringify(data))
}
if (er) {
log.error("owner mutate", "Failed to update package metadata")
}
else if (m.length > beforeMutation) {
console.log("+ %s (%s)", user, pkg)
}
else if (m.length < beforeMutation) {
console.log("- %s (%s)", user, pkg)
}
cb(er, data)
})
})
})
})
}
}
function unknown (action, cb) {
cb("Usage: \n" + owner.usage)
}

69
node_modules/npm/lib/pack.js generated vendored Normal file
View File

@@ -0,0 +1,69 @@
// npm pack <pkg>
// Packs the specified package into a .tgz file, which can then
// be installed.
module.exports = pack
var npm = require("./npm.js")
, install = require("./install.js")
, cache = require("./cache.js")
, fs = require("graceful-fs")
, chain = require("slide").chain
, path = require("path")
, cwd = process.cwd()
, writeStreamAtomic = require('fs-write-stream-atomic')
, cachedPackageRoot = require("./cache/cached-package-root.js")
pack.usage = "npm pack <pkg>"
// if it can be installed, it can be packed.
pack.completion = install.completion
function pack (args, silent, cb) {
if (typeof cb !== "function") cb = silent, silent = false
if (args.length === 0) args = ["."]
chain(args.map(function (arg) { return function (cb) {
pack_(arg, cb)
}}), function (er, files) {
if (er || silent) return cb(er, files)
printFiles(files, cb)
})
}
function printFiles (files, cb) {
files = files.map(function (file) {
return path.relative(cwd, file)
})
console.log(files.join("\n"))
cb()
}
// add to cache, then cp to the cwd
function pack_ (pkg, cb) {
cache.add(pkg, null, null, false, function (er, data) {
if (er) return cb(er)
// scoped packages get special treatment
var name = data.name
if (name[0] === "@") name = name.substr(1).replace(/\//g, "-")
var fname = name + "-" + data.version + ".tgz"
var cached = path.join(cachedPackageRoot(data), "package.tgz")
, from = fs.createReadStream(cached)
, to = writeStreamAtomic(fname)
, errState = null
from.on("error", cb_)
to.on("error", cb_)
to.on("close", cb_)
from.pipe(to)
function cb_ (er) {
if (errState) return
if (er) return cb(errState = er)
cb(null, fname)
}
})
}

20
node_modules/npm/lib/ping.js generated vendored Normal file
View File

@@ -0,0 +1,20 @@
var npm = require('./npm.js')
module.exports = ping
ping.usage = 'npm ping\nping registry'
function ping (args, silent, cb) {
if (typeof cb !== 'function') {
cb = silent
silent = false
}
var registry = npm.config.get('registry')
if (!registry) return cb(new Error('no default registry set'))
var auth = npm.config.getCredentialsByURI(registry)
npm.registry.ping(registry, {auth: auth}, function (er, pong) {
if (!silent) console.log(JSON.stringify(pong))
cb(er, er ? null : pong)
})
}

11
node_modules/npm/lib/prefix.js generated vendored Normal file
View File

@@ -0,0 +1,11 @@
module.exports = prefix
var npm = require("./npm.js")
prefix.usage = "npm prefix\nnpm prefix -g\n(just prints the prefix folder)"
function prefix (args, silent, cb) {
if (typeof cb !== "function") cb = silent, silent = false
if (!silent) console.log(npm.prefix)
process.nextTick(cb.bind(this, null, npm.prefix))
}

56
node_modules/npm/lib/prune.js generated vendored Normal file
View File

@@ -0,0 +1,56 @@
// prune extraneous packages.
module.exports = prune
prune.usage = "npm prune"
var readInstalled = require("read-installed")
, npm = require("./npm.js")
, path = require("path")
, readJson = require("read-package-json")
, log = require("npmlog")
prune.completion = require("./utils/completion/installed-deep.js")
function prune (args, cb) {
//check if is a valid package.json file
var jsonFile = path.resolve(npm.dir, "..", "package.json" )
readJson(jsonFile, log.warn, function (er) {
if (er) return cb(er)
next()
})
function next() {
var opt = {
depth: npm.config.get("depth"),
dev: !npm.config.get("production") || npm.config.get("dev")
}
readInstalled(npm.prefix, opt, function (er, data) {
if (er) return cb(er)
prune_(args, data, cb)
})
}
}
function prune_ (args, data, cb) {
npm.commands.unbuild(prunables(args, data, []), cb)
}
function prunables (args, data, seen) {
var deps = data.dependencies || {}
return Object.keys(deps).map(function (d) {
if (typeof deps[d] !== "object"
|| seen.indexOf(deps[d]) !== -1) return null
seen.push(deps[d])
if (deps[d].extraneous
&& (args.length === 0 || args.indexOf(d) !== -1)) {
var extra = deps[d]
delete deps[d]
return extra.path
}
return prunables(args, deps[d], seen)
}).filter(function (d) { return d !== null })
.reduce(function FLAT (l, r) {
return l.concat(Array.isArray(r) ? r.reduce(FLAT,[]) : r)
}, [])
}

151
node_modules/npm/lib/publish.js generated vendored Normal file
View File

@@ -0,0 +1,151 @@
module.exports = publish
var npm = require("./npm.js")
, log = require("npmlog")
, path = require("path")
, readJson = require("read-package-json")
, lifecycle = require("./utils/lifecycle.js")
, chain = require("slide").chain
, mapToRegistry = require("./utils/map-to-registry.js")
, cachedPackageRoot = require("./cache/cached-package-root.js")
, createReadStream = require("graceful-fs").createReadStream
, npa = require("npm-package-arg")
, semver = require('semver')
, getPublishConfig = require("./utils/get-publish-config.js")
publish.usage = "npm publish <tarball> [--tag <tagname>]"
+ "\nnpm publish <folder> [--tag <tagname>]"
+ "\n\nPublishes '.' if no argument supplied"
+ "\n\nSets tag `latest` if no --tag specified"
publish.completion = function (opts, cb) {
// publish can complete to a folder with a package.json
// or a tarball, or a tarball url.
// for now, not yet implemented.
return cb()
}
function publish (args, isRetry, cb) {
if (typeof cb !== "function") {
cb = isRetry
isRetry = false
}
if (args.length === 0) args = ["."]
if (args.length !== 1) return cb(publish.usage)
log.verbose("publish", args)
var t = npm.config.get('tag').trim()
if (semver.validRange(t)) {
var er = new Error("Tag name must not be a valid SemVer range: " + t)
return cb(er)
}
var arg = args[0]
// if it's a local folder, then run the prepublish there, first.
readJson(path.resolve(arg, "package.json"), function (er, data) {
if (er && er.code !== "ENOENT" && er.code !== "ENOTDIR") return cb(er)
if (data) {
if (!data.name) return cb(new Error("No name provided"))
if (!data.version) return cb(new Error("No version provided"))
}
// Error is OK. Could be publishing a URL or tarball, however, that means
// that we will not have automatically run the prepublish script, since
// that gets run when adding a folder to the cache.
if (er) return cacheAddPublish(arg, false, isRetry, cb)
else cacheAddPublish(arg, true, isRetry, cb)
})
}
// didPre in this case means that we already ran the prepublish script,
// and that the "dir" is an actual directory, and not something silly
// like a tarball or name@version thing.
// That means that we can run publish/postpublish in the dir, rather than
// in the cache dir.
function cacheAddPublish (dir, didPre, isRetry, cb) {
npm.commands.cache.add(dir, null, null, false, function (er, data) {
if (er) return cb(er)
log.silly("publish", data)
var cachedir = path.resolve(cachedPackageRoot(data), "package")
chain([ !didPre &&
[lifecycle, data, "prepublish", cachedir]
, [publish_, dir, data, isRetry, cachedir]
, [lifecycle, data, "publish", didPre ? dir : cachedir]
, [lifecycle, data, "postpublish", didPre ? dir : cachedir] ]
, cb )
})
}
function publish_ (arg, data, isRetry, cachedir, cb) {
if (!data) return cb(new Error("no package.json file found"))
var mappedConfig = getPublishConfig(
data.publishConfig,
npm.config,
npm.registry
)
var config = mappedConfig.config
var registry = mappedConfig.client
data._npmVersion = npm.version
data._nodeVersion = process.versions.node
delete data.modules
if (data.private) return cb(
new Error(
"This package has been marked as private\n" +
"Remove the 'private' field from the package.json to publish it."
)
)
mapToRegistry(data.name, config, function (er, registryURI, auth, registryBase) {
if (er) return cb(er)
var tarballPath = cachedir + ".tgz"
// we just want the base registry URL in this case
log.verbose("publish", "registryBase", registryBase)
log.silly("publish", "uploading", tarballPath)
data._npmUser = {
name : auth.username,
email : auth.email
}
var params = {
metadata : data,
body : createReadStream(tarballPath),
auth : auth
}
// registry-frontdoor cares about the access level, which is only
// configurable for scoped packages
if (config.get("access")) {
if (!npa(data.name).scope && config.get("access") === "restricted") {
return cb(new Error("Can't restrict access to unscoped packages."))
}
params.access = config.get("access")
}
registry.publish(registryBase, params, function (er) {
if (er && er.code === "EPUBLISHCONFLICT" &&
npm.config.get("force") && !isRetry) {
log.warn("publish", "Forced publish over " + data._id)
return npm.commands.unpublish([data._id], function (er) {
// ignore errors. Use the force. Reach out with your feelings.
// but if it fails again, then report the first error.
publish([arg], er || true, cb)
})
}
// report the unpublish error if this was a retry and unpublish failed
if (er && isRetry && isRetry !== true) return cb(isRetry)
if (er) return cb(er)
console.log("+ " + data._id)
cb()
})
})
}

73
node_modules/npm/lib/rebuild.js generated vendored Normal file
View File

@@ -0,0 +1,73 @@
module.exports = rebuild
var readInstalled = require("read-installed")
, semver = require("semver")
, log = require("npmlog")
, npm = require("./npm.js")
, npa = require("npm-package-arg")
rebuild.usage = "npm rebuild [<name>[@<version>] [name[@<version>] ...]]"
rebuild.completion = require("./utils/completion/installed-deep.js")
function rebuild (args, cb) {
var opt = { depth: npm.config.get("depth"), dev: true }
readInstalled(npm.prefix, opt, function (er, data) {
log.info("readInstalled", typeof data)
if (er) return cb(er)
var set = filter(data, args)
, folders = Object.keys(set).filter(function (f) {
return f !== npm.prefix
})
if (!folders.length) return cb()
log.silly("rebuild set", folders)
cleanBuild(folders, set, cb)
})
}
function cleanBuild (folders, set, cb) {
npm.commands.build(folders, function (er) {
if (er) return cb(er)
console.log(folders.map(function (f) {
return set[f] + " " + f
}).join("\n"))
cb()
})
}
function filter (data, args, set, seen) {
if (!set) set = {}
if (!seen) seen = {}
if (set.hasOwnProperty(data.path)) return set
if (seen.hasOwnProperty(data.path)) return set
seen[data.path] = true
var pass
if (!args.length) pass = true // rebuild everything
else if (data.name && data._id) {
for (var i = 0, l = args.length; i < l; i ++) {
var arg = args[i]
, nv = npa(arg)
, n = nv.name
, v = nv.rawSpec
if (n !== data.name) continue
if (!semver.satisfies(data.version, v, true)) continue
pass = true
break
}
}
if (pass && data._id) {
log.verbose("rebuild", "path, id", [data.path, data._id])
set[data.path] = data._id
}
// need to also dive through kids, always.
// since this isn't an install these won't get auto-built unless
// they're not dependencies.
Object.keys(data.dependencies || {}).forEach(function (d) {
// return
var dep = data.dependencies[d]
if (typeof dep === "string") return
filter(dep, args, set, seen)
})
return set
}

78
node_modules/npm/lib/repo.js generated vendored Normal file
View File

@@ -0,0 +1,78 @@
module.exports = repo
repo.usage = "npm repo <pkgname>"
var npm = require("./npm.js")
, opener = require("opener")
, github = require("github-url-from-git")
, githubUserRepo = require("github-url-from-username-repo")
, path = require("path")
, readJson = require("read-package-json")
, fs = require("fs")
, url_ = require("url")
, mapToRegistry = require("./utils/map-to-registry.js")
, npa = require("npm-package-arg")
repo.completion = function (opts, cb) {
// FIXME: there used to be registry completion here, but it stopped making
// sense somewhere around 50,000 packages on the registry
cb()
}
function repo (args, cb) {
var n = args.length && npa(args[0]).name || "."
fs.stat(n, function (er, s) {
if (er && er.code === "ENOENT") return callRegistry(n, cb)
else if (er) return cb(er)
if (!s.isDirectory()) return callRegistry(n, cb)
readJson(path.resolve(n, "package.json"), function (er, d) {
if (er) return cb(er)
getUrlAndOpen(d, cb)
})
})
}
function getUrlAndOpen (d, cb) {
var r = d.repository
if (!r) return cb(new Error("no repository"))
// XXX remove this when npm@v1.3.10 from node 0.10 is deprecated
// from https://github.com/npm/npm-www/issues/418
if (githubUserRepo(r.url))
r.url = githubUserRepo(r.url)
var url = (r.url && ~r.url.indexOf("github"))
? github(r.url)
: nonGithubUrl(r.url)
if (!url)
return cb(new Error("no repository: could not get url"))
opener(url, { command: npm.config.get("browser") }, cb)
}
function callRegistry (n, cb) {
mapToRegistry(n, npm.config, function (er, uri) {
if (er) return cb(er)
npm.registry.get(uri + "/latest", { timeout : 3600 }, function (er, d) {
if (er) return cb(er)
getUrlAndOpen(d, cb)
})
})
}
function nonGithubUrl (url) {
try {
var idx = url.indexOf("@")
if (idx !== -1) {
url = url.slice(idx+1).replace(/:([^\d]+)/, "/$1")
}
url = url_.parse(url)
var protocol = url.protocol === "https:"
? "https:"
: "http:"
return protocol + "//" + (url.host || "") +
url.path.replace(/\.git$/, "")
}
catch(e) {}
}

1
node_modules/npm/lib/restart.js generated vendored Normal file
View File

@@ -0,0 +1 @@
module.exports = require("./utils/lifecycle.js").cmd("restart")

11
node_modules/npm/lib/root.js generated vendored Normal file
View File

@@ -0,0 +1,11 @@
module.exports = root
var npm = require("./npm.js")
root.usage = "npm root\nnpm root -g\n(just prints the root folder)"
function root (args, silent, cb) {
if (typeof cb !== "function") cb = silent, silent = false
if (!silent) console.log(npm.dir)
process.nextTick(cb.bind(this, null, npm.dir))
}

172
node_modules/npm/lib/run-script.js generated vendored Normal file
View File

@@ -0,0 +1,172 @@
module.exports = runScript
var lifecycle = require("./utils/lifecycle.js")
, npm = require("./npm.js")
, path = require("path")
, readJson = require("read-package-json")
, log = require("npmlog")
, chain = require("slide").chain
runScript.usage = "npm run-script <command> [-- <args>]"
runScript.completion = function (opts, cb) {
// see if there's already a package specified.
var argv = opts.conf.argv.remain
if (argv.length >= 4) return cb()
if (argv.length === 3) {
// either specified a script locally, in which case, done,
// or a package, in which case, complete against its scripts
var json = path.join(npm.localPrefix, "package.json")
return readJson(json, function (er, d) {
if (er && er.code !== "ENOENT" && er.code !== "ENOTDIR") return cb(er)
if (er) d = {}
var scripts = Object.keys(d.scripts || {})
console.error("local scripts", scripts)
if (scripts.indexOf(argv[2]) !== -1) return cb()
// ok, try to find out which package it was, then
var pref = npm.config.get("global") ? npm.config.get("prefix")
: npm.localPrefix
var pkgDir = path.resolve( pref, "node_modules"
, argv[2], "package.json" )
readJson(pkgDir, function (er, d) {
if (er && er.code !== "ENOENT" && er.code !== "ENOTDIR") return cb(er)
if (er) d = {}
var scripts = Object.keys(d.scripts || {})
return cb(null, scripts)
})
})
}
readJson(path.join(npm.localPrefix, "package.json"), function (er, d) {
if (er && er.code !== "ENOENT" && er.code !== "ENOTDIR") return cb(er)
d = d || {}
cb(null, Object.keys(d.scripts || {}))
})
}
function runScript (args, cb) {
if (!args.length) return list(cb)
var pkgdir = npm.localPrefix
, cmd = args.shift()
readJson(path.resolve(pkgdir, "package.json"), function (er, d) {
if (er) return cb(er)
run(d, pkgdir, cmd, args, cb)
})
}
function list(cb) {
var json = path.join(npm.localPrefix, "package.json")
var cmdList = [ "publish", "install", "uninstall"
, "test", "stop", "start", "restart", "version"
].reduce(function (l, p) {
return l.concat(["pre" + p, p, "post" + p])
}, [])
return readJson(json, function(er, d) {
if (er && er.code !== "ENOENT" && er.code !== "ENOTDIR") return cb(er)
if (er) d = {}
var allScripts = Object.keys(d.scripts || {})
var scripts = []
var runScripts = []
allScripts.forEach(function (script) {
if (cmdList.indexOf(script) !== -1) scripts.push(script)
else runScripts.push(script)
})
if (log.level === "silent") {
return cb(null, allScripts)
}
if (npm.config.get("json")) {
console.log(JSON.stringify(d.scripts || {}, null, 2))
return cb(null, allScripts)
}
if (npm.config.get("parseable")) {
allScripts.forEach(function(script) {
console.log(script + ":" + d.scripts[script])
})
return cb(null, allScripts)
}
var s = "\n "
var prefix = " "
if (scripts.length) {
console.log("Lifecycle scripts included in %s:", d.name)
}
scripts.forEach(function(script) {
console.log(prefix + script + s + d.scripts[script])
})
if (!scripts.length && runScripts.length) {
console.log("Scripts available in %s via `npm run-script`:", d.name)
}
else if (runScripts.length) {
console.log("\navailable via `npm run-script`:")
}
runScripts.forEach(function(script) {
console.log(prefix + script + s + d.scripts[script])
})
return cb(null, allScripts)
})
}
function run (pkg, wd, cmd, args, cb) {
if (!pkg.scripts) pkg.scripts = {}
var cmds
if (cmd === "restart" && !pkg.scripts.restart) {
cmds = [
"prestop", "stop", "poststop",
"restart",
"prestart", "start", "poststart"
]
} else {
if (!pkg.scripts[cmd]) {
if (cmd === "test") {
pkg.scripts.test = "echo \"Error: no test specified\""
} else if (cmd === "env") {
if (process.platform === "win32") {
log.verbose("run-script using default platform env: SET (Windows)")
pkg.scripts[cmd] = "SET"
} else {
log.verbose("run-script using default platform env: env (Unix)")
pkg.scripts[cmd] = "env"
}
} else if (npm.config.get("if-present")) {
return cb(null);
} else {
return cb(new Error("missing script: " + cmd))
}
}
cmds = [cmd]
}
if (!cmd.match(/^(pre|post)/)) {
cmds = ["pre"+cmd].concat(cmds).concat("post"+cmd)
}
log.verbose("run-script", cmds)
chain(cmds.map(function (c) {
// pass cli arguments after -- to script.
if (pkg.scripts[c] && c === cmd) {
pkg.scripts[c] = pkg.scripts[c] + joinArgs(args)
}
// when running scripts explicitly, assume that they're trusted.
return [lifecycle, pkg, c, wd, true]
}), cb)
}
// join arguments after '--' and pass them to script,
// handle special characters such as ', ", ' '.
function joinArgs (args) {
var joinedArgs = ""
args.forEach(function(arg) {
joinedArgs += ' "' + arg.replace(/"/g, '\\"') + '"'
})
return joinedArgs
}

269
node_modules/npm/lib/search.js generated vendored Normal file
View File

@@ -0,0 +1,269 @@
module.exports = exports = search
var npm = require("./npm.js")
, columnify = require("columnify")
, updateIndex = require("./cache/update-index.js")
search.usage = "npm search [some search terms ...]"
search.completion = function (opts, cb) {
var compl = {}
, partial = opts.partialWord
, ipartial = partial.toLowerCase()
, plen = partial.length
// get the batch of data that matches so far.
// this is an example of using npm.commands.search programmatically
// to fetch data that has been filtered by a set of arguments.
search(opts.conf.argv.remain.slice(2), true, function (er, data) {
if (er) return cb(er)
Object.keys(data).forEach(function (name) {
data[name].words.split(" ").forEach(function (w) {
if (w.toLowerCase().indexOf(ipartial) === 0) {
compl[partial + w.substr(plen)] = true
}
})
})
cb(null, Object.keys(compl))
})
}
function search (args, silent, staleness, cb) {
if (typeof cb !== "function") cb = staleness, staleness = 600
if (typeof cb !== "function") cb = silent, silent = false
var searchopts = npm.config.get("searchopts")
var searchexclude = npm.config.get("searchexclude")
if (typeof searchopts !== "string") searchopts = ""
searchopts = searchopts.split(/\s+/)
var opts = searchopts.concat(args).map(function (s) {
return s.toLowerCase()
}).filter(function (s) { return s })
if (typeof searchexclude === "string") {
searchexclude = searchexclude.split(/\s+/)
} else {
searchexclude = []
}
searchexclude = searchexclude.map(function (s) {
return s.toLowerCase()
})
getFilteredData(staleness, opts, searchexclude, function (er, data) {
// now data is the list of data that we want to show.
// prettify and print it, and then provide the raw
// data to the cb.
if (er || silent) return cb(er, data)
console.log(prettify(data, args))
cb(null, data)
})
}
function getFilteredData (staleness, args, notArgs, cb) {
updateIndex(staleness, function (er, data) {
if (er) return cb(er)
return cb(null, filter(data, args, notArgs))
})
}
function filter (data, args, notArgs) {
// data={<name>:{package data}}
return Object.keys(data).map(function (d) {
return data[d]
}).filter(function (d) {
return typeof d === "object"
}).map(stripData).map(getWords).filter(function (data) {
return filterWords(data, args, notArgs)
}).reduce(function (l, r) {
l[r.name] = r
return l
}, {})
}
function stripData (data) {
return { name: data.name
, description: npm.config.get("description") ? data.description : ""
, maintainers: (data.maintainers || []).map(function (m) {
return "=" + m.name
})
, url: !Object.keys(data.versions || {}).length ? data.url : null
, keywords: data.keywords || []
, version: Object.keys(data.versions || {})[0] || []
, time: data.time
&& data.time.modified
&& (new Date(data.time.modified).toISOString()
.split("T").join(" ")
.replace(/:[0-9]{2}\.[0-9]{3}Z$/, ""))
.slice(0, -5) // remove time
|| "prehistoric"
}
}
function getWords (data) {
data.words = [ data.name ]
.concat(data.description)
.concat(data.maintainers)
.concat(data.url && ("<" + data.url + ">"))
.concat(data.keywords)
.map(function (f) { return f && f.trim && f.trim() })
.filter(function (f) { return f })
.join(" ")
.toLowerCase()
return data
}
function filterWords (data, args, notArgs) {
var words = data.words
for (var i = 0, l = args.length; i < l; i ++) {
if (!match(words, args[i])) return false
}
for (i = 0, l = notArgs.length; i < l; i ++) {
if (match(words, notArgs[i])) return false
}
return true
}
function match (words, arg) {
if (arg.charAt(0) === "/") {
arg = arg.replace(/\/$/, "")
arg = new RegExp(arg.substr(1, arg.length - 1))
return words.match(arg)
}
return words.indexOf(arg) !== -1
}
function prettify (data, args) {
var searchsort = (npm.config.get("searchsort") || "NAME").toLowerCase()
, sortField = searchsort.replace(/^\-+/, "")
, searchRev = searchsort.charAt(0) === "-"
, truncate = !npm.config.get("long")
if (Object.keys(data).length === 0) {
return "No match found for "+(args.map(JSON.stringify).join(" "))
}
var lines = Object.keys(data).map(function (d) {
// strip keyname
return data[d]
}).map(function(dat) {
dat.author = dat.maintainers
delete dat.maintainers
dat.date = dat.time
delete dat.time
return dat
}).map(function(dat) {
// split keywords on whitespace or ,
if (typeof dat.keywords === "string") {
dat.keywords = dat.keywords.split(/[,\s]+/)
}
if (Array.isArray(dat.keywords)) {
dat.keywords = dat.keywords.join(" ")
}
// split author on whitespace or ,
if (typeof dat.author === "string") {
dat.author = dat.author.split(/[,\s]+/)
}
if (Array.isArray(dat.author)) {
dat.author = dat.author.join(" ")
}
return dat
})
lines.sort(function(a, b) {
var aa = a[sortField].toLowerCase()
, bb = b[sortField].toLowerCase()
return aa === bb ? 0
: aa < bb ? -1 : 1
})
if (searchRev) lines.reverse()
var columns = npm.config.get("description")
? ["name", "description", "author", "date", "version", "keywords"]
: ["name", "author", "date", "version", "keywords"]
var output = columnify(lines, {
include: columns
, truncate: truncate
, config: {
name: { maxWidth: 40, truncate: false, truncateMarker: "" }
, description: { maxWidth: 60 }
, author: { maxWidth: 20 }
, date: { maxWidth: 11 }
, version: { maxWidth: 11 }
, keywords: { maxWidth: Infinity }
}
})
output = trimToMaxWidth(output)
output = highlightSearchTerms(output, args)
return output
}
var colors = [31, 33, 32, 36, 34, 35 ]
, cl = colors.length
function addColorMarker (str, arg, i) {
var m = i % cl + 1
, markStart = String.fromCharCode(m)
, markEnd = String.fromCharCode(0)
if (arg.charAt(0) === "/") {
//arg = arg.replace(/\/$/, "")
return str.replace( new RegExp(arg.substr(1, arg.length - 2), "gi")
, function (bit) { return markStart + bit + markEnd } )
}
// just a normal string, do the split/map thing
var pieces = str.toLowerCase().split(arg.toLowerCase())
, p = 0
return pieces.map(function (piece) {
piece = str.substr(p, piece.length)
var mark = markStart
+ str.substr(p+piece.length, arg.length)
+ markEnd
p += piece.length + arg.length
return piece + mark
}).join("")
}
function colorize (line) {
for (var i = 0; i < cl; i ++) {
var m = i + 1
var color = npm.color ? "\033["+colors[i]+"m" : ""
line = line.split(String.fromCharCode(m)).join(color)
}
var uncolor = npm.color ? "\033[0m" : ""
return line.split("\u0000").join(uncolor)
}
function getMaxWidth() {
var cols
try {
var tty = require("tty")
, stdout = process.stdout
cols = !tty.isatty(stdout.fd) ? Infinity : process.stdout.getWindowSize()[0]
cols = (cols === 0) ? Infinity : cols
} catch (ex) { cols = Infinity }
return cols
}
function trimToMaxWidth(str) {
var maxWidth = getMaxWidth()
return str.split("\n").map(function(line) {
return line.slice(0, maxWidth)
}).join("\n")
}
function highlightSearchTerms(str, terms) {
terms.forEach(function (arg, i) {
str = addColorMarker(str, arg, i)
})
return colorize(str).trim()
}

13
node_modules/npm/lib/set.js generated vendored Normal file
View File

@@ -0,0 +1,13 @@
module.exports = set
set.usage = "npm set <key> <value> (See `npm config`)"
var npm = require("./npm.js")
set.completion = npm.commands.config.completion
function set (args, cb) {
if (!args.length) return cb(set.usage)
npm.commands.config(["set"].concat(args), cb)
}

86
node_modules/npm/lib/shrinkwrap.js generated vendored Normal file
View File

@@ -0,0 +1,86 @@
// emit JSON describing versions of all packages currently installed (for later
// use with shrinkwrap install)
module.exports = exports = shrinkwrap
var npm = require("./npm.js")
, log = require("npmlog")
, fs = require("fs")
, writeFileAtomic = require("write-file-atomic")
, path = require("path")
, readJson = require("read-package-json")
, sortedObject = require("sorted-object")
shrinkwrap.usage = "npm shrinkwrap"
function shrinkwrap (args, silent, cb) {
if (typeof cb !== "function") cb = silent, silent = false
if (args.length) {
log.warn("shrinkwrap", "doesn't take positional args")
}
// https://github.com/npm/npm/issues/7641
// introduced because `npm ls` can now show dev and prod depenednecy
// trees separately
if (npm.config.get("dev")) {
npm.config.set("production", true)
}
npm.commands.ls([], true, function (er, _, pkginfo) {
if (er) return cb(er)
shrinkwrap_(pkginfo, silent, npm.config.get("dev"), cb)
})
}
function shrinkwrap_ (pkginfo, silent, dev, cb) {
if (pkginfo.problems) {
return cb(new Error("Problems were encountered\n"
+"Please correct and try again.\n"
+pkginfo.problems.join("\n")))
}
if (!dev) {
// remove dev deps unless the user does --dev
readJson(path.resolve(npm.prefix, "package.json"), function (er, data) {
if (er)
return cb(er)
if (data.devDependencies) {
Object.keys(data.devDependencies).forEach(function (dep) {
if (data.dependencies && data.dependencies[dep]) {
// do not exclude the dev dependency if it's also listed as a dependency
return
}
log.warn("shrinkwrap", "Excluding devDependency: %s", dep, data.dependencies)
delete pkginfo.dependencies[dep]
})
}
save(pkginfo, silent, cb)
})
} else {
save(pkginfo, silent, cb)
}
}
function save (pkginfo, silent, cb) {
// copy the keys over in a well defined order
// because javascript objects serialize arbitrarily
pkginfo.dependencies = sortedObject(pkginfo.dependencies || {})
var swdata
try {
swdata = JSON.stringify(pkginfo, null, 2) + "\n"
} catch (er) {
log.error("shrinkwrap", "Error converting package info to json")
return cb(er)
}
var file = path.resolve(npm.prefix, "npm-shrinkwrap.json")
writeFileAtomic(file, swdata, function (er) {
if (er) return cb(er)
if (silent) return cb(null, pkginfo)
console.log("wrote npm-shrinkwrap.json")
cb(null, pkginfo)
})
}

41
node_modules/npm/lib/star.js generated vendored Normal file
View File

@@ -0,0 +1,41 @@
module.exports = star
var npm = require("./npm.js")
, log = require("npmlog")
, asyncMap = require("slide").asyncMap
, mapToRegistry = require("./utils/map-to-registry.js")
star.usage = "npm star <package> [pkg, pkg, ...]\n"
+ "npm unstar <package> [pkg, pkg, ...]"
star.completion = function (opts, cb) {
// FIXME: there used to be registry completion here, but it stopped making
// sense somewhere around 50,000 packages on the registry
cb()
}
function star (args, cb) {
if (!args.length) return cb(star.usage)
var s = npm.config.get("unicode") ? "\u2605 " : "(*)"
, u = npm.config.get("unicode") ? "\u2606 " : "( )"
, using = !(npm.command.match(/^un/))
if (!using) s = u
asyncMap(args, function (pkg, cb) {
mapToRegistry(pkg, npm.config, function (er, uri, auth) {
if (er) return cb(er)
var params = {
starred : using,
auth : auth
}
npm.registry.star(uri, params, function (er, data, raw, req) {
if (!er) {
console.log(s + " "+pkg)
log.verbose("star", data)
}
cb(er, data, raw, req)
})
})
}, cb)
}

46
node_modules/npm/lib/stars.js generated vendored Normal file
View File

@@ -0,0 +1,46 @@
module.exports = stars
stars.usage = "npm stars [username]"
var npm = require("./npm.js")
, log = require("npmlog")
, mapToRegistry = require("./utils/map-to-registry.js")
function stars (args, cb) {
npm.commands.whoami([], true, function (er, username) {
var name = args.length === 1 ? args[0] : username
if (er) {
if (er.code === 'ENEEDAUTH' && !name) {
var needAuth = new Error("'npm stars' on your own user account requires auth")
needAuth.code = 'ENEEDAUTH'
return cb(needAuth)
}
if (er.code !== 'ENEEDAUTH') return cb(er)
}
mapToRegistry("", npm.config, function (er, uri, auth) {
if (er) return cb(er)
var params = {
username : name,
auth : auth
}
npm.registry.stars(uri, params, showstars)
})
})
function showstars (er, data) {
if (er) return cb(er)
if (data.rows.length === 0) {
log.warn("stars", "user has not starred any packages.")
} else {
data.rows.forEach(function(a) {
console.log(a.value)
})
}
cb()
}
}

1
node_modules/npm/lib/start.js generated vendored Normal file
View File

@@ -0,0 +1 @@
module.exports = require("./utils/lifecycle.js").cmd("start")

1
node_modules/npm/lib/stop.js generated vendored Normal file
View File

@@ -0,0 +1 @@
module.exports = require("./utils/lifecycle.js").cmd("stop")

20
node_modules/npm/lib/substack.js generated vendored Normal file
View File

@@ -0,0 +1,20 @@
module.exports = substack
var npm = require("./npm.js")
var isms =
[ "\033[32mbeep \033[35mboop\033[m"
, "Replace your configs with services"
, "SEPARATE ALL THE CONCERNS!"
, "MODULE ALL THE THINGS!"
, "\\o/"
, "but first, burritos"
, "full time mad scientist here"
, "c/,,\\" ]
function substack (args, cb) {
var i = Math.floor(Math.random() * isms.length)
console.log(isms[i])
var c = args.shift()
if (c) npm.commands[c](args, cb)
else cb()
}

41
node_modules/npm/lib/tag.js generated vendored Normal file
View File

@@ -0,0 +1,41 @@
// turns out tagging isn't very complicated
// all the smarts are in the couch.
module.exports = tag
tag.usage = "npm tag <project>@<version> [<tag>]"
tag.completion = require("./unpublish.js").completion
var npm = require("./npm.js")
, mapToRegistry = require("./utils/map-to-registry.js")
, npa = require("npm-package-arg")
, semver = require("semver")
, log = require("npmlog")
function tag (args, cb) {
var thing = npa(args.shift() || "")
, project = thing.name
, version = thing.rawSpec
, t = args.shift() || npm.config.get("tag")
t = t.trim()
if (!project || !version || !t) return cb("Usage:\n"+tag.usage)
if (semver.validRange(t)) {
var er = new Error("Tag name must not be a valid SemVer range: " + t)
return cb(er)
}
log.warn("tag", "This command is deprecated. Use `npm dist-tag` instead.")
mapToRegistry(project, npm.config, function (er, uri, auth) {
if (er) return cb(er)
var params = {
version : version,
tag : t,
auth : auth
}
npm.registry.tag(uri, params, cb)
})
}

54
node_modules/npm/lib/team.js generated vendored Normal file
View File

@@ -0,0 +1,54 @@
var mapToRegistry = require('./utils/map-to-registry.js')
var npm = require('./npm')
module.exports = team
team.subcommands = ['create', 'destroy', 'add', 'rm', 'ls', 'edit']
team.usage =
'npm team create <scope:team>\n' +
'npm team destroy <scope:team>\n' +
'npm team add <scope:team> <user>\n' +
'npm team rm <scope:team> <user>\n' +
'npm team ls <scope>|<scope:team>\n' +
'npm team edit <scope:team>'
team.completion = function (opts, cb) {
var argv = opts.conf.argv.remain
if (argv.length === 2) {
return cb(null, team.subcommands)
}
switch (argv[2]) {
case 'ls':
case 'create':
case 'destroy':
case 'add':
case 'rm':
case 'edit':
return cb(null, [])
default:
return cb(new Error(argv[2] + ' not recognized'))
}
}
function team (args, cb) {
// Entities are in the format <scope>:<team>
var cmd = args.shift()
var entity = (args.shift() || '').split(':')
return mapToRegistry('/', npm.config, function (err, uri, auth) {
if (err) { return cb(err) }
try {
return npm.registry.team(cmd, uri, {
auth: auth,
scope: entity[0],
team: entity[1],
user: args.shift()
}, function (err, data) {
!err && data && console.log(JSON.stringify(data, undefined, 2))
cb(err, data)
})
} catch (e) {
cb(e.message + '\n\nUsage:\n' + team.usage)
}
})
}

13
node_modules/npm/lib/test.js generated vendored Normal file
View File

@@ -0,0 +1,13 @@
module.exports = test
var testCmd = require("./utils/lifecycle.js").cmd("test")
function test (args, cb) {
testCmd(args, function (er) {
if (!er) return cb()
if (er.code === "ELIFECYCLE") {
return cb("Test failed. See above for more details.")
}
return cb(er)
})
}

115
node_modules/npm/lib/unbuild.js generated vendored Normal file
View File

@@ -0,0 +1,115 @@
module.exports = unbuild
unbuild.usage = "npm unbuild <folder>\n(this is plumbing)"
var readJson = require("read-package-json")
, gentlyRm = require("./utils/gently-rm.js")
, npm = require("./npm.js")
, path = require("path")
, isInside = require("path-is-inside")
, lifecycle = require("./utils/lifecycle.js")
, asyncMap = require("slide").asyncMap
, chain = require("slide").chain
, log = require("npmlog")
, build = require("./build.js")
// args is a list of folders.
// remove any bins/etc, and then delete the folder.
function unbuild (args, silent, cb) {
if (typeof silent === "function") cb = silent, silent = false
asyncMap(args, unbuild_(silent), cb)
}
function unbuild_ (silent) { return function (folder, cb_) {
function cb (er) {
cb_(er, path.relative(npm.root, folder))
}
folder = path.resolve(folder)
var base = isInside(folder, npm.prefix) ? npm.prefix : folder
delete build._didBuild[folder]
log.verbose("unbuild", folder.substr(npm.prefix.length + 1))
readJson(path.resolve(folder, "package.json"), function (er, pkg) {
// if no json, then just trash it, but no scripts or whatever.
if (er) return gentlyRm(folder, false, base, cb)
chain
( [ [lifecycle, pkg, "preuninstall", folder, false, true]
, [lifecycle, pkg, "uninstall", folder, false, true]
, !silent && function(cb) {
console.log("unbuild " + pkg._id)
cb()
}
, [rmStuff, pkg, folder]
, [lifecycle, pkg, "postuninstall", folder, false, true]
, [gentlyRm, folder, false, base] ]
, cb )
})
}}
function rmStuff (pkg, folder, cb) {
// if it's global, and folder is in {prefix}/node_modules,
// then bins are in {prefix}/bin
// otherwise, then bins are in folder/../.bin
var parent = path.dirname(folder)
, gnm = npm.dir
, top = gnm === parent
log.verbose("unbuild rmStuff", pkg._id, "from", gnm)
if (!top) log.verbose("unbuild rmStuff", "in", parent)
asyncMap([rmBins, rmMans], function (fn, cb) {
fn(pkg, folder, parent, top, cb)
}, cb)
}
function rmBins (pkg, folder, parent, top, cb) {
if (!pkg.bin) return cb()
var binRoot = top ? npm.bin : path.resolve(parent, ".bin")
asyncMap(Object.keys(pkg.bin), function (b, cb) {
if (process.platform === "win32") {
chain([ [gentlyRm, path.resolve(binRoot, b) + ".cmd", true]
, [gentlyRm, path.resolve(binRoot, b), true] ], cb)
} else {
gentlyRm(path.resolve(binRoot, b), true, cb)
}
}, cb)
}
function rmMans (pkg, folder, parent, top, cb) {
if (!pkg.man
|| !top
|| process.platform === "win32"
|| !npm.config.get("global")) {
return cb()
}
var manRoot = path.resolve(npm.config.get("prefix"), "share", "man")
log.verbose("rmMans", "man files are", pkg.man, "in", manRoot)
asyncMap(pkg.man, function (man, cb) {
if (Array.isArray(man)) {
man.forEach(rmMan)
} else {
rmMan(man)
}
function rmMan (man) {
log.silly("rmMan", "preparing to remove", man)
var parseMan = man.match(/(.*\.([0-9]+)(\.gz)?)$/)
if (!parseMan) {
log.error(
"rmMan", man, "is not a valid name for a man file.",
"Man files must end with a number, " +
"and optionally a .gz suffix if they are compressed."
)
return cb()
}
var stem = parseMan[1]
var sxn = parseMan[2]
var gz = parseMan[3] || ""
var bn = path.basename(stem)
var manDest = path.join(
manRoot,
"man"+sxn,
(bn.indexOf(pkg.name) === 0 ? bn : pkg.name+"-"+bn)+"."+sxn+gz
)
gentlyRm(manDest, true, cb)
}
}, cb)
}

128
node_modules/npm/lib/uninstall.js generated vendored Normal file
View File

@@ -0,0 +1,128 @@
// remove a package.
module.exports = uninstall
uninstall.usage = "npm uninstall <name>[@<version> [<name>[@<version>] ...]"
+ "\nnpm rm <name>[@<version> [<name>[@<version>] ...]"
uninstall.completion = require("./utils/completion/installed-shallow.js")
var fs = require("graceful-fs")
, writeFileAtomic = require("write-file-atomic")
, log = require("npmlog")
, readJson = require("read-package-json")
, path = require("path")
, npm = require("./npm.js")
, asyncMap = require("slide").asyncMap
function uninstall (args, cb) {
// this is super easy
// get the list of args that correspond to package names in either
// the global npm.dir,
// then call unbuild on all those folders to pull out their bins
// and mans and whatnot, and then delete the folder.
var nm = npm.dir
if (args.length === 1 && args[0] === ".") args = []
if (args.length) return uninstall_(args, nm, cb)
// remove this package from the global space, if it's installed there
readJson(path.resolve(npm.localPrefix, "package.json"), function (er, pkg) {
if (er && er.code !== "ENOENT" && er.code !== "ENOTDIR") return cb(er)
if (er) return cb(uninstall.usage)
uninstall_( [pkg.name]
, npm.globalDir
, cb )
})
}
function uninstall_ (args, nm, cb) {
// if we've been asked to --save or --save-dev or --save-optional,
// then also remove it from the associated dependencies hash.
var s = npm.config.get('save')
, d = npm.config.get('save-dev')
, o = npm.config.get('save-optional')
if (s || d || o) {
cb = saver(args, nm, cb)
}
asyncMap(args, function (arg, cb) {
// uninstall .. should not delete /usr/local/lib/node_modules/..
var p = path.join(path.resolve(nm), path.join("/", arg))
if (path.resolve(p) === nm) {
log.warn("uninstall", "invalid argument: %j", arg)
return cb(null, [])
}
fs.lstat(p, function (er) {
if (er) {
log.warn("uninstall", "not installed in %s: %j", nm, arg)
return cb(null, [])
}
cb(null, p)
})
}, function (er, folders) {
if (er) return cb(er)
asyncMap(folders, npm.commands.unbuild, cb)
})
}
function saver (args, nm, cb_) {
return cb
function cb (er, data) {
var s = npm.config.get('save')
, d = npm.config.get('save-dev')
, o = npm.config.get('save-optional')
if (er || !(s || d || o)) return cb_(er, data)
var pj = path.resolve(nm, '..', 'package.json')
// don't use readJson here, because we don't want all the defaults
// filled in, for mans and other bs.
fs.readFile(pj, 'utf8', function (er, json) {
var pkg
try {
pkg = JSON.parse(json)
} catch (_) {}
if (!pkg) return cb_(null, data)
var bundle
if (npm.config.get('save-bundle')) {
bundle = pkg.bundleDependencies || pkg.bundledDependencies
if (!Array.isArray(bundle)) bundle = undefined
}
var changed = false
args.forEach(function (a) {
; [ [s, 'dependencies']
, [o, 'optionalDependencies']
, [d, 'devDependencies'] ].forEach(function (f) {
var flag = f[0]
, field = f[1]
if (!flag || !pkg[field] || !pkg[field].hasOwnProperty(a)) return
changed = true
if (bundle) {
var i = bundle.indexOf(a)
if (i !== -1) bundle.splice(i, 1)
}
delete pkg[field][a]
})
})
if (!changed) return cb_(null, data)
if (bundle) {
delete pkg.bundledDependencies
if (bundle.length) {
pkg.bundleDependencies = bundle
} else {
delete pkg.bundleDependencies
}
}
writeFileAtomic(pj, JSON.stringify(pkg, null, 2) + "\n", function (er) {
return cb_(er, data)
})
})
}
}

115
node_modules/npm/lib/unpublish.js generated vendored Normal file
View File

@@ -0,0 +1,115 @@
module.exports = unpublish
var log = require("npmlog")
var npm = require("./npm.js")
var readJson = require("read-package-json")
var path = require("path")
var mapToRegistry = require("./utils/map-to-registry.js")
var npa = require("npm-package-arg")
var getPublishConfig = require("./utils/get-publish-config.js")
unpublish.usage = "npm unpublish <project>[@<version>]"
unpublish.completion = function (opts, cb) {
if (opts.conf.argv.remain.length >= 3) return cb()
npm.commands.whoami([], true, function (er, username) {
if (er) return cb()
var un = encodeURIComponent(username)
if (!un) return cb()
var byUser = "-/by-user/" + un
mapToRegistry(byUser, npm.config, function (er, uri, auth) {
if (er) return cb(er)
npm.registry.get(uri, { auth : auth }, function (er, pkgs) {
// do a bit of filtering at this point, so that we don't need
// to fetch versions for more than one thing, but also don't
// accidentally a whole project.
pkgs = pkgs[un]
if (!pkgs || !pkgs.length) return cb()
var pp = npa(opts.partialWord).name
pkgs = pkgs.filter(function (p) {
return p.indexOf(pp) === 0
})
if (pkgs.length > 1) return cb(null, pkgs)
mapToRegistry(pkgs[0], npm.config, function (er, uri, auth) {
if (er) return cb(er)
npm.registry.get(uri, { auth : auth }, function (er, d) {
if (er) return cb(er)
var vers = Object.keys(d.versions)
if (!vers.length) return cb(null, pkgs)
return cb(null, vers.map(function (v) {
return pkgs[0] + "@" + v
}))
})
})
})
})
})
}
function unpublish (args, cb) {
if (args.length > 1) return cb(unpublish.usage)
var thing = args.length ? npa(args[0]) : {}
, project = thing.name
, version = thing.rawSpec
log.silly("unpublish", "args[0]", args[0])
log.silly("unpublish", "thing", thing)
if (!version && !npm.config.get("force")) {
return cb("Refusing to delete entire project.\n"
+ "Run with --force to do this.\n"
+ unpublish.usage)
}
if (!project || path.resolve(project) === npm.localPrefix) {
// if there's a package.json in the current folder, then
// read the package name and version out of that.
var cwdJson = path.join(npm.localPrefix, "package.json")
return readJson(cwdJson, function (er, data) {
if (er && er.code !== "ENOENT" && er.code !== "ENOTDIR") return cb(er)
if (er) return cb("Usage:\n" + unpublish.usage)
log.verbose('unpublish', data)
gotProject(data.name, data.version, data.publishConfig, cb)
})
}
return gotProject(project, version, cb)
}
function gotProject (project, version, publishConfig, cb_) {
if (typeof cb_ !== 'function') {
cb_ = publishConfig
publishConfig = null
}
function cb (er) {
if (er) return cb_(er)
console.log("- " + project + (version ? "@" + version : ""))
cb_()
}
var mappedConfig = getPublishConfig(publishConfig, npm.config, npm.registry)
var config = mappedConfig.config
var registry = mappedConfig.client
// remove from the cache first
npm.commands.cache(["clean", project, version], function (er) {
if (er) {
log.error("unpublish", "Failed to clean cache")
return cb(er)
}
mapToRegistry(project, config, function (er, uri, auth) {
if (er) return cb(er)
var params = {
version: version,
auth: auth
}
registry.unpublish(uri, params, cb)
})
})
}

58
node_modules/npm/lib/update.js generated vendored Normal file
View File

@@ -0,0 +1,58 @@
/*
for each pkg in prefix that isn't a git repo
look for a new version of pkg that satisfies dep
if so, install it.
if not, then update it
*/
module.exports = update
update.usage = "npm update [pkg]"
var npm = require("./npm.js")
, asyncMap = require("slide").asyncMap
, log = require("npmlog")
// load these, just so that we know that they'll be available, in case
// npm itself is getting overwritten.
, install = require("./install.js")
, build = require("./build.js")
update.completion = npm.commands.outdated.completion
function update (args, cb) {
npm.commands.outdated(args, true, function (er, outdated) {
if (er) return cb(er)
var wanted = outdated.filter(function (ww) {
var dep = ww[1]
var current = ww[2]
var wanted = ww[3]
var latest = ww[4]
if (current === wanted && wanted !== latest) {
log.verbose(
'outdated',
'not updating', dep,
"because it's currently at the maximum version that matches its specified semver range"
)
}
return current !== wanted
})
if (wanted.length === 0) return cb()
log.info('outdated', 'updating', wanted)
asyncMap(wanted, function (ww, cb) {
// [[ dir, dep, has, want, req ]]
var where = ww[0]
, dep = ww[1]
, want = ww[3]
, what = dep + "@" + want
, req = ww[5]
, url = require('url')
// use the initial installation method (repo, tar, git) for updating
if (url.parse(req).protocol) what = req
npm.commands.install(where, what, cb)
}, cb)
})
}

58
node_modules/npm/lib/utils/completion.sh generated vendored Normal file
View File

@@ -0,0 +1,58 @@
#!/bin/bash
###-begin-npm-completion-###
#
# npm command completion script
#
# Installation: npm completion >> ~/.bashrc (or ~/.zshrc)
# Or, maybe: npm completion > /usr/local/etc/bash_completion.d/npm
#
if type complete &>/dev/null; then
_npm_completion () {
local words cword
if type _get_comp_words_by_ref &>/dev/null; then
_get_comp_words_by_ref -n = -n @ -w words -i cword
else
cword="$COMP_CWORD"
words=("${COMP_WORDS[@]}")
fi
local si="$IFS"
IFS=$'\n' COMPREPLY=($(COMP_CWORD="$cword" \
COMP_LINE="$COMP_LINE" \
COMP_POINT="$COMP_POINT" \
npm completion -- "${words[@]}" \
2>/dev/null)) || return $?
IFS="$si"
}
complete -o default -F _npm_completion npm
elif type compdef &>/dev/null; then
_npm_completion() {
local si=$IFS
compadd -- $(COMP_CWORD=$((CURRENT-1)) \
COMP_LINE=$BUFFER \
COMP_POINT=0 \
npm completion -- "${words[@]}" \
2>/dev/null)
IFS=$si
}
compdef _npm_completion npm
elif type compctl &>/dev/null; then
_npm_completion () {
local cword line point words si
read -Ac words
read -cn cword
let cword-=1
read -l line
read -ln point
si="$IFS"
IFS=$'\n' reply=($(COMP_CWORD="$cword" \
COMP_LINE="$line" \
COMP_POINT="$point" \
npm completion -- "${words[@]}" \
2>/dev/null)) || return $?
IFS="$si"
}
compctl -K _npm_completion npm
fi
###-end-npm-completion-###

View File

@@ -0,0 +1,23 @@
module.exports = fileCompletion
var mkdir = require("mkdirp")
, path = require("path")
, glob = require("glob")
function fileCompletion (root, req, depth, cb) {
if (typeof cb !== "function") cb = depth, depth = Infinity
mkdir(root, function (er) {
if (er) return cb(er)
// can be either exactly the req, or a descendent
var pattern = root + "/{" + req + "," + req + "/**/*}"
, opts = { mark: true, dot: true, maxDepth: depth }
glob(pattern, opts, function (er, files) {
if (er) return cb(er)
return cb(null, (files || []).map(function (f) {
var tail = f.substr(root.length + 1).replace(/^\//, "")
return path.join(req, tail)
}))
})
})
}

View File

@@ -0,0 +1,50 @@
module.exports = installedDeep
var npm = require("../../npm.js")
, readInstalled = require("read-installed")
function installedDeep (opts, cb) {
var local
, global
, depth = npm.config.get("depth")
, opt = { depth: depth, dev: true }
if (npm.config.get("global")) local = [], next()
else readInstalled(npm.prefix, opt, function (er, data) {
local = getNames(data || {})
next()
})
readInstalled(npm.config.get("prefix"), opt, function (er, data) {
global = getNames(data || {})
next()
})
function getNames_ (d, n) {
if (d.realName && n) {
if (n[d.realName]) return n
n[d.realName] = true
}
if (!n) n = {}
Object.keys(d.dependencies || {}).forEach(function (dep) {
getNames_(d.dependencies[dep], n)
})
return n
}
function getNames (d) {
return Object.keys(getNames_(d))
}
function next () {
if (!local || !global) return
if (!npm.config.get("global")) {
global = global.map(function (g) {
return [g, "-g"]
})
}
var names = local.concat(global)
return cb(null, names)
}
}

View File

@@ -0,0 +1,79 @@
module.exports = installedShallow
var npm = require("../../npm.js")
, fs = require("graceful-fs")
, path = require("path")
, readJson = require("read-package-json")
, asyncMap = require("slide").asyncMap
function installedShallow (opts, filter, cb) {
if (typeof cb !== "function") cb = filter, filter = null
var conf = opts.conf
, args = conf.argv.remain
if (args.length > 3) return cb()
var local
, global
, localDir = npm.dir
, globalDir = npm.globalDir
if (npm.config.get("global")) local = [], next()
else fs.readdir(localDir, function (er, pkgs) {
local = (pkgs || []).filter(function (p) {
return p.charAt(0) !== "."
})
next()
})
fs.readdir(globalDir, function (er, pkgs) {
global = (pkgs || []).filter(function (p) {
return p.charAt(0) !== "."
})
next()
})
function next () {
if (!local || !global) return
filterInstalled(local, global, filter, cb)
}
}
function filterInstalled (local, global, filter, cb) {
var fl
, fg
if (!filter) {
fl = local
fg = global
return next()
}
asyncMap(local, function (p, cb) {
readJson(path.join(npm.dir, p, "package.json"), function (er, d) {
if (!d || !filter(d)) return cb(null, [])
return cb(null, d.name)
})
}, function (er, local) {
fl = local || []
next()
})
var globalDir = npm.globalDir
asyncMap(global, function (p, cb) {
readJson(path.join(globalDir, p, "package.json"), function (er, d) {
if (!d || !filter(d)) return cb(null, [])
return cb(null, d.name)
})
}, function (er, global) {
fg = global || []
next()
})
function next () {
if (!fg || !fl) return
if (!npm.config.get("global")) {
fg = fg.map(function (g) {
return [g, "-g"]
})
}
console.error("filtered", fl, fg)
return cb(null, fl.concat(fg))
}
}

123
node_modules/npm/lib/utils/correct-mkdir.js generated vendored Normal file
View File

@@ -0,0 +1,123 @@
var chownr = require('chownr')
var dezalgo = require('dezalgo')
var fs = require('graceful-fs')
var inflight = require('inflight')
var log = require('npmlog')
var mkdirp = require('mkdirp')
// memoize the directories created by this step
var stats = {}
var effectiveOwner
module.exports = function correctMkdir (path, cb) {
cb = dezalgo(cb)
cb = inflight('correctMkdir:' + path, cb)
if (!cb) {
return log.verbose('correctMkdir', path, 'correctMkdir already in flight; waiting')
} else {
log.verbose('correctMkdir', path, 'correctMkdir not in flight; initializing')
}
if (stats[path]) return cb(null, stats[path])
fs.stat(path, function (er, st) {
if (er) return makeDirectory(path, cb)
if (!st.isDirectory()) {
log.error('correctMkdir', 'invalid dir %s', path)
return cb(er)
}
var ownerStats = calculateOwner()
// there's always a chance the permissions could have been frobbed, so fix
if (st.uid !== ownerStats.uid) {
stats[path] = ownerStats
setPermissions(path, ownerStats, cb)
} else {
stats[path] = st
cb(null, stats[path])
}
})
}
function calculateOwner () {
if (!effectiveOwner) {
effectiveOwner = { uid: 0, gid: 0 }
// Pretty much only on windows
if (!process.getuid) {
return effectiveOwner
}
effectiveOwner.uid = +process.getuid()
effectiveOwner.gid = +process.getgid()
if (effectiveOwner.uid === 0) {
if (process.env.SUDO_UID) effectiveOwner.uid = +process.env.SUDO_UID
if (process.env.SUDO_GID) effectiveOwner.gid = +process.env.SUDO_GID
}
}
return effectiveOwner
}
function makeDirectory (path, cb) {
cb = inflight('makeDirectory:' + path, cb)
if (!cb) {
return log.verbose('makeDirectory', path, 'creation already in flight; waiting')
} else {
log.verbose('makeDirectory', path, 'creation not in flight; initializing')
}
var owner = calculateOwner()
if (!process.getuid) {
return mkdirp(path, function (er) {
log.verbose('makeCacheDir', 'UID & GID are irrelevant on', process.platform)
stats[path] = owner
return cb(er, stats[path])
})
}
if (owner.uid !== 0 || !process.env.HOME) {
log.silly(
'makeDirectory', path,
'uid:', owner.uid,
'gid:', owner.gid
)
stats[path] = owner
mkdirp(path, afterMkdir)
} else {
fs.stat(process.env.HOME, function (er, st) {
if (er) {
log.error('makeDirectory', 'homeless?')
return cb(er)
}
log.silly(
'makeDirectory', path,
'uid:', st.uid,
'gid:', st.gid
)
stats[path] = st
mkdirp(path, afterMkdir)
})
}
function afterMkdir (er, made) {
if (er || !stats[path] || isNaN(stats[path].uid) || isNaN(stats[path].gid)) {
return cb(er, stats[path])
}
if (!made) return cb(er, stats[path])
setPermissions(made, stats[path], cb)
}
}
function setPermissions (path, st, cb) {
chownr(path, st.uid, st.gid, function (er) {
if (er && er.code === 'ENOENT') return cb(null, st)
return cb(er, st)
})
}

13
node_modules/npm/lib/utils/depr-check.js generated vendored Normal file
View File

@@ -0,0 +1,13 @@
var log = require("npmlog")
var deprecated = {}
, deprWarned = {}
module.exports = function deprCheck (data) {
if (deprecated[data._id]) data.deprecated = deprecated[data._id]
if (data.deprecated) deprecated[data._id] = data.deprecated
else return
if (!deprWarned[data._id]) {
deprWarned[data._id] = true
log.warn("deprecated", "%s: %s", data._id, data.deprecated)
}
}

414
node_modules/npm/lib/utils/error-handler.js generated vendored Normal file
View File

@@ -0,0 +1,414 @@
module.exports = errorHandler
var cbCalled = false
, log = require("npmlog")
, npm = require("../npm.js")
, rm = require("rimraf")
, itWorked = false
, path = require("path")
, wroteLogFile = false
, exitCode = 0
, rollbacks = npm.rollbacks
, chain = require("slide").chain
, writeStreamAtomic = require("fs-write-stream-atomic")
, nameValidator = require("validate-npm-package-name")
process.on("exit", function (code) {
// console.error("exit", code)
if (!npm.config || !npm.config.loaded) return
if (code) itWorked = false
if (itWorked) log.info("ok")
else {
if (!cbCalled) {
log.error("", "cb() never called!")
}
if (wroteLogFile) {
// just a line break
if (log.levels[log.level] <= log.levels.error) console.error("")
log.error("",
["Please include the following file with any support request:"
," " + path.resolve("npm-debug.log")
].join("\n"))
wroteLogFile = false
}
if (code) {
log.error("code", code)
}
}
var doExit = npm.config.get("_exit")
if (doExit) {
// actually exit.
if (exitCode === 0 && !itWorked) {
exitCode = 1
}
if (exitCode !== 0) process.exit(exitCode)
} else {
itWorked = false // ready for next exit
}
})
function exit (code, noLog) {
exitCode = exitCode || process.exitCode || code
var doExit = npm.config ? npm.config.get("_exit") : true
log.verbose("exit", [code, doExit])
if (log.level === "silent") noLog = true
if (rollbacks.length) {
chain(rollbacks.map(function (f) {
return function (cb) {
npm.commands.unbuild([f], true, cb)
}
}), function (er) {
if (er) {
log.error("error rolling back", er)
if (!code) errorHandler(er)
else if (noLog) rm("npm-debug.log", reallyExit.bind(null, er))
else writeLogFile(reallyExit.bind(this, er))
} else {
if (!noLog && code) writeLogFile(reallyExit)
else rm("npm-debug.log", reallyExit)
}
})
rollbacks.length = 0
}
else if (code && !noLog) writeLogFile(reallyExit)
else rm("npm-debug.log", reallyExit)
function reallyExit (er) {
if (er && !code) code = typeof er.errno === "number" ? er.errno : 1
// truncate once it's been written.
log.record.length = 0
itWorked = !code
// just emit a fake exit event.
// if we're really exiting, then let it exit on its own, so that
// in-process stuff can finish or clean up first.
if (!doExit) process.emit("exit", code)
npm.spinner.stop()
}
}
function errorHandler (er) {
// console.error("errorHandler", er)
if (!npm.config || !npm.config.loaded) {
// logging won't work unless we pretend that it's ready
er = er || new Error("Exit prior to config file resolving.")
console.error(er.stack || er.message)
}
if (cbCalled) {
er = er || new Error("Callback called more than once.")
}
cbCalled = true
if (!er) return exit(0)
if (typeof er === "string") {
log.error("", er)
return exit(1, true)
} else if (!(er instanceof Error)) {
log.error("weird error", er)
return exit(1, true)
}
var m = er.code || er.message.match(/^(?:Error: )?(E[A-Z]+)/)
if (m && !er.code) er.code = m
; [ "type"
, "fstream_path"
, "fstream_unc_path"
, "fstream_type"
, "fstream_class"
, "fstream_finish_call"
, "fstream_linkpath"
, "stack"
, "fstream_stack"
, "statusCode"
, "pkgid"
].forEach(function (k) {
var v = er[k]
if (!v) return
if (k === "fstream_stack") v = v.join("\n")
log.verbose(k, v)
})
log.verbose("cwd", process.cwd())
var os = require("os")
// log.error("System", os.type() + " " + os.release())
// log.error("command", process.argv.map(JSON.stringify).join(" "))
// log.error("node -v", process.version)
// log.error("npm -v", npm.version)
log.error("", os.type() + " " + os.release())
log.error("argv", process.argv.map(JSON.stringify).join(" "))
log.error("node", process.version)
log.error("npm ", "v" + npm.version)
; [ "file"
, "path"
, "code"
, "errno"
, "syscall"
].forEach(function (k) {
var v = er[k]
if (v) log.error(k, v)
})
// just a line break
if (log.levels[log.level] <= log.levels.error) console.error("")
switch (er.code) {
case "ECONNREFUSED":
log.error("", er)
log.error("", ["\nIf you are behind a proxy, please make sure that the"
,"'proxy' config is set properly. See: 'npm help config'"
].join("\n"))
break
case "EACCES":
case "EPERM":
log.error("", er)
log.error("", ["\nPlease try running this command again as root/Administrator."
].join("\n"))
break
case "ELIFECYCLE":
log.error("", er.message)
log.error("", ["","Failed at the "+er.pkgid+" "+er.stage+" script '"+er.script+"'."
,"This is most likely a problem with the "+er.pkgname+" package,"
,"not with npm itself."
,"Tell the author that this fails on your system:"
," "+er.script
,'You can get information on how to open an issue for this project with:'
,' npm bugs ' + er.pkgname
,'Or if that isn\'t available, you can get their info via:',
,' npm owner ls ' + er.pkgname
,"There is likely additional logging output above."
].join("\n"))
break
case "ENOGIT":
log.error("", er.message)
log.error("", ["","Failed using git."
,"This is most likely not a problem with npm itself."
,"Please check if you have git installed and in your PATH."
].join("\n"))
break
case "EJSONPARSE":
log.error("", er.message)
log.error("", "File: "+er.file)
log.error("", ["Failed to parse package.json data."
,"package.json must be actual JSON, not just JavaScript."
,"","This is not a bug in npm."
,"Tell the package author to fix their package.json file."
].join("\n"), "JSON.parse")
break
// TODO(isaacs)
// Add a special case here for E401 and E403 explaining auth issues?
case "E404":
var msg = [er.message]
if (er.pkgid && er.pkgid !== "-") {
msg.push("", "'" + er.pkgid + "' is not in the npm registry.")
var valResult = nameValidator(er.pkgid)
if (valResult.validForNewPackages) {
msg.push("You should bug the author to publish it (or use the name yourself!)")
} else {
msg.push("Your package name is not valid, because", "")
var errorsArray = (valResult.errors || []).concat(valResult.warnings || [])
errorsArray.forEach(function(item, idx) {
msg.push(" " + (idx + 1) + ". " + item)
})
}
if (er.parent) {
msg.push("It was specified as a dependency of '"+er.parent+"'")
}
msg.push("\nNote that you can also install from a"
,"tarball, folder, http url, or git url.")
}
// There's no need to have 404 in the message as well.
msg[0] = msg[0].replace(/^404\s+/, "")
log.error("404", msg.join("\n"))
break
case "EPUBLISHCONFLICT":
log.error("publish fail", ["Cannot publish over existing version."
,"Update the 'version' field in package.json and try again."
,""
,"To automatically increment version numbers, see:"
," npm help version"
].join("\n"))
break
case "EISGIT":
log.error("git", [er.message
," "+er.path
,"Refusing to remove it. Update manually,"
,"or move it out of the way first."
].join("\n"))
break
case "ECYCLE":
log.error("cycle", [er.message
,"While installing: "+er.pkgid
,"Found a pathological dependency case that npm cannot solve."
,"Please report this to the package author."
].join("\n"))
break
case "EBADPLATFORM":
log.error("notsup", [er.message
,"Not compatible with your operating system or architecture: "+er.pkgid
,"Valid OS: "+er.os.join(",")
,"Valid Arch: "+er.cpu.join(",")
,"Actual OS: "+process.platform
,"Actual Arch: "+process.arch
].join("\n"))
break
case "EEXIST":
log.error([er.message
,"File exists: "+er.path
,"Move it away, and try again."].join("\n"))
break
case "ENEEDAUTH":
log.error("need auth", [er.message
,"You need to authorize this machine using `npm adduser`"
].join("\n"))
break
case "EPEERINVALID":
var peerErrors = Object.keys(er.peersDepending).map(function (peer) {
return "Peer " + peer + " wants " + er.packageName + "@"
+ er.peersDepending[peer]
})
log.error("peerinvalid", [er.message].concat(peerErrors).join("\n"))
break
case "ECONNRESET":
case "ENOTFOUND":
case "ETIMEDOUT":
case "EAI_FAIL":
log.error("network", [er.message
,"This is most likely not a problem with npm itself"
,"and is related to network connectivity."
,"In most cases you are behind a proxy or have bad network settings."
,"\nIf you are behind a proxy, please make sure that the"
,"'proxy' config is set properly. See: 'npm help config'"
].join("\n"))
break
case "ENOPACKAGEJSON":
log.error("package.json", [er.message
,"This is most likely not a problem with npm itself."
,"npm can't find a package.json file in your current directory."
].join("\n"))
break
case "ETARGET":
var msg = [er.message
,"This is most likely not a problem with npm itself."
,"In most cases you or one of your dependencies are requesting"
,"a package version that doesn't exist."
]
if (er.parent) {
msg.push("\nIt was specified as a dependency of '"+er.parent+"'\n")
}
log.error("notarget", msg.join("\n"))
break
case "ENOTSUP":
if (er.required) {
log.error("notsup", [er.message
,"Not compatible with your version of node/npm: "+er.pkgid
,"Required: "+JSON.stringify(er.required)
,"Actual: "
+JSON.stringify({npm:npm.version
,node:npm.config.get("node-version")})
].join("\n"))
break
} // else passthrough
case "ENOSPC":
log.error("nospc", [er.message
,"This is most likely not a problem with npm itself"
,"and is related to insufficient space on your system."
].join("\n"))
break
case "EROFS":
log.error("rofs", [er.message
,"This is most likely not a problem with npm itself"
,"and is related to the file system being read-only."
,"\nOften virtualized file systems, or other file systems"
,"that don't support symlinks, give this error."
].join("\n"))
break
case "ENOENT":
log.error("enoent", [er.message
,"This is most likely not a problem with npm itself"
,"and is related to npm not being able to find a file."
,er.file?"\nCheck if the file '"+er.file+"' is present.":""
].join("\n"))
break
case "EISDIR":
log.error("eisdir", [er.message
,"This is most likely not a problem with npm itself"
,"and is related to npm not being able to find a package.json in"
,"a package you are trying to install."
].join("\n"))
break
default:
log.error("", er.message || er)
log.error("", ["", "If you need help, you may report this error at:"
," <https://github.com/npm/npm/issues>"
].join("\n"))
break
}
exit(typeof er.errno === "number" ? er.errno : 1)
}
var writingLogFile = false
function writeLogFile (cb) {
if (writingLogFile) return cb()
writingLogFile = true
wroteLogFile = true
var fstr = writeStreamAtomic("npm-debug.log")
, os = require("os")
, out = ""
log.record.forEach(function (m) {
var pref = [m.id, m.level]
if (m.prefix) pref.push(m.prefix)
pref = pref.join(" ")
m.message.trim().split(/\r?\n/).map(function (line) {
return (pref + " " + line).trim()
}).forEach(function (line) {
out += line + os.EOL
})
})
fstr.end(out)
fstr.on("close", cb)
}

197
node_modules/npm/lib/utils/gently-rm.js generated vendored Normal file
View File

@@ -0,0 +1,197 @@
// only remove the thing if it's a symlink into a specific folder.
// This is a very common use-case of npm's, but not so common elsewhere.
module.exports = gentlyRm
var npm = require('../npm.js')
var log = require('npmlog')
var resolve = require('path').resolve
var dirname = require('path').dirname
var lstat = require('graceful-fs').lstat
var readlink = require('graceful-fs').readlink
var isInside = require('path-is-inside')
var vacuum = require('fs-vacuum')
var some = require('async-some')
var asyncMap = require('slide').asyncMap
var normalize = require('path').normalize
function gentlyRm (target, gently, base, cb) {
if (!cb) {
cb = base
base = undefined
}
if (!cb) {
cb = gently
gently = false
}
log.silly(
'gentlyRm',
target,
'is being', gently ? 'gently removed' : 'purged',
base ? 'from base ' + base : ''
)
// never rm the root, prefix, or bin dirs
//
// globals included because of `npm link` -- as far as the package requesting
// the link is concerned, the linked package is always installed globally
var prefixes = [
npm.prefix,
npm.globalPrefix,
npm.dir,
npm.root,
npm.globalDir,
npm.bin,
npm.globalBin
]
var resolved = normalize(resolve(npm.prefix, target))
if (prefixes.indexOf(resolved) !== -1) {
log.verbose('gentlyRm', resolved, "is part of npm and can't be removed")
return cb(new Error('May not delete: ' + resolved))
}
var options = { log: log.silly.bind(log, 'vacuum-fs') }
if (npm.config.get('force') || !gently) options.purge = true
if (base) options.base = normalize(resolve(npm.prefix, base))
if (!gently) {
log.verbose('gentlyRm', "don't care about contents; nuking", resolved)
return vacuum(resolved, options, cb)
}
var parent = options.base = normalize(base ? resolve(npm.prefix, base) : npm.prefix)
// is the parent directory managed by npm?
log.silly('gentlyRm', 'verifying', parent, 'is an npm working directory')
some(prefixes, isManaged(parent), function (er, matched) {
if (er) return cb(er)
if (!matched) {
log.error('gentlyRm', 'containing path', parent, "isn't under npm's control")
return clobberFail(resolved, parent, cb)
}
log.silly('gentlyRm', 'containing path', parent, "is under npm's control, in", matched)
// is the target directly contained within the (now known to be
// managed) parent?
if (isInside(resolved, parent)) {
log.silly('gentlyRm', 'deletion target', resolved, 'is under', parent)
log.verbose('gentlyRm', 'vacuuming from', resolved, 'up to', parent)
return vacuum(resolved, options, cb)
}
log.silly('gentlyRm', resolved, 'is not under', parent)
// the target isn't directly within the parent, but is it itself managed?
log.silly('gentlyRm', 'verifying', resolved, 'is an npm working directory')
some(prefixes, isManaged(resolved), function (er, matched) {
if (er) return cb(er)
if (matched) {
log.silly('gentlyRm', resolved, "is under npm's control, in", matched)
options.base = matched
log.verbose('gentlyRm', 'removing', resolved, 'with base', options.base)
return vacuum(resolved, options, cb)
}
log.verbose('gentlyRm', resolved, "is not under npm's control")
// the target isn't managed directly, but maybe it's a link...
log.silly('gentlyRm', 'checking to see if', resolved, 'is a link')
lstat(resolved, function (er, stat) {
if (er) {
// race conditions are common when unbuilding
if (er.code === 'ENOENT') return cb(null)
return cb(er)
}
if (!stat.isSymbolicLink()) {
log.error('gentlyRm', resolved, 'is outside', parent, 'and not a link')
return clobberFail(resolved, parent, cb)
}
// ...and maybe the link source, when read...
log.silly('gentlyRm', resolved, 'is a link')
readlink(resolved, function (er, link) {
if (er) {
// race conditions are common when unbuilding
if (er.code === 'ENOENT') return cb(null)
return cb(er)
}
// ...is inside the managed parent
var source = resolve(dirname(resolved), link)
if (isInside(source, parent)) {
log.silly('gentlyRm', source, 'symlink target', resolved, 'is inside', parent)
log.verbose('gentlyRm', 'vacuuming', resolved)
return vacuum(resolved, options, cb)
}
log.error('gentlyRm', source, 'symlink target', resolved, 'is not controlled by npm', parent)
return clobberFail(target, parent, cb)
})
})
})
})
}
var resolvedPaths = {}
function isManaged (target) {
return function predicate (path, cb) {
if (!path) {
log.verbose('isManaged', 'no path passed for target', target)
return cb(null, false)
}
asyncMap([path, target], resolveSymlink, function (er, results) {
if (er) {
if (er.code === 'ENOENT') return cb(null, false)
return cb(er)
}
var path = results[0]
var target = results[1]
var inside = isInside(target, path)
if (!inside) log.silly('isManaged', target, 'is not inside', path)
return cb(null, inside && path)
})
}
function resolveSymlink (toResolve, cb) {
var resolved = resolve(npm.prefix, toResolve)
// if the path has already been memoized, return immediately
var cached = resolvedPaths[resolved]
if (cached) return cb(null, cached)
// otherwise, check the path
lstat(resolved, function (er, stat) {
if (er) return cb(er)
// if it's not a link, cache & return the path itself
if (!stat.isSymbolicLink()) {
resolvedPaths[resolved] = resolved
return cb(null, resolved)
}
// otherwise, cache & return the link's source
readlink(resolved, function (er, source) {
if (er) return cb(er)
resolved = resolve(resolved, source)
resolvedPaths[resolved] = resolved
cb(null, resolved)
})
})
}
}
function clobberFail (target, root, cb) {
var er = new Error('Refusing to delete: ' + target + ' not in ' + root)
er.code = 'EEXIST'
er.path = target
return cb(er)
}

25
node_modules/npm/lib/utils/get-publish-config.js generated vendored Normal file
View File

@@ -0,0 +1,25 @@
var Conf = require('../config/core.js').Conf
var CachingRegClient = require('../cache/caching-client.js')
var log = require('npmlog')
module.exports = getPublishConfig
function getPublishConfig (publishConfig, defaultConfig, defaultClient) {
var config = defaultConfig
var client = defaultClient
log.verbose('getPublishConfig', publishConfig)
if (publishConfig) {
config = new Conf(defaultConfig)
config.save = defaultConfig.save.bind(defaultConfig)
// don't modify the actual publishConfig object, in case we have
// to set a login token or some other data.
config.unshift(Object.keys(publishConfig).reduce(function (s, k) {
s[k] = publishConfig[k]
return s
}, {}))
client = new CachingRegClient(config)
}
return { config: config, client: client }
}

51
node_modules/npm/lib/utils/git.js generated vendored Normal file
View File

@@ -0,0 +1,51 @@
// handle some git configuration for windows
exports.spawn = spawnGit
exports.chainableExec = chainableExec
exports.whichAndExec = whichAndExec
var exec = require("child_process").execFile
, spawn = require("./spawn")
, npm = require("../npm.js")
, which = require("which")
, git = npm.config.get("git")
, assert = require("assert")
, log = require("npmlog")
function prefixGitArgs () {
return process.platform === "win32" ? ["-c", "core.longpaths=true"] : []
}
function execGit (args, options, cb) {
log.info('git', args)
var fullArgs = prefixGitArgs().concat(args || [])
return exec(git, fullArgs, options, cb)
}
function spawnGit (args, options) {
log.info("git", args)
return spawn(git, prefixGitArgs().concat(args || []), options)
}
function chainableExec () {
var args = Array.prototype.slice.call(arguments)
return [execGit].concat(args)
}
function whichGit (cb) {
return which(git, cb)
}
function whichAndExec (args, options, cb) {
assert.equal(typeof cb, "function", "no callback provided")
// check for git
whichGit(function (err) {
if (err) {
err.code = "ENOGIT"
return cb(err)
}
execGit(args, options, cb)
})
}

369
node_modules/npm/lib/utils/lifecycle.js generated vendored Normal file
View File

@@ -0,0 +1,369 @@
exports = module.exports = lifecycle
exports.cmd = cmd
exports.makeEnv = makeEnv
var log = require("npmlog")
var spawn = require("./spawn")
var npm = require("../npm.js")
var path = require("path")
var fs = require("graceful-fs")
var chain = require("slide").chain
var Stream = require("stream").Stream
var PATH = "PATH"
var uidNumber = require("uid-number")
var umask = require("./umask")
// windows calls it's path "Path" usually, but this is not guaranteed.
if (process.platform === "win32") {
PATH = "Path"
Object.keys(process.env).forEach(function (e) {
if (e.match(/^PATH$/i)) {
PATH = e
}
})
}
function lifecycle (pkg, stage, wd, unsafe, failOk, cb) {
if (typeof cb !== "function") cb = failOk, failOk = false
if (typeof cb !== "function") cb = unsafe, unsafe = false
if (typeof cb !== "function") cb = wd, wd = null
while (pkg && pkg._data) pkg = pkg._data
if (!pkg) return cb(new Error("Invalid package data"))
log.info(stage, pkg._id)
if (!pkg.scripts || npm.config.get('ignore-scripts')) pkg.scripts = {}
validWd(wd || path.resolve(npm.dir, pkg.name), function (er, wd) {
if (er) return cb(er)
unsafe = unsafe || npm.config.get("unsafe-perm")
if ((wd.indexOf(npm.dir) !== 0 ||
wd.indexOf(pkg.name) !== wd.length - pkg.name.length) &&
!unsafe && pkg.scripts[stage]) {
log.warn( "cannot run in wd", "%s %s (wd=%s)"
, pkg._id, pkg.scripts[stage], wd)
return cb()
}
// set the env variables, then run scripts as a child process.
var env = makeEnv(pkg)
env.npm_lifecycle_event = stage
env.npm_node_execpath = env.NODE = env.NODE || process.execPath
env.npm_execpath = require.main.filename
// "nobody" typically doesn't have permission to write to /tmp
// even if it's never used, sh freaks out.
if (!npm.config.get("unsafe-perm")) env.TMPDIR = wd
lifecycle_(pkg, stage, wd, env, unsafe, failOk, cb)
})
}
function checkForLink (pkg, cb) {
var f = path.join(npm.dir, pkg.name)
fs.lstat(f, function (er, s) {
cb(null, !(er || !s.isSymbolicLink()))
})
}
function lifecycle_ (pkg, stage, wd, env, unsafe, failOk, cb) {
var pathArr = []
, p = wd.split("node_modules")
, acc = path.resolve(p.shift())
p.forEach(function (pp) {
pathArr.unshift(path.join(acc, "node_modules", ".bin"))
acc = path.join(acc, "node_modules", pp)
})
pathArr.unshift(path.join(acc, "node_modules", ".bin"))
// we also unshift the bundled node-gyp-bin folder so that
// the bundled one will be used for installing things.
pathArr.unshift(path.join(__dirname, "..", "..", "bin", "node-gyp-bin"))
// prefer current node interpreter in child scripts
pathArr.push(path.dirname(process.execPath))
if (env[PATH]) pathArr.push(env[PATH])
env[PATH] = pathArr.join(process.platform === "win32" ? ";" : ":")
var packageLifecycle = pkg.scripts && pkg.scripts.hasOwnProperty(stage)
if (packageLifecycle) {
// define this here so it's available to all scripts.
env.npm_lifecycle_script = pkg.scripts[stage]
}
function done (er) {
if (er) {
if (npm.config.get("force")) {
log.info("forced, continuing", er)
er = null
} else if (failOk) {
log.warn("continuing anyway", er.message)
er = null
}
}
cb(er)
}
chain
( [ packageLifecycle && [runPackageLifecycle, pkg, env, wd, unsafe]
, [runHookLifecycle, pkg, env, wd, unsafe] ]
, done )
}
function validWd (d, cb) {
fs.stat(d, function (er, st) {
if (er || !st.isDirectory()) {
var p = path.dirname(d)
if (p === d) {
return cb(new Error("Could not find suitable wd"))
}
return validWd(p, cb)
}
return cb(null, d)
})
}
function runPackageLifecycle (pkg, env, wd, unsafe, cb) {
// run package lifecycle scripts in the package root, or the nearest parent.
var stage = env.npm_lifecycle_event
, cmd = env.npm_lifecycle_script
var note = "\n> " + pkg._id + " " + stage + " " + wd
+ "\n> " + cmd + "\n"
runCmd(note, cmd, pkg, env, stage, wd, unsafe, cb)
}
var running = false
var queue = []
function dequeue() {
running = false
if (queue.length) {
var r = queue.shift()
runCmd.apply(null, r)
}
}
function runCmd (note, cmd, pkg, env, stage, wd, unsafe, cb) {
if (running) {
queue.push([note, cmd, pkg, env, stage, wd, unsafe, cb])
return
}
running = true
log.pause()
var user = unsafe ? null : npm.config.get("user")
, group = unsafe ? null : npm.config.get("group")
if (log.level !== 'silent') {
if (npm.spinner.int) {
npm.config.get("logstream").write("\r \r")
}
console.log(note)
}
log.verbose("unsafe-perm in lifecycle", unsafe)
if (process.platform === "win32") {
unsafe = true
}
if (unsafe) {
runCmd_(cmd, pkg, env, wd, stage, unsafe, 0, 0, cb)
} else {
uidNumber(user, group, function (er, uid, gid) {
runCmd_(cmd, pkg, env, wd, stage, unsafe, uid, gid, cb)
})
}
}
function runCmd_ (cmd, pkg, env, wd, stage, unsafe, uid, gid, cb_) {
function cb (er) {
cb_.apply(null, arguments)
log.resume()
process.nextTick(dequeue)
}
var conf = { cwd: wd
, env: env
, stdio: [ 0, 1, 2 ]
}
if (!unsafe) {
conf.uid = uid ^ 0
conf.gid = gid ^ 0
}
var sh = 'sh'
var shFlag = '-c'
if (process.platform === 'win32') {
sh = process.env.comspec || 'cmd'
shFlag = '/d /s /c'
conf.windowsVerbatimArguments = true
}
var proc = spawn(sh, [shFlag, cmd], conf)
proc.on("error", procError)
proc.on("close", function (code, signal) {
if (signal) {
process.kill(process.pid, signal);
} else if (code) {
var er = new Error("Exit status " + code)
}
procError(er)
})
function procError (er) {
if (er && !npm.ROLLBACK) {
log.info(pkg._id, "Failed to exec "+stage+" script")
er.message = pkg._id + " "
+ stage + ": `" + cmd +"`\n"
+ er.message
if (er.code !== "EPERM") {
er.code = "ELIFECYCLE"
}
er.pkgid = pkg._id
er.stage = stage
er.script = cmd
er.pkgname = pkg.name
return cb(er)
} else if (er) {
log.error(pkg._id+"."+stage, er)
log.error(pkg._id+"."+stage, "continuing anyway")
return cb()
}
cb(er)
}
}
function runHookLifecycle (pkg, env, wd, unsafe, cb) {
// check for a hook script, run if present.
var stage = env.npm_lifecycle_event
, hook = path.join(npm.dir, ".hooks", stage)
, user = unsafe ? null : npm.config.get("user")
, group = unsafe ? null : npm.config.get("group")
, cmd = hook
fs.stat(hook, function (er) {
if (er) return cb()
var note = "\n> " + pkg._id + " " + stage + " " + wd
+ "\n> " + cmd
runCmd(note, hook, pkg, env, stage, wd, unsafe, cb)
})
}
function makeEnv (data, prefix, env) {
prefix = prefix || "npm_package_"
if (!env) {
env = {}
for (var i in process.env) if (!i.match(/^npm_/)) {
env[i] = process.env[i]
}
// npat asks for tap output
if (npm.config.get("npat")) env.TAP = 1
// express and others respect the NODE_ENV value.
if (npm.config.get("production")) env.NODE_ENV = "production"
} else if (!data.hasOwnProperty("_lifecycleEnv")) {
Object.defineProperty(data, "_lifecycleEnv",
{ value : env
, enumerable : false
})
}
for (var i in data) if (i.charAt(0) !== "_") {
var envKey = (prefix+i).replace(/[^a-zA-Z0-9_]/g, '_')
if (i === "readme") {
continue
}
if (data[i] && typeof(data[i]) === "object") {
try {
// quick and dirty detection for cyclical structures
JSON.stringify(data[i])
makeEnv(data[i], envKey+"_", env)
} catch (ex) {
// usually these are package objects.
// just get the path and basic details.
var d = data[i]
makeEnv( { name: d.name, version: d.version, path:d.path }
, envKey+"_", env)
}
} else {
env[envKey] = String(data[i])
env[envKey] = -1 !== env[envKey].indexOf("\n")
? JSON.stringify(env[envKey])
: env[envKey]
}
}
if (prefix !== "npm_package_") return env
prefix = "npm_config_"
var pkgConfig = {}
, keys = npm.config.keys
, pkgVerConfig = {}
, namePref = data.name + ":"
, verPref = data.name + "@" + data.version + ":"
keys.forEach(function (i) {
// in some rare cases (e.g. working with nerf darts), there are segmented
// "private" (underscore-prefixed) config names -- don't export
if (i.charAt(0) === '_' && i.indexOf('_' + namePref) !== 0 || i.match(/:_/)) {
return
}
var value = npm.config.get(i)
if (value instanceof Stream || Array.isArray(value)) return
if (i.match(/umask/)) value = umask.toString(value)
if (!value) value = ""
else if (typeof value === "number") value = "" + value
else if (typeof value !== "string") value = JSON.stringify(value)
value = -1 !== value.indexOf("\n")
? JSON.stringify(value)
: value
i = i.replace(/^_+/, "")
if (i.indexOf(namePref) === 0) {
var k = i.substr(namePref.length).replace(/[^a-zA-Z0-9_]/g, "_")
pkgConfig[ k ] = value
} else if (i.indexOf(verPref) === 0) {
var k = i.substr(verPref.length).replace(/[^a-zA-Z0-9_]/g, "_")
pkgVerConfig[ k ] = value
}
var envKey = (prefix+i).replace(/[^a-zA-Z0-9_]/g, "_")
env[envKey] = value
})
prefix = "npm_package_config_"
;[pkgConfig, pkgVerConfig].forEach(function (conf) {
for (var i in conf) {
var envKey = (prefix+i)
env[envKey] = conf[i]
}
})
return env
}
function cmd (stage) {
function CMD (args, cb) {
npm.commands["run-script"]([stage].concat(args), cb)
}
CMD.usage = "npm "+stage+" [-- <args>]"
var installedShallow = require("./completion/installed-shallow.js")
CMD.completion = function (opts, cb) {
installedShallow(opts, function (d) {
return d.scripts && d.scripts[stage]
}, cb)
}
return CMD
}

40
node_modules/npm/lib/utils/link.js generated vendored Normal file
View File

@@ -0,0 +1,40 @@
module.exports = link
link.ifExists = linkIfExists
var fs = require("graceful-fs")
, chain = require("slide").chain
, mkdir = require("mkdirp")
, rm = require("./gently-rm.js")
, path = require("path")
, npm = require("../npm.js")
function linkIfExists (from, to, gently, cb) {
fs.stat(from, function (er) {
if (er) return cb()
link(from, to, gently, cb)
})
}
function link (from, to, gently, abs, cb) {
if (typeof cb !== "function") cb = abs, abs = false
if (typeof cb !== "function") cb = gently, gently = null
if (npm.config.get("force")) gently = false
to = path.resolve(to)
var target = from = path.resolve(from)
if (!abs && process.platform !== "win32") {
// junctions on windows must be absolute
target = path.relative(path.dirname(to), from)
// if there is no folder in common, then it will be much
// longer, and using a relative link is dumb.
if (target.length >= from.length) target = from
}
chain
( [ [fs, "stat", from]
, [rm, to, gently]
, [mkdir, path.dirname(to)]
, [fs, "symlink", target, to, "junction"] ]
, cb)
}

73
node_modules/npm/lib/utils/locker.js generated vendored Normal file
View File

@@ -0,0 +1,73 @@
var crypto = require("crypto")
var resolve = require("path").resolve
var lockfile = require("lockfile")
var log = require("npmlog")
var mkdirp = require("mkdirp")
var npm = require("../npm.js")
var correctMkdir = require('../utils/correct-mkdir.js')
var installLocks = {}
function lockFileName (base, name) {
var c = name.replace(/[^a-zA-Z0-9]+/g, "-").replace(/^-+|-+$/g, "")
, p = resolve(base, name)
, h = crypto.createHash("sha1").update(p).digest("hex")
, l = resolve(npm.cache, "_locks")
return resolve(l, c.substr(0, 24)+"-"+h.substr(0, 16)+".lock")
}
function lock (base, name, cb) {
var lockDir = resolve(npm.cache, "_locks")
correctMkdir(lockDir, function (er) {
if (er) return cb(er)
var opts = { stale: npm.config.get("cache-lock-stale")
, retries: npm.config.get("cache-lock-retries")
, wait: npm.config.get("cache-lock-wait") }
var lf = lockFileName(base, name)
lockfile.lock(lf, opts, function (er) {
if (er) log.warn("locking", lf, "failed", er)
if (!er) {
log.verbose("lock", "using", lf, "for", resolve(base, name))
installLocks[lf] = true
}
cb(er)
})
})
}
function unlock (base, name, cb) {
var lf = lockFileName(base, name)
, locked = installLocks[lf]
if (locked === false) {
return process.nextTick(cb)
}
else if (locked === true) {
lockfile.unlock(lf, function (er) {
if (er) {
log.warn("unlocking", lf, "failed", er)
}
else {
installLocks[lf] = false
log.verbose("unlock", "done using", lf, "for", resolve(base, name))
}
cb(er)
})
}
else {
throw new Error(
"Attempt to unlock " + resolve(base, name) + ", which hasn't been locked"
)
}
}
module.exports = {
lock : lock,
unlock : unlock
}

100
node_modules/npm/lib/utils/map-to-registry.js generated vendored Normal file
View File

@@ -0,0 +1,100 @@
var url = require("url")
var log = require("npmlog")
, npa = require("npm-package-arg")
module.exports = mapToRegistry
function mapToRegistry(name, config, cb) {
log.silly("mapToRegistry", "name", name)
var registry
// the name itself takes precedence
var data = npa(name)
if (data.scope) {
// the name is definitely scoped, so escape now
name = name.replace("/", "%2f")
log.silly("mapToRegistry", "scope (from package name)", data.scope)
registry = config.get(data.scope + ":registry")
if (!registry) {
log.verbose("mapToRegistry", "no registry URL found in name for scope", data.scope)
}
}
// ...then --scope=@scope or --scope=scope
var scope = config.get("scope")
if (!registry && scope) {
// I'm an enabler, sorry
if (scope.charAt(0) !== "@") scope = "@" + scope
log.silly("mapToRegistry", "scope (from config)", scope)
registry = config.get(scope + ":registry")
if (!registry) {
log.verbose("mapToRegistry", "no registry URL found in config for scope", scope)
}
}
// ...and finally use the default registry
if (!registry) {
log.silly("mapToRegistry", "using default registry")
registry = config.get("registry")
}
log.silly("mapToRegistry", "registry", registry)
var auth = config.getCredentialsByURI(registry)
// normalize registry URL so resolution doesn't drop a piece of registry URL
var normalized = registry.slice(-1) !== '/' ? registry + '/' : registry
var uri
log.silly('mapToRegistry', 'data', data)
if (data.type === 'remote') {
uri = data.spec
} else {
uri = url.resolve(normalized, name)
}
log.silly('mapToRegistry', 'uri', uri)
cb(null, uri, scopeAuth(uri, registry, auth), normalized)
}
function scopeAuth (uri, registry, auth) {
var cleaned = {
scope: auth.scope,
email: auth.email,
alwaysAuth: auth.alwaysAuth,
token: undefined,
username: undefined,
password: undefined,
auth: undefined
}
var requestHost
var registryHost
if (auth.token || auth.auth || (auth.username && auth.password)) {
requestHost = url.parse(uri).hostname
registryHost = url.parse(registry).hostname
if (requestHost === registryHost) {
cleaned.token = auth.token
cleaned.auth = auth.auth
cleaned.username = auth.username
cleaned.password = auth.password
} else if (auth.alwaysAuth) {
log.verbose('scopeAuth', 'alwaysAuth set for', registry)
cleaned.token = auth.token
cleaned.auth = auth.auth
cleaned.username = auth.username
cleaned.password = auth.password
} else {
log.silly('scopeAuth', uri, "doesn't share host with registry", registry)
}
}
return cleaned
}

12
node_modules/npm/lib/utils/read-local-package.js generated vendored Normal file
View File

@@ -0,0 +1,12 @@
exports = module.exports = readLocalPkg
var npm = require("../npm.js")
, readJson = require("read-package-json")
function readLocalPkg (cb) {
if (npm.config.get("global")) return cb()
var path = require("path")
readJson(path.resolve(npm.prefix, "package.json"), function (er, d) {
return cb(er, d && d.name)
})
}

34
node_modules/npm/lib/utils/spawn.js generated vendored Normal file
View File

@@ -0,0 +1,34 @@
module.exports = spawn
var _spawn = require("child_process").spawn
var EventEmitter = require("events").EventEmitter
function spawn (cmd, args, options) {
var raw = _spawn(cmd, args, options)
var cooked = new EventEmitter()
raw.on("error", function (er) {
er.file = cmd
cooked.emit("error", er)
}).on("close", function (code, signal) {
// Create ENOENT error because Node.js v0.8 will not emit
// an `error` event if the command could not be found.
if (code === 127) {
var er = new Error('spawn ENOENT')
er.code = 'ENOENT'
er.errno = 'ENOENT'
er.syscall = 'spawn'
er.file = cmd
cooked.emit('error', er)
} else {
cooked.emit("close", code, signal)
}
})
cooked.stdin = raw.stdin
cooked.stdout = raw.stdout
cooked.stderr = raw.stderr
cooked.kill = function (sig) { return raw.kill(sig) }
return cooked
}

291
node_modules/npm/lib/utils/tar.js generated vendored Normal file
View File

@@ -0,0 +1,291 @@
// commands for packing and unpacking tarballs
// this file is used by lib/cache.js
var npm = require("../npm.js")
, fs = require("graceful-fs")
, writeFileAtomic = require("write-file-atomic")
, writeStreamAtomic = require("fs-write-stream-atomic")
, path = require("path")
, log = require("npmlog")
, uidNumber = require("uid-number")
, rm = require("./gently-rm.js")
, readJson = require("read-package-json")
, myUid = process.getuid && process.getuid()
, myGid = process.getgid && process.getgid()
, tar = require("tar")
, zlib = require("zlib")
, fstream = require("fstream")
, Packer = require("fstream-npm")
, lifecycle = require("./lifecycle.js")
if (process.env.SUDO_UID && myUid === 0) {
if (!isNaN(process.env.SUDO_UID)) myUid = +process.env.SUDO_UID
if (!isNaN(process.env.SUDO_GID)) myGid = +process.env.SUDO_GID
}
exports.pack = pack
exports.unpack = unpack
function pack (tarball, folder, pkg, dfc, cb) {
log.verbose("tar pack", [tarball, folder])
if (typeof cb !== "function") cb = dfc, dfc = false
log.verbose("tarball", tarball)
log.verbose("folder", folder)
if (dfc) {
// do fancy crap
return lifecycle(pkg, "prepublish", folder, function (er) {
if (er) return cb(er)
pack_(tarball, folder, pkg, cb)
})
} else {
pack_(tarball, folder, pkg, cb)
}
}
function pack_ (tarball, folder, pkg, cb) {
new Packer({ path: folder, type: "Directory", isDirectory: true })
.on("error", function (er) {
if (er) log.error("tar pack", "Error reading " + folder)
return cb(er)
})
// By default, npm includes some proprietary attributes in the
// package tarball. This is sane, and allowed by the spec.
// However, npm *itself* excludes these from its own package,
// so that it can be more easily bootstrapped using old and
// non-compliant tar implementations.
.pipe(tar.Pack({ noProprietary: !npm.config.get("proprietary-attribs") }))
.on("error", function (er) {
if (er) log.error("tar.pack", "tar creation error", tarball)
cb(er)
})
.pipe(zlib.Gzip())
.on("error", function (er) {
if (er) log.error("tar.pack", "gzip error "+tarball)
cb(er)
})
.pipe(writeStreamAtomic(tarball))
.on("error", function (er) {
if (er) log.error("tar.pack", "Could not write "+tarball)
cb(er)
})
.on("close", cb)
}
function unpack (tarball, unpackTarget, dMode, fMode, uid, gid, cb) {
log.verbose("tar", "unpack", tarball)
log.verbose("tar", "unpacking to", unpackTarget)
if (typeof cb !== "function") cb = gid, gid = null
if (typeof cb !== "function") cb = uid, uid = null
if (typeof cb !== "function") cb = fMode, fMode = npm.modes.file
if (typeof cb !== "function") cb = dMode, dMode = npm.modes.exec
uidNumber(uid, gid, function (er, uid, gid) {
if (er) return cb(er)
unpack_(tarball, unpackTarget, dMode, fMode, uid, gid, cb)
})
}
function unpack_ ( tarball, unpackTarget, dMode, fMode, uid, gid, cb ) {
rm(unpackTarget, function (er) {
if (er) return cb(er)
// gzip {tarball} --decompress --stdout \
// | tar -mvxpf - --strip-components=1 -C {unpackTarget}
gunzTarPerm( tarball, unpackTarget
, dMode, fMode
, uid, gid
, function (er, folder) {
if (er) return cb(er)
readJson(path.resolve(folder, "package.json"), cb)
})
})
}
function gunzTarPerm (tarball, target, dMode, fMode, uid, gid, cb_) {
if (!dMode) dMode = npm.modes.exec
if (!fMode) fMode = npm.modes.file
log.silly("gunzTarPerm", "modes", [dMode.toString(8), fMode.toString(8)])
var cbCalled = false
function cb (er) {
if (cbCalled) return
cbCalled = true
cb_(er, target)
}
var fst = fs.createReadStream(tarball)
fst.on("open", function (fd) {
fs.fstat(fd, function (er, st) {
if (er) return fst.emit("error", er)
if (st.size === 0) {
er = new Error("0-byte tarball\n" +
"Please run `npm cache clean`")
fst.emit("error", er)
}
})
})
// figure out who we're supposed to be, if we're not pretending
// to be a specific user.
if (npm.config.get("unsafe-perm") && process.platform !== "win32") {
uid = myUid
gid = myGid
}
function extractEntry (entry) {
log.silly("gunzTarPerm", "extractEntry", entry.path)
// never create things that are user-unreadable,
// or dirs that are user-un-listable. Only leads to headaches.
var originalMode = entry.mode = entry.mode || entry.props.mode
entry.mode = entry.mode | (entry.type === "Directory" ? dMode : fMode)
entry.mode = entry.mode & (~npm.modes.umask)
entry.props.mode = entry.mode
if (originalMode !== entry.mode) {
log.silly( "gunzTarPerm", "modified mode"
, [entry.path, originalMode, entry.mode])
}
// if there's a specific owner uid/gid that we want, then set that
if (process.platform !== "win32" &&
typeof uid === "number" &&
typeof gid === "number") {
entry.props.uid = entry.uid = uid
entry.props.gid = entry.gid = gid
}
}
var extractOpts = { type: "Directory", path: target, strip: 1 }
if (process.platform !== "win32" &&
typeof uid === "number" &&
typeof gid === "number") {
extractOpts.uid = uid
extractOpts.gid = gid
}
var sawIgnores = {}
extractOpts.filter = function () {
// symbolic links are not allowed in packages.
if (this.type.match(/^.*Link$/)) {
log.warn( "excluding symbolic link"
, this.path.substr(target.length + 1)
+ " -> " + this.linkpath )
return false
}
// Note: This mirrors logic in the fs read operations that are
// employed during tarball creation, in the fstream-npm module.
// It is duplicated here to handle tarballs that are created
// using other means, such as system tar or git archive.
if (this.type === "File") {
var base = path.basename(this.path)
if (base === ".npmignore") {
sawIgnores[ this.path ] = true
} else if (base === ".gitignore") {
var npmignore = this.path.replace(/\.gitignore$/, ".npmignore")
if (sawIgnores[npmignore]) {
// Skip this one, already seen.
return false
} else {
// Rename, may be clobbered later.
this.path = npmignore
this._path = npmignore
}
}
}
return true
}
fst
.on("error", function (er) {
if (er) log.error("tar.unpack", "error reading "+tarball)
cb(er)
})
.on("data", function OD (c) {
// detect what it is.
// Then, depending on that, we'll figure out whether it's
// a single-file module, gzipped tarball, or naked tarball.
// gzipped files all start with 1f8b08
if (c[0] === 0x1F &&
c[1] === 0x8B &&
c[2] === 0x08) {
fst
.pipe(zlib.Unzip())
.on("error", function (er) {
if (er) log.error("tar.unpack", "unzip error "+tarball)
cb(er)
})
.pipe(tar.Extract(extractOpts))
.on("entry", extractEntry)
.on("error", function (er) {
if (er) log.error("tar.unpack", "untar error "+tarball)
cb(er)
})
.on("close", cb)
} else if (hasTarHeader(c)) {
// naked tar
fst
.pipe(tar.Extract(extractOpts))
.on("entry", extractEntry)
.on("error", function (er) {
if (er) log.error("tar.unpack", "untar error "+tarball)
cb(er)
})
.on("close", cb)
} else {
// naked js file
var jsOpts = { path: path.resolve(target, "index.js") }
if (process.platform !== "win32" &&
typeof uid === "number" &&
typeof gid === "number") {
jsOpts.uid = uid
jsOpts.gid = gid
}
fst
.pipe(fstream.Writer(jsOpts))
.on("error", function (er) {
if (er) log.error("tar.unpack", "copy error "+tarball)
cb(er)
})
.on("close", function () {
var j = path.resolve(target, "package.json")
readJson(j, function (er, d) {
if (er) {
log.error("not a package", tarball)
return cb(er)
}
writeFileAtomic(j, JSON.stringify(d) + "\n", cb)
})
})
}
// now un-hook, and re-emit the chunk
fst.removeListener("data", OD)
fst.emit("data", c)
})
}
function hasTarHeader (c) {
return c[257] === 0x75 && // tar archives have 7573746172 at position
c[258] === 0x73 && // 257 and 003030 or 202000 at position 262
c[259] === 0x74 &&
c[260] === 0x61 &&
c[261] === 0x72 &&
((c[262] === 0x00 &&
c[263] === 0x30 &&
c[264] === 0x30) ||
(c[262] === 0x20 &&
c[263] === 0x20 &&
c[264] === 0x00))
}

17
node_modules/npm/lib/utils/umask.js generated vendored Normal file
View File

@@ -0,0 +1,17 @@
var umask = require("umask")
var npmlog = require("npmlog")
var _fromString = umask.fromString
module.exports = umask
// fromString with logging callback
umask.fromString = function (val) {
_fromString(val, function (err, result) {
if (err) {
npmlog.warn("invalid umask", err.message)
}
val = result
})
return val
}

24
node_modules/npm/lib/utils/warn-deprecated.js generated vendored Normal file
View File

@@ -0,0 +1,24 @@
module.exports = warnDeprecated
var log = require("npmlog")
var deprecations = {}
function warnDeprecated (type) {
return function warn (messages, instance) {
if (!instance) {
if (!deprecations[type]) {
deprecations[type] = {}
messages.forEach(function (m) { log.warn(type, m) })
}
}
else {
if (!deprecations[type]) deprecations[type] = {}
if (!deprecations[type][instance]) {
deprecations[type][instance] = true
messages.forEach(function (m) { log.warn(type, m) })
}
}
}
}

209
node_modules/npm/lib/version.js generated vendored Normal file
View File

@@ -0,0 +1,209 @@
// npm version <newver>
module.exports = version
var semver = require('semver')
var path = require('path')
var fs = require('graceful-fs')
var writeFileAtomic = require('write-file-atomic')
var chain = require('slide').chain
var log = require('npmlog')
var npm = require('./npm.js')
var git = require('./utils/git.js')
var assert = require('assert')
var lifecycle = require('./utils/lifecycle.js')
version.usage = 'npm version [<newversion> | major | minor | patch | premajor | preminor | prepatch | prerelease]' +
'\n(run in package dir)\n' +
"'npm -v' or 'npm --version' to print npm version " +
'(' + npm.version + ')\n' +
"'npm view <pkg> version' to view a package's " +
'published version\n' +
"'npm ls' to inspect current package/dependency versions"
function version (args, silent, cb_) {
if (typeof cb_ !== 'function') {
cb_ = silent
silent = false
}
if (args.length > 1) return cb_(version.usage)
var packagePath = path.join(npm.localPrefix, 'package.json')
fs.readFile(packagePath, function (er, data) {
if (data) data = data.toString()
try {
data = JSON.parse(data)
} catch (e) {
er = e
data = null
}
if (!args.length) return dump(data, cb_)
if (er) {
log.error('version', 'No valid package.json found')
return cb_(er)
}
var newVersion = semver.valid(args[0])
if (!newVersion) newVersion = semver.inc(data.version, args[0])
if (!newVersion) return cb_(version.usage)
if (data.version === newVersion) return cb_(new Error('Version not changed'))
data.version = newVersion
var lifecycleData = Object.create(data)
lifecycleData._id = data.name + '@' + newVersion
var localData = {}
var where = npm.prefix
chain([
[checkGit, localData],
[lifecycle, lifecycleData, 'preversion', where],
[updatePackage, newVersion, silent],
[lifecycle, lifecycleData, 'version', where],
[commit, localData, newVersion],
[lifecycle, lifecycleData, 'postversion', where] ],
cb_)
})
}
function readPackage (cb) {
var packagePath = path.join(npm.localPrefix, 'package.json')
fs.readFile(packagePath, function (er, data) {
if (er) return cb(new Error(er))
if (data) data = data.toString()
try {
data = JSON.parse(data)
} catch (e) {
er = e
data = null
}
cb(er, data)
})
}
function updatePackage (newVersion, silent, cb_) {
function cb (er) {
if (!er && !silent) console.log('v' + newVersion)
cb_(er)
}
readPackage(function (er, data) {
if (er) return cb(new Error(er))
data.version = newVersion
write(data, 'package.json', cb)
})
}
function commit (localData, newVersion, cb) {
updateShrinkwrap(newVersion, function (er, hasShrinkwrap) {
if (er || !localData.hasGit) return cb(er)
_commit(newVersion, hasShrinkwrap, cb)
})
}
function updateShrinkwrap (newVersion, cb) {
fs.readFile(path.join(npm.localPrefix, 'npm-shrinkwrap.json'), function (er, data) {
if (er && er.code === 'ENOENT') return cb(null, false)
try {
data = data.toString()
data = JSON.parse(data)
} catch (er) {
log.error('version', 'Bad npm-shrinkwrap.json data')
return cb(er)
}
data.version = newVersion
write(data, 'npm-shrinkwrap.json', function (er) {
if (er) {
log.error('version', 'Bad npm-shrinkwrap.json data')
return cb(er)
}
cb(null, true)
})
})
}
function dump (data, cb) {
var v = {}
if (data && data.name && data.version) v[data.name] = data.version
v.npm = npm.version
Object.keys(process.versions).sort().forEach(function (k) {
v[k] = process.versions[k]
})
if (npm.config.get('json')) v = JSON.stringify(v, null, 2)
console.log(v)
cb()
}
function checkGit (localData, cb) {
fs.stat(path.join(npm.localPrefix, '.git'), function (er, s) {
var doGit = !er && npm.config.get('git-tag-version')
if (!doGit) {
if (er) log.verbose('version', 'error checking for .git', er)
log.verbose('version', 'not tagging in git')
return cb(null, false)
}
// check for git
git.whichAndExec(
[ 'status', '--porcelain' ],
{ env: process.env },
function (er, stdout) {
if (er && er.code === 'ENOGIT') {
log.warn(
'version',
'This is a Git checkout, but the git command was not found.',
'npm could not create a Git tag for this release!'
)
return cb(null, false)
}
var lines = stdout.trim().split('\n').filter(function (line) {
return line.trim() && !line.match(/^\?\? /)
}).map(function (line) {
return line.trim()
})
if (lines.length && !npm.config.get('force')) {
return cb(new Error(
'Git working directory not clean.\n' + lines.join('\n')
))
}
localData.hasGit = true
cb(null, true)
}
)
})
}
function _commit (version, hasShrinkwrap, cb) {
var packagePath = path.join(npm.localPrefix, 'package.json')
var options = { env: process.env }
var message = npm.config.get('message').replace(/%s/g, version)
var sign = npm.config.get('sign-git-tag')
var flag = sign ? '-sm' : '-am'
chain(
[
git.chainableExec([ 'add', packagePath ], options),
hasShrinkwrap && git.chainableExec([ 'add', 'npm-shrinkwrap.json' ], options),
git.chainableExec([ 'commit', '-m', message ], options),
git.chainableExec([ 'tag', npm.config.get('tag-version-prefix') + version, flag, message ], options)
],
cb
)
}
function write (data, file, cb) {
assert(data && typeof data === 'object', 'must pass data to version write')
assert(typeof file === 'string', 'must pass filename to write to version write')
log.verbose('version.write', 'data', data, 'to', file)
writeFileAtomic(
path.join(npm.localPrefix, file),
new Buffer(JSON.stringify(data, null, 2) + '\n'),
cb
)
}

302
node_modules/npm/lib/view.js generated vendored Normal file
View File

@@ -0,0 +1,302 @@
// npm view [pkg [pkg ...]]
module.exports = view
view.usage = "npm view pkg[@version] [<field>[.subfield]...]"
var npm = require("./npm.js")
, readJson = require("read-package-json")
, log = require("npmlog")
, util = require("util")
, semver = require("semver")
, mapToRegistry = require("./utils/map-to-registry.js")
, npa = require("npm-package-arg")
, path = require("path")
view.completion = function (opts, cb) {
if (opts.conf.argv.remain.length <= 2) {
// FIXME: there used to be registry completion here, but it stopped making
// sense somewhere around 50,000 packages on the registry
return cb()
}
// have the package, get the fields.
var tag = npm.config.get("tag")
mapToRegistry(opts.conf.argv.remain[2], npm.config, function (er, uri, auth) {
if (er) return cb(er)
npm.registry.get(uri, { auth : auth }, function (er, d) {
if (er) return cb(er)
var dv = d.versions[d["dist-tags"][tag]]
, fields = []
d.versions = Object.keys(d.versions).sort(semver.compareLoose)
fields = getFields(d).concat(getFields(dv))
cb(null, fields)
})
})
function getFields (d, f, pref) {
f = f || []
if (!d) return f
pref = pref || []
Object.keys(d).forEach(function (k) {
if (k.charAt(0) === "_" || k.indexOf(".") !== -1) return
var p = pref.concat(k).join(".")
f.push(p)
if (Array.isArray(d[k])) {
d[k].forEach(function (val, i) {
var pi = p + "[" + i + "]"
if (val && typeof val === "object") getFields(val, f, [p])
else f.push(pi)
})
return
}
if (typeof d[k] === "object") getFields(d[k], f, [p])
})
return f
}
}
function view (args, silent, cb) {
if (typeof cb !== "function") cb = silent, silent = false
if (!args.length) args = ["."]
var pkg = args.shift()
, nv = npa(pkg)
, name = nv.name
, local = (name === "." || !name)
if (npm.config.get("global") && local) {
return cb(new Error("Cannot use view command in global mode."))
}
if (local) {
var dir = npm.prefix
readJson(path.resolve(dir, "package.json"), function (er, d) {
d = d || {}
if (er && er.code !== "ENOENT" && er.code !== "ENOTDIR") return cb(er)
if (!d.name) return cb(new Error("Invalid package.json"))
var p = d.name
nv = npa(p)
if (pkg && ~pkg.indexOf("@")) {
nv.rawSpec = pkg.split("@")[pkg.indexOf("@")]
}
fetchAndRead(nv, args, silent, cb)
})
} else {
fetchAndRead(nv, args, silent, cb)
}
}
function fetchAndRead (nv, args, silent, cb) {
// get the data about this package
var name = nv.name
, version = nv.rawSpec || npm.config.get("tag")
mapToRegistry(name, npm.config, function (er, uri, auth) {
if (er) return cb(er)
npm.registry.get(uri, { auth : auth }, function (er, data) {
if (er) return cb(er)
if (data["dist-tags"] && data["dist-tags"].hasOwnProperty(version)) {
version = data["dist-tags"][version]
}
if (data.time && data.time.unpublished) {
var u = data.time.unpublished
er = new Error("Unpublished by " + u.name + " on " + u.time)
er.statusCode = 404
er.code = "E404"
er.pkgid = data._id
return cb(er, data)
}
var results = []
, error = null
, versions = data.versions || {}
data.versions = Object.keys(versions).sort(semver.compareLoose)
if (!args.length) args = [""]
// remove readme unless we asked for it
if (-1 === args.indexOf("readme")) {
delete data.readme
}
Object.keys(versions).forEach(function (v) {
if (semver.satisfies(v, version, true)) args.forEach(function (args) {
// remove readme unless we asked for it
if (-1 === args.indexOf("readme")) {
delete versions[v].readme
}
results.push(showFields(data, versions[v], args))
})
})
results = results.reduce(reducer, {})
var retval = results
if (args.length === 1 && args[0] === "") {
retval = cleanBlanks(retval)
log.silly("cleanup", retval)
}
if (error || silent) cb(error, retval)
else printData(results, data._id, cb.bind(null, error, retval))
})
})
}
function cleanBlanks (obj) {
var clean = {}
Object.keys(obj).forEach(function (version) {
clean[version] = obj[version][""]
})
return clean
}
function reducer (l, r) {
if (r) Object.keys(r).forEach(function (v) {
l[v] = l[v] || {}
Object.keys(r[v]).forEach(function (t) {
l[v][t] = r[v][t]
})
})
return l
}
// return whatever was printed
function showFields (data, version, fields) {
var o = {}
;[data, version].forEach(function (s) {
Object.keys(s).forEach(function (k) {
o[k] = s[k]
})
})
return search(o, fields.split("."), version.version, fields)
}
function search (data, fields, version, title) {
var field
, tail = fields
while (!field && fields.length) field = tail.shift()
fields = [field].concat(tail)
var o
if (!field && !tail.length) {
o = {}
o[version] = {}
o[version][title] = data
return o
}
var index = field.match(/(.+)\[([^\]]+)\]$/)
if (index) {
field = index[1]
index = index[2]
if (data.field && data.field.hasOwnProperty(index)) {
return search(data[field][index], tail, version, title)
} else {
field = field + "[" + index + "]"
}
}
if (Array.isArray(data)) {
if (data.length === 1) {
return search(data[0], fields, version, title)
}
var results = []
data.forEach(function (data, i) {
var tl = title.length
, newt = title.substr(0, tl-(fields.join(".").length) - 1)
+ "["+i+"]" + [""].concat(fields).join(".")
results.push(search(data, fields.slice(), version, newt))
})
results = results.reduce(reducer, {})
return results
}
if (!data.hasOwnProperty(field)) return undefined
data = data[field]
if (tail.length) {
if (typeof data === "object") {
// there are more fields to deal with.
return search(data, tail, version, title)
} else {
return new Error("Not an object: "+data)
}
}
o = {}
o[version] = {}
o[version][title] = data
return o
}
function printData (data, name, cb) {
var versions = Object.keys(data)
, msg = ""
, includeVersions = versions.length > 1
, includeFields
versions.forEach(function (v) {
var fields = Object.keys(data[v])
includeFields = includeFields || (fields.length > 1)
fields.forEach(function (f) {
var d = cleanup(data[v][f])
if (includeVersions || includeFields || typeof d !== "string") {
d = cleanup(data[v][f])
d = npm.config.get("json")
? JSON.stringify(d, null, 2)
: util.inspect(d, false, 5, npm.color)
} else if (typeof d === "string" && npm.config.get("json")) {
d = JSON.stringify(d)
}
if (f && includeFields) f += " = "
if (d.indexOf("\n") !== -1) d = " \n" + d
msg += (includeVersions ? name + "@" + v + " " : "")
+ (includeFields ? f : "") + d + "\n"
})
})
// preserve output symmetry by adding a whitespace-only line at the end if
// there's one at the beginning
if (/^\s*\n/.test(msg)) msg += "\n"
// print directly to stdout to not unnecessarily add blank lines
process.stdout.write(msg)
cb(null, data)
}
function cleanup (data) {
if (Array.isArray(data)) {
if (data.length === 1) {
data = data[0]
} else {
return data.map(cleanup)
}
}
if (!data || typeof data !== "object") return data
if (typeof data.versions === "object"
&& data.versions
&& !Array.isArray(data.versions)) {
data.versions = Object.keys(data.versions || {})
}
var keys = Object.keys(data)
keys.forEach(function (d) {
if (d.charAt(0) === "_") delete data[d]
else if (typeof data[d] === "object") data[d] = cleanup(data[d])
})
keys = Object.keys(data)
if (keys.length <= 3
&& data.name
&& (keys.length === 1
|| keys.length === 3 && data.email && data.url
|| keys.length === 2 && (data.email || data.url))) {
data = unparsePerson(data)
}
return data
}
function unparsePerson (d) {
if (typeof d === "string") return d
return d.name
+ (d.email ? " <"+d.email+">" : "")
+ (d.url ? " ("+d.url+")" : "")
}

42
node_modules/npm/lib/visnup.js generated vendored Normal file
View File

@@ -0,0 +1,42 @@
module.exports = visnup
var npm = require("./npm.js")
var handsomeFace = [
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 232, 237, 236, 236, 232, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
,[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 235, 236, 235, 233, 237, 235, 233, 232, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
,[0, 0, 0, 0, 0, 0, 0, 0, 0, 232, 235, 233, 232, 235, 235, 234, 233, 236, 232, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
,[0, 0, 0, 0, 0, 0, 0, 0, 237, 235, 232, 232, 234, 233, 233, 232, 232, 233, 232, 232, 235, 232, 233, 234, 234, 0, 0, 0, 0, 0, 0, 0, 0, 0]
,[0, 0, 0, 0, 0, 0, 0, 232, 232, 232, 239, 238, 235, 233, 232, 232, 232, 232, 232, 232, 232, 233, 235, 232, 233, 233, 232, 0, 0, 0, 0, 0, 0, 0]
,[0, 0, 0, 0, 234, 234, 232, 233, 234, 233, 234, 235, 233, 235, 60, 238, 238, 234, 234, 233, 234, 233, 238, 251, 246, 233, 233, 232, 0, 0, 0, 0, 0, 0]
,[0, 0, 233, 233, 233, 232, 232, 239, 249, 251, 252, 231, 231, 188, 250, 254, 59, 60, 255, 231, 231, 231, 252, 235, 239, 235, 232, 233, 0, 0, 0, 0, 0, 0]
,[0, 0, 232, 233, 232, 232, 232, 248, 231, 231, 231, 231, 231, 231, 231, 254, 238, 254, 231, 231, 231, 231, 231, 252, 233, 235, 237, 233, 234, 0, 0, 0, 0, 0]
,[0, 0, 233, 232, 232, 232, 248, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 251, 233, 233, 233, 236, 233, 0, 0, 0, 0]
,[232, 233, 233, 232, 232, 246, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 249, 233, 234, 234, 0, 0, 0, 0]
,[232, 232, 232, 232, 233, 249, 231, 255, 255, 255, 255, 254, 109, 60, 239, 237, 238, 237, 235, 235, 235, 235, 236, 235, 235, 235, 234, 232, 232, 232, 232, 232, 233, 0]
,[0, 232, 232, 233, 233, 233, 233, 233, 233, 233, 233, 233, 235, 236, 238, 238, 235, 188, 254, 254, 145, 236, 252, 254, 254, 254, 254, 249, 236, 235, 232, 232, 233, 0]
,[0, 0, 233, 237, 249, 239, 233, 252, 231, 231, 231, 231, 231, 231, 254, 235, 235, 254, 231, 231, 251, 235, 237, 231, 231, 231, 231, 7, 237, 235, 232, 233, 233, 0]
,[0, 0, 0, 0, 233, 248, 239, 233, 231, 231, 231, 231, 254, 233, 233, 235, 254, 255, 231, 254, 237, 236, 254, 239, 235, 235, 233, 233, 232, 232, 233, 232, 0, 0]
,[0, 0, 0, 232, 233, 246, 255, 255, 236, 236, 236, 236, 236, 255, 231, 231, 231, 231, 231, 231, 252, 234, 248, 231, 231, 231, 231, 248, 232, 232, 232, 0, 0, 0]
,[0, 0, 0, 0, 235, 237, 7, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 255, 238, 235, 7, 231, 231, 231, 246, 232, 0, 0, 0, 0, 0]
,[0, 0, 0, 0, 0, 235, 103, 188, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 252, 232, 238, 231, 231, 255, 244, 232, 0, 0, 0, 0, 0]
,[0, 0, 0, 0, 0, 235, 236, 103, 146, 253, 255, 231, 231, 231, 231, 231, 253, 251, 250, 250, 250, 246, 232, 235, 152, 255, 146, 66, 233, 0, 0, 0, 0, 0]
,[0, 0, 0, 0, 0, 0, 233, 103, 146, 146, 146, 146, 254, 231, 231, 231, 109, 103, 146, 255, 188, 239, 240, 103, 255, 253, 103, 238, 234, 0, 0, 0, 0, 0]
,[0, 0, 0, 0, 0, 0, 232, 235, 109, 146, 146, 146, 146, 146, 252, 152, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 103, 235, 233, 0, 0, 0, 0, 0]
,[0, 0, 0, 0, 0, 0, 0, 235, 235, 103, 146, 146, 146, 146, 146, 146, 188, 188, 188, 188, 188, 188, 152, 146, 146, 146, 66, 235, 0, 0, 0, 0, 0, 0]
,[0, 0, 0, 0, 0, 0, 0, 0, 233, 235, 66, 146, 146, 146, 146, 152, 255, 146, 240, 239, 241, 109, 146, 146, 146, 103, 233, 0, 0, 0, 0, 0, 0, 0]
,[0, 0, 0, 0, 0, 0, 0, 0, 0, 234, 237, 109, 146, 146, 146, 146, 146, 254, 231, 231, 188, 146, 146, 146, 103, 233, 0, 0, 0, 0, 0, 0, 0, 0]
,[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 233, 237, 60, 103, 146, 146, 146, 146, 146, 103, 66, 60, 235, 232, 0, 0, 0, 0, 0, 0, 0, 0, 0]
,[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 232, 233, 233, 236, 235, 237, 235, 237, 237, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]
function visnup (args, cb) {
handsomeFace.forEach(function (line) {
console.log(line.map(function (ch) {
return "\033[" + (ch ? "48;5;" + ch : ch) + "m"
}).join(' '))
})
var c = args.shift()
if (c) npm.commands[c](args, cb)
else cb()
}

47
node_modules/npm/lib/whoami.js generated vendored Normal file
View File

@@ -0,0 +1,47 @@
var npm = require("./npm.js")
module.exports = whoami
whoami.usage = "npm whoami\n(just prints username according to given registry)"
function whoami (args, silent, cb) {
// FIXME: need tighter checking on this, but is a breaking change
if (typeof cb !== "function") {
cb = silent
silent = false
}
var registry = npm.config.get("registry")
if (!registry) return cb(new Error("no default registry set"))
var auth = npm.config.getCredentialsByURI(registry)
if (auth) {
if (auth.username) {
if (!silent) console.log(auth.username)
return process.nextTick(cb.bind(this, null, auth.username))
}
else if (auth.token) {
return npm.registry.whoami(registry, { auth : auth }, function (er, username) {
if (er) return cb(er)
if (!username) {
var needNewSession = new Error(
"Your auth token is no longer valid. Please log in again."
)
needNewSession.code = 'ENEEDAUTH'
return cb(needNewSession)
}
if (!silent) console.log(username)
cb(null, username)
})
}
}
// At this point, if they have a credentials object, it doesn't have a token
// or auth in it. Probably just the default registry.
var needAuth = new Error(
"this command requires you to be logged in."
)
needAuth.code = 'ENEEDAUTH'
process.nextTick(cb.bind(this, needAuth))
}

55
node_modules/npm/lib/xmas.js generated vendored Normal file
View File

@@ -0,0 +1,55 @@
// happy xmas
var log = require("npmlog")
module.exports = function (args, cb) {
var s = process.platform === "win32" ? " *" : " \u2605"
, f = "\uFF0F"
, b = "\uFF3C"
, x = process.platform === "win32" ? " " : ""
, o = [ "\u0069" , "\u0020", "\u0020", "\u0020", "\u0020", "\u0020"
, "\u0020", "\u0020", "\u0020", "\u0020", "\u0020", "\u0020"
, "\u0020", "\u2E1B","\u2042","\u2E2E","&","@","\uFF61" ]
, oc = [21,33,34,35,36,37]
, l = "\u005e"
function w (s) { process.stderr.write(s) }
w("\n")
;(function T (H) {
for (var i = 0; i < H; i ++) w(" ")
w(x+"\033[33m"+s+"\n")
var M = H * 2 - 1
for (var L = 1; L <= H; L ++) {
var O = L * 2 - 2
var S = (M - O) / 2
for (i = 0; i < S; i ++) w(" ")
w(x+"\033[32m"+f)
for (i = 0; i < O; i ++) w(
"\033["+oc[Math.floor(Math.random()*oc.length)]+"m"+
o[Math.floor(Math.random() * o.length)]
)
w(x+"\033[32m"+b+"\n")
}
w(" ")
for (i = 1; i < H; i ++) w("\033[32m"+l)
w("| "+x+" |")
for (i = 1; i < H; i ++) w("\033[32m"+l)
if (H > 10) {
w("\n ")
for (i = 1; i < H; i ++) w(" ")
w("| "+x+" |")
for (i = 1; i < H; i ++) w(" ")
}
})(20)
w("\n\n")
log.heading = ''
log.addLevel('npm', 100000, log.headingStyle)
log.npm("loves you", "Happy Xmas, Noders!")
cb()
}
var dg=false
Object.defineProperty(module.exports, "usage", {get:function () {
if (dg) module.exports([], function () {})
dg = true
return " "
}})