1
0
mirror of https://github.com/S2-/minifyfromhtml.git synced 2025-08-04 12:40:05 +02:00

update packages to latest version

This commit is contained in:
s2
2022-08-20 18:51:33 +02:00
parent 09663a35a5
commit 806ebf9a57
4513 changed files with 366205 additions and 92512 deletions

58
node_modules/npm/lib/utils/completion.sh generated vendored Normal file
View File

@@ -0,0 +1,58 @@
#!/bin/bash
###-begin-npm-completion-###
#
# npm command completion script
#
# Installation: npm completion >> ~/.bashrc (or ~/.zshrc)
# Or, maybe: npm completion > /usr/local/etc/bash_completion.d/npm
#
if type complete &>/dev/null; then
_npm_completion () {
local words cword
if type _get_comp_words_by_ref &>/dev/null; then
_get_comp_words_by_ref -n = -n @ -w words -i cword
else
cword="$COMP_CWORD"
words=("${COMP_WORDS[@]}")
fi
local si="$IFS"
IFS=$'\n' COMPREPLY=($(COMP_CWORD="$cword" \
COMP_LINE="$COMP_LINE" \
COMP_POINT="$COMP_POINT" \
npm completion -- "${words[@]}" \
2>/dev/null)) || return $?
IFS="$si"
}
complete -o default -F _npm_completion npm
elif type compdef &>/dev/null; then
_npm_completion() {
local si=$IFS
compadd -- $(COMP_CWORD=$((CURRENT-1)) \
COMP_LINE=$BUFFER \
COMP_POINT=0 \
npm completion -- "${words[@]}" \
2>/dev/null)
IFS=$si
}
compdef _npm_completion npm
elif type compctl &>/dev/null; then
_npm_completion () {
local cword line point words si
read -Ac words
read -cn cword
let cword-=1
read -l line
read -ln point
si="$IFS"
IFS=$'\n' reply=($(COMP_CWORD="$cword" \
COMP_LINE="$line" \
COMP_POINT="$point" \
npm completion -- "${words[@]}" \
2>/dev/null)) || return $?
IFS="$si"
}
compctl -K _npm_completion npm
fi
###-end-npm-completion-###

View File

@@ -0,0 +1,23 @@
module.exports = fileCompletion
var mkdir = require("mkdirp")
, path = require("path")
, glob = require("glob")
function fileCompletion (root, req, depth, cb) {
if (typeof cb !== "function") cb = depth, depth = Infinity
mkdir(root, function (er) {
if (er) return cb(er)
// can be either exactly the req, or a descendent
var pattern = root + "/{" + req + "," + req + "/**/*}"
, opts = { mark: true, dot: true, maxDepth: depth }
glob(pattern, opts, function (er, files) {
if (er) return cb(er)
return cb(null, (files || []).map(function (f) {
var tail = f.substr(root.length + 1).replace(/^\//, "")
return path.join(req, tail)
}))
})
})
}

View File

@@ -0,0 +1,50 @@
module.exports = installedDeep
var npm = require("../../npm.js")
, readInstalled = require("read-installed")
function installedDeep (opts, cb) {
var local
, global
, depth = npm.config.get("depth")
, opt = { depth: depth, dev: true }
if (npm.config.get("global")) local = [], next()
else readInstalled(npm.prefix, opt, function (er, data) {
local = getNames(data || {})
next()
})
readInstalled(npm.config.get("prefix"), opt, function (er, data) {
global = getNames(data || {})
next()
})
function getNames_ (d, n) {
if (d.realName && n) {
if (n[d.realName]) return n
n[d.realName] = true
}
if (!n) n = {}
Object.keys(d.dependencies || {}).forEach(function (dep) {
getNames_(d.dependencies[dep], n)
})
return n
}
function getNames (d) {
return Object.keys(getNames_(d))
}
function next () {
if (!local || !global) return
if (!npm.config.get("global")) {
global = global.map(function (g) {
return [g, "-g"]
})
}
var names = local.concat(global)
return cb(null, names)
}
}

View File

@@ -0,0 +1,79 @@
module.exports = installedShallow
var npm = require("../../npm.js")
, fs = require("graceful-fs")
, path = require("path")
, readJson = require("read-package-json")
, asyncMap = require("slide").asyncMap
function installedShallow (opts, filter, cb) {
if (typeof cb !== "function") cb = filter, filter = null
var conf = opts.conf
, args = conf.argv.remain
if (args.length > 3) return cb()
var local
, global
, localDir = npm.dir
, globalDir = npm.globalDir
if (npm.config.get("global")) local = [], next()
else fs.readdir(localDir, function (er, pkgs) {
local = (pkgs || []).filter(function (p) {
return p.charAt(0) !== "."
})
next()
})
fs.readdir(globalDir, function (er, pkgs) {
global = (pkgs || []).filter(function (p) {
return p.charAt(0) !== "."
})
next()
})
function next () {
if (!local || !global) return
filterInstalled(local, global, filter, cb)
}
}
function filterInstalled (local, global, filter, cb) {
var fl
, fg
if (!filter) {
fl = local
fg = global
return next()
}
asyncMap(local, function (p, cb) {
readJson(path.join(npm.dir, p, "package.json"), function (er, d) {
if (!d || !filter(d)) return cb(null, [])
return cb(null, d.name)
})
}, function (er, local) {
fl = local || []
next()
})
var globalDir = npm.globalDir
asyncMap(global, function (p, cb) {
readJson(path.join(globalDir, p, "package.json"), function (er, d) {
if (!d || !filter(d)) return cb(null, [])
return cb(null, d.name)
})
}, function (er, global) {
fg = global || []
next()
})
function next () {
if (!fg || !fl) return
if (!npm.config.get("global")) {
fg = fg.map(function (g) {
return [g, "-g"]
})
}
console.error("filtered", fl, fg)
return cb(null, fl.concat(fg))
}
}

123
node_modules/npm/lib/utils/correct-mkdir.js generated vendored Normal file
View File

@@ -0,0 +1,123 @@
var chownr = require('chownr')
var dezalgo = require('dezalgo')
var fs = require('graceful-fs')
var inflight = require('inflight')
var log = require('npmlog')
var mkdirp = require('mkdirp')
// memoize the directories created by this step
var stats = {}
var effectiveOwner
module.exports = function correctMkdir (path, cb) {
cb = dezalgo(cb)
cb = inflight('correctMkdir:' + path, cb)
if (!cb) {
return log.verbose('correctMkdir', path, 'correctMkdir already in flight; waiting')
} else {
log.verbose('correctMkdir', path, 'correctMkdir not in flight; initializing')
}
if (stats[path]) return cb(null, stats[path])
fs.stat(path, function (er, st) {
if (er) return makeDirectory(path, cb)
if (!st.isDirectory()) {
log.error('correctMkdir', 'invalid dir %s', path)
return cb(er)
}
var ownerStats = calculateOwner()
// there's always a chance the permissions could have been frobbed, so fix
if (st.uid !== ownerStats.uid) {
stats[path] = ownerStats
setPermissions(path, ownerStats, cb)
} else {
stats[path] = st
cb(null, stats[path])
}
})
}
function calculateOwner () {
if (!effectiveOwner) {
effectiveOwner = { uid: 0, gid: 0 }
// Pretty much only on windows
if (!process.getuid) {
return effectiveOwner
}
effectiveOwner.uid = +process.getuid()
effectiveOwner.gid = +process.getgid()
if (effectiveOwner.uid === 0) {
if (process.env.SUDO_UID) effectiveOwner.uid = +process.env.SUDO_UID
if (process.env.SUDO_GID) effectiveOwner.gid = +process.env.SUDO_GID
}
}
return effectiveOwner
}
function makeDirectory (path, cb) {
cb = inflight('makeDirectory:' + path, cb)
if (!cb) {
return log.verbose('makeDirectory', path, 'creation already in flight; waiting')
} else {
log.verbose('makeDirectory', path, 'creation not in flight; initializing')
}
var owner = calculateOwner()
if (!process.getuid) {
return mkdirp(path, function (er) {
log.verbose('makeCacheDir', 'UID & GID are irrelevant on', process.platform)
stats[path] = owner
return cb(er, stats[path])
})
}
if (owner.uid !== 0 || !process.env.HOME) {
log.silly(
'makeDirectory', path,
'uid:', owner.uid,
'gid:', owner.gid
)
stats[path] = owner
mkdirp(path, afterMkdir)
} else {
fs.stat(process.env.HOME, function (er, st) {
if (er) {
log.error('makeDirectory', 'homeless?')
return cb(er)
}
log.silly(
'makeDirectory', path,
'uid:', st.uid,
'gid:', st.gid
)
stats[path] = st
mkdirp(path, afterMkdir)
})
}
function afterMkdir (er, made) {
if (er || !stats[path] || isNaN(stats[path].uid) || isNaN(stats[path].gid)) {
return cb(er, stats[path])
}
if (!made) return cb(er, stats[path])
setPermissions(made, stats[path], cb)
}
}
function setPermissions (path, st, cb) {
chownr(path, st.uid, st.gid, function (er) {
if (er && er.code === 'ENOENT') return cb(null, st)
return cb(er, st)
})
}

13
node_modules/npm/lib/utils/depr-check.js generated vendored Normal file
View File

@@ -0,0 +1,13 @@
var log = require("npmlog")
var deprecated = {}
, deprWarned = {}
module.exports = function deprCheck (data) {
if (deprecated[data._id]) data.deprecated = deprecated[data._id]
if (data.deprecated) deprecated[data._id] = data.deprecated
else return
if (!deprWarned[data._id]) {
deprWarned[data._id] = true
log.warn("deprecated", "%s: %s", data._id, data.deprecated)
}
}

414
node_modules/npm/lib/utils/error-handler.js generated vendored Normal file
View File

@@ -0,0 +1,414 @@
module.exports = errorHandler
var cbCalled = false
, log = require("npmlog")
, npm = require("../npm.js")
, rm = require("rimraf")
, itWorked = false
, path = require("path")
, wroteLogFile = false
, exitCode = 0
, rollbacks = npm.rollbacks
, chain = require("slide").chain
, writeStreamAtomic = require("fs-write-stream-atomic")
, nameValidator = require("validate-npm-package-name")
process.on("exit", function (code) {
// console.error("exit", code)
if (!npm.config || !npm.config.loaded) return
if (code) itWorked = false
if (itWorked) log.info("ok")
else {
if (!cbCalled) {
log.error("", "cb() never called!")
}
if (wroteLogFile) {
// just a line break
if (log.levels[log.level] <= log.levels.error) console.error("")
log.error("",
["Please include the following file with any support request:"
," " + path.resolve("npm-debug.log")
].join("\n"))
wroteLogFile = false
}
if (code) {
log.error("code", code)
}
}
var doExit = npm.config.get("_exit")
if (doExit) {
// actually exit.
if (exitCode === 0 && !itWorked) {
exitCode = 1
}
if (exitCode !== 0) process.exit(exitCode)
} else {
itWorked = false // ready for next exit
}
})
function exit (code, noLog) {
exitCode = exitCode || process.exitCode || code
var doExit = npm.config ? npm.config.get("_exit") : true
log.verbose("exit", [code, doExit])
if (log.level === "silent") noLog = true
if (rollbacks.length) {
chain(rollbacks.map(function (f) {
return function (cb) {
npm.commands.unbuild([f], true, cb)
}
}), function (er) {
if (er) {
log.error("error rolling back", er)
if (!code) errorHandler(er)
else if (noLog) rm("npm-debug.log", reallyExit.bind(null, er))
else writeLogFile(reallyExit.bind(this, er))
} else {
if (!noLog && code) writeLogFile(reallyExit)
else rm("npm-debug.log", reallyExit)
}
})
rollbacks.length = 0
}
else if (code && !noLog) writeLogFile(reallyExit)
else rm("npm-debug.log", reallyExit)
function reallyExit (er) {
if (er && !code) code = typeof er.errno === "number" ? er.errno : 1
// truncate once it's been written.
log.record.length = 0
itWorked = !code
// just emit a fake exit event.
// if we're really exiting, then let it exit on its own, so that
// in-process stuff can finish or clean up first.
if (!doExit) process.emit("exit", code)
npm.spinner.stop()
}
}
function errorHandler (er) {
// console.error("errorHandler", er)
if (!npm.config || !npm.config.loaded) {
// logging won't work unless we pretend that it's ready
er = er || new Error("Exit prior to config file resolving.")
console.error(er.stack || er.message)
}
if (cbCalled) {
er = er || new Error("Callback called more than once.")
}
cbCalled = true
if (!er) return exit(0)
if (typeof er === "string") {
log.error("", er)
return exit(1, true)
} else if (!(er instanceof Error)) {
log.error("weird error", er)
return exit(1, true)
}
var m = er.code || er.message.match(/^(?:Error: )?(E[A-Z]+)/)
if (m && !er.code) er.code = m
; [ "type"
, "fstream_path"
, "fstream_unc_path"
, "fstream_type"
, "fstream_class"
, "fstream_finish_call"
, "fstream_linkpath"
, "stack"
, "fstream_stack"
, "statusCode"
, "pkgid"
].forEach(function (k) {
var v = er[k]
if (!v) return
if (k === "fstream_stack") v = v.join("\n")
log.verbose(k, v)
})
log.verbose("cwd", process.cwd())
var os = require("os")
// log.error("System", os.type() + " " + os.release())
// log.error("command", process.argv.map(JSON.stringify).join(" "))
// log.error("node -v", process.version)
// log.error("npm -v", npm.version)
log.error("", os.type() + " " + os.release())
log.error("argv", process.argv.map(JSON.stringify).join(" "))
log.error("node", process.version)
log.error("npm ", "v" + npm.version)
; [ "file"
, "path"
, "code"
, "errno"
, "syscall"
].forEach(function (k) {
var v = er[k]
if (v) log.error(k, v)
})
// just a line break
if (log.levels[log.level] <= log.levels.error) console.error("")
switch (er.code) {
case "ECONNREFUSED":
log.error("", er)
log.error("", ["\nIf you are behind a proxy, please make sure that the"
,"'proxy' config is set properly. See: 'npm help config'"
].join("\n"))
break
case "EACCES":
case "EPERM":
log.error("", er)
log.error("", ["\nPlease try running this command again as root/Administrator."
].join("\n"))
break
case "ELIFECYCLE":
log.error("", er.message)
log.error("", ["","Failed at the "+er.pkgid+" "+er.stage+" script '"+er.script+"'."
,"This is most likely a problem with the "+er.pkgname+" package,"
,"not with npm itself."
,"Tell the author that this fails on your system:"
," "+er.script
,'You can get information on how to open an issue for this project with:'
,' npm bugs ' + er.pkgname
,'Or if that isn\'t available, you can get their info via:',
,' npm owner ls ' + er.pkgname
,"There is likely additional logging output above."
].join("\n"))
break
case "ENOGIT":
log.error("", er.message)
log.error("", ["","Failed using git."
,"This is most likely not a problem with npm itself."
,"Please check if you have git installed and in your PATH."
].join("\n"))
break
case "EJSONPARSE":
log.error("", er.message)
log.error("", "File: "+er.file)
log.error("", ["Failed to parse package.json data."
,"package.json must be actual JSON, not just JavaScript."
,"","This is not a bug in npm."
,"Tell the package author to fix their package.json file."
].join("\n"), "JSON.parse")
break
// TODO(isaacs)
// Add a special case here for E401 and E403 explaining auth issues?
case "E404":
var msg = [er.message]
if (er.pkgid && er.pkgid !== "-") {
msg.push("", "'" + er.pkgid + "' is not in the npm registry.")
var valResult = nameValidator(er.pkgid)
if (valResult.validForNewPackages) {
msg.push("You should bug the author to publish it (or use the name yourself!)")
} else {
msg.push("Your package name is not valid, because", "")
var errorsArray = (valResult.errors || []).concat(valResult.warnings || [])
errorsArray.forEach(function(item, idx) {
msg.push(" " + (idx + 1) + ". " + item)
})
}
if (er.parent) {
msg.push("It was specified as a dependency of '"+er.parent+"'")
}
msg.push("\nNote that you can also install from a"
,"tarball, folder, http url, or git url.")
}
// There's no need to have 404 in the message as well.
msg[0] = msg[0].replace(/^404\s+/, "")
log.error("404", msg.join("\n"))
break
case "EPUBLISHCONFLICT":
log.error("publish fail", ["Cannot publish over existing version."
,"Update the 'version' field in package.json and try again."
,""
,"To automatically increment version numbers, see:"
," npm help version"
].join("\n"))
break
case "EISGIT":
log.error("git", [er.message
," "+er.path
,"Refusing to remove it. Update manually,"
,"or move it out of the way first."
].join("\n"))
break
case "ECYCLE":
log.error("cycle", [er.message
,"While installing: "+er.pkgid
,"Found a pathological dependency case that npm cannot solve."
,"Please report this to the package author."
].join("\n"))
break
case "EBADPLATFORM":
log.error("notsup", [er.message
,"Not compatible with your operating system or architecture: "+er.pkgid
,"Valid OS: "+er.os.join(",")
,"Valid Arch: "+er.cpu.join(",")
,"Actual OS: "+process.platform
,"Actual Arch: "+process.arch
].join("\n"))
break
case "EEXIST":
log.error([er.message
,"File exists: "+er.path
,"Move it away, and try again."].join("\n"))
break
case "ENEEDAUTH":
log.error("need auth", [er.message
,"You need to authorize this machine using `npm adduser`"
].join("\n"))
break
case "EPEERINVALID":
var peerErrors = Object.keys(er.peersDepending).map(function (peer) {
return "Peer " + peer + " wants " + er.packageName + "@"
+ er.peersDepending[peer]
})
log.error("peerinvalid", [er.message].concat(peerErrors).join("\n"))
break
case "ECONNRESET":
case "ENOTFOUND":
case "ETIMEDOUT":
case "EAI_FAIL":
log.error("network", [er.message
,"This is most likely not a problem with npm itself"
,"and is related to network connectivity."
,"In most cases you are behind a proxy or have bad network settings."
,"\nIf you are behind a proxy, please make sure that the"
,"'proxy' config is set properly. See: 'npm help config'"
].join("\n"))
break
case "ENOPACKAGEJSON":
log.error("package.json", [er.message
,"This is most likely not a problem with npm itself."
,"npm can't find a package.json file in your current directory."
].join("\n"))
break
case "ETARGET":
var msg = [er.message
,"This is most likely not a problem with npm itself."
,"In most cases you or one of your dependencies are requesting"
,"a package version that doesn't exist."
]
if (er.parent) {
msg.push("\nIt was specified as a dependency of '"+er.parent+"'\n")
}
log.error("notarget", msg.join("\n"))
break
case "ENOTSUP":
if (er.required) {
log.error("notsup", [er.message
,"Not compatible with your version of node/npm: "+er.pkgid
,"Required: "+JSON.stringify(er.required)
,"Actual: "
+JSON.stringify({npm:npm.version
,node:npm.config.get("node-version")})
].join("\n"))
break
} // else passthrough
case "ENOSPC":
log.error("nospc", [er.message
,"This is most likely not a problem with npm itself"
,"and is related to insufficient space on your system."
].join("\n"))
break
case "EROFS":
log.error("rofs", [er.message
,"This is most likely not a problem with npm itself"
,"and is related to the file system being read-only."
,"\nOften virtualized file systems, or other file systems"
,"that don't support symlinks, give this error."
].join("\n"))
break
case "ENOENT":
log.error("enoent", [er.message
,"This is most likely not a problem with npm itself"
,"and is related to npm not being able to find a file."
,er.file?"\nCheck if the file '"+er.file+"' is present.":""
].join("\n"))
break
case "EISDIR":
log.error("eisdir", [er.message
,"This is most likely not a problem with npm itself"
,"and is related to npm not being able to find a package.json in"
,"a package you are trying to install."
].join("\n"))
break
default:
log.error("", er.message || er)
log.error("", ["", "If you need help, you may report this error at:"
," <https://github.com/npm/npm/issues>"
].join("\n"))
break
}
exit(typeof er.errno === "number" ? er.errno : 1)
}
var writingLogFile = false
function writeLogFile (cb) {
if (writingLogFile) return cb()
writingLogFile = true
wroteLogFile = true
var fstr = writeStreamAtomic("npm-debug.log")
, os = require("os")
, out = ""
log.record.forEach(function (m) {
var pref = [m.id, m.level]
if (m.prefix) pref.push(m.prefix)
pref = pref.join(" ")
m.message.trim().split(/\r?\n/).map(function (line) {
return (pref + " " + line).trim()
}).forEach(function (line) {
out += line + os.EOL
})
})
fstr.end(out)
fstr.on("close", cb)
}

197
node_modules/npm/lib/utils/gently-rm.js generated vendored Normal file
View File

@@ -0,0 +1,197 @@
// only remove the thing if it's a symlink into a specific folder.
// This is a very common use-case of npm's, but not so common elsewhere.
module.exports = gentlyRm
var npm = require('../npm.js')
var log = require('npmlog')
var resolve = require('path').resolve
var dirname = require('path').dirname
var lstat = require('graceful-fs').lstat
var readlink = require('graceful-fs').readlink
var isInside = require('path-is-inside')
var vacuum = require('fs-vacuum')
var some = require('async-some')
var asyncMap = require('slide').asyncMap
var normalize = require('path').normalize
function gentlyRm (target, gently, base, cb) {
if (!cb) {
cb = base
base = undefined
}
if (!cb) {
cb = gently
gently = false
}
log.silly(
'gentlyRm',
target,
'is being', gently ? 'gently removed' : 'purged',
base ? 'from base ' + base : ''
)
// never rm the root, prefix, or bin dirs
//
// globals included because of `npm link` -- as far as the package requesting
// the link is concerned, the linked package is always installed globally
var prefixes = [
npm.prefix,
npm.globalPrefix,
npm.dir,
npm.root,
npm.globalDir,
npm.bin,
npm.globalBin
]
var resolved = normalize(resolve(npm.prefix, target))
if (prefixes.indexOf(resolved) !== -1) {
log.verbose('gentlyRm', resolved, "is part of npm and can't be removed")
return cb(new Error('May not delete: ' + resolved))
}
var options = { log: log.silly.bind(log, 'vacuum-fs') }
if (npm.config.get('force') || !gently) options.purge = true
if (base) options.base = normalize(resolve(npm.prefix, base))
if (!gently) {
log.verbose('gentlyRm', "don't care about contents; nuking", resolved)
return vacuum(resolved, options, cb)
}
var parent = options.base = normalize(base ? resolve(npm.prefix, base) : npm.prefix)
// is the parent directory managed by npm?
log.silly('gentlyRm', 'verifying', parent, 'is an npm working directory')
some(prefixes, isManaged(parent), function (er, matched) {
if (er) return cb(er)
if (!matched) {
log.error('gentlyRm', 'containing path', parent, "isn't under npm's control")
return clobberFail(resolved, parent, cb)
}
log.silly('gentlyRm', 'containing path', parent, "is under npm's control, in", matched)
// is the target directly contained within the (now known to be
// managed) parent?
if (isInside(resolved, parent)) {
log.silly('gentlyRm', 'deletion target', resolved, 'is under', parent)
log.verbose('gentlyRm', 'vacuuming from', resolved, 'up to', parent)
return vacuum(resolved, options, cb)
}
log.silly('gentlyRm', resolved, 'is not under', parent)
// the target isn't directly within the parent, but is it itself managed?
log.silly('gentlyRm', 'verifying', resolved, 'is an npm working directory')
some(prefixes, isManaged(resolved), function (er, matched) {
if (er) return cb(er)
if (matched) {
log.silly('gentlyRm', resolved, "is under npm's control, in", matched)
options.base = matched
log.verbose('gentlyRm', 'removing', resolved, 'with base', options.base)
return vacuum(resolved, options, cb)
}
log.verbose('gentlyRm', resolved, "is not under npm's control")
// the target isn't managed directly, but maybe it's a link...
log.silly('gentlyRm', 'checking to see if', resolved, 'is a link')
lstat(resolved, function (er, stat) {
if (er) {
// race conditions are common when unbuilding
if (er.code === 'ENOENT') return cb(null)
return cb(er)
}
if (!stat.isSymbolicLink()) {
log.error('gentlyRm', resolved, 'is outside', parent, 'and not a link')
return clobberFail(resolved, parent, cb)
}
// ...and maybe the link source, when read...
log.silly('gentlyRm', resolved, 'is a link')
readlink(resolved, function (er, link) {
if (er) {
// race conditions are common when unbuilding
if (er.code === 'ENOENT') return cb(null)
return cb(er)
}
// ...is inside the managed parent
var source = resolve(dirname(resolved), link)
if (isInside(source, parent)) {
log.silly('gentlyRm', source, 'symlink target', resolved, 'is inside', parent)
log.verbose('gentlyRm', 'vacuuming', resolved)
return vacuum(resolved, options, cb)
}
log.error('gentlyRm', source, 'symlink target', resolved, 'is not controlled by npm', parent)
return clobberFail(target, parent, cb)
})
})
})
})
}
var resolvedPaths = {}
function isManaged (target) {
return function predicate (path, cb) {
if (!path) {
log.verbose('isManaged', 'no path passed for target', target)
return cb(null, false)
}
asyncMap([path, target], resolveSymlink, function (er, results) {
if (er) {
if (er.code === 'ENOENT') return cb(null, false)
return cb(er)
}
var path = results[0]
var target = results[1]
var inside = isInside(target, path)
if (!inside) log.silly('isManaged', target, 'is not inside', path)
return cb(null, inside && path)
})
}
function resolveSymlink (toResolve, cb) {
var resolved = resolve(npm.prefix, toResolve)
// if the path has already been memoized, return immediately
var cached = resolvedPaths[resolved]
if (cached) return cb(null, cached)
// otherwise, check the path
lstat(resolved, function (er, stat) {
if (er) return cb(er)
// if it's not a link, cache & return the path itself
if (!stat.isSymbolicLink()) {
resolvedPaths[resolved] = resolved
return cb(null, resolved)
}
// otherwise, cache & return the link's source
readlink(resolved, function (er, source) {
if (er) return cb(er)
resolved = resolve(resolved, source)
resolvedPaths[resolved] = resolved
cb(null, resolved)
})
})
}
}
function clobberFail (target, root, cb) {
var er = new Error('Refusing to delete: ' + target + ' not in ' + root)
er.code = 'EEXIST'
er.path = target
return cb(er)
}

25
node_modules/npm/lib/utils/get-publish-config.js generated vendored Normal file
View File

@@ -0,0 +1,25 @@
var Conf = require('../config/core.js').Conf
var CachingRegClient = require('../cache/caching-client.js')
var log = require('npmlog')
module.exports = getPublishConfig
function getPublishConfig (publishConfig, defaultConfig, defaultClient) {
var config = defaultConfig
var client = defaultClient
log.verbose('getPublishConfig', publishConfig)
if (publishConfig) {
config = new Conf(defaultConfig)
config.save = defaultConfig.save.bind(defaultConfig)
// don't modify the actual publishConfig object, in case we have
// to set a login token or some other data.
config.unshift(Object.keys(publishConfig).reduce(function (s, k) {
s[k] = publishConfig[k]
return s
}, {}))
client = new CachingRegClient(config)
}
return { config: config, client: client }
}

51
node_modules/npm/lib/utils/git.js generated vendored Normal file
View File

@@ -0,0 +1,51 @@
// handle some git configuration for windows
exports.spawn = spawnGit
exports.chainableExec = chainableExec
exports.whichAndExec = whichAndExec
var exec = require("child_process").execFile
, spawn = require("./spawn")
, npm = require("../npm.js")
, which = require("which")
, git = npm.config.get("git")
, assert = require("assert")
, log = require("npmlog")
function prefixGitArgs () {
return process.platform === "win32" ? ["-c", "core.longpaths=true"] : []
}
function execGit (args, options, cb) {
log.info('git', args)
var fullArgs = prefixGitArgs().concat(args || [])
return exec(git, fullArgs, options, cb)
}
function spawnGit (args, options) {
log.info("git", args)
return spawn(git, prefixGitArgs().concat(args || []), options)
}
function chainableExec () {
var args = Array.prototype.slice.call(arguments)
return [execGit].concat(args)
}
function whichGit (cb) {
return which(git, cb)
}
function whichAndExec (args, options, cb) {
assert.equal(typeof cb, "function", "no callback provided")
// check for git
whichGit(function (err) {
if (err) {
err.code = "ENOGIT"
return cb(err)
}
execGit(args, options, cb)
})
}

369
node_modules/npm/lib/utils/lifecycle.js generated vendored Normal file
View File

@@ -0,0 +1,369 @@
exports = module.exports = lifecycle
exports.cmd = cmd
exports.makeEnv = makeEnv
var log = require("npmlog")
var spawn = require("./spawn")
var npm = require("../npm.js")
var path = require("path")
var fs = require("graceful-fs")
var chain = require("slide").chain
var Stream = require("stream").Stream
var PATH = "PATH"
var uidNumber = require("uid-number")
var umask = require("./umask")
// windows calls it's path "Path" usually, but this is not guaranteed.
if (process.platform === "win32") {
PATH = "Path"
Object.keys(process.env).forEach(function (e) {
if (e.match(/^PATH$/i)) {
PATH = e
}
})
}
function lifecycle (pkg, stage, wd, unsafe, failOk, cb) {
if (typeof cb !== "function") cb = failOk, failOk = false
if (typeof cb !== "function") cb = unsafe, unsafe = false
if (typeof cb !== "function") cb = wd, wd = null
while (pkg && pkg._data) pkg = pkg._data
if (!pkg) return cb(new Error("Invalid package data"))
log.info(stage, pkg._id)
if (!pkg.scripts || npm.config.get('ignore-scripts')) pkg.scripts = {}
validWd(wd || path.resolve(npm.dir, pkg.name), function (er, wd) {
if (er) return cb(er)
unsafe = unsafe || npm.config.get("unsafe-perm")
if ((wd.indexOf(npm.dir) !== 0 ||
wd.indexOf(pkg.name) !== wd.length - pkg.name.length) &&
!unsafe && pkg.scripts[stage]) {
log.warn( "cannot run in wd", "%s %s (wd=%s)"
, pkg._id, pkg.scripts[stage], wd)
return cb()
}
// set the env variables, then run scripts as a child process.
var env = makeEnv(pkg)
env.npm_lifecycle_event = stage
env.npm_node_execpath = env.NODE = env.NODE || process.execPath
env.npm_execpath = require.main.filename
// "nobody" typically doesn't have permission to write to /tmp
// even if it's never used, sh freaks out.
if (!npm.config.get("unsafe-perm")) env.TMPDIR = wd
lifecycle_(pkg, stage, wd, env, unsafe, failOk, cb)
})
}
function checkForLink (pkg, cb) {
var f = path.join(npm.dir, pkg.name)
fs.lstat(f, function (er, s) {
cb(null, !(er || !s.isSymbolicLink()))
})
}
function lifecycle_ (pkg, stage, wd, env, unsafe, failOk, cb) {
var pathArr = []
, p = wd.split("node_modules")
, acc = path.resolve(p.shift())
p.forEach(function (pp) {
pathArr.unshift(path.join(acc, "node_modules", ".bin"))
acc = path.join(acc, "node_modules", pp)
})
pathArr.unshift(path.join(acc, "node_modules", ".bin"))
// we also unshift the bundled node-gyp-bin folder so that
// the bundled one will be used for installing things.
pathArr.unshift(path.join(__dirname, "..", "..", "bin", "node-gyp-bin"))
// prefer current node interpreter in child scripts
pathArr.push(path.dirname(process.execPath))
if (env[PATH]) pathArr.push(env[PATH])
env[PATH] = pathArr.join(process.platform === "win32" ? ";" : ":")
var packageLifecycle = pkg.scripts && pkg.scripts.hasOwnProperty(stage)
if (packageLifecycle) {
// define this here so it's available to all scripts.
env.npm_lifecycle_script = pkg.scripts[stage]
}
function done (er) {
if (er) {
if (npm.config.get("force")) {
log.info("forced, continuing", er)
er = null
} else if (failOk) {
log.warn("continuing anyway", er.message)
er = null
}
}
cb(er)
}
chain
( [ packageLifecycle && [runPackageLifecycle, pkg, env, wd, unsafe]
, [runHookLifecycle, pkg, env, wd, unsafe] ]
, done )
}
function validWd (d, cb) {
fs.stat(d, function (er, st) {
if (er || !st.isDirectory()) {
var p = path.dirname(d)
if (p === d) {
return cb(new Error("Could not find suitable wd"))
}
return validWd(p, cb)
}
return cb(null, d)
})
}
function runPackageLifecycle (pkg, env, wd, unsafe, cb) {
// run package lifecycle scripts in the package root, or the nearest parent.
var stage = env.npm_lifecycle_event
, cmd = env.npm_lifecycle_script
var note = "\n> " + pkg._id + " " + stage + " " + wd
+ "\n> " + cmd + "\n"
runCmd(note, cmd, pkg, env, stage, wd, unsafe, cb)
}
var running = false
var queue = []
function dequeue() {
running = false
if (queue.length) {
var r = queue.shift()
runCmd.apply(null, r)
}
}
function runCmd (note, cmd, pkg, env, stage, wd, unsafe, cb) {
if (running) {
queue.push([note, cmd, pkg, env, stage, wd, unsafe, cb])
return
}
running = true
log.pause()
var user = unsafe ? null : npm.config.get("user")
, group = unsafe ? null : npm.config.get("group")
if (log.level !== 'silent') {
if (npm.spinner.int) {
npm.config.get("logstream").write("\r \r")
}
console.log(note)
}
log.verbose("unsafe-perm in lifecycle", unsafe)
if (process.platform === "win32") {
unsafe = true
}
if (unsafe) {
runCmd_(cmd, pkg, env, wd, stage, unsafe, 0, 0, cb)
} else {
uidNumber(user, group, function (er, uid, gid) {
runCmd_(cmd, pkg, env, wd, stage, unsafe, uid, gid, cb)
})
}
}
function runCmd_ (cmd, pkg, env, wd, stage, unsafe, uid, gid, cb_) {
function cb (er) {
cb_.apply(null, arguments)
log.resume()
process.nextTick(dequeue)
}
var conf = { cwd: wd
, env: env
, stdio: [ 0, 1, 2 ]
}
if (!unsafe) {
conf.uid = uid ^ 0
conf.gid = gid ^ 0
}
var sh = 'sh'
var shFlag = '-c'
if (process.platform === 'win32') {
sh = process.env.comspec || 'cmd'
shFlag = '/d /s /c'
conf.windowsVerbatimArguments = true
}
var proc = spawn(sh, [shFlag, cmd], conf)
proc.on("error", procError)
proc.on("close", function (code, signal) {
if (signal) {
process.kill(process.pid, signal);
} else if (code) {
var er = new Error("Exit status " + code)
}
procError(er)
})
function procError (er) {
if (er && !npm.ROLLBACK) {
log.info(pkg._id, "Failed to exec "+stage+" script")
er.message = pkg._id + " "
+ stage + ": `" + cmd +"`\n"
+ er.message
if (er.code !== "EPERM") {
er.code = "ELIFECYCLE"
}
er.pkgid = pkg._id
er.stage = stage
er.script = cmd
er.pkgname = pkg.name
return cb(er)
} else if (er) {
log.error(pkg._id+"."+stage, er)
log.error(pkg._id+"."+stage, "continuing anyway")
return cb()
}
cb(er)
}
}
function runHookLifecycle (pkg, env, wd, unsafe, cb) {
// check for a hook script, run if present.
var stage = env.npm_lifecycle_event
, hook = path.join(npm.dir, ".hooks", stage)
, user = unsafe ? null : npm.config.get("user")
, group = unsafe ? null : npm.config.get("group")
, cmd = hook
fs.stat(hook, function (er) {
if (er) return cb()
var note = "\n> " + pkg._id + " " + stage + " " + wd
+ "\n> " + cmd
runCmd(note, hook, pkg, env, stage, wd, unsafe, cb)
})
}
function makeEnv (data, prefix, env) {
prefix = prefix || "npm_package_"
if (!env) {
env = {}
for (var i in process.env) if (!i.match(/^npm_/)) {
env[i] = process.env[i]
}
// npat asks for tap output
if (npm.config.get("npat")) env.TAP = 1
// express and others respect the NODE_ENV value.
if (npm.config.get("production")) env.NODE_ENV = "production"
} else if (!data.hasOwnProperty("_lifecycleEnv")) {
Object.defineProperty(data, "_lifecycleEnv",
{ value : env
, enumerable : false
})
}
for (var i in data) if (i.charAt(0) !== "_") {
var envKey = (prefix+i).replace(/[^a-zA-Z0-9_]/g, '_')
if (i === "readme") {
continue
}
if (data[i] && typeof(data[i]) === "object") {
try {
// quick and dirty detection for cyclical structures
JSON.stringify(data[i])
makeEnv(data[i], envKey+"_", env)
} catch (ex) {
// usually these are package objects.
// just get the path and basic details.
var d = data[i]
makeEnv( { name: d.name, version: d.version, path:d.path }
, envKey+"_", env)
}
} else {
env[envKey] = String(data[i])
env[envKey] = -1 !== env[envKey].indexOf("\n")
? JSON.stringify(env[envKey])
: env[envKey]
}
}
if (prefix !== "npm_package_") return env
prefix = "npm_config_"
var pkgConfig = {}
, keys = npm.config.keys
, pkgVerConfig = {}
, namePref = data.name + ":"
, verPref = data.name + "@" + data.version + ":"
keys.forEach(function (i) {
// in some rare cases (e.g. working with nerf darts), there are segmented
// "private" (underscore-prefixed) config names -- don't export
if (i.charAt(0) === '_' && i.indexOf('_' + namePref) !== 0 || i.match(/:_/)) {
return
}
var value = npm.config.get(i)
if (value instanceof Stream || Array.isArray(value)) return
if (i.match(/umask/)) value = umask.toString(value)
if (!value) value = ""
else if (typeof value === "number") value = "" + value
else if (typeof value !== "string") value = JSON.stringify(value)
value = -1 !== value.indexOf("\n")
? JSON.stringify(value)
: value
i = i.replace(/^_+/, "")
if (i.indexOf(namePref) === 0) {
var k = i.substr(namePref.length).replace(/[^a-zA-Z0-9_]/g, "_")
pkgConfig[ k ] = value
} else if (i.indexOf(verPref) === 0) {
var k = i.substr(verPref.length).replace(/[^a-zA-Z0-9_]/g, "_")
pkgVerConfig[ k ] = value
}
var envKey = (prefix+i).replace(/[^a-zA-Z0-9_]/g, "_")
env[envKey] = value
})
prefix = "npm_package_config_"
;[pkgConfig, pkgVerConfig].forEach(function (conf) {
for (var i in conf) {
var envKey = (prefix+i)
env[envKey] = conf[i]
}
})
return env
}
function cmd (stage) {
function CMD (args, cb) {
npm.commands["run-script"]([stage].concat(args), cb)
}
CMD.usage = "npm "+stage+" [-- <args>]"
var installedShallow = require("./completion/installed-shallow.js")
CMD.completion = function (opts, cb) {
installedShallow(opts, function (d) {
return d.scripts && d.scripts[stage]
}, cb)
}
return CMD
}

40
node_modules/npm/lib/utils/link.js generated vendored Normal file
View File

@@ -0,0 +1,40 @@
module.exports = link
link.ifExists = linkIfExists
var fs = require("graceful-fs")
, chain = require("slide").chain
, mkdir = require("mkdirp")
, rm = require("./gently-rm.js")
, path = require("path")
, npm = require("../npm.js")
function linkIfExists (from, to, gently, cb) {
fs.stat(from, function (er) {
if (er) return cb()
link(from, to, gently, cb)
})
}
function link (from, to, gently, abs, cb) {
if (typeof cb !== "function") cb = abs, abs = false
if (typeof cb !== "function") cb = gently, gently = null
if (npm.config.get("force")) gently = false
to = path.resolve(to)
var target = from = path.resolve(from)
if (!abs && process.platform !== "win32") {
// junctions on windows must be absolute
target = path.relative(path.dirname(to), from)
// if there is no folder in common, then it will be much
// longer, and using a relative link is dumb.
if (target.length >= from.length) target = from
}
chain
( [ [fs, "stat", from]
, [rm, to, gently]
, [mkdir, path.dirname(to)]
, [fs, "symlink", target, to, "junction"] ]
, cb)
}

73
node_modules/npm/lib/utils/locker.js generated vendored Normal file
View File

@@ -0,0 +1,73 @@
var crypto = require("crypto")
var resolve = require("path").resolve
var lockfile = require("lockfile")
var log = require("npmlog")
var mkdirp = require("mkdirp")
var npm = require("../npm.js")
var correctMkdir = require('../utils/correct-mkdir.js')
var installLocks = {}
function lockFileName (base, name) {
var c = name.replace(/[^a-zA-Z0-9]+/g, "-").replace(/^-+|-+$/g, "")
, p = resolve(base, name)
, h = crypto.createHash("sha1").update(p).digest("hex")
, l = resolve(npm.cache, "_locks")
return resolve(l, c.substr(0, 24)+"-"+h.substr(0, 16)+".lock")
}
function lock (base, name, cb) {
var lockDir = resolve(npm.cache, "_locks")
correctMkdir(lockDir, function (er) {
if (er) return cb(er)
var opts = { stale: npm.config.get("cache-lock-stale")
, retries: npm.config.get("cache-lock-retries")
, wait: npm.config.get("cache-lock-wait") }
var lf = lockFileName(base, name)
lockfile.lock(lf, opts, function (er) {
if (er) log.warn("locking", lf, "failed", er)
if (!er) {
log.verbose("lock", "using", lf, "for", resolve(base, name))
installLocks[lf] = true
}
cb(er)
})
})
}
function unlock (base, name, cb) {
var lf = lockFileName(base, name)
, locked = installLocks[lf]
if (locked === false) {
return process.nextTick(cb)
}
else if (locked === true) {
lockfile.unlock(lf, function (er) {
if (er) {
log.warn("unlocking", lf, "failed", er)
}
else {
installLocks[lf] = false
log.verbose("unlock", "done using", lf, "for", resolve(base, name))
}
cb(er)
})
}
else {
throw new Error(
"Attempt to unlock " + resolve(base, name) + ", which hasn't been locked"
)
}
}
module.exports = {
lock : lock,
unlock : unlock
}

100
node_modules/npm/lib/utils/map-to-registry.js generated vendored Normal file
View File

@@ -0,0 +1,100 @@
var url = require("url")
var log = require("npmlog")
, npa = require("npm-package-arg")
module.exports = mapToRegistry
function mapToRegistry(name, config, cb) {
log.silly("mapToRegistry", "name", name)
var registry
// the name itself takes precedence
var data = npa(name)
if (data.scope) {
// the name is definitely scoped, so escape now
name = name.replace("/", "%2f")
log.silly("mapToRegistry", "scope (from package name)", data.scope)
registry = config.get(data.scope + ":registry")
if (!registry) {
log.verbose("mapToRegistry", "no registry URL found in name for scope", data.scope)
}
}
// ...then --scope=@scope or --scope=scope
var scope = config.get("scope")
if (!registry && scope) {
// I'm an enabler, sorry
if (scope.charAt(0) !== "@") scope = "@" + scope
log.silly("mapToRegistry", "scope (from config)", scope)
registry = config.get(scope + ":registry")
if (!registry) {
log.verbose("mapToRegistry", "no registry URL found in config for scope", scope)
}
}
// ...and finally use the default registry
if (!registry) {
log.silly("mapToRegistry", "using default registry")
registry = config.get("registry")
}
log.silly("mapToRegistry", "registry", registry)
var auth = config.getCredentialsByURI(registry)
// normalize registry URL so resolution doesn't drop a piece of registry URL
var normalized = registry.slice(-1) !== '/' ? registry + '/' : registry
var uri
log.silly('mapToRegistry', 'data', data)
if (data.type === 'remote') {
uri = data.spec
} else {
uri = url.resolve(normalized, name)
}
log.silly('mapToRegistry', 'uri', uri)
cb(null, uri, scopeAuth(uri, registry, auth), normalized)
}
function scopeAuth (uri, registry, auth) {
var cleaned = {
scope: auth.scope,
email: auth.email,
alwaysAuth: auth.alwaysAuth,
token: undefined,
username: undefined,
password: undefined,
auth: undefined
}
var requestHost
var registryHost
if (auth.token || auth.auth || (auth.username && auth.password)) {
requestHost = url.parse(uri).hostname
registryHost = url.parse(registry).hostname
if (requestHost === registryHost) {
cleaned.token = auth.token
cleaned.auth = auth.auth
cleaned.username = auth.username
cleaned.password = auth.password
} else if (auth.alwaysAuth) {
log.verbose('scopeAuth', 'alwaysAuth set for', registry)
cleaned.token = auth.token
cleaned.auth = auth.auth
cleaned.username = auth.username
cleaned.password = auth.password
} else {
log.silly('scopeAuth', uri, "doesn't share host with registry", registry)
}
}
return cleaned
}

12
node_modules/npm/lib/utils/read-local-package.js generated vendored Normal file
View File

@@ -0,0 +1,12 @@
exports = module.exports = readLocalPkg
var npm = require("../npm.js")
, readJson = require("read-package-json")
function readLocalPkg (cb) {
if (npm.config.get("global")) return cb()
var path = require("path")
readJson(path.resolve(npm.prefix, "package.json"), function (er, d) {
return cb(er, d && d.name)
})
}

34
node_modules/npm/lib/utils/spawn.js generated vendored Normal file
View File

@@ -0,0 +1,34 @@
module.exports = spawn
var _spawn = require("child_process").spawn
var EventEmitter = require("events").EventEmitter
function spawn (cmd, args, options) {
var raw = _spawn(cmd, args, options)
var cooked = new EventEmitter()
raw.on("error", function (er) {
er.file = cmd
cooked.emit("error", er)
}).on("close", function (code, signal) {
// Create ENOENT error because Node.js v0.8 will not emit
// an `error` event if the command could not be found.
if (code === 127) {
var er = new Error('spawn ENOENT')
er.code = 'ENOENT'
er.errno = 'ENOENT'
er.syscall = 'spawn'
er.file = cmd
cooked.emit('error', er)
} else {
cooked.emit("close", code, signal)
}
})
cooked.stdin = raw.stdin
cooked.stdout = raw.stdout
cooked.stderr = raw.stderr
cooked.kill = function (sig) { return raw.kill(sig) }
return cooked
}

291
node_modules/npm/lib/utils/tar.js generated vendored Normal file
View File

@@ -0,0 +1,291 @@
// commands for packing and unpacking tarballs
// this file is used by lib/cache.js
var npm = require("../npm.js")
, fs = require("graceful-fs")
, writeFileAtomic = require("write-file-atomic")
, writeStreamAtomic = require("fs-write-stream-atomic")
, path = require("path")
, log = require("npmlog")
, uidNumber = require("uid-number")
, rm = require("./gently-rm.js")
, readJson = require("read-package-json")
, myUid = process.getuid && process.getuid()
, myGid = process.getgid && process.getgid()
, tar = require("tar")
, zlib = require("zlib")
, fstream = require("fstream")
, Packer = require("fstream-npm")
, lifecycle = require("./lifecycle.js")
if (process.env.SUDO_UID && myUid === 0) {
if (!isNaN(process.env.SUDO_UID)) myUid = +process.env.SUDO_UID
if (!isNaN(process.env.SUDO_GID)) myGid = +process.env.SUDO_GID
}
exports.pack = pack
exports.unpack = unpack
function pack (tarball, folder, pkg, dfc, cb) {
log.verbose("tar pack", [tarball, folder])
if (typeof cb !== "function") cb = dfc, dfc = false
log.verbose("tarball", tarball)
log.verbose("folder", folder)
if (dfc) {
// do fancy crap
return lifecycle(pkg, "prepublish", folder, function (er) {
if (er) return cb(er)
pack_(tarball, folder, pkg, cb)
})
} else {
pack_(tarball, folder, pkg, cb)
}
}
function pack_ (tarball, folder, pkg, cb) {
new Packer({ path: folder, type: "Directory", isDirectory: true })
.on("error", function (er) {
if (er) log.error("tar pack", "Error reading " + folder)
return cb(er)
})
// By default, npm includes some proprietary attributes in the
// package tarball. This is sane, and allowed by the spec.
// However, npm *itself* excludes these from its own package,
// so that it can be more easily bootstrapped using old and
// non-compliant tar implementations.
.pipe(tar.Pack({ noProprietary: !npm.config.get("proprietary-attribs") }))
.on("error", function (er) {
if (er) log.error("tar.pack", "tar creation error", tarball)
cb(er)
})
.pipe(zlib.Gzip())
.on("error", function (er) {
if (er) log.error("tar.pack", "gzip error "+tarball)
cb(er)
})
.pipe(writeStreamAtomic(tarball))
.on("error", function (er) {
if (er) log.error("tar.pack", "Could not write "+tarball)
cb(er)
})
.on("close", cb)
}
function unpack (tarball, unpackTarget, dMode, fMode, uid, gid, cb) {
log.verbose("tar", "unpack", tarball)
log.verbose("tar", "unpacking to", unpackTarget)
if (typeof cb !== "function") cb = gid, gid = null
if (typeof cb !== "function") cb = uid, uid = null
if (typeof cb !== "function") cb = fMode, fMode = npm.modes.file
if (typeof cb !== "function") cb = dMode, dMode = npm.modes.exec
uidNumber(uid, gid, function (er, uid, gid) {
if (er) return cb(er)
unpack_(tarball, unpackTarget, dMode, fMode, uid, gid, cb)
})
}
function unpack_ ( tarball, unpackTarget, dMode, fMode, uid, gid, cb ) {
rm(unpackTarget, function (er) {
if (er) return cb(er)
// gzip {tarball} --decompress --stdout \
// | tar -mvxpf - --strip-components=1 -C {unpackTarget}
gunzTarPerm( tarball, unpackTarget
, dMode, fMode
, uid, gid
, function (er, folder) {
if (er) return cb(er)
readJson(path.resolve(folder, "package.json"), cb)
})
})
}
function gunzTarPerm (tarball, target, dMode, fMode, uid, gid, cb_) {
if (!dMode) dMode = npm.modes.exec
if (!fMode) fMode = npm.modes.file
log.silly("gunzTarPerm", "modes", [dMode.toString(8), fMode.toString(8)])
var cbCalled = false
function cb (er) {
if (cbCalled) return
cbCalled = true
cb_(er, target)
}
var fst = fs.createReadStream(tarball)
fst.on("open", function (fd) {
fs.fstat(fd, function (er, st) {
if (er) return fst.emit("error", er)
if (st.size === 0) {
er = new Error("0-byte tarball\n" +
"Please run `npm cache clean`")
fst.emit("error", er)
}
})
})
// figure out who we're supposed to be, if we're not pretending
// to be a specific user.
if (npm.config.get("unsafe-perm") && process.platform !== "win32") {
uid = myUid
gid = myGid
}
function extractEntry (entry) {
log.silly("gunzTarPerm", "extractEntry", entry.path)
// never create things that are user-unreadable,
// or dirs that are user-un-listable. Only leads to headaches.
var originalMode = entry.mode = entry.mode || entry.props.mode
entry.mode = entry.mode | (entry.type === "Directory" ? dMode : fMode)
entry.mode = entry.mode & (~npm.modes.umask)
entry.props.mode = entry.mode
if (originalMode !== entry.mode) {
log.silly( "gunzTarPerm", "modified mode"
, [entry.path, originalMode, entry.mode])
}
// if there's a specific owner uid/gid that we want, then set that
if (process.platform !== "win32" &&
typeof uid === "number" &&
typeof gid === "number") {
entry.props.uid = entry.uid = uid
entry.props.gid = entry.gid = gid
}
}
var extractOpts = { type: "Directory", path: target, strip: 1 }
if (process.platform !== "win32" &&
typeof uid === "number" &&
typeof gid === "number") {
extractOpts.uid = uid
extractOpts.gid = gid
}
var sawIgnores = {}
extractOpts.filter = function () {
// symbolic links are not allowed in packages.
if (this.type.match(/^.*Link$/)) {
log.warn( "excluding symbolic link"
, this.path.substr(target.length + 1)
+ " -> " + this.linkpath )
return false
}
// Note: This mirrors logic in the fs read operations that are
// employed during tarball creation, in the fstream-npm module.
// It is duplicated here to handle tarballs that are created
// using other means, such as system tar or git archive.
if (this.type === "File") {
var base = path.basename(this.path)
if (base === ".npmignore") {
sawIgnores[ this.path ] = true
} else if (base === ".gitignore") {
var npmignore = this.path.replace(/\.gitignore$/, ".npmignore")
if (sawIgnores[npmignore]) {
// Skip this one, already seen.
return false
} else {
// Rename, may be clobbered later.
this.path = npmignore
this._path = npmignore
}
}
}
return true
}
fst
.on("error", function (er) {
if (er) log.error("tar.unpack", "error reading "+tarball)
cb(er)
})
.on("data", function OD (c) {
// detect what it is.
// Then, depending on that, we'll figure out whether it's
// a single-file module, gzipped tarball, or naked tarball.
// gzipped files all start with 1f8b08
if (c[0] === 0x1F &&
c[1] === 0x8B &&
c[2] === 0x08) {
fst
.pipe(zlib.Unzip())
.on("error", function (er) {
if (er) log.error("tar.unpack", "unzip error "+tarball)
cb(er)
})
.pipe(tar.Extract(extractOpts))
.on("entry", extractEntry)
.on("error", function (er) {
if (er) log.error("tar.unpack", "untar error "+tarball)
cb(er)
})
.on("close", cb)
} else if (hasTarHeader(c)) {
// naked tar
fst
.pipe(tar.Extract(extractOpts))
.on("entry", extractEntry)
.on("error", function (er) {
if (er) log.error("tar.unpack", "untar error "+tarball)
cb(er)
})
.on("close", cb)
} else {
// naked js file
var jsOpts = { path: path.resolve(target, "index.js") }
if (process.platform !== "win32" &&
typeof uid === "number" &&
typeof gid === "number") {
jsOpts.uid = uid
jsOpts.gid = gid
}
fst
.pipe(fstream.Writer(jsOpts))
.on("error", function (er) {
if (er) log.error("tar.unpack", "copy error "+tarball)
cb(er)
})
.on("close", function () {
var j = path.resolve(target, "package.json")
readJson(j, function (er, d) {
if (er) {
log.error("not a package", tarball)
return cb(er)
}
writeFileAtomic(j, JSON.stringify(d) + "\n", cb)
})
})
}
// now un-hook, and re-emit the chunk
fst.removeListener("data", OD)
fst.emit("data", c)
})
}
function hasTarHeader (c) {
return c[257] === 0x75 && // tar archives have 7573746172 at position
c[258] === 0x73 && // 257 and 003030 or 202000 at position 262
c[259] === 0x74 &&
c[260] === 0x61 &&
c[261] === 0x72 &&
((c[262] === 0x00 &&
c[263] === 0x30 &&
c[264] === 0x30) ||
(c[262] === 0x20 &&
c[263] === 0x20 &&
c[264] === 0x00))
}

17
node_modules/npm/lib/utils/umask.js generated vendored Normal file
View File

@@ -0,0 +1,17 @@
var umask = require("umask")
var npmlog = require("npmlog")
var _fromString = umask.fromString
module.exports = umask
// fromString with logging callback
umask.fromString = function (val) {
_fromString(val, function (err, result) {
if (err) {
npmlog.warn("invalid umask", err.message)
}
val = result
})
return val
}

24
node_modules/npm/lib/utils/warn-deprecated.js generated vendored Normal file
View File

@@ -0,0 +1,24 @@
module.exports = warnDeprecated
var log = require("npmlog")
var deprecations = {}
function warnDeprecated (type) {
return function warn (messages, instance) {
if (!instance) {
if (!deprecations[type]) {
deprecations[type] = {}
messages.forEach(function (m) { log.warn(type, m) })
}
}
else {
if (!deprecations[type]) deprecations[type] = {}
if (!deprecations[type][instance]) {
deprecations[type][instance] = true
messages.forEach(function (m) { log.warn(type, m) })
}
}
}
}