mirror of
https://github.com/S2-/gitlit
synced 2025-08-02 12:20:05 +02:00
packager
This commit is contained in:
15
app/node_modules/.bin/asar
generated
vendored
Normal file
15
app/node_modules/.bin/asar
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||||
|
|
||||||
|
case `uname` in
|
||||||
|
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
if [ -x "$basedir/node" ]; then
|
||||||
|
"$basedir/node" "$basedir/../asar/bin/asar.js" "$@"
|
||||||
|
ret=$?
|
||||||
|
else
|
||||||
|
node "$basedir/../asar/bin/asar.js" "$@"
|
||||||
|
ret=$?
|
||||||
|
fi
|
||||||
|
exit $ret
|
7
app/node_modules/.bin/asar.cmd
generated
vendored
Normal file
7
app/node_modules/.bin/asar.cmd
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
@IF EXIST "%~dp0\node.exe" (
|
||||||
|
"%~dp0\node.exe" "%~dp0\..\asar\bin\asar.js" %*
|
||||||
|
) ELSE (
|
||||||
|
@SETLOCAL
|
||||||
|
@SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||||
|
node "%~dp0\..\asar\bin\asar.js" %*
|
||||||
|
)
|
15
app/node_modules/.bin/decompress-zip
generated
vendored
Normal file
15
app/node_modules/.bin/decompress-zip
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||||
|
|
||||||
|
case `uname` in
|
||||||
|
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
if [ -x "$basedir/node" ]; then
|
||||||
|
"$basedir/node" "$basedir/../decompress-zip/bin/decompress-zip" "$@"
|
||||||
|
ret=$?
|
||||||
|
else
|
||||||
|
node "$basedir/../decompress-zip/bin/decompress-zip" "$@"
|
||||||
|
ret=$?
|
||||||
|
fi
|
||||||
|
exit $ret
|
7
app/node_modules/.bin/decompress-zip.cmd
generated
vendored
Normal file
7
app/node_modules/.bin/decompress-zip.cmd
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
@IF EXIST "%~dp0\node.exe" (
|
||||||
|
"%~dp0\node.exe" "%~dp0\..\decompress-zip\bin\decompress-zip" %*
|
||||||
|
) ELSE (
|
||||||
|
@SETLOCAL
|
||||||
|
@SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||||
|
node "%~dp0\..\decompress-zip\bin\decompress-zip" %*
|
||||||
|
)
|
15
app/node_modules/.bin/electron-osx-flat
generated
vendored
Normal file
15
app/node_modules/.bin/electron-osx-flat
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||||
|
|
||||||
|
case `uname` in
|
||||||
|
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
if [ -x "$basedir/node" ]; then
|
||||||
|
"$basedir/node" "$basedir/../electron-osx-sign/bin/electron-osx-flat.js" "$@"
|
||||||
|
ret=$?
|
||||||
|
else
|
||||||
|
node "$basedir/../electron-osx-sign/bin/electron-osx-flat.js" "$@"
|
||||||
|
ret=$?
|
||||||
|
fi
|
||||||
|
exit $ret
|
7
app/node_modules/.bin/electron-osx-flat.cmd
generated
vendored
Normal file
7
app/node_modules/.bin/electron-osx-flat.cmd
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
@IF EXIST "%~dp0\node.exe" (
|
||||||
|
"%~dp0\node.exe" "%~dp0\..\electron-osx-sign\bin\electron-osx-flat.js" %*
|
||||||
|
) ELSE (
|
||||||
|
@SETLOCAL
|
||||||
|
@SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||||
|
node "%~dp0\..\electron-osx-sign\bin\electron-osx-flat.js" %*
|
||||||
|
)
|
15
app/node_modules/.bin/electron-osx-sign
generated
vendored
Normal file
15
app/node_modules/.bin/electron-osx-sign
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||||
|
|
||||||
|
case `uname` in
|
||||||
|
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
if [ -x "$basedir/node" ]; then
|
||||||
|
"$basedir/node" "$basedir/../electron-osx-sign/bin/electron-osx-sign.js" "$@"
|
||||||
|
ret=$?
|
||||||
|
else
|
||||||
|
node "$basedir/../electron-osx-sign/bin/electron-osx-sign.js" "$@"
|
||||||
|
ret=$?
|
||||||
|
fi
|
||||||
|
exit $ret
|
7
app/node_modules/.bin/electron-osx-sign.cmd
generated
vendored
Normal file
7
app/node_modules/.bin/electron-osx-sign.cmd
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
@IF EXIST "%~dp0\node.exe" (
|
||||||
|
"%~dp0\node.exe" "%~dp0\..\electron-osx-sign\bin\electron-osx-sign.js" %*
|
||||||
|
) ELSE (
|
||||||
|
@SETLOCAL
|
||||||
|
@SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||||
|
node "%~dp0\..\electron-osx-sign\bin\electron-osx-sign.js" %*
|
||||||
|
)
|
15
app/node_modules/.bin/electron-packager
generated
vendored
Normal file
15
app/node_modules/.bin/electron-packager
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||||
|
|
||||||
|
case `uname` in
|
||||||
|
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
if [ -x "$basedir/node" ]; then
|
||||||
|
"$basedir/node" "$basedir/../electron-packager/cli.js" "$@"
|
||||||
|
ret=$?
|
||||||
|
else
|
||||||
|
node "$basedir/../electron-packager/cli.js" "$@"
|
||||||
|
ret=$?
|
||||||
|
fi
|
||||||
|
exit $ret
|
7
app/node_modules/.bin/electron-packager.cmd
generated
vendored
Normal file
7
app/node_modules/.bin/electron-packager.cmd
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
@IF EXIST "%~dp0\node.exe" (
|
||||||
|
"%~dp0\node.exe" "%~dp0\..\electron-packager\cli.js" %*
|
||||||
|
) ELSE (
|
||||||
|
@SETLOCAL
|
||||||
|
@SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||||
|
node "%~dp0\..\electron-packager\cli.js" %*
|
||||||
|
)
|
15
app/node_modules/.bin/nopt
generated
vendored
Normal file
15
app/node_modules/.bin/nopt
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||||
|
|
||||||
|
case `uname` in
|
||||||
|
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
if [ -x "$basedir/node" ]; then
|
||||||
|
"$basedir/node" "$basedir/../nopt/bin/nopt.js" "$@"
|
||||||
|
ret=$?
|
||||||
|
else
|
||||||
|
node "$basedir/../nopt/bin/nopt.js" "$@"
|
||||||
|
ret=$?
|
||||||
|
fi
|
||||||
|
exit $ret
|
7
app/node_modules/.bin/nopt.cmd
generated
vendored
Normal file
7
app/node_modules/.bin/nopt.cmd
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
@IF EXIST "%~dp0\node.exe" (
|
||||||
|
"%~dp0\node.exe" "%~dp0\..\nopt\bin\nopt.js" %*
|
||||||
|
) ELSE (
|
||||||
|
@SETLOCAL
|
||||||
|
@SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||||
|
node "%~dp0\..\nopt\bin\nopt.js" %*
|
||||||
|
)
|
46
app/node_modules/abbrev/LICENSE
generated
vendored
Normal file
46
app/node_modules/abbrev/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
This software is dual-licensed under the ISC and MIT licenses.
|
||||||
|
You may use this software under EITHER of the following licenses.
|
||||||
|
|
||||||
|
----------
|
||||||
|
|
||||||
|
The ISC License
|
||||||
|
|
||||||
|
Copyright (c) Isaac Z. Schlueter and Contributors
|
||||||
|
|
||||||
|
Permission to use, copy, modify, and/or distribute this software for any
|
||||||
|
purpose with or without fee is hereby granted, provided that the above
|
||||||
|
copyright notice and this permission notice appear in all copies.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||||
|
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
|
||||||
|
----------
|
||||||
|
|
||||||
|
Copyright Isaac Z. Schlueter and Contributors
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person
|
||||||
|
obtaining a copy of this software and associated documentation
|
||||||
|
files (the "Software"), to deal in the Software without
|
||||||
|
restriction, including without limitation the rights to use,
|
||||||
|
copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the
|
||||||
|
Software is furnished to do so, subject to the following
|
||||||
|
conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be
|
||||||
|
included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||||
|
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||||
|
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||||
|
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||||
|
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||||
|
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||||
|
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||||
|
OTHER DEALINGS IN THE SOFTWARE.
|
23
app/node_modules/abbrev/README.md
generated
vendored
Normal file
23
app/node_modules/abbrev/README.md
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
# abbrev-js
|
||||||
|
|
||||||
|
Just like [ruby's Abbrev](http://apidock.com/ruby/Abbrev).
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
|
||||||
|
var abbrev = require("abbrev");
|
||||||
|
abbrev("foo", "fool", "folding", "flop");
|
||||||
|
|
||||||
|
// returns:
|
||||||
|
{ fl: 'flop'
|
||||||
|
, flo: 'flop'
|
||||||
|
, flop: 'flop'
|
||||||
|
, fol: 'folding'
|
||||||
|
, fold: 'folding'
|
||||||
|
, foldi: 'folding'
|
||||||
|
, foldin: 'folding'
|
||||||
|
, folding: 'folding'
|
||||||
|
, foo: 'foo'
|
||||||
|
, fool: 'fool'
|
||||||
|
}
|
||||||
|
|
||||||
|
This is handy for command-line scripts, or other cases where you want to be able to accept shorthands.
|
61
app/node_modules/abbrev/abbrev.js
generated
vendored
Normal file
61
app/node_modules/abbrev/abbrev.js
generated
vendored
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
module.exports = exports = abbrev.abbrev = abbrev
|
||||||
|
|
||||||
|
abbrev.monkeyPatch = monkeyPatch
|
||||||
|
|
||||||
|
function monkeyPatch () {
|
||||||
|
Object.defineProperty(Array.prototype, 'abbrev', {
|
||||||
|
value: function () { return abbrev(this) },
|
||||||
|
enumerable: false, configurable: true, writable: true
|
||||||
|
})
|
||||||
|
|
||||||
|
Object.defineProperty(Object.prototype, 'abbrev', {
|
||||||
|
value: function () { return abbrev(Object.keys(this)) },
|
||||||
|
enumerable: false, configurable: true, writable: true
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
function abbrev (list) {
|
||||||
|
if (arguments.length !== 1 || !Array.isArray(list)) {
|
||||||
|
list = Array.prototype.slice.call(arguments, 0)
|
||||||
|
}
|
||||||
|
for (var i = 0, l = list.length, args = [] ; i < l ; i ++) {
|
||||||
|
args[i] = typeof list[i] === "string" ? list[i] : String(list[i])
|
||||||
|
}
|
||||||
|
|
||||||
|
// sort them lexicographically, so that they're next to their nearest kin
|
||||||
|
args = args.sort(lexSort)
|
||||||
|
|
||||||
|
// walk through each, seeing how much it has in common with the next and previous
|
||||||
|
var abbrevs = {}
|
||||||
|
, prev = ""
|
||||||
|
for (var i = 0, l = args.length ; i < l ; i ++) {
|
||||||
|
var current = args[i]
|
||||||
|
, next = args[i + 1] || ""
|
||||||
|
, nextMatches = true
|
||||||
|
, prevMatches = true
|
||||||
|
if (current === next) continue
|
||||||
|
for (var j = 0, cl = current.length ; j < cl ; j ++) {
|
||||||
|
var curChar = current.charAt(j)
|
||||||
|
nextMatches = nextMatches && curChar === next.charAt(j)
|
||||||
|
prevMatches = prevMatches && curChar === prev.charAt(j)
|
||||||
|
if (!nextMatches && !prevMatches) {
|
||||||
|
j ++
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
prev = current
|
||||||
|
if (j === cl) {
|
||||||
|
abbrevs[current] = current
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for (var a = current.substr(0, j) ; j <= cl ; j ++) {
|
||||||
|
abbrevs[a] = current
|
||||||
|
a += current.charAt(j)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return abbrevs
|
||||||
|
}
|
||||||
|
|
||||||
|
function lexSort (a, b) {
|
||||||
|
return a === b ? 0 : a > b ? 1 : -1
|
||||||
|
}
|
57
app/node_modules/abbrev/package.json
generated
vendored
Normal file
57
app/node_modules/abbrev/package.json
generated
vendored
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
{
|
||||||
|
"_from": "abbrev@1",
|
||||||
|
"_id": "abbrev@1.1.1",
|
||||||
|
"_inBundle": false,
|
||||||
|
"_integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==",
|
||||||
|
"_location": "/abbrev",
|
||||||
|
"_phantomChildren": {},
|
||||||
|
"_requested": {
|
||||||
|
"type": "range",
|
||||||
|
"registry": true,
|
||||||
|
"raw": "abbrev@1",
|
||||||
|
"name": "abbrev",
|
||||||
|
"escapedName": "abbrev",
|
||||||
|
"rawSpec": "1",
|
||||||
|
"saveSpec": null,
|
||||||
|
"fetchSpec": "1"
|
||||||
|
},
|
||||||
|
"_requiredBy": [
|
||||||
|
"/nopt",
|
||||||
|
"/touch/nopt"
|
||||||
|
],
|
||||||
|
"_resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz",
|
||||||
|
"_shasum": "f8f2c887ad10bf67f634f005b6987fed3179aac8",
|
||||||
|
"_spec": "abbrev@1",
|
||||||
|
"_where": "E:\\projects\\p\\gitlit\\app\\node_modules\\nopt",
|
||||||
|
"author": {
|
||||||
|
"name": "Isaac Z. Schlueter",
|
||||||
|
"email": "i@izs.me"
|
||||||
|
},
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/isaacs/abbrev-js/issues"
|
||||||
|
},
|
||||||
|
"bundleDependencies": false,
|
||||||
|
"deprecated": false,
|
||||||
|
"description": "Like ruby's abbrev module, but in js",
|
||||||
|
"devDependencies": {
|
||||||
|
"tap": "^10.1"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"abbrev.js"
|
||||||
|
],
|
||||||
|
"homepage": "https://github.com/isaacs/abbrev-js#readme",
|
||||||
|
"license": "ISC",
|
||||||
|
"main": "abbrev.js",
|
||||||
|
"name": "abbrev",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git+ssh://git@github.com/isaacs/abbrev-js.git"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"postpublish": "git push origin --all; git push origin --tags",
|
||||||
|
"postversion": "npm publish",
|
||||||
|
"preversion": "npm test",
|
||||||
|
"test": "tap test.js --100"
|
||||||
|
},
|
||||||
|
"version": "1.1.1"
|
||||||
|
}
|
65
app/node_modules/asar/CHANGELOG.md
generated
vendored
Normal file
65
app/node_modules/asar/CHANGELOG.md
generated
vendored
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
# Changes By Version
|
||||||
|
|
||||||
|
## 0.14.0 - 2017-11-02
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
* Snapcraft metadata (#130)
|
||||||
|
* `uncache` and `uncacheAll` (#118)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
* Use of asar inside of an Electron app (#118)
|
||||||
|
|
||||||
|
## 0.13.1 - 2017-11-02
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Do not return before the write stream fully closes (#113)
|
||||||
|
|
||||||
|
## 0.13.0 - 2017-01-09
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Dropped support for Node `0.10.0` and `0.12.0`. The minimum supported version
|
||||||
|
is now Node `4.6.0`. (#100)
|
||||||
|
- This project was ported from CoffeeScript to JavaScript. The behavior and
|
||||||
|
APIs should be the same as previous releases. (#100)
|
||||||
|
|
||||||
|
## 0.12.4 - 2016-12-28
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Unpack glob patterns containing `{}` characters not working properly (#99)
|
||||||
|
|
||||||
|
## 0.12.3 - 2016-08-29
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Multibyte characters in paths are now supported (#86)
|
||||||
|
|
||||||
|
## 0.12.2 - 2016-08-22
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Upgraded `minimatch` to `^3.0.3` from `^3.0.0` for [RegExp DOS fix](https://nodesecurity.io/advisories/minimatch_regular-expression-denial-of-service).
|
||||||
|
|
||||||
|
## 0.12.1 - 2016-07-25
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Fix `Maximum call stack size exceeded` error regression (#80)
|
||||||
|
|
||||||
|
## 0.12.0 - 2016-07-20
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- Added `transform` option to specify a `stream.Transform` function to the
|
||||||
|
`createPackageWithOptions` API (#73)
|
||||||
|
|
||||||
|
## 0.11.0 - 2016-04-06
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Upgraded `mksnapshot` dependency to remove logged `graceful-fs` deprecation
|
||||||
|
warnings (#61)
|
20
app/node_modules/asar/LICENSE.md
generated
vendored
Normal file
20
app/node_modules/asar/LICENSE.md
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
Copyright (c) 2014 GitHub Inc.
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining
|
||||||
|
a copy of this software and associated documentation files (the
|
||||||
|
"Software"), to deal in the Software without restriction, including
|
||||||
|
without limitation the rights to use, copy, modify, merge, publish,
|
||||||
|
distribute, sublicense, and/or sell copies of the Software, and to
|
||||||
|
permit persons to whom the Software is furnished to do so, subject to
|
||||||
|
the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be
|
||||||
|
included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||||
|
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||||
|
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||||
|
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||||
|
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||||
|
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||||
|
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
193
app/node_modules/asar/README.md
generated
vendored
Normal file
193
app/node_modules/asar/README.md
generated
vendored
Normal file
@@ -0,0 +1,193 @@
|
|||||||
|
# asar - Electron Archive
|
||||||
|
|
||||||
|
[](https://travis-ci.org/electron/asar)
|
||||||
|
[](https://ci.appveyor.com/project/electron-bot/asar)
|
||||||
|
[](https://david-dm.org/electron/asar)
|
||||||
|
[](https://npmjs.org/package/asar)
|
||||||
|
|
||||||
|
Asar is a simple extensive archive format, it works like `tar` that concatenates
|
||||||
|
all files together without compression, while having random access support.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
* Support random access
|
||||||
|
* Use JSON to store files' information
|
||||||
|
* Very easy to write a parser
|
||||||
|
|
||||||
|
## Command line utility
|
||||||
|
|
||||||
|
### Install
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ npm install asar
|
||||||
|
```
|
||||||
|
|
||||||
|
### Usage
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ asar --help
|
||||||
|
|
||||||
|
Usage: asar [options] [command]
|
||||||
|
|
||||||
|
Commands:
|
||||||
|
|
||||||
|
pack|p <dir> <output>
|
||||||
|
create asar archive
|
||||||
|
|
||||||
|
list|l <archive>
|
||||||
|
list files of asar archive
|
||||||
|
|
||||||
|
extract-file|ef <archive> <filename>
|
||||||
|
extract one file from archive
|
||||||
|
|
||||||
|
extract|e <archive> <dest>
|
||||||
|
extract archive
|
||||||
|
|
||||||
|
|
||||||
|
Options:
|
||||||
|
|
||||||
|
-h, --help output usage information
|
||||||
|
-V, --version output the version number
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Excluding multiple resources from being packed
|
||||||
|
|
||||||
|
Given:
|
||||||
|
```
|
||||||
|
app
|
||||||
|
(a) ├── x1
|
||||||
|
(b) ├── x2
|
||||||
|
(c) ├── y3
|
||||||
|
(d) │ ├── x1
|
||||||
|
(e) │ └── z1
|
||||||
|
(f) │ └── x2
|
||||||
|
(g) └── z4
|
||||||
|
(h) └── w1
|
||||||
|
```
|
||||||
|
|
||||||
|
Exclude: a, b
|
||||||
|
```bash
|
||||||
|
$ asar pack app app.asar --unpack-dir "{x1,x2}"
|
||||||
|
```
|
||||||
|
|
||||||
|
Exclude: a, b, d, f
|
||||||
|
```bash
|
||||||
|
$ asar pack app app.asar --unpack-dir "**/{x1,x2}"
|
||||||
|
```
|
||||||
|
|
||||||
|
Exclude: a, b, d, f, h
|
||||||
|
```bash
|
||||||
|
$ asar pack app app.asar --unpack-dir "{**/x1,**/x2,z4/w1}"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Using programatically
|
||||||
|
|
||||||
|
### Example
|
||||||
|
|
||||||
|
```js
|
||||||
|
var asar = require('asar');
|
||||||
|
|
||||||
|
var src = 'some/path/';
|
||||||
|
var dest = 'name.asar';
|
||||||
|
|
||||||
|
asar.createPackage(src, dest, function() {
|
||||||
|
console.log('done.');
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
Please note that there is currently **no** error handling provided!
|
||||||
|
|
||||||
|
### Transform
|
||||||
|
You can pass in a `transform` option, that is a function, which either returns
|
||||||
|
nothing, or a `stream.Transform`. The latter will be used on files that will be
|
||||||
|
in the `.asar` file to transform them (e.g. compress).
|
||||||
|
|
||||||
|
```js
|
||||||
|
var asar = require('asar');
|
||||||
|
|
||||||
|
var src = 'some/path/';
|
||||||
|
var dest = 'name.asar';
|
||||||
|
|
||||||
|
function transform(filename) {
|
||||||
|
return new CustomTransformStream()
|
||||||
|
}
|
||||||
|
|
||||||
|
asar.createPackageWithOptions(src, dest, { transform: transform }, function() {
|
||||||
|
console.log('done.');
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
## Using with grunt
|
||||||
|
|
||||||
|
There is also an unofficial grunt plugin to generate asar archives at [bwin/grunt-asar][grunt-asar].
|
||||||
|
|
||||||
|
## Format
|
||||||
|
|
||||||
|
Asar uses [Pickle][pickle] to safely serialize binary value to file, there is
|
||||||
|
also a [node.js binding][node-pickle] of `Pickle` class.
|
||||||
|
|
||||||
|
The format of asar is very flat:
|
||||||
|
|
||||||
|
```
|
||||||
|
| UInt32: header_size | String: header | Bytes: file1 | ... | Bytes: file42 |
|
||||||
|
```
|
||||||
|
|
||||||
|
The `header_size` and `header` are serialized with [Pickle][pickle] class, and
|
||||||
|
`header_size`'s [Pickle][pickle] object is 8 bytes.
|
||||||
|
|
||||||
|
The `header` is a JSON string, and the `header_size` is the size of `header`'s
|
||||||
|
`Pickle` object.
|
||||||
|
|
||||||
|
Structure of `header` is something like this:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"files": {
|
||||||
|
"tmp": {
|
||||||
|
"files": {}
|
||||||
|
},
|
||||||
|
"usr" : {
|
||||||
|
"files": {
|
||||||
|
"bin": {
|
||||||
|
"files": {
|
||||||
|
"ls": {
|
||||||
|
"offset": "0",
|
||||||
|
"size": 100,
|
||||||
|
"executable": true
|
||||||
|
},
|
||||||
|
"cd": {
|
||||||
|
"offset": "100",
|
||||||
|
"size": 100,
|
||||||
|
"executable": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"etc": {
|
||||||
|
"files": {
|
||||||
|
"hosts": {
|
||||||
|
"offset": "200",
|
||||||
|
"size": 32
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
`offset` and `size` records the information to read the file from archive, the
|
||||||
|
`offset` starts from 0 so you have to manually add the size of `header_size` and
|
||||||
|
`header` to the `offset` to get the real offset of the file.
|
||||||
|
|
||||||
|
`offset` is a UINT64 number represented in string, because there is no way to
|
||||||
|
precisely represent UINT64 in JavaScript `Number`. `size` is a JavaScript
|
||||||
|
`Number` that is no larger than `Number.MAX_SAFE_INTEGER`, which has a value of
|
||||||
|
`9007199254740991` and is about 8PB in size. We didn't store `size` in UINT64
|
||||||
|
because file size in Node.js is represented as `Number` and it is not safe to
|
||||||
|
convert `Number` to UINT64.
|
||||||
|
|
||||||
|
[pickle]: https://chromium.googlesource.com/chromium/src/+/master/base/pickle.h
|
||||||
|
[node-pickle]: https://www.npmjs.org/package/chromium-pickle
|
||||||
|
[grunt-asar]: https://github.com/bwin/grunt-asar
|
72
app/node_modules/asar/bin/asar.js
generated
vendored
Normal file
72
app/node_modules/asar/bin/asar.js
generated
vendored
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
#!/usr/bin/env node
|
||||||
|
var asar = require('../lib/asar')
|
||||||
|
var program = require('commander')
|
||||||
|
|
||||||
|
program.version('v' + require('../package.json').version)
|
||||||
|
.description('Manipulate asar archive files')
|
||||||
|
|
||||||
|
program.command('pack <dir> <output>')
|
||||||
|
.alias('p')
|
||||||
|
.description('create asar archive')
|
||||||
|
.option('--ordering <file path>', 'path to a text file for ordering contents')
|
||||||
|
.option('--unpack <expression>', 'do not pack files matching glob <expression>')
|
||||||
|
.option('--unpack-dir <expression>', 'do not pack dirs matching glob <expression> or starting with literal <expression>')
|
||||||
|
.option('--snapshot', 'create snapshot')
|
||||||
|
.option('--exclude-hidden', 'exclude hidden files')
|
||||||
|
.option('--sv <version>', '(snapshot) version of Electron')
|
||||||
|
.option('--sa <arch>', '(snapshot) arch of Electron')
|
||||||
|
.option('--sb <builddir>', '(snapshot) where to put downloaded files')
|
||||||
|
.action(function (dir, output, options) {
|
||||||
|
options = {
|
||||||
|
unpack: options.unpack,
|
||||||
|
unpackDir: options.unpackDir,
|
||||||
|
snapshot: options.snapshot,
|
||||||
|
ordering: options.ordering,
|
||||||
|
version: options.sv,
|
||||||
|
arch: options.sa,
|
||||||
|
builddir: options.sb,
|
||||||
|
dot: !options.excludeHidden
|
||||||
|
}
|
||||||
|
asar.createPackageWithOptions(dir, output, options, function (error) {
|
||||||
|
if (error) {
|
||||||
|
console.error(error.stack)
|
||||||
|
process.exit(1)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
program.command('list <archive>')
|
||||||
|
.alias('l')
|
||||||
|
.description('list files of asar archive')
|
||||||
|
.action(function (archive) {
|
||||||
|
var files = asar.listPackage(archive)
|
||||||
|
for (var i in files) {
|
||||||
|
console.log(files[i])
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
program.command('extract-file <archive> <filename>')
|
||||||
|
.alias('ef')
|
||||||
|
.description('extract one file from archive')
|
||||||
|
.action(function (archive, filename) {
|
||||||
|
require('fs').writeFileSync(require('path').basename(filename),
|
||||||
|
asar.extractFile(archive, filename))
|
||||||
|
})
|
||||||
|
|
||||||
|
program.command('extract <archive> <dest>')
|
||||||
|
.alias('e')
|
||||||
|
.description('extract archive')
|
||||||
|
.action(function (archive, dest) {
|
||||||
|
asar.extractAll(archive, dest)
|
||||||
|
})
|
||||||
|
|
||||||
|
program.command('*')
|
||||||
|
.action(function (cmd) {
|
||||||
|
console.log('asar: \'%s\' is not an asar command. See \'asar --help\'.', cmd)
|
||||||
|
})
|
||||||
|
|
||||||
|
program.parse(process.argv)
|
||||||
|
|
||||||
|
if (program.args.length === 0) {
|
||||||
|
program.help()
|
||||||
|
}
|
232
app/node_modules/asar/lib/asar.js
generated
vendored
Normal file
232
app/node_modules/asar/lib/asar.js
generated
vendored
Normal file
@@ -0,0 +1,232 @@
|
|||||||
|
'use strict'
|
||||||
|
const fs = process.versions.electron ? require('original-fs') : require('fs')
|
||||||
|
const path = require('path')
|
||||||
|
const minimatch = require('minimatch')
|
||||||
|
const mkdirp = require('mkdirp')
|
||||||
|
|
||||||
|
const Filesystem = require('./filesystem')
|
||||||
|
const disk = require('./disk')
|
||||||
|
const crawlFilesystem = require('./crawlfs')
|
||||||
|
const createSnapshot = require('./snapshot')
|
||||||
|
|
||||||
|
// Return whether or not a directory should be excluded from packing due to
|
||||||
|
// "--unpack-dir" option
|
||||||
|
//
|
||||||
|
// @param {string} path - diretory path to check
|
||||||
|
// @param {string} pattern - literal prefix [for backward compatibility] or glob pattern
|
||||||
|
// @param {array} unpackDirs - Array of directory paths previously marked as unpacked
|
||||||
|
//
|
||||||
|
const isUnpackDir = function (path, pattern, unpackDirs) {
|
||||||
|
if (path.indexOf(pattern) === 0 || minimatch(path, pattern)) {
|
||||||
|
if (unpackDirs.indexOf(path) === -1) {
|
||||||
|
unpackDirs.push(path)
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
} else {
|
||||||
|
for (let i = 0; i < unpackDirs.length; i++) {
|
||||||
|
if (path.indexOf(unpackDirs[i]) === 0) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports.createPackage = function (src, dest, callback) {
|
||||||
|
return module.exports.createPackageWithOptions(src, dest, {}, callback)
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports.createPackageWithOptions = function (src, dest, options, callback) {
|
||||||
|
const globOptions = options.globOptions ? options.globOptions : {}
|
||||||
|
globOptions.dot = options.dot === undefined ? true : options.dot
|
||||||
|
|
||||||
|
let pattern = src + '/**/*'
|
||||||
|
if (options.pattern) {
|
||||||
|
pattern = src + options.pattern
|
||||||
|
}
|
||||||
|
|
||||||
|
return crawlFilesystem(pattern, globOptions, function (error, filenames, metadata) {
|
||||||
|
if (error) { return callback(error) }
|
||||||
|
module.exports.createPackageFromFiles(src, dest, filenames, metadata, options, callback)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
createPackageFromFiles - Create an asar-archive from a list of filenames
|
||||||
|
src: Base path. All files are relative to this.
|
||||||
|
dest: Archive filename (& path).
|
||||||
|
filenames: Array of filenames relative to src.
|
||||||
|
metadata: Object with filenames as keys and {type='directory|file|link', stat: fs.stat} as values. (Optional)
|
||||||
|
options: The options.
|
||||||
|
callback: The callback function. Accepts (err).
|
||||||
|
*/
|
||||||
|
module.exports.createPackageFromFiles = function (src, dest, filenames, metadata, options, callback) {
|
||||||
|
if (typeof metadata === 'undefined' || metadata === null) { metadata = {} }
|
||||||
|
const filesystem = new Filesystem(src)
|
||||||
|
const files = []
|
||||||
|
const unpackDirs = []
|
||||||
|
|
||||||
|
let filenamesSorted = []
|
||||||
|
if (options.ordering) {
|
||||||
|
const orderingFiles = fs.readFileSync(options.ordering).toString().split('\n').map(function (line) {
|
||||||
|
if (line.includes(':')) { line = line.split(':').pop() }
|
||||||
|
line = line.trim()
|
||||||
|
if (line.startsWith('/')) { line = line.slice(1) }
|
||||||
|
return line
|
||||||
|
})
|
||||||
|
|
||||||
|
const ordering = []
|
||||||
|
for (const file of orderingFiles) {
|
||||||
|
const pathComponents = file.split(path.sep)
|
||||||
|
let str = src
|
||||||
|
for (const pathComponent of pathComponents) {
|
||||||
|
str = path.join(str, pathComponent)
|
||||||
|
ordering.push(str)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let missing = 0
|
||||||
|
const total = filenames.length
|
||||||
|
|
||||||
|
for (const file of ordering) {
|
||||||
|
if (!filenamesSorted.includes(file) && filenames.includes(file)) {
|
||||||
|
filenamesSorted.push(file)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const file of filenames) {
|
||||||
|
if (!filenamesSorted.includes(file)) {
|
||||||
|
filenamesSorted.push(file)
|
||||||
|
missing += 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Ordering file has ${((total - missing) / total) * 100}% coverage.`)
|
||||||
|
} else {
|
||||||
|
filenamesSorted = filenames
|
||||||
|
}
|
||||||
|
|
||||||
|
const handleFile = function (filename, done) {
|
||||||
|
let file = metadata[filename]
|
||||||
|
let type
|
||||||
|
if (!file) {
|
||||||
|
const stat = fs.lstatSync(filename)
|
||||||
|
if (stat.isDirectory()) { type = 'directory' }
|
||||||
|
if (stat.isFile()) { type = 'file' }
|
||||||
|
if (stat.isSymbolicLink()) { type = 'link' }
|
||||||
|
file = {stat, type}
|
||||||
|
}
|
||||||
|
|
||||||
|
let shouldUnpack
|
||||||
|
switch (file.type) {
|
||||||
|
case 'directory':
|
||||||
|
shouldUnpack = options.unpackDir
|
||||||
|
? isUnpackDir(path.relative(src, filename), options.unpackDir, unpackDirs)
|
||||||
|
: false
|
||||||
|
filesystem.insertDirectory(filename, shouldUnpack)
|
||||||
|
break
|
||||||
|
case 'file':
|
||||||
|
shouldUnpack = false
|
||||||
|
if (options.unpack) {
|
||||||
|
shouldUnpack = minimatch(filename, options.unpack, {matchBase: true})
|
||||||
|
}
|
||||||
|
if (!shouldUnpack && options.unpackDir) {
|
||||||
|
const dirName = path.relative(src, path.dirname(filename))
|
||||||
|
shouldUnpack = isUnpackDir(dirName, options.unpackDir, unpackDirs)
|
||||||
|
}
|
||||||
|
files.push({filename: filename, unpack: shouldUnpack})
|
||||||
|
filesystem.insertFile(filename, shouldUnpack, file, options, done)
|
||||||
|
return
|
||||||
|
case 'link':
|
||||||
|
filesystem.insertLink(filename, file.stat)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
return process.nextTick(done)
|
||||||
|
}
|
||||||
|
|
||||||
|
const insertsDone = function () {
|
||||||
|
return mkdirp(path.dirname(dest), function (error) {
|
||||||
|
if (error) { return callback(error) }
|
||||||
|
return disk.writeFilesystem(dest, filesystem, files, metadata, function (error) {
|
||||||
|
if (error) { return callback(error) }
|
||||||
|
if (options.snapshot) {
|
||||||
|
return createSnapshot(src, dest, filenames, metadata, options, callback)
|
||||||
|
} else {
|
||||||
|
return callback(null)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const names = filenamesSorted.slice()
|
||||||
|
|
||||||
|
const next = function (name) {
|
||||||
|
if (!name) { return insertsDone() }
|
||||||
|
|
||||||
|
return handleFile(name, function () {
|
||||||
|
return next(names.shift())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return next(names.shift())
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports.statFile = function (archive, filename, followLinks) {
|
||||||
|
const filesystem = disk.readFilesystemSync(archive)
|
||||||
|
return filesystem.getFile(filename, followLinks)
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports.listPackage = function (archive) {
|
||||||
|
return disk.readFilesystemSync(archive).listFiles()
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports.extractFile = function (archive, filename) {
|
||||||
|
const filesystem = disk.readFilesystemSync(archive)
|
||||||
|
return disk.readFileSync(filesystem, filename, filesystem.getFile(filename))
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports.extractAll = function (archive, dest) {
|
||||||
|
const filesystem = disk.readFilesystemSync(archive)
|
||||||
|
const filenames = filesystem.listFiles()
|
||||||
|
|
||||||
|
// under windows just extract links as regular files
|
||||||
|
const followLinks = process.platform === 'win32'
|
||||||
|
|
||||||
|
// create destination directory
|
||||||
|
mkdirp.sync(dest)
|
||||||
|
|
||||||
|
return filenames.map((filename) => {
|
||||||
|
filename = filename.substr(1) // get rid of leading slash
|
||||||
|
const destFilename = path.join(dest, filename)
|
||||||
|
const file = filesystem.getFile(filename, followLinks)
|
||||||
|
if (file.files) {
|
||||||
|
// it's a directory, create it and continue with the next entry
|
||||||
|
mkdirp.sync(destFilename)
|
||||||
|
} else if (file.link) {
|
||||||
|
// it's a symlink, create a symlink
|
||||||
|
const linkSrcPath = path.dirname(path.join(dest, file.link))
|
||||||
|
const linkDestPath = path.dirname(destFilename)
|
||||||
|
const relativePath = path.relative(linkDestPath, linkSrcPath);
|
||||||
|
// try to delete output file, because we can't overwrite a link
|
||||||
|
(() => {
|
||||||
|
try {
|
||||||
|
fs.unlinkSync(destFilename)
|
||||||
|
} catch (error) {}
|
||||||
|
})()
|
||||||
|
const linkTo = path.join(relativePath, path.basename(file.link))
|
||||||
|
fs.symlinkSync(linkTo, destFilename)
|
||||||
|
} else {
|
||||||
|
// it's a file, extract it
|
||||||
|
const content = disk.readFileSync(filesystem, filename, file)
|
||||||
|
fs.writeFileSync(destFilename, content)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports.uncache = function (archive) {
|
||||||
|
return disk.uncacheFilesystem(archive)
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports.uncacheAll = function () {
|
||||||
|
disk.uncacheAll()
|
||||||
|
}
|
21
app/node_modules/asar/lib/crawlfs.js
generated
vendored
Normal file
21
app/node_modules/asar/lib/crawlfs.js
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
'use strict'
|
||||||
|
const fs = process.versions.electron ? require('original-fs') : require('fs')
|
||||||
|
const glob = require('glob')
|
||||||
|
|
||||||
|
module.exports = function (dir, options, callback) {
|
||||||
|
const metadata = {}
|
||||||
|
return glob(dir, options, function (error, filenames) {
|
||||||
|
if (error) { return callback(error) }
|
||||||
|
for (const filename of filenames) {
|
||||||
|
const stat = fs.lstatSync(filename)
|
||||||
|
if (stat.isFile()) {
|
||||||
|
metadata[filename] = {type: 'file', stat: stat}
|
||||||
|
} else if (stat.isDirectory()) {
|
||||||
|
metadata[filename] = {type: 'directory', stat: stat}
|
||||||
|
} else if (stat.isSymbolicLink()) {
|
||||||
|
metadata[filename] = {type: 'link', stat: stat}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return callback(null, filenames, metadata)
|
||||||
|
})
|
||||||
|
}
|
134
app/node_modules/asar/lib/disk.js
generated
vendored
Normal file
134
app/node_modules/asar/lib/disk.js
generated
vendored
Normal file
@@ -0,0 +1,134 @@
|
|||||||
|
'use strict'
|
||||||
|
const fs = process.versions.electron ? require('original-fs') : require('fs')
|
||||||
|
const path = require('path')
|
||||||
|
const mkdirp = require('mkdirp')
|
||||||
|
const pickle = require('chromium-pickle-js')
|
||||||
|
|
||||||
|
const Filesystem = require('./filesystem')
|
||||||
|
let filesystemCache = {}
|
||||||
|
|
||||||
|
const copyFileToSync = function (dest, src, filename) {
|
||||||
|
const srcFile = path.join(src, filename)
|
||||||
|
const targetFile = path.join(dest, filename)
|
||||||
|
|
||||||
|
const content = fs.readFileSync(srcFile)
|
||||||
|
const stats = fs.statSync(srcFile)
|
||||||
|
mkdirp.sync(path.dirname(targetFile))
|
||||||
|
return fs.writeFileSync(targetFile, content, {mode: stats.mode})
|
||||||
|
}
|
||||||
|
|
||||||
|
const writeFileListToStream = function (dest, filesystem, out, list, metadata, callback) {
|
||||||
|
for (let i = 0; i < list.length; i++) {
|
||||||
|
const file = list[i]
|
||||||
|
if (file.unpack) {
|
||||||
|
// the file should not be packed into archive.
|
||||||
|
const filename = path.relative(filesystem.src, file.filename)
|
||||||
|
try {
|
||||||
|
copyFileToSync(`${dest}.unpacked`, filesystem.src, filename)
|
||||||
|
} catch (error) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
const tr = metadata[file.filename].transformed
|
||||||
|
const stream = fs.createReadStream((tr ? tr.path : file.filename))
|
||||||
|
stream.pipe(out, {end: false})
|
||||||
|
stream.on('error', callback)
|
||||||
|
return stream.on('end', function () {
|
||||||
|
return writeFileListToStream(dest, filesystem, out, list.slice(i + 1), metadata, callback)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
out.end()
|
||||||
|
return callback(null)
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports.writeFilesystem = function (dest, filesystem, files, metadata, callback) {
|
||||||
|
let sizeBuf
|
||||||
|
let headerBuf
|
||||||
|
try {
|
||||||
|
const headerPickle = pickle.createEmpty()
|
||||||
|
headerPickle.writeString(JSON.stringify(filesystem.header))
|
||||||
|
headerBuf = headerPickle.toBuffer()
|
||||||
|
|
||||||
|
const sizePickle = pickle.createEmpty()
|
||||||
|
sizePickle.writeUInt32(headerBuf.length)
|
||||||
|
sizeBuf = sizePickle.toBuffer()
|
||||||
|
} catch (error) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
|
||||||
|
const out = fs.createWriteStream(dest)
|
||||||
|
out.on('error', callback)
|
||||||
|
out.write(sizeBuf)
|
||||||
|
return out.write(headerBuf, function () {
|
||||||
|
return writeFileListToStream(dest, filesystem, out, files, metadata, callback)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports.readArchiveHeaderSync = function (archive) {
|
||||||
|
const fd = fs.openSync(archive, 'r')
|
||||||
|
let size
|
||||||
|
let headerBuf
|
||||||
|
try {
|
||||||
|
const sizeBuf = new Buffer(8)
|
||||||
|
if (fs.readSync(fd, sizeBuf, 0, 8, null) !== 8) {
|
||||||
|
throw new Error('Unable to read header size')
|
||||||
|
}
|
||||||
|
|
||||||
|
const sizePickle = pickle.createFromBuffer(sizeBuf)
|
||||||
|
size = sizePickle.createIterator().readUInt32()
|
||||||
|
headerBuf = new Buffer(size)
|
||||||
|
if (fs.readSync(fd, headerBuf, 0, size, null) !== size) {
|
||||||
|
throw new Error('Unable to read header')
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
fs.closeSync(fd)
|
||||||
|
}
|
||||||
|
|
||||||
|
const headerPickle = pickle.createFromBuffer(headerBuf)
|
||||||
|
const header = headerPickle.createIterator().readString()
|
||||||
|
return {header: JSON.parse(header), headerSize: size}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports.readFilesystemSync = function (archive) {
|
||||||
|
if (!filesystemCache[archive]) {
|
||||||
|
const header = this.readArchiveHeaderSync(archive)
|
||||||
|
const filesystem = new Filesystem(archive)
|
||||||
|
filesystem.header = header.header
|
||||||
|
filesystem.headerSize = header.headerSize
|
||||||
|
filesystemCache[archive] = filesystem
|
||||||
|
}
|
||||||
|
return filesystemCache[archive]
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports.uncacheFilesystem = function (archive) {
|
||||||
|
if (filesystemCache[archive]) {
|
||||||
|
filesystemCache[archive] = undefined
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports.uncacheAll = function () {
|
||||||
|
filesystemCache = {}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports.readFileSync = function (filesystem, filename, info) {
|
||||||
|
let buffer = new Buffer(info.size)
|
||||||
|
if (info.size <= 0) { return buffer }
|
||||||
|
if (info.unpacked) {
|
||||||
|
// it's an unpacked file, copy it.
|
||||||
|
buffer = fs.readFileSync(path.join(`${filesystem.src}.unpacked`, filename))
|
||||||
|
} else {
|
||||||
|
// Node throws an exception when reading 0 bytes into a 0-size buffer,
|
||||||
|
// so we short-circuit the read in this case.
|
||||||
|
const fd = fs.openSync(filesystem.src, 'r')
|
||||||
|
try {
|
||||||
|
const offset = 8 + filesystem.headerSize + parseInt(info.offset)
|
||||||
|
fs.readSync(fd, buffer, 0, info.size, offset)
|
||||||
|
} finally {
|
||||||
|
fs.closeSync(fd)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return buffer
|
||||||
|
}
|
151
app/node_modules/asar/lib/filesystem.js
generated
vendored
Normal file
151
app/node_modules/asar/lib/filesystem.js
generated
vendored
Normal file
@@ -0,0 +1,151 @@
|
|||||||
|
'use strict'
|
||||||
|
const fs = process.versions.electron ? require('original-fs') : require('fs')
|
||||||
|
const path = require('path')
|
||||||
|
const tmp = require('tmp')
|
||||||
|
const UINT64 = require('cuint').UINT64
|
||||||
|
|
||||||
|
class Filesystem {
|
||||||
|
constructor (src) {
|
||||||
|
this.src = path.resolve(src)
|
||||||
|
this.header = {files: {}}
|
||||||
|
this.offset = UINT64(0)
|
||||||
|
}
|
||||||
|
|
||||||
|
searchNodeFromDirectory (p) {
|
||||||
|
let json = this.header
|
||||||
|
const dirs = p.split(path.sep)
|
||||||
|
for (const dir of dirs) {
|
||||||
|
if (dir !== '.') {
|
||||||
|
json = json.files[dir]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return json
|
||||||
|
}
|
||||||
|
|
||||||
|
searchNodeFromPath (p) {
|
||||||
|
p = path.relative(this.src, p)
|
||||||
|
if (!p) { return this.header }
|
||||||
|
const name = path.basename(p)
|
||||||
|
const node = this.searchNodeFromDirectory(path.dirname(p))
|
||||||
|
if (node.files == null) {
|
||||||
|
node.files = {}
|
||||||
|
}
|
||||||
|
if (node.files[name] == null) {
|
||||||
|
node.files[name] = {}
|
||||||
|
}
|
||||||
|
return node.files[name]
|
||||||
|
}
|
||||||
|
|
||||||
|
insertDirectory (p, shouldUnpack) {
|
||||||
|
const node = this.searchNodeFromPath(p)
|
||||||
|
if (shouldUnpack) {
|
||||||
|
node.unpacked = shouldUnpack
|
||||||
|
}
|
||||||
|
node.files = {}
|
||||||
|
return node.files
|
||||||
|
}
|
||||||
|
|
||||||
|
insertFile (p, shouldUnpack, file, options, callback) {
|
||||||
|
const dirNode = this.searchNodeFromPath(path.dirname(p))
|
||||||
|
const node = this.searchNodeFromPath(p)
|
||||||
|
if (shouldUnpack || dirNode.unpacked) {
|
||||||
|
node.size = file.stat.size
|
||||||
|
node.unpacked = true
|
||||||
|
process.nextTick(callback)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const handler = () => {
|
||||||
|
const size = file.transformed ? file.transformed.stat.size : file.stat.size
|
||||||
|
|
||||||
|
// JavaScript can not precisely present integers >= UINT32_MAX.
|
||||||
|
if (size > 4294967295) {
|
||||||
|
throw new Error(`${p}: file size can not be larger than 4.2GB`)
|
||||||
|
}
|
||||||
|
|
||||||
|
node.size = size
|
||||||
|
node.offset = this.offset.toString()
|
||||||
|
if (process.platform !== 'win32' && (file.stat.mode & 0o100)) {
|
||||||
|
node.executable = true
|
||||||
|
}
|
||||||
|
this.offset.add(UINT64(size))
|
||||||
|
|
||||||
|
return callback()
|
||||||
|
}
|
||||||
|
|
||||||
|
const tr = options.transform && options.transform(p)
|
||||||
|
if (tr) {
|
||||||
|
return tmp.file(function (err, path) {
|
||||||
|
if (err) { return handler() }
|
||||||
|
const out = fs.createWriteStream(path)
|
||||||
|
const stream = fs.createReadStream(p)
|
||||||
|
|
||||||
|
stream.pipe(tr).pipe(out)
|
||||||
|
return out.on('close', function () {
|
||||||
|
file.transformed = {
|
||||||
|
path,
|
||||||
|
stat: fs.lstatSync(path)
|
||||||
|
}
|
||||||
|
return handler()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
return process.nextTick(handler)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
insertLink (p, stat) {
|
||||||
|
const link = path.relative(fs.realpathSync(this.src), fs.realpathSync(p))
|
||||||
|
if (link.substr(0, 2) === '..') {
|
||||||
|
throw new Error(`${p}: file links out of the package`)
|
||||||
|
}
|
||||||
|
const node = this.searchNodeFromPath(p)
|
||||||
|
node.link = link
|
||||||
|
return link
|
||||||
|
}
|
||||||
|
|
||||||
|
listFiles () {
|
||||||
|
const files = []
|
||||||
|
const fillFilesFromHeader = function (p, json) {
|
||||||
|
if (!json.files) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
return (() => {
|
||||||
|
const result = []
|
||||||
|
for (const f in json.files) {
|
||||||
|
const fullPath = path.join(p, f)
|
||||||
|
files.push(fullPath)
|
||||||
|
result.push(fillFilesFromHeader(fullPath, json.files[f]))
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
})()
|
||||||
|
}
|
||||||
|
|
||||||
|
fillFilesFromHeader('/', this.header)
|
||||||
|
return files
|
||||||
|
}
|
||||||
|
|
||||||
|
getNode (p) {
|
||||||
|
const node = this.searchNodeFromDirectory(path.dirname(p))
|
||||||
|
const name = path.basename(p)
|
||||||
|
if (name) {
|
||||||
|
return node.files[name]
|
||||||
|
} else {
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
getFile (p, followLinks) {
|
||||||
|
followLinks = typeof followLinks === 'undefined' ? true : followLinks
|
||||||
|
const info = this.getNode(p)
|
||||||
|
|
||||||
|
// if followLinks is false we don't resolve symlinks
|
||||||
|
if (info.link && followLinks) {
|
||||||
|
return this.getFile(info.link)
|
||||||
|
} else {
|
||||||
|
return info
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Filesystem
|
62
app/node_modules/asar/lib/snapshot.js
generated
vendored
Normal file
62
app/node_modules/asar/lib/snapshot.js
generated
vendored
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
'use strict'
|
||||||
|
const fs = process.versions.electron ? require('original-fs') : require('fs')
|
||||||
|
const path = require('path')
|
||||||
|
const mksnapshot = require('mksnapshot')
|
||||||
|
const vm = require('vm')
|
||||||
|
|
||||||
|
const stripBOM = function (content) {
|
||||||
|
if (content.charCodeAt(0) === 0xFEFF) {
|
||||||
|
content = content.slice(1)
|
||||||
|
}
|
||||||
|
return content
|
||||||
|
}
|
||||||
|
|
||||||
|
const wrapModuleCode = function (script) {
|
||||||
|
script = script.replace(/^#!.*/, '')
|
||||||
|
return `(function(exports, require, module, __filename, __dirname) { ${script} \n});`
|
||||||
|
}
|
||||||
|
|
||||||
|
const dumpObjectToJS = function (content) {
|
||||||
|
let result = 'var __ATOM_SHELL_SNAPSHOT = {\n'
|
||||||
|
for (const filename in content) {
|
||||||
|
const func = content[filename].toString()
|
||||||
|
result += ` '${filename}': ${func},\n`
|
||||||
|
}
|
||||||
|
result += '};\n'
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
const createSnapshot = function (src, dest, filenames, metadata, options, callback) {
|
||||||
|
const content = {}
|
||||||
|
try {
|
||||||
|
src = path.resolve(src)
|
||||||
|
for (const filename of filenames) {
|
||||||
|
const file = metadata[filename]
|
||||||
|
if ((file.type === 'file' || file.type === 'link') && filename.substr(-3) === '.js') {
|
||||||
|
const script = wrapModuleCode(stripBOM(fs.readFileSync(filename, 'utf8')))
|
||||||
|
const relativeFilename = path.relative(src, filename)
|
||||||
|
try {
|
||||||
|
const compiled = vm.runInThisContext(script, {filename: relativeFilename})
|
||||||
|
content[relativeFilename] = compiled
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Ignoring ' + relativeFilename + ' for ' + error.name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// run mksnapshot
|
||||||
|
const str = dumpObjectToJS(content)
|
||||||
|
const version = options.version
|
||||||
|
const arch = options.arch
|
||||||
|
const builddir = options.builddir
|
||||||
|
let snapshotdir = options.snapshotdir
|
||||||
|
|
||||||
|
if (typeof snapshotdir === 'undefined' || snapshotdir === null) { snapshotdir = path.dirname(dest) }
|
||||||
|
const target = path.resolve(snapshotdir, 'snapshot_blob.bin')
|
||||||
|
return mksnapshot(str, target, version, arch, builddir, callback)
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = createSnapshot
|
15
app/node_modules/asar/node_modules/glob/LICENSE
generated
vendored
Normal file
15
app/node_modules/asar/node_modules/glob/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
The ISC License
|
||||||
|
|
||||||
|
Copyright (c) Isaac Z. Schlueter and Contributors
|
||||||
|
|
||||||
|
Permission to use, copy, modify, and/or distribute this software for any
|
||||||
|
purpose with or without fee is hereby granted, provided that the above
|
||||||
|
copyright notice and this permission notice appear in all copies.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||||
|
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
359
app/node_modules/asar/node_modules/glob/README.md
generated
vendored
Normal file
359
app/node_modules/asar/node_modules/glob/README.md
generated
vendored
Normal file
@@ -0,0 +1,359 @@
|
|||||||
|
# Glob
|
||||||
|
|
||||||
|
Match files using the patterns the shell uses, like stars and stuff.
|
||||||
|
|
||||||
|
[](https://travis-ci.org/isaacs/node-glob/) [](https://ci.appveyor.com/project/isaacs/node-glob) [](https://coveralls.io/github/isaacs/node-glob?branch=master)
|
||||||
|
|
||||||
|
This is a glob implementation in JavaScript. It uses the `minimatch`
|
||||||
|
library to do its matching.
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
var glob = require("glob")
|
||||||
|
|
||||||
|
// options is optional
|
||||||
|
glob("**/*.js", options, function (er, files) {
|
||||||
|
// files is an array of filenames.
|
||||||
|
// If the `nonull` option is set, and nothing
|
||||||
|
// was found, then files is ["**/*.js"]
|
||||||
|
// er is an error object or null.
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
## Glob Primer
|
||||||
|
|
||||||
|
"Globs" are the patterns you type when you do stuff like `ls *.js` on
|
||||||
|
the command line, or put `build/*` in a `.gitignore` file.
|
||||||
|
|
||||||
|
Before parsing the path part patterns, braced sections are expanded
|
||||||
|
into a set. Braced sections start with `{` and end with `}`, with any
|
||||||
|
number of comma-delimited sections within. Braced sections may contain
|
||||||
|
slash characters, so `a{/b/c,bcd}` would expand into `a/b/c` and `abcd`.
|
||||||
|
|
||||||
|
The following characters have special magic meaning when used in a
|
||||||
|
path portion:
|
||||||
|
|
||||||
|
* `*` Matches 0 or more characters in a single path portion
|
||||||
|
* `?` Matches 1 character
|
||||||
|
* `[...]` Matches a range of characters, similar to a RegExp range.
|
||||||
|
If the first character of the range is `!` or `^` then it matches
|
||||||
|
any character not in the range.
|
||||||
|
* `!(pattern|pattern|pattern)` Matches anything that does not match
|
||||||
|
any of the patterns provided.
|
||||||
|
* `?(pattern|pattern|pattern)` Matches zero or one occurrence of the
|
||||||
|
patterns provided.
|
||||||
|
* `+(pattern|pattern|pattern)` Matches one or more occurrences of the
|
||||||
|
patterns provided.
|
||||||
|
* `*(a|b|c)` Matches zero or more occurrences of the patterns provided
|
||||||
|
* `@(pattern|pat*|pat?erN)` Matches exactly one of the patterns
|
||||||
|
provided
|
||||||
|
* `**` If a "globstar" is alone in a path portion, then it matches
|
||||||
|
zero or more directories and subdirectories searching for matches.
|
||||||
|
It does not crawl symlinked directories.
|
||||||
|
|
||||||
|
### Dots
|
||||||
|
|
||||||
|
If a file or directory path portion has a `.` as the first character,
|
||||||
|
then it will not match any glob pattern unless that pattern's
|
||||||
|
corresponding path part also has a `.` as its first character.
|
||||||
|
|
||||||
|
For example, the pattern `a/.*/c` would match the file at `a/.b/c`.
|
||||||
|
However the pattern `a/*/c` would not, because `*` does not start with
|
||||||
|
a dot character.
|
||||||
|
|
||||||
|
You can make glob treat dots as normal characters by setting
|
||||||
|
`dot:true` in the options.
|
||||||
|
|
||||||
|
### Basename Matching
|
||||||
|
|
||||||
|
If you set `matchBase:true` in the options, and the pattern has no
|
||||||
|
slashes in it, then it will seek for any file anywhere in the tree
|
||||||
|
with a matching basename. For example, `*.js` would match
|
||||||
|
`test/simple/basic.js`.
|
||||||
|
|
||||||
|
### Empty Sets
|
||||||
|
|
||||||
|
If no matching files are found, then an empty array is returned. This
|
||||||
|
differs from the shell, where the pattern itself is returned. For
|
||||||
|
example:
|
||||||
|
|
||||||
|
$ echo a*s*d*f
|
||||||
|
a*s*d*f
|
||||||
|
|
||||||
|
To get the bash-style behavior, set the `nonull:true` in the options.
|
||||||
|
|
||||||
|
### See Also:
|
||||||
|
|
||||||
|
* `man sh`
|
||||||
|
* `man bash` (Search for "Pattern Matching")
|
||||||
|
* `man 3 fnmatch`
|
||||||
|
* `man 5 gitignore`
|
||||||
|
* [minimatch documentation](https://github.com/isaacs/minimatch)
|
||||||
|
|
||||||
|
## glob.hasMagic(pattern, [options])
|
||||||
|
|
||||||
|
Returns `true` if there are any special characters in the pattern, and
|
||||||
|
`false` otherwise.
|
||||||
|
|
||||||
|
Note that the options affect the results. If `noext:true` is set in
|
||||||
|
the options object, then `+(a|b)` will not be considered a magic
|
||||||
|
pattern. If the pattern has a brace expansion, like `a/{b/c,x/y}`
|
||||||
|
then that is considered magical, unless `nobrace:true` is set in the
|
||||||
|
options.
|
||||||
|
|
||||||
|
## glob(pattern, [options], cb)
|
||||||
|
|
||||||
|
* `pattern` `{String}` Pattern to be matched
|
||||||
|
* `options` `{Object}`
|
||||||
|
* `cb` `{Function}`
|
||||||
|
* `err` `{Error | null}`
|
||||||
|
* `matches` `{Array<String>}` filenames found matching the pattern
|
||||||
|
|
||||||
|
Perform an asynchronous glob search.
|
||||||
|
|
||||||
|
## glob.sync(pattern, [options])
|
||||||
|
|
||||||
|
* `pattern` `{String}` Pattern to be matched
|
||||||
|
* `options` `{Object}`
|
||||||
|
* return: `{Array<String>}` filenames found matching the pattern
|
||||||
|
|
||||||
|
Perform a synchronous glob search.
|
||||||
|
|
||||||
|
## Class: glob.Glob
|
||||||
|
|
||||||
|
Create a Glob object by instantiating the `glob.Glob` class.
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
var Glob = require("glob").Glob
|
||||||
|
var mg = new Glob(pattern, options, cb)
|
||||||
|
```
|
||||||
|
|
||||||
|
It's an EventEmitter, and starts walking the filesystem to find matches
|
||||||
|
immediately.
|
||||||
|
|
||||||
|
### new glob.Glob(pattern, [options], [cb])
|
||||||
|
|
||||||
|
* `pattern` `{String}` pattern to search for
|
||||||
|
* `options` `{Object}`
|
||||||
|
* `cb` `{Function}` Called when an error occurs, or matches are found
|
||||||
|
* `err` `{Error | null}`
|
||||||
|
* `matches` `{Array<String>}` filenames found matching the pattern
|
||||||
|
|
||||||
|
Note that if the `sync` flag is set in the options, then matches will
|
||||||
|
be immediately available on the `g.found` member.
|
||||||
|
|
||||||
|
### Properties
|
||||||
|
|
||||||
|
* `minimatch` The minimatch object that the glob uses.
|
||||||
|
* `options` The options object passed in.
|
||||||
|
* `aborted` Boolean which is set to true when calling `abort()`. There
|
||||||
|
is no way at this time to continue a glob search after aborting, but
|
||||||
|
you can re-use the statCache to avoid having to duplicate syscalls.
|
||||||
|
* `cache` Convenience object. Each field has the following possible
|
||||||
|
values:
|
||||||
|
* `false` - Path does not exist
|
||||||
|
* `true` - Path exists
|
||||||
|
* `'FILE'` - Path exists, and is not a directory
|
||||||
|
* `'DIR'` - Path exists, and is a directory
|
||||||
|
* `[file, entries, ...]` - Path exists, is a directory, and the
|
||||||
|
array value is the results of `fs.readdir`
|
||||||
|
* `statCache` Cache of `fs.stat` results, to prevent statting the same
|
||||||
|
path multiple times.
|
||||||
|
* `symlinks` A record of which paths are symbolic links, which is
|
||||||
|
relevant in resolving `**` patterns.
|
||||||
|
* `realpathCache` An optional object which is passed to `fs.realpath`
|
||||||
|
to minimize unnecessary syscalls. It is stored on the instantiated
|
||||||
|
Glob object, and may be re-used.
|
||||||
|
|
||||||
|
### Events
|
||||||
|
|
||||||
|
* `end` When the matching is finished, this is emitted with all the
|
||||||
|
matches found. If the `nonull` option is set, and no match was found,
|
||||||
|
then the `matches` list contains the original pattern. The matches
|
||||||
|
are sorted, unless the `nosort` flag is set.
|
||||||
|
* `match` Every time a match is found, this is emitted with the specific
|
||||||
|
thing that matched. It is not deduplicated or resolved to a realpath.
|
||||||
|
* `error` Emitted when an unexpected error is encountered, or whenever
|
||||||
|
any fs error occurs if `options.strict` is set.
|
||||||
|
* `abort` When `abort()` is called, this event is raised.
|
||||||
|
|
||||||
|
### Methods
|
||||||
|
|
||||||
|
* `pause` Temporarily stop the search
|
||||||
|
* `resume` Resume the search
|
||||||
|
* `abort` Stop the search forever
|
||||||
|
|
||||||
|
### Options
|
||||||
|
|
||||||
|
All the options that can be passed to Minimatch can also be passed to
|
||||||
|
Glob to change pattern matching behavior. Also, some have been added,
|
||||||
|
or have glob-specific ramifications.
|
||||||
|
|
||||||
|
All options are false by default, unless otherwise noted.
|
||||||
|
|
||||||
|
All options are added to the Glob object, as well.
|
||||||
|
|
||||||
|
If you are running many `glob` operations, you can pass a Glob object
|
||||||
|
as the `options` argument to a subsequent operation to shortcut some
|
||||||
|
`stat` and `readdir` calls. At the very least, you may pass in shared
|
||||||
|
`symlinks`, `statCache`, `realpathCache`, and `cache` options, so that
|
||||||
|
parallel glob operations will be sped up by sharing information about
|
||||||
|
the filesystem.
|
||||||
|
|
||||||
|
* `cwd` The current working directory in which to search. Defaults
|
||||||
|
to `process.cwd()`.
|
||||||
|
* `root` The place where patterns starting with `/` will be mounted
|
||||||
|
onto. Defaults to `path.resolve(options.cwd, "/")` (`/` on Unix
|
||||||
|
systems, and `C:\` or some such on Windows.)
|
||||||
|
* `dot` Include `.dot` files in normal matches and `globstar` matches.
|
||||||
|
Note that an explicit dot in a portion of the pattern will always
|
||||||
|
match dot files.
|
||||||
|
* `nomount` By default, a pattern starting with a forward-slash will be
|
||||||
|
"mounted" onto the root setting, so that a valid filesystem path is
|
||||||
|
returned. Set this flag to disable that behavior.
|
||||||
|
* `mark` Add a `/` character to directory matches. Note that this
|
||||||
|
requires additional stat calls.
|
||||||
|
* `nosort` Don't sort the results.
|
||||||
|
* `stat` Set to true to stat *all* results. This reduces performance
|
||||||
|
somewhat, and is completely unnecessary, unless `readdir` is presumed
|
||||||
|
to be an untrustworthy indicator of file existence.
|
||||||
|
* `silent` When an unusual error is encountered when attempting to
|
||||||
|
read a directory, a warning will be printed to stderr. Set the
|
||||||
|
`silent` option to true to suppress these warnings.
|
||||||
|
* `strict` When an unusual error is encountered when attempting to
|
||||||
|
read a directory, the process will just continue on in search of
|
||||||
|
other matches. Set the `strict` option to raise an error in these
|
||||||
|
cases.
|
||||||
|
* `cache` See `cache` property above. Pass in a previously generated
|
||||||
|
cache object to save some fs calls.
|
||||||
|
* `statCache` A cache of results of filesystem information, to prevent
|
||||||
|
unnecessary stat calls. While it should not normally be necessary
|
||||||
|
to set this, you may pass the statCache from one glob() call to the
|
||||||
|
options object of another, if you know that the filesystem will not
|
||||||
|
change between calls. (See "Race Conditions" below.)
|
||||||
|
* `symlinks` A cache of known symbolic links. You may pass in a
|
||||||
|
previously generated `symlinks` object to save `lstat` calls when
|
||||||
|
resolving `**` matches.
|
||||||
|
* `sync` DEPRECATED: use `glob.sync(pattern, opts)` instead.
|
||||||
|
* `nounique` In some cases, brace-expanded patterns can result in the
|
||||||
|
same file showing up multiple times in the result set. By default,
|
||||||
|
this implementation prevents duplicates in the result set. Set this
|
||||||
|
flag to disable that behavior.
|
||||||
|
* `nonull` Set to never return an empty set, instead returning a set
|
||||||
|
containing the pattern itself. This is the default in glob(3).
|
||||||
|
* `debug` Set to enable debug logging in minimatch and glob.
|
||||||
|
* `nobrace` Do not expand `{a,b}` and `{1..3}` brace sets.
|
||||||
|
* `noglobstar` Do not match `**` against multiple filenames. (Ie,
|
||||||
|
treat it as a normal `*` instead.)
|
||||||
|
* `noext` Do not match `+(a|b)` "extglob" patterns.
|
||||||
|
* `nocase` Perform a case-insensitive match. Note: on
|
||||||
|
case-insensitive filesystems, non-magic patterns will match by
|
||||||
|
default, since `stat` and `readdir` will not raise errors.
|
||||||
|
* `matchBase` Perform a basename-only match if the pattern does not
|
||||||
|
contain any slash characters. That is, `*.js` would be treated as
|
||||||
|
equivalent to `**/*.js`, matching all js files in all directories.
|
||||||
|
* `nodir` Do not match directories, only files. (Note: to match
|
||||||
|
*only* directories, simply put a `/` at the end of the pattern.)
|
||||||
|
* `ignore` Add a pattern or an array of glob patterns to exclude matches.
|
||||||
|
Note: `ignore` patterns are *always* in `dot:true` mode, regardless
|
||||||
|
of any other settings.
|
||||||
|
* `follow` Follow symlinked directories when expanding `**` patterns.
|
||||||
|
Note that this can result in a lot of duplicate references in the
|
||||||
|
presence of cyclic links.
|
||||||
|
* `realpath` Set to true to call `fs.realpath` on all of the results.
|
||||||
|
In the case of a symlink that cannot be resolved, the full absolute
|
||||||
|
path to the matched entry is returned (though it will usually be a
|
||||||
|
broken symlink)
|
||||||
|
|
||||||
|
## Comparisons to other fnmatch/glob implementations
|
||||||
|
|
||||||
|
While strict compliance with the existing standards is a worthwhile
|
||||||
|
goal, some discrepancies exist between node-glob and other
|
||||||
|
implementations, and are intentional.
|
||||||
|
|
||||||
|
The double-star character `**` is supported by default, unless the
|
||||||
|
`noglobstar` flag is set. This is supported in the manner of bsdglob
|
||||||
|
and bash 4.3, where `**` only has special significance if it is the only
|
||||||
|
thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but
|
||||||
|
`a/**b` will not.
|
||||||
|
|
||||||
|
Note that symlinked directories are not crawled as part of a `**`,
|
||||||
|
though their contents may match against subsequent portions of the
|
||||||
|
pattern. This prevents infinite loops and duplicates and the like.
|
||||||
|
|
||||||
|
If an escaped pattern has no matches, and the `nonull` flag is set,
|
||||||
|
then glob returns the pattern as-provided, rather than
|
||||||
|
interpreting the character escapes. For example,
|
||||||
|
`glob.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than
|
||||||
|
`"*a?"`. This is akin to setting the `nullglob` option in bash, except
|
||||||
|
that it does not resolve escaped pattern characters.
|
||||||
|
|
||||||
|
If brace expansion is not disabled, then it is performed before any
|
||||||
|
other interpretation of the glob pattern. Thus, a pattern like
|
||||||
|
`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded
|
||||||
|
**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are
|
||||||
|
checked for validity. Since those two are valid, matching proceeds.
|
||||||
|
|
||||||
|
### Comments and Negation
|
||||||
|
|
||||||
|
Previously, this module let you mark a pattern as a "comment" if it
|
||||||
|
started with a `#` character, or a "negated" pattern if it started
|
||||||
|
with a `!` character.
|
||||||
|
|
||||||
|
These options were deprecated in version 5, and removed in version 6.
|
||||||
|
|
||||||
|
To specify things that should not match, use the `ignore` option.
|
||||||
|
|
||||||
|
## Windows
|
||||||
|
|
||||||
|
**Please only use forward-slashes in glob expressions.**
|
||||||
|
|
||||||
|
Though windows uses either `/` or `\` as its path separator, only `/`
|
||||||
|
characters are used by this glob implementation. You must use
|
||||||
|
forward-slashes **only** in glob expressions. Back-slashes will always
|
||||||
|
be interpreted as escape characters, not path separators.
|
||||||
|
|
||||||
|
Results from absolute patterns such as `/foo/*` are mounted onto the
|
||||||
|
root setting using `path.join`. On windows, this will by default result
|
||||||
|
in `/foo/*` matching `C:\foo\bar.txt`.
|
||||||
|
|
||||||
|
## Race Conditions
|
||||||
|
|
||||||
|
Glob searching, by its very nature, is susceptible to race conditions,
|
||||||
|
since it relies on directory walking and such.
|
||||||
|
|
||||||
|
As a result, it is possible that a file that exists when glob looks for
|
||||||
|
it may have been deleted or modified by the time it returns the result.
|
||||||
|
|
||||||
|
As part of its internal implementation, this program caches all stat
|
||||||
|
and readdir calls that it makes, in order to cut down on system
|
||||||
|
overhead. However, this also makes it even more susceptible to races,
|
||||||
|
especially if the cache or statCache objects are reused between glob
|
||||||
|
calls.
|
||||||
|
|
||||||
|
Users are thus advised not to use a glob result as a guarantee of
|
||||||
|
filesystem state in the face of rapid changes. For the vast majority
|
||||||
|
of operations, this is never a problem.
|
||||||
|
|
||||||
|
## Contributing
|
||||||
|
|
||||||
|
Any change to behavior (including bugfixes) must come with a test.
|
||||||
|
|
||||||
|
Patches that fail tests or reduce performance will be rejected.
|
||||||
|
|
||||||
|
```
|
||||||
|
# to run tests
|
||||||
|
npm test
|
||||||
|
|
||||||
|
# to re-generate test fixtures
|
||||||
|
npm run test-regen
|
||||||
|
|
||||||
|
# to benchmark against bash/zsh
|
||||||
|
npm run bench
|
||||||
|
|
||||||
|
# to profile javascript
|
||||||
|
npm run prof
|
||||||
|
```
|
226
app/node_modules/asar/node_modules/glob/common.js
generated
vendored
Normal file
226
app/node_modules/asar/node_modules/glob/common.js
generated
vendored
Normal file
@@ -0,0 +1,226 @@
|
|||||||
|
exports.alphasort = alphasort
|
||||||
|
exports.alphasorti = alphasorti
|
||||||
|
exports.setopts = setopts
|
||||||
|
exports.ownProp = ownProp
|
||||||
|
exports.makeAbs = makeAbs
|
||||||
|
exports.finish = finish
|
||||||
|
exports.mark = mark
|
||||||
|
exports.isIgnored = isIgnored
|
||||||
|
exports.childrenIgnored = childrenIgnored
|
||||||
|
|
||||||
|
function ownProp (obj, field) {
|
||||||
|
return Object.prototype.hasOwnProperty.call(obj, field)
|
||||||
|
}
|
||||||
|
|
||||||
|
var path = require("path")
|
||||||
|
var minimatch = require("minimatch")
|
||||||
|
var isAbsolute = require("path-is-absolute")
|
||||||
|
var Minimatch = minimatch.Minimatch
|
||||||
|
|
||||||
|
function alphasorti (a, b) {
|
||||||
|
return a.toLowerCase().localeCompare(b.toLowerCase())
|
||||||
|
}
|
||||||
|
|
||||||
|
function alphasort (a, b) {
|
||||||
|
return a.localeCompare(b)
|
||||||
|
}
|
||||||
|
|
||||||
|
function setupIgnores (self, options) {
|
||||||
|
self.ignore = options.ignore || []
|
||||||
|
|
||||||
|
if (!Array.isArray(self.ignore))
|
||||||
|
self.ignore = [self.ignore]
|
||||||
|
|
||||||
|
if (self.ignore.length) {
|
||||||
|
self.ignore = self.ignore.map(ignoreMap)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ignore patterns are always in dot:true mode.
|
||||||
|
function ignoreMap (pattern) {
|
||||||
|
var gmatcher = null
|
||||||
|
if (pattern.slice(-3) === '/**') {
|
||||||
|
var gpattern = pattern.replace(/(\/\*\*)+$/, '')
|
||||||
|
gmatcher = new Minimatch(gpattern, { dot: true })
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
matcher: new Minimatch(pattern, { dot: true }),
|
||||||
|
gmatcher: gmatcher
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function setopts (self, pattern, options) {
|
||||||
|
if (!options)
|
||||||
|
options = {}
|
||||||
|
|
||||||
|
// base-matching: just use globstar for that.
|
||||||
|
if (options.matchBase && -1 === pattern.indexOf("/")) {
|
||||||
|
if (options.noglobstar) {
|
||||||
|
throw new Error("base matching requires globstar")
|
||||||
|
}
|
||||||
|
pattern = "**/" + pattern
|
||||||
|
}
|
||||||
|
|
||||||
|
self.silent = !!options.silent
|
||||||
|
self.pattern = pattern
|
||||||
|
self.strict = options.strict !== false
|
||||||
|
self.realpath = !!options.realpath
|
||||||
|
self.realpathCache = options.realpathCache || Object.create(null)
|
||||||
|
self.follow = !!options.follow
|
||||||
|
self.dot = !!options.dot
|
||||||
|
self.mark = !!options.mark
|
||||||
|
self.nodir = !!options.nodir
|
||||||
|
if (self.nodir)
|
||||||
|
self.mark = true
|
||||||
|
self.sync = !!options.sync
|
||||||
|
self.nounique = !!options.nounique
|
||||||
|
self.nonull = !!options.nonull
|
||||||
|
self.nosort = !!options.nosort
|
||||||
|
self.nocase = !!options.nocase
|
||||||
|
self.stat = !!options.stat
|
||||||
|
self.noprocess = !!options.noprocess
|
||||||
|
|
||||||
|
self.maxLength = options.maxLength || Infinity
|
||||||
|
self.cache = options.cache || Object.create(null)
|
||||||
|
self.statCache = options.statCache || Object.create(null)
|
||||||
|
self.symlinks = options.symlinks || Object.create(null)
|
||||||
|
|
||||||
|
setupIgnores(self, options)
|
||||||
|
|
||||||
|
self.changedCwd = false
|
||||||
|
var cwd = process.cwd()
|
||||||
|
if (!ownProp(options, "cwd"))
|
||||||
|
self.cwd = cwd
|
||||||
|
else {
|
||||||
|
self.cwd = options.cwd
|
||||||
|
self.changedCwd = path.resolve(options.cwd) !== cwd
|
||||||
|
}
|
||||||
|
|
||||||
|
self.root = options.root || path.resolve(self.cwd, "/")
|
||||||
|
self.root = path.resolve(self.root)
|
||||||
|
if (process.platform === "win32")
|
||||||
|
self.root = self.root.replace(/\\/g, "/")
|
||||||
|
|
||||||
|
self.nomount = !!options.nomount
|
||||||
|
|
||||||
|
// disable comments and negation in Minimatch.
|
||||||
|
// Note that they are not supported in Glob itself anyway.
|
||||||
|
options.nonegate = true
|
||||||
|
options.nocomment = true
|
||||||
|
|
||||||
|
self.minimatch = new Minimatch(pattern, options)
|
||||||
|
self.options = self.minimatch.options
|
||||||
|
}
|
||||||
|
|
||||||
|
function finish (self) {
|
||||||
|
var nou = self.nounique
|
||||||
|
var all = nou ? [] : Object.create(null)
|
||||||
|
|
||||||
|
for (var i = 0, l = self.matches.length; i < l; i ++) {
|
||||||
|
var matches = self.matches[i]
|
||||||
|
if (!matches || Object.keys(matches).length === 0) {
|
||||||
|
if (self.nonull) {
|
||||||
|
// do like the shell, and spit out the literal glob
|
||||||
|
var literal = self.minimatch.globSet[i]
|
||||||
|
if (nou)
|
||||||
|
all.push(literal)
|
||||||
|
else
|
||||||
|
all[literal] = true
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// had matches
|
||||||
|
var m = Object.keys(matches)
|
||||||
|
if (nou)
|
||||||
|
all.push.apply(all, m)
|
||||||
|
else
|
||||||
|
m.forEach(function (m) {
|
||||||
|
all[m] = true
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!nou)
|
||||||
|
all = Object.keys(all)
|
||||||
|
|
||||||
|
if (!self.nosort)
|
||||||
|
all = all.sort(self.nocase ? alphasorti : alphasort)
|
||||||
|
|
||||||
|
// at *some* point we statted all of these
|
||||||
|
if (self.mark) {
|
||||||
|
for (var i = 0; i < all.length; i++) {
|
||||||
|
all[i] = self._mark(all[i])
|
||||||
|
}
|
||||||
|
if (self.nodir) {
|
||||||
|
all = all.filter(function (e) {
|
||||||
|
return !(/\/$/.test(e))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (self.ignore.length)
|
||||||
|
all = all.filter(function(m) {
|
||||||
|
return !isIgnored(self, m)
|
||||||
|
})
|
||||||
|
|
||||||
|
self.found = all
|
||||||
|
}
|
||||||
|
|
||||||
|
function mark (self, p) {
|
||||||
|
var abs = makeAbs(self, p)
|
||||||
|
var c = self.cache[abs]
|
||||||
|
var m = p
|
||||||
|
if (c) {
|
||||||
|
var isDir = c === 'DIR' || Array.isArray(c)
|
||||||
|
var slash = p.slice(-1) === '/'
|
||||||
|
|
||||||
|
if (isDir && !slash)
|
||||||
|
m += '/'
|
||||||
|
else if (!isDir && slash)
|
||||||
|
m = m.slice(0, -1)
|
||||||
|
|
||||||
|
if (m !== p) {
|
||||||
|
var mabs = makeAbs(self, m)
|
||||||
|
self.statCache[mabs] = self.statCache[abs]
|
||||||
|
self.cache[mabs] = self.cache[abs]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return m
|
||||||
|
}
|
||||||
|
|
||||||
|
// lotta situps...
|
||||||
|
function makeAbs (self, f) {
|
||||||
|
var abs = f
|
||||||
|
if (f.charAt(0) === '/') {
|
||||||
|
abs = path.join(self.root, f)
|
||||||
|
} else if (isAbsolute(f) || f === '') {
|
||||||
|
abs = f
|
||||||
|
} else if (self.changedCwd) {
|
||||||
|
abs = path.resolve(self.cwd, f)
|
||||||
|
} else {
|
||||||
|
abs = path.resolve(f)
|
||||||
|
}
|
||||||
|
return abs
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// Return true, if pattern ends with globstar '**', for the accompanying parent directory.
|
||||||
|
// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents
|
||||||
|
function isIgnored (self, path) {
|
||||||
|
if (!self.ignore.length)
|
||||||
|
return false
|
||||||
|
|
||||||
|
return self.ignore.some(function(item) {
|
||||||
|
return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
function childrenIgnored (self, path) {
|
||||||
|
if (!self.ignore.length)
|
||||||
|
return false
|
||||||
|
|
||||||
|
return self.ignore.some(function(item) {
|
||||||
|
return !!(item.gmatcher && item.gmatcher.match(path))
|
||||||
|
})
|
||||||
|
}
|
765
app/node_modules/asar/node_modules/glob/glob.js
generated
vendored
Normal file
765
app/node_modules/asar/node_modules/glob/glob.js
generated
vendored
Normal file
@@ -0,0 +1,765 @@
|
|||||||
|
// Approach:
|
||||||
|
//
|
||||||
|
// 1. Get the minimatch set
|
||||||
|
// 2. For each pattern in the set, PROCESS(pattern, false)
|
||||||
|
// 3. Store matches per-set, then uniq them
|
||||||
|
//
|
||||||
|
// PROCESS(pattern, inGlobStar)
|
||||||
|
// Get the first [n] items from pattern that are all strings
|
||||||
|
// Join these together. This is PREFIX.
|
||||||
|
// If there is no more remaining, then stat(PREFIX) and
|
||||||
|
// add to matches if it succeeds. END.
|
||||||
|
//
|
||||||
|
// If inGlobStar and PREFIX is symlink and points to dir
|
||||||
|
// set ENTRIES = []
|
||||||
|
// else readdir(PREFIX) as ENTRIES
|
||||||
|
// If fail, END
|
||||||
|
//
|
||||||
|
// with ENTRIES
|
||||||
|
// If pattern[n] is GLOBSTAR
|
||||||
|
// // handle the case where the globstar match is empty
|
||||||
|
// // by pruning it out, and testing the resulting pattern
|
||||||
|
// PROCESS(pattern[0..n] + pattern[n+1 .. $], false)
|
||||||
|
// // handle other cases.
|
||||||
|
// for ENTRY in ENTRIES (not dotfiles)
|
||||||
|
// // attach globstar + tail onto the entry
|
||||||
|
// // Mark that this entry is a globstar match
|
||||||
|
// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true)
|
||||||
|
//
|
||||||
|
// else // not globstar
|
||||||
|
// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot)
|
||||||
|
// Test ENTRY against pattern[n]
|
||||||
|
// If fails, continue
|
||||||
|
// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $])
|
||||||
|
//
|
||||||
|
// Caveat:
|
||||||
|
// Cache all stats and readdirs results to minimize syscall. Since all
|
||||||
|
// we ever care about is existence and directory-ness, we can just keep
|
||||||
|
// `true` for files, and [children,...] for directories, or `false` for
|
||||||
|
// things that don't exist.
|
||||||
|
|
||||||
|
module.exports = glob
|
||||||
|
|
||||||
|
var fs = require('fs')
|
||||||
|
var minimatch = require('minimatch')
|
||||||
|
var Minimatch = minimatch.Minimatch
|
||||||
|
var inherits = require('inherits')
|
||||||
|
var EE = require('events').EventEmitter
|
||||||
|
var path = require('path')
|
||||||
|
var assert = require('assert')
|
||||||
|
var isAbsolute = require('path-is-absolute')
|
||||||
|
var globSync = require('./sync.js')
|
||||||
|
var common = require('./common.js')
|
||||||
|
var alphasort = common.alphasort
|
||||||
|
var alphasorti = common.alphasorti
|
||||||
|
var setopts = common.setopts
|
||||||
|
var ownProp = common.ownProp
|
||||||
|
var inflight = require('inflight')
|
||||||
|
var util = require('util')
|
||||||
|
var childrenIgnored = common.childrenIgnored
|
||||||
|
var isIgnored = common.isIgnored
|
||||||
|
|
||||||
|
var once = require('once')
|
||||||
|
|
||||||
|
function glob (pattern, options, cb) {
|
||||||
|
if (typeof options === 'function') cb = options, options = {}
|
||||||
|
if (!options) options = {}
|
||||||
|
|
||||||
|
if (options.sync) {
|
||||||
|
if (cb)
|
||||||
|
throw new TypeError('callback provided to sync glob')
|
||||||
|
return globSync(pattern, options)
|
||||||
|
}
|
||||||
|
|
||||||
|
return new Glob(pattern, options, cb)
|
||||||
|
}
|
||||||
|
|
||||||
|
glob.sync = globSync
|
||||||
|
var GlobSync = glob.GlobSync = globSync.GlobSync
|
||||||
|
|
||||||
|
// old api surface
|
||||||
|
glob.glob = glob
|
||||||
|
|
||||||
|
function extend (origin, add) {
|
||||||
|
if (add === null || typeof add !== 'object') {
|
||||||
|
return origin
|
||||||
|
}
|
||||||
|
|
||||||
|
var keys = Object.keys(add)
|
||||||
|
var i = keys.length
|
||||||
|
while (i--) {
|
||||||
|
origin[keys[i]] = add[keys[i]]
|
||||||
|
}
|
||||||
|
return origin
|
||||||
|
}
|
||||||
|
|
||||||
|
glob.hasMagic = function (pattern, options_) {
|
||||||
|
var options = extend({}, options_)
|
||||||
|
options.noprocess = true
|
||||||
|
|
||||||
|
var g = new Glob(pattern, options)
|
||||||
|
var set = g.minimatch.set
|
||||||
|
if (set.length > 1)
|
||||||
|
return true
|
||||||
|
|
||||||
|
for (var j = 0; j < set[0].length; j++) {
|
||||||
|
if (typeof set[0][j] !== 'string')
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
glob.Glob = Glob
|
||||||
|
inherits(Glob, EE)
|
||||||
|
function Glob (pattern, options, cb) {
|
||||||
|
if (typeof options === 'function') {
|
||||||
|
cb = options
|
||||||
|
options = null
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options && options.sync) {
|
||||||
|
if (cb)
|
||||||
|
throw new TypeError('callback provided to sync glob')
|
||||||
|
return new GlobSync(pattern, options)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!(this instanceof Glob))
|
||||||
|
return new Glob(pattern, options, cb)
|
||||||
|
|
||||||
|
setopts(this, pattern, options)
|
||||||
|
this._didRealPath = false
|
||||||
|
|
||||||
|
// process each pattern in the minimatch set
|
||||||
|
var n = this.minimatch.set.length
|
||||||
|
|
||||||
|
// The matches are stored as {<filename>: true,...} so that
|
||||||
|
// duplicates are automagically pruned.
|
||||||
|
// Later, we do an Object.keys() on these.
|
||||||
|
// Keep them as a list so we can fill in when nonull is set.
|
||||||
|
this.matches = new Array(n)
|
||||||
|
|
||||||
|
if (typeof cb === 'function') {
|
||||||
|
cb = once(cb)
|
||||||
|
this.on('error', cb)
|
||||||
|
this.on('end', function (matches) {
|
||||||
|
cb(null, matches)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
var self = this
|
||||||
|
var n = this.minimatch.set.length
|
||||||
|
this._processing = 0
|
||||||
|
this.matches = new Array(n)
|
||||||
|
|
||||||
|
this._emitQueue = []
|
||||||
|
this._processQueue = []
|
||||||
|
this.paused = false
|
||||||
|
|
||||||
|
if (this.noprocess)
|
||||||
|
return this
|
||||||
|
|
||||||
|
if (n === 0)
|
||||||
|
return done()
|
||||||
|
|
||||||
|
for (var i = 0; i < n; i ++) {
|
||||||
|
this._process(this.minimatch.set[i], i, false, done)
|
||||||
|
}
|
||||||
|
|
||||||
|
function done () {
|
||||||
|
--self._processing
|
||||||
|
if (self._processing <= 0)
|
||||||
|
self._finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Glob.prototype._finish = function () {
|
||||||
|
assert(this instanceof Glob)
|
||||||
|
if (this.aborted)
|
||||||
|
return
|
||||||
|
|
||||||
|
if (this.realpath && !this._didRealpath)
|
||||||
|
return this._realpath()
|
||||||
|
|
||||||
|
common.finish(this)
|
||||||
|
this.emit('end', this.found)
|
||||||
|
}
|
||||||
|
|
||||||
|
Glob.prototype._realpath = function () {
|
||||||
|
if (this._didRealpath)
|
||||||
|
return
|
||||||
|
|
||||||
|
this._didRealpath = true
|
||||||
|
|
||||||
|
var n = this.matches.length
|
||||||
|
if (n === 0)
|
||||||
|
return this._finish()
|
||||||
|
|
||||||
|
var self = this
|
||||||
|
for (var i = 0; i < this.matches.length; i++)
|
||||||
|
this._realpathSet(i, next)
|
||||||
|
|
||||||
|
function next () {
|
||||||
|
if (--n === 0)
|
||||||
|
self._finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Glob.prototype._realpathSet = function (index, cb) {
|
||||||
|
var matchset = this.matches[index]
|
||||||
|
if (!matchset)
|
||||||
|
return cb()
|
||||||
|
|
||||||
|
var found = Object.keys(matchset)
|
||||||
|
var self = this
|
||||||
|
var n = found.length
|
||||||
|
|
||||||
|
if (n === 0)
|
||||||
|
return cb()
|
||||||
|
|
||||||
|
var set = this.matches[index] = Object.create(null)
|
||||||
|
found.forEach(function (p, i) {
|
||||||
|
// If there's a problem with the stat, then it means that
|
||||||
|
// one or more of the links in the realpath couldn't be
|
||||||
|
// resolved. just return the abs value in that case.
|
||||||
|
p = self._makeAbs(p)
|
||||||
|
fs.realpath(p, self.realpathCache, function (er, real) {
|
||||||
|
if (!er)
|
||||||
|
set[real] = true
|
||||||
|
else if (er.syscall === 'stat')
|
||||||
|
set[p] = true
|
||||||
|
else
|
||||||
|
self.emit('error', er) // srsly wtf right here
|
||||||
|
|
||||||
|
if (--n === 0) {
|
||||||
|
self.matches[index] = set
|
||||||
|
cb()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
Glob.prototype._mark = function (p) {
|
||||||
|
return common.mark(this, p)
|
||||||
|
}
|
||||||
|
|
||||||
|
Glob.prototype._makeAbs = function (f) {
|
||||||
|
return common.makeAbs(this, f)
|
||||||
|
}
|
||||||
|
|
||||||
|
Glob.prototype.abort = function () {
|
||||||
|
this.aborted = true
|
||||||
|
this.emit('abort')
|
||||||
|
}
|
||||||
|
|
||||||
|
Glob.prototype.pause = function () {
|
||||||
|
if (!this.paused) {
|
||||||
|
this.paused = true
|
||||||
|
this.emit('pause')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Glob.prototype.resume = function () {
|
||||||
|
if (this.paused) {
|
||||||
|
this.emit('resume')
|
||||||
|
this.paused = false
|
||||||
|
if (this._emitQueue.length) {
|
||||||
|
var eq = this._emitQueue.slice(0)
|
||||||
|
this._emitQueue.length = 0
|
||||||
|
for (var i = 0; i < eq.length; i ++) {
|
||||||
|
var e = eq[i]
|
||||||
|
this._emitMatch(e[0], e[1])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (this._processQueue.length) {
|
||||||
|
var pq = this._processQueue.slice(0)
|
||||||
|
this._processQueue.length = 0
|
||||||
|
for (var i = 0; i < pq.length; i ++) {
|
||||||
|
var p = pq[i]
|
||||||
|
this._processing--
|
||||||
|
this._process(p[0], p[1], p[2], p[3])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Glob.prototype._process = function (pattern, index, inGlobStar, cb) {
|
||||||
|
assert(this instanceof Glob)
|
||||||
|
assert(typeof cb === 'function')
|
||||||
|
|
||||||
|
if (this.aborted)
|
||||||
|
return
|
||||||
|
|
||||||
|
this._processing++
|
||||||
|
if (this.paused) {
|
||||||
|
this._processQueue.push([pattern, index, inGlobStar, cb])
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
//console.error('PROCESS %d', this._processing, pattern)
|
||||||
|
|
||||||
|
// Get the first [n] parts of pattern that are all strings.
|
||||||
|
var n = 0
|
||||||
|
while (typeof pattern[n] === 'string') {
|
||||||
|
n ++
|
||||||
|
}
|
||||||
|
// now n is the index of the first one that is *not* a string.
|
||||||
|
|
||||||
|
// see if there's anything else
|
||||||
|
var prefix
|
||||||
|
switch (n) {
|
||||||
|
// if not, then this is rather simple
|
||||||
|
case pattern.length:
|
||||||
|
this._processSimple(pattern.join('/'), index, cb)
|
||||||
|
return
|
||||||
|
|
||||||
|
case 0:
|
||||||
|
// pattern *starts* with some non-trivial item.
|
||||||
|
// going to readdir(cwd), but not include the prefix in matches.
|
||||||
|
prefix = null
|
||||||
|
break
|
||||||
|
|
||||||
|
default:
|
||||||
|
// pattern has some string bits in the front.
|
||||||
|
// whatever it starts with, whether that's 'absolute' like /foo/bar,
|
||||||
|
// or 'relative' like '../baz'
|
||||||
|
prefix = pattern.slice(0, n).join('/')
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
var remain = pattern.slice(n)
|
||||||
|
|
||||||
|
// get the list of entries.
|
||||||
|
var read
|
||||||
|
if (prefix === null)
|
||||||
|
read = '.'
|
||||||
|
else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) {
|
||||||
|
if (!prefix || !isAbsolute(prefix))
|
||||||
|
prefix = '/' + prefix
|
||||||
|
read = prefix
|
||||||
|
} else
|
||||||
|
read = prefix
|
||||||
|
|
||||||
|
var abs = this._makeAbs(read)
|
||||||
|
|
||||||
|
//if ignored, skip _processing
|
||||||
|
if (childrenIgnored(this, read))
|
||||||
|
return cb()
|
||||||
|
|
||||||
|
var isGlobStar = remain[0] === minimatch.GLOBSTAR
|
||||||
|
if (isGlobStar)
|
||||||
|
this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb)
|
||||||
|
else
|
||||||
|
this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb)
|
||||||
|
}
|
||||||
|
|
||||||
|
Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) {
|
||||||
|
var self = this
|
||||||
|
this._readdir(abs, inGlobStar, function (er, entries) {
|
||||||
|
return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {
|
||||||
|
|
||||||
|
// if the abs isn't a dir, then nothing can match!
|
||||||
|
if (!entries)
|
||||||
|
return cb()
|
||||||
|
|
||||||
|
// It will only match dot entries if it starts with a dot, or if
|
||||||
|
// dot is set. Stuff like @(.foo|.bar) isn't allowed.
|
||||||
|
var pn = remain[0]
|
||||||
|
var negate = !!this.minimatch.negate
|
||||||
|
var rawGlob = pn._glob
|
||||||
|
var dotOk = this.dot || rawGlob.charAt(0) === '.'
|
||||||
|
|
||||||
|
var matchedEntries = []
|
||||||
|
for (var i = 0; i < entries.length; i++) {
|
||||||
|
var e = entries[i]
|
||||||
|
if (e.charAt(0) !== '.' || dotOk) {
|
||||||
|
var m
|
||||||
|
if (negate && !prefix) {
|
||||||
|
m = !e.match(pn)
|
||||||
|
} else {
|
||||||
|
m = e.match(pn)
|
||||||
|
}
|
||||||
|
if (m)
|
||||||
|
matchedEntries.push(e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries)
|
||||||
|
|
||||||
|
var len = matchedEntries.length
|
||||||
|
// If there are no matched entries, then nothing matches.
|
||||||
|
if (len === 0)
|
||||||
|
return cb()
|
||||||
|
|
||||||
|
// if this is the last remaining pattern bit, then no need for
|
||||||
|
// an additional stat *unless* the user has specified mark or
|
||||||
|
// stat explicitly. We know they exist, since readdir returned
|
||||||
|
// them.
|
||||||
|
|
||||||
|
if (remain.length === 1 && !this.mark && !this.stat) {
|
||||||
|
if (!this.matches[index])
|
||||||
|
this.matches[index] = Object.create(null)
|
||||||
|
|
||||||
|
for (var i = 0; i < len; i ++) {
|
||||||
|
var e = matchedEntries[i]
|
||||||
|
if (prefix) {
|
||||||
|
if (prefix !== '/')
|
||||||
|
e = prefix + '/' + e
|
||||||
|
else
|
||||||
|
e = prefix + e
|
||||||
|
}
|
||||||
|
|
||||||
|
if (e.charAt(0) === '/' && !this.nomount) {
|
||||||
|
e = path.join(this.root, e)
|
||||||
|
}
|
||||||
|
this._emitMatch(index, e)
|
||||||
|
}
|
||||||
|
// This was the last one, and no stats were needed
|
||||||
|
return cb()
|
||||||
|
}
|
||||||
|
|
||||||
|
// now test all matched entries as stand-ins for that part
|
||||||
|
// of the pattern.
|
||||||
|
remain.shift()
|
||||||
|
for (var i = 0; i < len; i ++) {
|
||||||
|
var e = matchedEntries[i]
|
||||||
|
var newPattern
|
||||||
|
if (prefix) {
|
||||||
|
if (prefix !== '/')
|
||||||
|
e = prefix + '/' + e
|
||||||
|
else
|
||||||
|
e = prefix + e
|
||||||
|
}
|
||||||
|
this._process([e].concat(remain), index, inGlobStar, cb)
|
||||||
|
}
|
||||||
|
cb()
|
||||||
|
}
|
||||||
|
|
||||||
|
Glob.prototype._emitMatch = function (index, e) {
|
||||||
|
if (this.aborted)
|
||||||
|
return
|
||||||
|
|
||||||
|
if (this.matches[index][e])
|
||||||
|
return
|
||||||
|
|
||||||
|
if (isIgnored(this, e))
|
||||||
|
return
|
||||||
|
|
||||||
|
if (this.paused) {
|
||||||
|
this._emitQueue.push([index, e])
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var abs = this._makeAbs(e)
|
||||||
|
|
||||||
|
if (this.nodir) {
|
||||||
|
var c = this.cache[abs]
|
||||||
|
if (c === 'DIR' || Array.isArray(c))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.mark)
|
||||||
|
e = this._mark(e)
|
||||||
|
|
||||||
|
this.matches[index][e] = true
|
||||||
|
|
||||||
|
var st = this.statCache[abs]
|
||||||
|
if (st)
|
||||||
|
this.emit('stat', e, st)
|
||||||
|
|
||||||
|
this.emit('match', e)
|
||||||
|
}
|
||||||
|
|
||||||
|
Glob.prototype._readdirInGlobStar = function (abs, cb) {
|
||||||
|
if (this.aborted)
|
||||||
|
return
|
||||||
|
|
||||||
|
// follow all symlinked directories forever
|
||||||
|
// just proceed as if this is a non-globstar situation
|
||||||
|
if (this.follow)
|
||||||
|
return this._readdir(abs, false, cb)
|
||||||
|
|
||||||
|
var lstatkey = 'lstat\0' + abs
|
||||||
|
var self = this
|
||||||
|
var lstatcb = inflight(lstatkey, lstatcb_)
|
||||||
|
|
||||||
|
if (lstatcb)
|
||||||
|
fs.lstat(abs, lstatcb)
|
||||||
|
|
||||||
|
function lstatcb_ (er, lstat) {
|
||||||
|
if (er)
|
||||||
|
return cb()
|
||||||
|
|
||||||
|
var isSym = lstat.isSymbolicLink()
|
||||||
|
self.symlinks[abs] = isSym
|
||||||
|
|
||||||
|
// If it's not a symlink or a dir, then it's definitely a regular file.
|
||||||
|
// don't bother doing a readdir in that case.
|
||||||
|
if (!isSym && !lstat.isDirectory()) {
|
||||||
|
self.cache[abs] = 'FILE'
|
||||||
|
cb()
|
||||||
|
} else
|
||||||
|
self._readdir(abs, false, cb)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Glob.prototype._readdir = function (abs, inGlobStar, cb) {
|
||||||
|
if (this.aborted)
|
||||||
|
return
|
||||||
|
|
||||||
|
cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb)
|
||||||
|
if (!cb)
|
||||||
|
return
|
||||||
|
|
||||||
|
//console.error('RD %j %j', +inGlobStar, abs)
|
||||||
|
if (inGlobStar && !ownProp(this.symlinks, abs))
|
||||||
|
return this._readdirInGlobStar(abs, cb)
|
||||||
|
|
||||||
|
if (ownProp(this.cache, abs)) {
|
||||||
|
var c = this.cache[abs]
|
||||||
|
if (!c || c === 'FILE')
|
||||||
|
return cb()
|
||||||
|
|
||||||
|
if (Array.isArray(c))
|
||||||
|
return cb(null, c)
|
||||||
|
}
|
||||||
|
|
||||||
|
var self = this
|
||||||
|
fs.readdir(abs, readdirCb(this, abs, cb))
|
||||||
|
}
|
||||||
|
|
||||||
|
function readdirCb (self, abs, cb) {
|
||||||
|
return function (er, entries) {
|
||||||
|
if (er)
|
||||||
|
self._readdirError(abs, er, cb)
|
||||||
|
else
|
||||||
|
self._readdirEntries(abs, entries, cb)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Glob.prototype._readdirEntries = function (abs, entries, cb) {
|
||||||
|
if (this.aborted)
|
||||||
|
return
|
||||||
|
|
||||||
|
// if we haven't asked to stat everything, then just
|
||||||
|
// assume that everything in there exists, so we can avoid
|
||||||
|
// having to stat it a second time.
|
||||||
|
if (!this.mark && !this.stat) {
|
||||||
|
for (var i = 0; i < entries.length; i ++) {
|
||||||
|
var e = entries[i]
|
||||||
|
if (abs === '/')
|
||||||
|
e = abs + e
|
||||||
|
else
|
||||||
|
e = abs + '/' + e
|
||||||
|
this.cache[e] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.cache[abs] = entries
|
||||||
|
return cb(null, entries)
|
||||||
|
}
|
||||||
|
|
||||||
|
Glob.prototype._readdirError = function (f, er, cb) {
|
||||||
|
if (this.aborted)
|
||||||
|
return
|
||||||
|
|
||||||
|
// handle errors, and cache the information
|
||||||
|
switch (er.code) {
|
||||||
|
case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205
|
||||||
|
case 'ENOTDIR': // totally normal. means it *does* exist.
|
||||||
|
this.cache[this._makeAbs(f)] = 'FILE'
|
||||||
|
break
|
||||||
|
|
||||||
|
case 'ENOENT': // not terribly unusual
|
||||||
|
case 'ELOOP':
|
||||||
|
case 'ENAMETOOLONG':
|
||||||
|
case 'UNKNOWN':
|
||||||
|
this.cache[this._makeAbs(f)] = false
|
||||||
|
break
|
||||||
|
|
||||||
|
default: // some unusual error. Treat as failure.
|
||||||
|
this.cache[this._makeAbs(f)] = false
|
||||||
|
if (this.strict) {
|
||||||
|
this.emit('error', er)
|
||||||
|
// If the error is handled, then we abort
|
||||||
|
// if not, we threw out of here
|
||||||
|
this.abort()
|
||||||
|
}
|
||||||
|
if (!this.silent)
|
||||||
|
console.error('glob error', er)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
return cb()
|
||||||
|
}
|
||||||
|
|
||||||
|
Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) {
|
||||||
|
var self = this
|
||||||
|
this._readdir(abs, inGlobStar, function (er, entries) {
|
||||||
|
self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {
|
||||||
|
//console.error('pgs2', prefix, remain[0], entries)
|
||||||
|
|
||||||
|
// no entries means not a dir, so it can never have matches
|
||||||
|
// foo.txt/** doesn't match foo.txt
|
||||||
|
if (!entries)
|
||||||
|
return cb()
|
||||||
|
|
||||||
|
// test without the globstar, and with every child both below
|
||||||
|
// and replacing the globstar.
|
||||||
|
var remainWithoutGlobStar = remain.slice(1)
|
||||||
|
var gspref = prefix ? [ prefix ] : []
|
||||||
|
var noGlobStar = gspref.concat(remainWithoutGlobStar)
|
||||||
|
|
||||||
|
// the noGlobStar pattern exits the inGlobStar state
|
||||||
|
this._process(noGlobStar, index, false, cb)
|
||||||
|
|
||||||
|
var isSym = this.symlinks[abs]
|
||||||
|
var len = entries.length
|
||||||
|
|
||||||
|
// If it's a symlink, and we're in a globstar, then stop
|
||||||
|
if (isSym && inGlobStar)
|
||||||
|
return cb()
|
||||||
|
|
||||||
|
for (var i = 0; i < len; i++) {
|
||||||
|
var e = entries[i]
|
||||||
|
if (e.charAt(0) === '.' && !this.dot)
|
||||||
|
continue
|
||||||
|
|
||||||
|
// these two cases enter the inGlobStar state
|
||||||
|
var instead = gspref.concat(entries[i], remainWithoutGlobStar)
|
||||||
|
this._process(instead, index, true, cb)
|
||||||
|
|
||||||
|
var below = gspref.concat(entries[i], remain)
|
||||||
|
this._process(below, index, true, cb)
|
||||||
|
}
|
||||||
|
|
||||||
|
cb()
|
||||||
|
}
|
||||||
|
|
||||||
|
Glob.prototype._processSimple = function (prefix, index, cb) {
|
||||||
|
// XXX review this. Shouldn't it be doing the mounting etc
|
||||||
|
// before doing stat? kinda weird?
|
||||||
|
var self = this
|
||||||
|
this._stat(prefix, function (er, exists) {
|
||||||
|
self._processSimple2(prefix, index, er, exists, cb)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) {
|
||||||
|
|
||||||
|
//console.error('ps2', prefix, exists)
|
||||||
|
|
||||||
|
if (!this.matches[index])
|
||||||
|
this.matches[index] = Object.create(null)
|
||||||
|
|
||||||
|
// If it doesn't exist, then just mark the lack of results
|
||||||
|
if (!exists)
|
||||||
|
return cb()
|
||||||
|
|
||||||
|
if (prefix && isAbsolute(prefix) && !this.nomount) {
|
||||||
|
var trail = /[\/\\]$/.test(prefix)
|
||||||
|
if (prefix.charAt(0) === '/') {
|
||||||
|
prefix = path.join(this.root, prefix)
|
||||||
|
} else {
|
||||||
|
prefix = path.resolve(this.root, prefix)
|
||||||
|
if (trail)
|
||||||
|
prefix += '/'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (process.platform === 'win32')
|
||||||
|
prefix = prefix.replace(/\\/g, '/')
|
||||||
|
|
||||||
|
// Mark this as a match
|
||||||
|
this._emitMatch(index, prefix)
|
||||||
|
cb()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Returns either 'DIR', 'FILE', or false
|
||||||
|
Glob.prototype._stat = function (f, cb) {
|
||||||
|
var abs = this._makeAbs(f)
|
||||||
|
var needDir = f.slice(-1) === '/'
|
||||||
|
|
||||||
|
if (f.length > this.maxLength)
|
||||||
|
return cb()
|
||||||
|
|
||||||
|
if (!this.stat && ownProp(this.cache, abs)) {
|
||||||
|
var c = this.cache[abs]
|
||||||
|
|
||||||
|
if (Array.isArray(c))
|
||||||
|
c = 'DIR'
|
||||||
|
|
||||||
|
// It exists, but maybe not how we need it
|
||||||
|
if (!needDir || c === 'DIR')
|
||||||
|
return cb(null, c)
|
||||||
|
|
||||||
|
if (needDir && c === 'FILE')
|
||||||
|
return cb()
|
||||||
|
|
||||||
|
// otherwise we have to stat, because maybe c=true
|
||||||
|
// if we know it exists, but not what it is.
|
||||||
|
}
|
||||||
|
|
||||||
|
var exists
|
||||||
|
var stat = this.statCache[abs]
|
||||||
|
if (stat !== undefined) {
|
||||||
|
if (stat === false)
|
||||||
|
return cb(null, stat)
|
||||||
|
else {
|
||||||
|
var type = stat.isDirectory() ? 'DIR' : 'FILE'
|
||||||
|
if (needDir && type === 'FILE')
|
||||||
|
return cb()
|
||||||
|
else
|
||||||
|
return cb(null, type, stat)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var self = this
|
||||||
|
var statcb = inflight('stat\0' + abs, lstatcb_)
|
||||||
|
if (statcb)
|
||||||
|
fs.lstat(abs, statcb)
|
||||||
|
|
||||||
|
function lstatcb_ (er, lstat) {
|
||||||
|
if (lstat && lstat.isSymbolicLink()) {
|
||||||
|
// If it's a symlink, then treat it as the target, unless
|
||||||
|
// the target does not exist, then treat it as a file.
|
||||||
|
return fs.stat(abs, function (er, stat) {
|
||||||
|
if (er)
|
||||||
|
self._stat2(f, abs, null, lstat, cb)
|
||||||
|
else
|
||||||
|
self._stat2(f, abs, er, stat, cb)
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
self._stat2(f, abs, er, lstat, cb)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Glob.prototype._stat2 = function (f, abs, er, stat, cb) {
|
||||||
|
if (er) {
|
||||||
|
this.statCache[abs] = false
|
||||||
|
return cb()
|
||||||
|
}
|
||||||
|
|
||||||
|
var needDir = f.slice(-1) === '/'
|
||||||
|
this.statCache[abs] = stat
|
||||||
|
|
||||||
|
if (abs.slice(-1) === '/' && !stat.isDirectory())
|
||||||
|
return cb(null, false, stat)
|
||||||
|
|
||||||
|
var c = stat.isDirectory() ? 'DIR' : 'FILE'
|
||||||
|
this.cache[abs] = this.cache[abs] || c
|
||||||
|
|
||||||
|
if (needDir && c !== 'DIR')
|
||||||
|
return cb()
|
||||||
|
|
||||||
|
return cb(null, c, stat)
|
||||||
|
}
|
75
app/node_modules/asar/node_modules/glob/package.json
generated
vendored
Normal file
75
app/node_modules/asar/node_modules/glob/package.json
generated
vendored
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
{
|
||||||
|
"_from": "glob@^6.0.4",
|
||||||
|
"_id": "glob@6.0.4",
|
||||||
|
"_inBundle": false,
|
||||||
|
"_integrity": "sha1-DwiGD2oVUSey+t1PnOJLGqtuTSI=",
|
||||||
|
"_location": "/asar/glob",
|
||||||
|
"_phantomChildren": {},
|
||||||
|
"_requested": {
|
||||||
|
"type": "range",
|
||||||
|
"registry": true,
|
||||||
|
"raw": "glob@^6.0.4",
|
||||||
|
"name": "glob",
|
||||||
|
"escapedName": "glob",
|
||||||
|
"rawSpec": "^6.0.4",
|
||||||
|
"saveSpec": null,
|
||||||
|
"fetchSpec": "^6.0.4"
|
||||||
|
},
|
||||||
|
"_requiredBy": [
|
||||||
|
"/asar"
|
||||||
|
],
|
||||||
|
"_resolved": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz",
|
||||||
|
"_shasum": "0f08860f6a155127b2fadd4f9ce24b1aab6e4d22",
|
||||||
|
"_spec": "glob@^6.0.4",
|
||||||
|
"_where": "E:\\projects\\p\\gitlit\\app\\node_modules\\asar",
|
||||||
|
"author": {
|
||||||
|
"name": "Isaac Z. Schlueter",
|
||||||
|
"email": "i@izs.me",
|
||||||
|
"url": "http://blog.izs.me/"
|
||||||
|
},
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/isaacs/node-glob/issues"
|
||||||
|
},
|
||||||
|
"bundleDependencies": false,
|
||||||
|
"dependencies": {
|
||||||
|
"inflight": "^1.0.4",
|
||||||
|
"inherits": "2",
|
||||||
|
"minimatch": "2 || 3",
|
||||||
|
"once": "^1.3.0",
|
||||||
|
"path-is-absolute": "^1.0.0"
|
||||||
|
},
|
||||||
|
"deprecated": false,
|
||||||
|
"description": "a little globber",
|
||||||
|
"devDependencies": {
|
||||||
|
"mkdirp": "0",
|
||||||
|
"rimraf": "^2.2.8",
|
||||||
|
"tap": "^5.0.0",
|
||||||
|
"tick": "0.0.6"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": "*"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"glob.js",
|
||||||
|
"sync.js",
|
||||||
|
"common.js"
|
||||||
|
],
|
||||||
|
"homepage": "https://github.com/isaacs/node-glob#readme",
|
||||||
|
"license": "ISC",
|
||||||
|
"main": "glob.js",
|
||||||
|
"name": "glob",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git://github.com/isaacs/node-glob.git"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"bench": "bash benchmark.sh",
|
||||||
|
"benchclean": "node benchclean.js",
|
||||||
|
"prepublish": "npm run benchclean",
|
||||||
|
"prof": "bash prof.sh && cat profile.txt",
|
||||||
|
"profclean": "rm -f v8.log profile.txt",
|
||||||
|
"test": "tap test/*.js --cov",
|
||||||
|
"test-regen": "npm run profclean && TEST_REGEN=1 node test/00-setup.js"
|
||||||
|
},
|
||||||
|
"version": "6.0.4"
|
||||||
|
}
|
460
app/node_modules/asar/node_modules/glob/sync.js
generated
vendored
Normal file
460
app/node_modules/asar/node_modules/glob/sync.js
generated
vendored
Normal file
@@ -0,0 +1,460 @@
|
|||||||
|
module.exports = globSync
|
||||||
|
globSync.GlobSync = GlobSync
|
||||||
|
|
||||||
|
var fs = require('fs')
|
||||||
|
var minimatch = require('minimatch')
|
||||||
|
var Minimatch = minimatch.Minimatch
|
||||||
|
var Glob = require('./glob.js').Glob
|
||||||
|
var util = require('util')
|
||||||
|
var path = require('path')
|
||||||
|
var assert = require('assert')
|
||||||
|
var isAbsolute = require('path-is-absolute')
|
||||||
|
var common = require('./common.js')
|
||||||
|
var alphasort = common.alphasort
|
||||||
|
var alphasorti = common.alphasorti
|
||||||
|
var setopts = common.setopts
|
||||||
|
var ownProp = common.ownProp
|
||||||
|
var childrenIgnored = common.childrenIgnored
|
||||||
|
|
||||||
|
function globSync (pattern, options) {
|
||||||
|
if (typeof options === 'function' || arguments.length === 3)
|
||||||
|
throw new TypeError('callback provided to sync glob\n'+
|
||||||
|
'See: https://github.com/isaacs/node-glob/issues/167')
|
||||||
|
|
||||||
|
return new GlobSync(pattern, options).found
|
||||||
|
}
|
||||||
|
|
||||||
|
function GlobSync (pattern, options) {
|
||||||
|
if (!pattern)
|
||||||
|
throw new Error('must provide pattern')
|
||||||
|
|
||||||
|
if (typeof options === 'function' || arguments.length === 3)
|
||||||
|
throw new TypeError('callback provided to sync glob\n'+
|
||||||
|
'See: https://github.com/isaacs/node-glob/issues/167')
|
||||||
|
|
||||||
|
if (!(this instanceof GlobSync))
|
||||||
|
return new GlobSync(pattern, options)
|
||||||
|
|
||||||
|
setopts(this, pattern, options)
|
||||||
|
|
||||||
|
if (this.noprocess)
|
||||||
|
return this
|
||||||
|
|
||||||
|
var n = this.minimatch.set.length
|
||||||
|
this.matches = new Array(n)
|
||||||
|
for (var i = 0; i < n; i ++) {
|
||||||
|
this._process(this.minimatch.set[i], i, false)
|
||||||
|
}
|
||||||
|
this._finish()
|
||||||
|
}
|
||||||
|
|
||||||
|
GlobSync.prototype._finish = function () {
|
||||||
|
assert(this instanceof GlobSync)
|
||||||
|
if (this.realpath) {
|
||||||
|
var self = this
|
||||||
|
this.matches.forEach(function (matchset, index) {
|
||||||
|
var set = self.matches[index] = Object.create(null)
|
||||||
|
for (var p in matchset) {
|
||||||
|
try {
|
||||||
|
p = self._makeAbs(p)
|
||||||
|
var real = fs.realpathSync(p, self.realpathCache)
|
||||||
|
set[real] = true
|
||||||
|
} catch (er) {
|
||||||
|
if (er.syscall === 'stat')
|
||||||
|
set[self._makeAbs(p)] = true
|
||||||
|
else
|
||||||
|
throw er
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
common.finish(this)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
GlobSync.prototype._process = function (pattern, index, inGlobStar) {
|
||||||
|
assert(this instanceof GlobSync)
|
||||||
|
|
||||||
|
// Get the first [n] parts of pattern that are all strings.
|
||||||
|
var n = 0
|
||||||
|
while (typeof pattern[n] === 'string') {
|
||||||
|
n ++
|
||||||
|
}
|
||||||
|
// now n is the index of the first one that is *not* a string.
|
||||||
|
|
||||||
|
// See if there's anything else
|
||||||
|
var prefix
|
||||||
|
switch (n) {
|
||||||
|
// if not, then this is rather simple
|
||||||
|
case pattern.length:
|
||||||
|
this._processSimple(pattern.join('/'), index)
|
||||||
|
return
|
||||||
|
|
||||||
|
case 0:
|
||||||
|
// pattern *starts* with some non-trivial item.
|
||||||
|
// going to readdir(cwd), but not include the prefix in matches.
|
||||||
|
prefix = null
|
||||||
|
break
|
||||||
|
|
||||||
|
default:
|
||||||
|
// pattern has some string bits in the front.
|
||||||
|
// whatever it starts with, whether that's 'absolute' like /foo/bar,
|
||||||
|
// or 'relative' like '../baz'
|
||||||
|
prefix = pattern.slice(0, n).join('/')
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
var remain = pattern.slice(n)
|
||||||
|
|
||||||
|
// get the list of entries.
|
||||||
|
var read
|
||||||
|
if (prefix === null)
|
||||||
|
read = '.'
|
||||||
|
else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) {
|
||||||
|
if (!prefix || !isAbsolute(prefix))
|
||||||
|
prefix = '/' + prefix
|
||||||
|
read = prefix
|
||||||
|
} else
|
||||||
|
read = prefix
|
||||||
|
|
||||||
|
var abs = this._makeAbs(read)
|
||||||
|
|
||||||
|
//if ignored, skip processing
|
||||||
|
if (childrenIgnored(this, read))
|
||||||
|
return
|
||||||
|
|
||||||
|
var isGlobStar = remain[0] === minimatch.GLOBSTAR
|
||||||
|
if (isGlobStar)
|
||||||
|
this._processGlobStar(prefix, read, abs, remain, index, inGlobStar)
|
||||||
|
else
|
||||||
|
this._processReaddir(prefix, read, abs, remain, index, inGlobStar)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
GlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) {
|
||||||
|
var entries = this._readdir(abs, inGlobStar)
|
||||||
|
|
||||||
|
// if the abs isn't a dir, then nothing can match!
|
||||||
|
if (!entries)
|
||||||
|
return
|
||||||
|
|
||||||
|
// It will only match dot entries if it starts with a dot, or if
|
||||||
|
// dot is set. Stuff like @(.foo|.bar) isn't allowed.
|
||||||
|
var pn = remain[0]
|
||||||
|
var negate = !!this.minimatch.negate
|
||||||
|
var rawGlob = pn._glob
|
||||||
|
var dotOk = this.dot || rawGlob.charAt(0) === '.'
|
||||||
|
|
||||||
|
var matchedEntries = []
|
||||||
|
for (var i = 0; i < entries.length; i++) {
|
||||||
|
var e = entries[i]
|
||||||
|
if (e.charAt(0) !== '.' || dotOk) {
|
||||||
|
var m
|
||||||
|
if (negate && !prefix) {
|
||||||
|
m = !e.match(pn)
|
||||||
|
} else {
|
||||||
|
m = e.match(pn)
|
||||||
|
}
|
||||||
|
if (m)
|
||||||
|
matchedEntries.push(e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var len = matchedEntries.length
|
||||||
|
// If there are no matched entries, then nothing matches.
|
||||||
|
if (len === 0)
|
||||||
|
return
|
||||||
|
|
||||||
|
// if this is the last remaining pattern bit, then no need for
|
||||||
|
// an additional stat *unless* the user has specified mark or
|
||||||
|
// stat explicitly. We know they exist, since readdir returned
|
||||||
|
// them.
|
||||||
|
|
||||||
|
if (remain.length === 1 && !this.mark && !this.stat) {
|
||||||
|
if (!this.matches[index])
|
||||||
|
this.matches[index] = Object.create(null)
|
||||||
|
|
||||||
|
for (var i = 0; i < len; i ++) {
|
||||||
|
var e = matchedEntries[i]
|
||||||
|
if (prefix) {
|
||||||
|
if (prefix.slice(-1) !== '/')
|
||||||
|
e = prefix + '/' + e
|
||||||
|
else
|
||||||
|
e = prefix + e
|
||||||
|
}
|
||||||
|
|
||||||
|
if (e.charAt(0) === '/' && !this.nomount) {
|
||||||
|
e = path.join(this.root, e)
|
||||||
|
}
|
||||||
|
this.matches[index][e] = true
|
||||||
|
}
|
||||||
|
// This was the last one, and no stats were needed
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// now test all matched entries as stand-ins for that part
|
||||||
|
// of the pattern.
|
||||||
|
remain.shift()
|
||||||
|
for (var i = 0; i < len; i ++) {
|
||||||
|
var e = matchedEntries[i]
|
||||||
|
var newPattern
|
||||||
|
if (prefix)
|
||||||
|
newPattern = [prefix, e]
|
||||||
|
else
|
||||||
|
newPattern = [e]
|
||||||
|
this._process(newPattern.concat(remain), index, inGlobStar)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
GlobSync.prototype._emitMatch = function (index, e) {
|
||||||
|
var abs = this._makeAbs(e)
|
||||||
|
if (this.mark)
|
||||||
|
e = this._mark(e)
|
||||||
|
|
||||||
|
if (this.matches[index][e])
|
||||||
|
return
|
||||||
|
|
||||||
|
if (this.nodir) {
|
||||||
|
var c = this.cache[this._makeAbs(e)]
|
||||||
|
if (c === 'DIR' || Array.isArray(c))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
this.matches[index][e] = true
|
||||||
|
if (this.stat)
|
||||||
|
this._stat(e)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
GlobSync.prototype._readdirInGlobStar = function (abs) {
|
||||||
|
// follow all symlinked directories forever
|
||||||
|
// just proceed as if this is a non-globstar situation
|
||||||
|
if (this.follow)
|
||||||
|
return this._readdir(abs, false)
|
||||||
|
|
||||||
|
var entries
|
||||||
|
var lstat
|
||||||
|
var stat
|
||||||
|
try {
|
||||||
|
lstat = fs.lstatSync(abs)
|
||||||
|
} catch (er) {
|
||||||
|
// lstat failed, doesn't exist
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
var isSym = lstat.isSymbolicLink()
|
||||||
|
this.symlinks[abs] = isSym
|
||||||
|
|
||||||
|
// If it's not a symlink or a dir, then it's definitely a regular file.
|
||||||
|
// don't bother doing a readdir in that case.
|
||||||
|
if (!isSym && !lstat.isDirectory())
|
||||||
|
this.cache[abs] = 'FILE'
|
||||||
|
else
|
||||||
|
entries = this._readdir(abs, false)
|
||||||
|
|
||||||
|
return entries
|
||||||
|
}
|
||||||
|
|
||||||
|
GlobSync.prototype._readdir = function (abs, inGlobStar) {
|
||||||
|
var entries
|
||||||
|
|
||||||
|
if (inGlobStar && !ownProp(this.symlinks, abs))
|
||||||
|
return this._readdirInGlobStar(abs)
|
||||||
|
|
||||||
|
if (ownProp(this.cache, abs)) {
|
||||||
|
var c = this.cache[abs]
|
||||||
|
if (!c || c === 'FILE')
|
||||||
|
return null
|
||||||
|
|
||||||
|
if (Array.isArray(c))
|
||||||
|
return c
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
return this._readdirEntries(abs, fs.readdirSync(abs))
|
||||||
|
} catch (er) {
|
||||||
|
this._readdirError(abs, er)
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
GlobSync.prototype._readdirEntries = function (abs, entries) {
|
||||||
|
// if we haven't asked to stat everything, then just
|
||||||
|
// assume that everything in there exists, so we can avoid
|
||||||
|
// having to stat it a second time.
|
||||||
|
if (!this.mark && !this.stat) {
|
||||||
|
for (var i = 0; i < entries.length; i ++) {
|
||||||
|
var e = entries[i]
|
||||||
|
if (abs === '/')
|
||||||
|
e = abs + e
|
||||||
|
else
|
||||||
|
e = abs + '/' + e
|
||||||
|
this.cache[e] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.cache[abs] = entries
|
||||||
|
|
||||||
|
// mark and cache dir-ness
|
||||||
|
return entries
|
||||||
|
}
|
||||||
|
|
||||||
|
GlobSync.prototype._readdirError = function (f, er) {
|
||||||
|
// handle errors, and cache the information
|
||||||
|
switch (er.code) {
|
||||||
|
case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205
|
||||||
|
case 'ENOTDIR': // totally normal. means it *does* exist.
|
||||||
|
this.cache[this._makeAbs(f)] = 'FILE'
|
||||||
|
break
|
||||||
|
|
||||||
|
case 'ENOENT': // not terribly unusual
|
||||||
|
case 'ELOOP':
|
||||||
|
case 'ENAMETOOLONG':
|
||||||
|
case 'UNKNOWN':
|
||||||
|
this.cache[this._makeAbs(f)] = false
|
||||||
|
break
|
||||||
|
|
||||||
|
default: // some unusual error. Treat as failure.
|
||||||
|
this.cache[this._makeAbs(f)] = false
|
||||||
|
if (this.strict)
|
||||||
|
throw er
|
||||||
|
if (!this.silent)
|
||||||
|
console.error('glob error', er)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
GlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) {
|
||||||
|
|
||||||
|
var entries = this._readdir(abs, inGlobStar)
|
||||||
|
|
||||||
|
// no entries means not a dir, so it can never have matches
|
||||||
|
// foo.txt/** doesn't match foo.txt
|
||||||
|
if (!entries)
|
||||||
|
return
|
||||||
|
|
||||||
|
// test without the globstar, and with every child both below
|
||||||
|
// and replacing the globstar.
|
||||||
|
var remainWithoutGlobStar = remain.slice(1)
|
||||||
|
var gspref = prefix ? [ prefix ] : []
|
||||||
|
var noGlobStar = gspref.concat(remainWithoutGlobStar)
|
||||||
|
|
||||||
|
// the noGlobStar pattern exits the inGlobStar state
|
||||||
|
this._process(noGlobStar, index, false)
|
||||||
|
|
||||||
|
var len = entries.length
|
||||||
|
var isSym = this.symlinks[abs]
|
||||||
|
|
||||||
|
// If it's a symlink, and we're in a globstar, then stop
|
||||||
|
if (isSym && inGlobStar)
|
||||||
|
return
|
||||||
|
|
||||||
|
for (var i = 0; i < len; i++) {
|
||||||
|
var e = entries[i]
|
||||||
|
if (e.charAt(0) === '.' && !this.dot)
|
||||||
|
continue
|
||||||
|
|
||||||
|
// these two cases enter the inGlobStar state
|
||||||
|
var instead = gspref.concat(entries[i], remainWithoutGlobStar)
|
||||||
|
this._process(instead, index, true)
|
||||||
|
|
||||||
|
var below = gspref.concat(entries[i], remain)
|
||||||
|
this._process(below, index, true)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
GlobSync.prototype._processSimple = function (prefix, index) {
|
||||||
|
// XXX review this. Shouldn't it be doing the mounting etc
|
||||||
|
// before doing stat? kinda weird?
|
||||||
|
var exists = this._stat(prefix)
|
||||||
|
|
||||||
|
if (!this.matches[index])
|
||||||
|
this.matches[index] = Object.create(null)
|
||||||
|
|
||||||
|
// If it doesn't exist, then just mark the lack of results
|
||||||
|
if (!exists)
|
||||||
|
return
|
||||||
|
|
||||||
|
if (prefix && isAbsolute(prefix) && !this.nomount) {
|
||||||
|
var trail = /[\/\\]$/.test(prefix)
|
||||||
|
if (prefix.charAt(0) === '/') {
|
||||||
|
prefix = path.join(this.root, prefix)
|
||||||
|
} else {
|
||||||
|
prefix = path.resolve(this.root, prefix)
|
||||||
|
if (trail)
|
||||||
|
prefix += '/'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (process.platform === 'win32')
|
||||||
|
prefix = prefix.replace(/\\/g, '/')
|
||||||
|
|
||||||
|
// Mark this as a match
|
||||||
|
this.matches[index][prefix] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Returns either 'DIR', 'FILE', or false
|
||||||
|
GlobSync.prototype._stat = function (f) {
|
||||||
|
var abs = this._makeAbs(f)
|
||||||
|
var needDir = f.slice(-1) === '/'
|
||||||
|
|
||||||
|
if (f.length > this.maxLength)
|
||||||
|
return false
|
||||||
|
|
||||||
|
if (!this.stat && ownProp(this.cache, abs)) {
|
||||||
|
var c = this.cache[abs]
|
||||||
|
|
||||||
|
if (Array.isArray(c))
|
||||||
|
c = 'DIR'
|
||||||
|
|
||||||
|
// It exists, but maybe not how we need it
|
||||||
|
if (!needDir || c === 'DIR')
|
||||||
|
return c
|
||||||
|
|
||||||
|
if (needDir && c === 'FILE')
|
||||||
|
return false
|
||||||
|
|
||||||
|
// otherwise we have to stat, because maybe c=true
|
||||||
|
// if we know it exists, but not what it is.
|
||||||
|
}
|
||||||
|
|
||||||
|
var exists
|
||||||
|
var stat = this.statCache[abs]
|
||||||
|
if (!stat) {
|
||||||
|
var lstat
|
||||||
|
try {
|
||||||
|
lstat = fs.lstatSync(abs)
|
||||||
|
} catch (er) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if (lstat.isSymbolicLink()) {
|
||||||
|
try {
|
||||||
|
stat = fs.statSync(abs)
|
||||||
|
} catch (er) {
|
||||||
|
stat = lstat
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
stat = lstat
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.statCache[abs] = stat
|
||||||
|
|
||||||
|
var c = stat.isDirectory() ? 'DIR' : 'FILE'
|
||||||
|
this.cache[abs] = this.cache[abs] || c
|
||||||
|
|
||||||
|
if (needDir && c !== 'DIR')
|
||||||
|
return false
|
||||||
|
|
||||||
|
return c
|
||||||
|
}
|
||||||
|
|
||||||
|
GlobSync.prototype._mark = function (p) {
|
||||||
|
return common.mark(this, p)
|
||||||
|
}
|
||||||
|
|
||||||
|
GlobSync.prototype._makeAbs = function (f) {
|
||||||
|
return common.makeAbs(this, f)
|
||||||
|
}
|
80
app/node_modules/asar/package.json
generated
vendored
Normal file
80
app/node_modules/asar/package.json
generated
vendored
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
{
|
||||||
|
"_from": "asar@^0.14.0",
|
||||||
|
"_id": "asar@0.14.3",
|
||||||
|
"_inBundle": false,
|
||||||
|
"_integrity": "sha512-+hNnVVDmYbv05We/a9knj/98w171+A94A9DNHj+3kXUr3ENTQoSEcfbJRvBBRHyOh4vukBYWujmHvvaMmQoQbg==",
|
||||||
|
"_location": "/asar",
|
||||||
|
"_phantomChildren": {
|
||||||
|
"inflight": "1.0.6",
|
||||||
|
"inherits": "2.0.3",
|
||||||
|
"minimatch": "3.0.4",
|
||||||
|
"once": "1.4.0",
|
||||||
|
"path-is-absolute": "1.0.1"
|
||||||
|
},
|
||||||
|
"_requested": {
|
||||||
|
"type": "range",
|
||||||
|
"registry": true,
|
||||||
|
"raw": "asar@^0.14.0",
|
||||||
|
"name": "asar",
|
||||||
|
"escapedName": "asar",
|
||||||
|
"rawSpec": "^0.14.0",
|
||||||
|
"saveSpec": null,
|
||||||
|
"fetchSpec": "^0.14.0"
|
||||||
|
},
|
||||||
|
"_requiredBy": [
|
||||||
|
"/electron-packager"
|
||||||
|
],
|
||||||
|
"_resolved": "https://registry.npmjs.org/asar/-/asar-0.14.3.tgz",
|
||||||
|
"_shasum": "c72a81542a48e3bca459fb1b07ee2b6adfae265d",
|
||||||
|
"_spec": "asar@^0.14.0",
|
||||||
|
"_where": "E:\\projects\\p\\gitlit\\app\\node_modules\\electron-packager",
|
||||||
|
"bin": {
|
||||||
|
"asar": "./bin/asar.js"
|
||||||
|
},
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/electron/asar/issues"
|
||||||
|
},
|
||||||
|
"bundleDependencies": false,
|
||||||
|
"dependencies": {
|
||||||
|
"chromium-pickle-js": "^0.2.0",
|
||||||
|
"commander": "^2.9.0",
|
||||||
|
"cuint": "^0.2.1",
|
||||||
|
"glob": "^6.0.4",
|
||||||
|
"minimatch": "^3.0.3",
|
||||||
|
"mkdirp": "^0.5.0",
|
||||||
|
"mksnapshot": "^0.3.0",
|
||||||
|
"tmp": "0.0.28"
|
||||||
|
},
|
||||||
|
"deprecated": false,
|
||||||
|
"description": "Creating Electron app packages",
|
||||||
|
"devDependencies": {
|
||||||
|
"electron": "^1.6.2",
|
||||||
|
"electron-mocha": "^3.4.0",
|
||||||
|
"lodash": "^4.2.1",
|
||||||
|
"mocha": "^2.0.1",
|
||||||
|
"rimraf": "^2.5.1",
|
||||||
|
"standard": "^8.6.0",
|
||||||
|
"xvfb-maybe": "^0.1.3"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=4.6"
|
||||||
|
},
|
||||||
|
"homepage": "https://github.com/electron/asar",
|
||||||
|
"license": "MIT",
|
||||||
|
"main": "./lib/asar.js",
|
||||||
|
"name": "asar",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git+https://github.com/electron/asar.git"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"lint": "standard",
|
||||||
|
"test": "xvfb-maybe electron-mocha --reporter spec && mocha --reporter spec && npm run lint"
|
||||||
|
},
|
||||||
|
"standard": {
|
||||||
|
"env": {
|
||||||
|
"mocha": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"version": "0.14.3"
|
||||||
|
}
|
18
app/node_modules/asar/snapcraft.yaml
generated
vendored
Normal file
18
app/node_modules/asar/snapcraft.yaml
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
name: asar
|
||||||
|
version: git
|
||||||
|
summary: Manipulate asar archive files
|
||||||
|
description: |
|
||||||
|
Asar is a simple extensive archive format, it works like tar that
|
||||||
|
concatenates all files together without compression, while having
|
||||||
|
random access support.
|
||||||
|
|
||||||
|
confinement: classic
|
||||||
|
|
||||||
|
parts:
|
||||||
|
asar:
|
||||||
|
plugin: nodejs
|
||||||
|
source: .
|
||||||
|
|
||||||
|
apps:
|
||||||
|
asar:
|
||||||
|
command: lib/node_modules/asar/bin/asar.js
|
22
app/node_modules/author-regex/LICENSE
generated
vendored
Normal file
22
app/node_modules/author-regex/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
Copyright (c) 2014 Jon Schlinkert, contributors.
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person
|
||||||
|
obtaining a copy of this software and associated documentation
|
||||||
|
files (the "Software"), to deal in the Software without
|
||||||
|
restriction, including without limitation the rights to use,
|
||||||
|
copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the
|
||||||
|
Software is furnished to do so, subject to the following
|
||||||
|
conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be
|
||||||
|
included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||||
|
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||||
|
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||||
|
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||||
|
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||||
|
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||||
|
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||||
|
OTHER DEALINGS IN THE SOFTWARE.
|
69
app/node_modules/author-regex/README.md
generated
vendored
Normal file
69
app/node_modules/author-regex/README.md
generated
vendored
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
# author-regex [](http://badge.fury.io/js/author-regex)
|
||||||
|
|
||||||
|
|
||||||
|
> Regular expression for parsing an `author` string into an object following npm conventions.
|
||||||
|
|
||||||
|
This the regex used by [parse-authors](https://github.com/jonschlinkert/parse-authors).
|
||||||
|
|
||||||
|
|
||||||
|
**Related**
|
||||||
|
|
||||||
|
- [parse-author](https://github.com/jonschlinkert/parse-author)
|
||||||
|
- [parse-authors](https://github.com/jonschlinkert/parse-authors)
|
||||||
|
|
||||||
|
|
||||||
|
## Install
|
||||||
|
#### Install with [npm](npmjs.org)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm i author-regex --save
|
||||||
|
```
|
||||||
|
#### Install with [bower](https://github.com/bower/bower)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bower install author-regex --save
|
||||||
|
```
|
||||||
|
|
||||||
|
## Tests
|
||||||
|
|
||||||
|
Run
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm test
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```js
|
||||||
|
var re = require('author-regex');
|
||||||
|
|
||||||
|
function authors(str) {
|
||||||
|
return re().exec(str);
|
||||||
|
}
|
||||||
|
console.log(author('Jon Schlinkert <foo@bar.com> (https://github.com/jonschlinkert)'));
|
||||||
|
```
|
||||||
|
Returns:
|
||||||
|
|
||||||
|
```js
|
||||||
|
[ 'Jon Schlinkert <foo@bar.com> (https://github.com/jonschlinkert)',
|
||||||
|
'Jon Schlinkert',
|
||||||
|
'foo@bar.com',
|
||||||
|
'https://github.com/jonschlinkert',
|
||||||
|
index: 0,
|
||||||
|
input: 'Jon Schlinkert <foo@bar.com> (https://github.com/jonschlinkert)' ]
|
||||||
|
```
|
||||||
|
|
||||||
|
## Author
|
||||||
|
|
||||||
|
**Jon Schlinkert**
|
||||||
|
|
||||||
|
+ [github/jonschlinkert](https://github.com/jonschlinkert)
|
||||||
|
+ [twitter/jonschlinkert](http://twitter.com/jonschlinkert)
|
||||||
|
|
||||||
|
## License
|
||||||
|
Copyright (c) 2014 Jon Schlinkert, contributors.
|
||||||
|
Released under the MIT license
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
_This file was generated by [verb-cli](https://github.com/assemble/verb-cli) on September 29, 2014._
|
13
app/node_modules/author-regex/index.js
generated
vendored
Normal file
13
app/node_modules/author-regex/index.js
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
/*!
|
||||||
|
* author-regex <https://github.com/jonschlinkert/author-regex>
|
||||||
|
*
|
||||||
|
* Copyright (c) 2014, 2017, Jon Schlinkert.
|
||||||
|
* Released under the MIT License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
module.exports = function() {
|
||||||
|
return /^\s*([^<(]*?)\s*([<(]([^>)]*?)[>)])?\s*([<(]([^>)]*?)[>)])*\s*$/;
|
||||||
|
};
|
||||||
|
|
88
app/node_modules/author-regex/package.json
generated
vendored
Normal file
88
app/node_modules/author-regex/package.json
generated
vendored
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
{
|
||||||
|
"_from": "author-regex@^1.0.0",
|
||||||
|
"_id": "author-regex@1.0.0",
|
||||||
|
"_inBundle": false,
|
||||||
|
"_integrity": "sha1-0IiFvmubv5Q5/gh8dihyRfCoFFA=",
|
||||||
|
"_location": "/author-regex",
|
||||||
|
"_phantomChildren": {},
|
||||||
|
"_requested": {
|
||||||
|
"type": "range",
|
||||||
|
"registry": true,
|
||||||
|
"raw": "author-regex@^1.0.0",
|
||||||
|
"name": "author-regex",
|
||||||
|
"escapedName": "author-regex",
|
||||||
|
"rawSpec": "^1.0.0",
|
||||||
|
"saveSpec": null,
|
||||||
|
"fetchSpec": "^1.0.0"
|
||||||
|
},
|
||||||
|
"_requiredBy": [
|
||||||
|
"/parse-author"
|
||||||
|
],
|
||||||
|
"_resolved": "https://registry.npmjs.org/author-regex/-/author-regex-1.0.0.tgz",
|
||||||
|
"_shasum": "d08885be6b9bbf9439fe087c76287245f0a81450",
|
||||||
|
"_spec": "author-regex@^1.0.0",
|
||||||
|
"_where": "E:\\projects\\p\\gitlit\\app\\node_modules\\parse-author",
|
||||||
|
"author": {
|
||||||
|
"name": "Jon Schlinkert",
|
||||||
|
"url": "https://github.com/jonschlinkert"
|
||||||
|
},
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/jonschlinkert/author-regex/issues"
|
||||||
|
},
|
||||||
|
"bundleDependencies": false,
|
||||||
|
"deprecated": false,
|
||||||
|
"description": "Regular expression for parsing an `author` string into an object following npm conventions.",
|
||||||
|
"devDependencies": {
|
||||||
|
"gulp-format-md": "^0.1.11",
|
||||||
|
"mocha": "^3.2.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.8"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"index.js"
|
||||||
|
],
|
||||||
|
"homepage": "https://github.com/jonschlinkert/author-regex",
|
||||||
|
"keywords": [
|
||||||
|
"author",
|
||||||
|
"authors",
|
||||||
|
"exec",
|
||||||
|
"expression",
|
||||||
|
"extract",
|
||||||
|
"maintainer",
|
||||||
|
"maintainers",
|
||||||
|
"match",
|
||||||
|
"package",
|
||||||
|
"parse",
|
||||||
|
"person",
|
||||||
|
"pkg",
|
||||||
|
"re",
|
||||||
|
"regex",
|
||||||
|
"regexp",
|
||||||
|
"regular"
|
||||||
|
],
|
||||||
|
"license": "MIT",
|
||||||
|
"main": "index.js",
|
||||||
|
"name": "author-regex",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git+https://github.com/jonschlinkert/author-regex.git"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"test": "mocha"
|
||||||
|
},
|
||||||
|
"verb": {
|
||||||
|
"toc": false,
|
||||||
|
"layout": "default",
|
||||||
|
"tasks": [
|
||||||
|
"readme"
|
||||||
|
],
|
||||||
|
"plugins": [
|
||||||
|
"gulp-format-md"
|
||||||
|
],
|
||||||
|
"lint": {
|
||||||
|
"reflinks": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
21
app/node_modules/base64-js/LICENSE
generated
vendored
Normal file
21
app/node_modules/base64-js/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2014
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
32
app/node_modules/base64-js/README.md
generated
vendored
Normal file
32
app/node_modules/base64-js/README.md
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
base64-js
|
||||||
|
=========
|
||||||
|
|
||||||
|
`base64-js` does basic base64 encoding/decoding in pure JS.
|
||||||
|
|
||||||
|
[](http://travis-ci.org/beatgammit/base64-js)
|
||||||
|
|
||||||
|
[](https://ci.testling.com/beatgammit/base64-js)
|
||||||
|
|
||||||
|
Many browsers already have base64 encoding/decoding functionality, but it is for text data, not all-purpose binary data.
|
||||||
|
|
||||||
|
Sometimes encoding/decoding binary data in the browser is useful, and that is what this module does.
|
||||||
|
|
||||||
|
## install
|
||||||
|
|
||||||
|
With [npm](https://npmjs.org) do:
|
||||||
|
|
||||||
|
`npm install base64-js`
|
||||||
|
|
||||||
|
## methods
|
||||||
|
|
||||||
|
`var base64 = require('base64-js')`
|
||||||
|
|
||||||
|
`base64` has three exposed functions, `byteLength`, `toByteArray` and `fromByteArray`, which both take a single argument.
|
||||||
|
|
||||||
|
* `byteLength` - Takes a base64 string and returns length of byte array
|
||||||
|
* `toByteArray` - Takes a base64 string and returns a byte array
|
||||||
|
* `fromByteArray` - Takes a byte array and returns a base64 string
|
||||||
|
|
||||||
|
## license
|
||||||
|
|
||||||
|
MIT
|
1
app/node_modules/base64-js/base64js.min.js
generated
vendored
Normal file
1
app/node_modules/base64-js/base64js.min.js
generated
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
(function(r){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=r()}else if(typeof define==="function"&&define.amd){define([],r)}else{var e;if(typeof window!=="undefined"){e=window}else if(typeof global!=="undefined"){e=global}else if(typeof self!=="undefined"){e=self}else{e=this}e.base64js=r()}})(function(){var r,e,t;return function r(e,t,n){function o(i,a){if(!t[i]){if(!e[i]){var u=typeof require=="function"&&require;if(!a&&u)return u(i,!0);if(f)return f(i,!0);var d=new Error("Cannot find module '"+i+"'");throw d.code="MODULE_NOT_FOUND",d}var c=t[i]={exports:{}};e[i][0].call(c.exports,function(r){var t=e[i][1][r];return o(t?t:r)},c,c.exports,r,e,t,n)}return t[i].exports}var f=typeof require=="function"&&require;for(var i=0;i<n.length;i++)o(n[i]);return o}({"/":[function(r,e,t){"use strict";t.byteLength=c;t.toByteArray=v;t.fromByteArray=s;var n=[];var o=[];var f=typeof Uint8Array!=="undefined"?Uint8Array:Array;var i="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";for(var a=0,u=i.length;a<u;++a){n[a]=i[a];o[i.charCodeAt(a)]=a}o["-".charCodeAt(0)]=62;o["_".charCodeAt(0)]=63;function d(r){var e=r.length;if(e%4>0){throw new Error("Invalid string. Length must be a multiple of 4")}return r[e-2]==="="?2:r[e-1]==="="?1:0}function c(r){return r.length*3/4-d(r)}function v(r){var e,t,n,i,a,u;var c=r.length;a=d(r);u=new f(c*3/4-a);n=a>0?c-4:c;var v=0;for(e=0,t=0;e<n;e+=4,t+=3){i=o[r.charCodeAt(e)]<<18|o[r.charCodeAt(e+1)]<<12|o[r.charCodeAt(e+2)]<<6|o[r.charCodeAt(e+3)];u[v++]=i>>16&255;u[v++]=i>>8&255;u[v++]=i&255}if(a===2){i=o[r.charCodeAt(e)]<<2|o[r.charCodeAt(e+1)]>>4;u[v++]=i&255}else if(a===1){i=o[r.charCodeAt(e)]<<10|o[r.charCodeAt(e+1)]<<4|o[r.charCodeAt(e+2)]>>2;u[v++]=i>>8&255;u[v++]=i&255}return u}function l(r){return n[r>>18&63]+n[r>>12&63]+n[r>>6&63]+n[r&63]}function h(r,e,t){var n;var o=[];for(var f=e;f<t;f+=3){n=(r[f]<<16)+(r[f+1]<<8)+r[f+2];o.push(l(n))}return o.join("")}function s(r){var e;var t=r.length;var o=t%3;var f="";var i=[];var a=16383;for(var u=0,d=t-o;u<d;u+=a){i.push(h(r,u,u+a>d?d:u+a))}if(o===1){e=r[t-1];f+=n[e>>2];f+=n[e<<4&63];f+="=="}else if(o===2){e=(r[t-2]<<8)+r[t-1];f+=n[e>>10];f+=n[e>>4&63];f+=n[e<<2&63];f+="="}i.push(f);return i.join("")}},{}]},{},[])("/")});
|
114
app/node_modules/base64-js/index.js
generated
vendored
Normal file
114
app/node_modules/base64-js/index.js
generated
vendored
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
'use strict'
|
||||||
|
|
||||||
|
exports.byteLength = byteLength
|
||||||
|
exports.toByteArray = toByteArray
|
||||||
|
exports.fromByteArray = fromByteArray
|
||||||
|
|
||||||
|
var lookup = []
|
||||||
|
var revLookup = []
|
||||||
|
var Arr = typeof Uint8Array !== 'undefined' ? Uint8Array : Array
|
||||||
|
|
||||||
|
var code = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'
|
||||||
|
for (var i = 0, len = code.length; i < len; ++i) {
|
||||||
|
lookup[i] = code[i]
|
||||||
|
revLookup[code.charCodeAt(i)] = i
|
||||||
|
}
|
||||||
|
|
||||||
|
revLookup['-'.charCodeAt(0)] = 62
|
||||||
|
revLookup['_'.charCodeAt(0)] = 63
|
||||||
|
|
||||||
|
function placeHoldersCount (b64) {
|
||||||
|
var len = b64.length
|
||||||
|
if (len % 4 > 0) {
|
||||||
|
throw new Error('Invalid string. Length must be a multiple of 4')
|
||||||
|
}
|
||||||
|
|
||||||
|
// the number of equal signs (place holders)
|
||||||
|
// if there are two placeholders, than the two characters before it
|
||||||
|
// represent one byte
|
||||||
|
// if there is only one, then the three characters before it represent 2 bytes
|
||||||
|
// this is just a cheap hack to not do indexOf twice
|
||||||
|
return b64[len - 2] === '=' ? 2 : b64[len - 1] === '=' ? 1 : 0
|
||||||
|
}
|
||||||
|
|
||||||
|
function byteLength (b64) {
|
||||||
|
// base64 is 4/3 + up to two characters of the original data
|
||||||
|
return b64.length * 3 / 4 - placeHoldersCount(b64)
|
||||||
|
}
|
||||||
|
|
||||||
|
function toByteArray (b64) {
|
||||||
|
var i, j, l, tmp, placeHolders, arr
|
||||||
|
var len = b64.length
|
||||||
|
placeHolders = placeHoldersCount(b64)
|
||||||
|
|
||||||
|
arr = new Arr(len * 3 / 4 - placeHolders)
|
||||||
|
|
||||||
|
// if there are placeholders, only get up to the last complete 4 chars
|
||||||
|
l = placeHolders > 0 ? len - 4 : len
|
||||||
|
|
||||||
|
var L = 0
|
||||||
|
|
||||||
|
for (i = 0, j = 0; i < l; i += 4, j += 3) {
|
||||||
|
tmp = (revLookup[b64.charCodeAt(i)] << 18) | (revLookup[b64.charCodeAt(i + 1)] << 12) | (revLookup[b64.charCodeAt(i + 2)] << 6) | revLookup[b64.charCodeAt(i + 3)]
|
||||||
|
arr[L++] = (tmp >> 16) & 0xFF
|
||||||
|
arr[L++] = (tmp >> 8) & 0xFF
|
||||||
|
arr[L++] = tmp & 0xFF
|
||||||
|
}
|
||||||
|
|
||||||
|
if (placeHolders === 2) {
|
||||||
|
tmp = (revLookup[b64.charCodeAt(i)] << 2) | (revLookup[b64.charCodeAt(i + 1)] >> 4)
|
||||||
|
arr[L++] = tmp & 0xFF
|
||||||
|
} else if (placeHolders === 1) {
|
||||||
|
tmp = (revLookup[b64.charCodeAt(i)] << 10) | (revLookup[b64.charCodeAt(i + 1)] << 4) | (revLookup[b64.charCodeAt(i + 2)] >> 2)
|
||||||
|
arr[L++] = (tmp >> 8) & 0xFF
|
||||||
|
arr[L++] = tmp & 0xFF
|
||||||
|
}
|
||||||
|
|
||||||
|
return arr
|
||||||
|
}
|
||||||
|
|
||||||
|
function tripletToBase64 (num) {
|
||||||
|
return lookup[num >> 18 & 0x3F] + lookup[num >> 12 & 0x3F] + lookup[num >> 6 & 0x3F] + lookup[num & 0x3F]
|
||||||
|
}
|
||||||
|
|
||||||
|
function encodeChunk (uint8, start, end) {
|
||||||
|
var tmp
|
||||||
|
var output = []
|
||||||
|
for (var i = start; i < end; i += 3) {
|
||||||
|
tmp = (uint8[i] << 16) + (uint8[i + 1] << 8) + (uint8[i + 2])
|
||||||
|
output.push(tripletToBase64(tmp))
|
||||||
|
}
|
||||||
|
return output.join('')
|
||||||
|
}
|
||||||
|
|
||||||
|
function fromByteArray (uint8) {
|
||||||
|
var tmp
|
||||||
|
var len = uint8.length
|
||||||
|
var extraBytes = len % 3 // if we have 1 byte left, pad 2 bytes
|
||||||
|
var output = ''
|
||||||
|
var parts = []
|
||||||
|
var maxChunkLength = 16383 // must be multiple of 3
|
||||||
|
|
||||||
|
// go through the array every three bytes, we'll deal with trailing stuff later
|
||||||
|
for (var i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) {
|
||||||
|
parts.push(encodeChunk(uint8, i, (i + maxChunkLength) > len2 ? len2 : (i + maxChunkLength)))
|
||||||
|
}
|
||||||
|
|
||||||
|
// pad the end with zeros, but make sure to not forget the extra bytes
|
||||||
|
if (extraBytes === 1) {
|
||||||
|
tmp = uint8[len - 1]
|
||||||
|
output += lookup[tmp >> 2]
|
||||||
|
output += lookup[(tmp << 4) & 0x3F]
|
||||||
|
output += '=='
|
||||||
|
} else if (extraBytes === 2) {
|
||||||
|
tmp = (uint8[len - 2] << 8) + (uint8[len - 1])
|
||||||
|
output += lookup[tmp >> 10]
|
||||||
|
output += lookup[(tmp >> 4) & 0x3F]
|
||||||
|
output += lookup[(tmp << 2) & 0x3F]
|
||||||
|
output += '='
|
||||||
|
}
|
||||||
|
|
||||||
|
parts.push(output)
|
||||||
|
|
||||||
|
return parts.join('')
|
||||||
|
}
|
65
app/node_modules/base64-js/package.json
generated
vendored
Normal file
65
app/node_modules/base64-js/package.json
generated
vendored
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
{
|
||||||
|
"_from": "base64-js@1.2.0",
|
||||||
|
"_id": "base64-js@1.2.0",
|
||||||
|
"_inBundle": false,
|
||||||
|
"_integrity": "sha1-o5mS1yNYSBGYK+XikLtqU9hnAPE=",
|
||||||
|
"_location": "/base64-js",
|
||||||
|
"_phantomChildren": {},
|
||||||
|
"_requested": {
|
||||||
|
"type": "version",
|
||||||
|
"registry": true,
|
||||||
|
"raw": "base64-js@1.2.0",
|
||||||
|
"name": "base64-js",
|
||||||
|
"escapedName": "base64-js",
|
||||||
|
"rawSpec": "1.2.0",
|
||||||
|
"saveSpec": null,
|
||||||
|
"fetchSpec": "1.2.0"
|
||||||
|
},
|
||||||
|
"_requiredBy": [
|
||||||
|
"/plist"
|
||||||
|
],
|
||||||
|
"_resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.2.0.tgz",
|
||||||
|
"_shasum": "a39992d723584811982be5e290bb6a53d86700f1",
|
||||||
|
"_spec": "base64-js@1.2.0",
|
||||||
|
"_where": "E:\\projects\\p\\gitlit\\app\\node_modules\\plist",
|
||||||
|
"author": {
|
||||||
|
"name": "T. Jameson Little",
|
||||||
|
"email": "t.jameson.little@gmail.com"
|
||||||
|
},
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/beatgammit/base64-js/issues"
|
||||||
|
},
|
||||||
|
"bundleDependencies": false,
|
||||||
|
"deprecated": false,
|
||||||
|
"description": "Base64 encoding/decoding in pure JS",
|
||||||
|
"devDependencies": {
|
||||||
|
"benchmark": "^2.1.0",
|
||||||
|
"browserify": "^13.0.0",
|
||||||
|
"standard": "*",
|
||||||
|
"tape": "4.x",
|
||||||
|
"uglify-js": "^2.6.2"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"test",
|
||||||
|
"index.js",
|
||||||
|
"base64js.min.js"
|
||||||
|
],
|
||||||
|
"homepage": "https://github.com/beatgammit/base64-js",
|
||||||
|
"keywords": [
|
||||||
|
"base64"
|
||||||
|
],
|
||||||
|
"license": "MIT",
|
||||||
|
"main": "index.js",
|
||||||
|
"name": "base64-js",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git://github.com/beatgammit/base64-js.git"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"build": "browserify -s base64js -r ./ | uglifyjs -m > base64js.min.js",
|
||||||
|
"lint": "standard",
|
||||||
|
"test": "npm run lint && npm run unit",
|
||||||
|
"unit": "tape test/*.js"
|
||||||
|
},
|
||||||
|
"version": "1.2.0"
|
||||||
|
}
|
24
app/node_modules/base64-js/test/big-data.js
generated
vendored
Normal file
24
app/node_modules/base64-js/test/big-data.js
generated
vendored
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
var test = require('tape')
|
||||||
|
var b64 = require('../')
|
||||||
|
|
||||||
|
test('convert big data to base64', function (t) {
|
||||||
|
var b64str, arr, i, length
|
||||||
|
var big = new Uint8Array(64 * 1024 * 1024)
|
||||||
|
for (i = 0, length = big.length; i < length; ++i) {
|
||||||
|
big[i] = i % 256
|
||||||
|
}
|
||||||
|
b64str = b64.fromByteArray(big)
|
||||||
|
arr = b64.toByteArray(b64str)
|
||||||
|
t.ok(equal(arr, big))
|
||||||
|
t.end()
|
||||||
|
})
|
||||||
|
|
||||||
|
function equal (a, b) {
|
||||||
|
var i
|
||||||
|
var length = a.length
|
||||||
|
if (length !== b.length) return false
|
||||||
|
for (i = 0; i < length; ++i) {
|
||||||
|
if (a[i] !== b[i]) return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
48
app/node_modules/base64-js/test/convert.js
generated
vendored
Normal file
48
app/node_modules/base64-js/test/convert.js
generated
vendored
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
var test = require('tape')
|
||||||
|
var b64 = require('../')
|
||||||
|
var checks = [
|
||||||
|
'a',
|
||||||
|
'aa',
|
||||||
|
'aaa',
|
||||||
|
'hi',
|
||||||
|
'hi!',
|
||||||
|
'hi!!',
|
||||||
|
'sup',
|
||||||
|
'sup?',
|
||||||
|
'sup?!'
|
||||||
|
]
|
||||||
|
|
||||||
|
test('convert to base64 and back', function (t) {
|
||||||
|
t.plan(checks.length * 2)
|
||||||
|
|
||||||
|
for (var i = 0; i < checks.length; i++) {
|
||||||
|
var check = checks[i]
|
||||||
|
var b64Str, arr, str
|
||||||
|
|
||||||
|
b64Str = b64.fromByteArray(map(check, function (char) { return char.charCodeAt(0) }))
|
||||||
|
|
||||||
|
arr = b64.toByteArray(b64Str)
|
||||||
|
str = map(arr, function (byte) { return String.fromCharCode(byte) }).join('')
|
||||||
|
|
||||||
|
t.equal(check, str, 'Checked ' + check)
|
||||||
|
t.equal(b64.byteLength(b64Str), arr.length, 'Checked length for ' + check)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
function map (arr, callback) {
|
||||||
|
var res = []
|
||||||
|
var kValue, mappedValue
|
||||||
|
|
||||||
|
for (var k = 0, len = arr.length; k < len; k++) {
|
||||||
|
if ((typeof arr === 'string' && !!arr.charAt(k))) {
|
||||||
|
kValue = arr.charAt(k)
|
||||||
|
mappedValue = callback(kValue, k, arr)
|
||||||
|
res[k] = mappedValue
|
||||||
|
} else if (typeof arr !== 'string' && k in arr) {
|
||||||
|
kValue = arr[k]
|
||||||
|
mappedValue = callback(kValue, k, arr)
|
||||||
|
res[k] = mappedValue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return res
|
||||||
|
}
|
18
app/node_modules/base64-js/test/url-safe.js
generated
vendored
Normal file
18
app/node_modules/base64-js/test/url-safe.js
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
var test = require('tape')
|
||||||
|
var b64 = require('../')
|
||||||
|
|
||||||
|
test('decode url-safe style base64 strings', function (t) {
|
||||||
|
var expected = [0xff, 0xff, 0xbe, 0xff, 0xef, 0xbf, 0xfb, 0xef, 0xff]
|
||||||
|
|
||||||
|
var actual = b64.toByteArray('//++/++/++//')
|
||||||
|
for (var i = 0; i < actual.length; i++) {
|
||||||
|
t.equal(actual[i], expected[i])
|
||||||
|
}
|
||||||
|
|
||||||
|
actual = b64.toByteArray('__--_--_--__')
|
||||||
|
for (i = 0; i < actual.length; i++) {
|
||||||
|
t.equal(actual[i], expected[i])
|
||||||
|
}
|
||||||
|
|
||||||
|
t.end()
|
||||||
|
})
|
1
app/node_modules/binary/.npmignore
generated
vendored
Normal file
1
app/node_modules/binary/.npmignore
generated
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
node_modules
|
4
app/node_modules/binary/.travis.yml
generated
vendored
Normal file
4
app/node_modules/binary/.travis.yml
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
language: node_js
|
||||||
|
node_js:
|
||||||
|
- 0.4
|
||||||
|
- 0.6
|
177
app/node_modules/binary/README.markdown
generated
vendored
Normal file
177
app/node_modules/binary/README.markdown
generated
vendored
Normal file
@@ -0,0 +1,177 @@
|
|||||||
|
binary
|
||||||
|
======
|
||||||
|
|
||||||
|
Unpack multibyte binary values from buffers and streams.
|
||||||
|
You can specify the endianness and signedness of the fields to be unpacked too.
|
||||||
|
|
||||||
|
This module is a cleaner and more complete version of
|
||||||
|
[bufferlist](https://github.com/substack/node-bufferlist)'s binary module that
|
||||||
|
runs on pre-allocated buffers instead of a linked list.
|
||||||
|
|
||||||
|
[](http://travis-ci.org/substack/node-binary)
|
||||||
|
|
||||||
|
examples
|
||||||
|
========
|
||||||
|
|
||||||
|
stream.js
|
||||||
|
---------
|
||||||
|
|
||||||
|
``` js
|
||||||
|
var binary = require('binary');
|
||||||
|
|
||||||
|
var ws = binary()
|
||||||
|
.word32lu('x')
|
||||||
|
.word16bs('y')
|
||||||
|
.word16bu('z')
|
||||||
|
.tap(function (vars) {
|
||||||
|
console.dir(vars);
|
||||||
|
})
|
||||||
|
;
|
||||||
|
process.stdin.pipe(ws);
|
||||||
|
process.stdin.resume();
|
||||||
|
```
|
||||||
|
|
||||||
|
output:
|
||||||
|
|
||||||
|
```
|
||||||
|
$ node examples/stream.js
|
||||||
|
abcdefgh
|
||||||
|
{ x: 1684234849, y: 25958, z: 26472 }
|
||||||
|
^D
|
||||||
|
```
|
||||||
|
|
||||||
|
parse.js
|
||||||
|
--------
|
||||||
|
|
||||||
|
``` js
|
||||||
|
var buf = new Buffer([ 97, 98, 99, 100, 101, 102, 0 ]);
|
||||||
|
|
||||||
|
var binary = require('binary');
|
||||||
|
var vars = binary.parse(buf)
|
||||||
|
.word16ls('ab')
|
||||||
|
.word32bu('cf')
|
||||||
|
.word8('x')
|
||||||
|
.vars
|
||||||
|
;
|
||||||
|
console.dir(vars);
|
||||||
|
```
|
||||||
|
|
||||||
|
output:
|
||||||
|
|
||||||
|
```
|
||||||
|
{ ab: 25185, cf: 1667523942, x: 0 }
|
||||||
|
```
|
||||||
|
|
||||||
|
methods
|
||||||
|
=======
|
||||||
|
|
||||||
|
`var binary = require('binary')`
|
||||||
|
|
||||||
|
var b = binary()
|
||||||
|
----------------
|
||||||
|
|
||||||
|
Return a new writable stream `b` that has the chainable methods documented below
|
||||||
|
for buffering binary input.
|
||||||
|
|
||||||
|
binary.parse(buf)
|
||||||
|
-----------------
|
||||||
|
|
||||||
|
Parse a static buffer in one pass. Returns a chainable interface with the
|
||||||
|
methods below plus a `vars` field to get at the variable stash as the last item
|
||||||
|
in a chain.
|
||||||
|
|
||||||
|
In parse mode, methods will set their keys to `null` if the buffer isn't big
|
||||||
|
enough except `buffer()` and `scan()` which read up up to the end of the buffer
|
||||||
|
and stop.
|
||||||
|
|
||||||
|
b.word{8,16,32,64}{l,b}{e,u,s}(key)
|
||||||
|
-----------------------------------
|
||||||
|
|
||||||
|
Parse bytes in the buffer or stream given:
|
||||||
|
|
||||||
|
* number of bits
|
||||||
|
* endianness ( l : little, b : big ),
|
||||||
|
* signedness ( u and e : unsigned, s : signed )
|
||||||
|
|
||||||
|
These functions won't start parsing until all previous parser functions have run
|
||||||
|
and the data is available.
|
||||||
|
|
||||||
|
The result of the parse goes into the variable stash at `key`.
|
||||||
|
If `key` has dots (`.`s), it refers to a nested address. If parent container
|
||||||
|
values don't exist they will be created automatically, so for instance you can
|
||||||
|
assign into `dst.addr` and `dst.port` and the `dst` key in the variable stash
|
||||||
|
will be `{ addr : x, port : y }` afterwards.
|
||||||
|
|
||||||
|
b.buffer(key, size)
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
Take `size` bytes directly off the buffer stream, putting the resulting buffer
|
||||||
|
slice in the variable stash at `key`. If `size` is a string, use the value at
|
||||||
|
`vars[size]`. The key follows the same dotted address rules as the word
|
||||||
|
functions.
|
||||||
|
|
||||||
|
b.scan(key, buffer)
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
Search for `buffer` in the stream and store all the intervening data in the
|
||||||
|
stash at at `key`, excluding the search buffer. If `buffer` passed as a string,
|
||||||
|
it will be converted into a Buffer internally.
|
||||||
|
|
||||||
|
For example, to read in a line you can just do:
|
||||||
|
|
||||||
|
``` js
|
||||||
|
var b = binary()
|
||||||
|
.scan('line', new Buffer('\r\n'))
|
||||||
|
.tap(function (vars) {
|
||||||
|
console.log(vars.line)
|
||||||
|
})
|
||||||
|
;
|
||||||
|
stream.pipe(b);
|
||||||
|
```
|
||||||
|
|
||||||
|
b.tap(cb)
|
||||||
|
---------
|
||||||
|
|
||||||
|
The callback `cb` is provided with the variable stash from all the previous
|
||||||
|
actions once they've all finished.
|
||||||
|
|
||||||
|
You can nest additional actions onto `this` inside the callback.
|
||||||
|
|
||||||
|
b.into(key, cb)
|
||||||
|
---------------
|
||||||
|
|
||||||
|
Like `.tap()`, except all nested actions will assign into a `key` in the `vars`
|
||||||
|
stash.
|
||||||
|
|
||||||
|
b.loop(cb)
|
||||||
|
----------
|
||||||
|
|
||||||
|
Loop, each time calling `cb(end, vars)` for function `end` and the variable
|
||||||
|
stash with `this` set to a new chain for nested parsing. The loop terminates
|
||||||
|
once `end` is called.
|
||||||
|
|
||||||
|
b.flush()
|
||||||
|
---------
|
||||||
|
|
||||||
|
Clear the variable stash entirely.
|
||||||
|
|
||||||
|
installation
|
||||||
|
============
|
||||||
|
|
||||||
|
To install with [npm](http://github.com/isaacs/npm):
|
||||||
|
|
||||||
|
```
|
||||||
|
npm install binary
|
||||||
|
```
|
||||||
|
|
||||||
|
notes
|
||||||
|
=====
|
||||||
|
|
||||||
|
The word64 functions will only return approximations since javascript uses ieee
|
||||||
|
floating point for all number types. Mind the loss of precision.
|
||||||
|
|
||||||
|
license
|
||||||
|
=======
|
||||||
|
|
||||||
|
MIT
|
||||||
|
|
11
app/node_modules/binary/example/buf.js
generated
vendored
Normal file
11
app/node_modules/binary/example/buf.js
generated
vendored
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
var buf = new Buffer([ 97, 98, 99, 100, 101, 102, 0 ]);
|
||||||
|
|
||||||
|
var binary = require('binary');
|
||||||
|
binary(buf)
|
||||||
|
.word16ls('ab')
|
||||||
|
.word32bu('cf')
|
||||||
|
.word8('x')
|
||||||
|
.tap(function (vars) {
|
||||||
|
console.dir(vars);
|
||||||
|
})
|
||||||
|
;
|
10
app/node_modules/binary/example/parse.js
generated
vendored
Normal file
10
app/node_modules/binary/example/parse.js
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
var buf = new Buffer([ 97, 98, 99, 100, 101, 102, 0 ]);
|
||||||
|
|
||||||
|
var binary = require('binary');
|
||||||
|
var vars = binary.parse(buf)
|
||||||
|
.word16ls('ab')
|
||||||
|
.word32bu('cf')
|
||||||
|
.word8('x')
|
||||||
|
.vars
|
||||||
|
;
|
||||||
|
console.dir(vars);
|
12
app/node_modules/binary/example/stream.js
generated
vendored
Normal file
12
app/node_modules/binary/example/stream.js
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
var binary = require('binary');
|
||||||
|
|
||||||
|
var ws = binary()
|
||||||
|
.word32lu('x')
|
||||||
|
.word16bs('y')
|
||||||
|
.word16bu('z')
|
||||||
|
.tap(function (vars) {
|
||||||
|
console.dir(vars);
|
||||||
|
})
|
||||||
|
;
|
||||||
|
process.stdin.pipe(ws);
|
||||||
|
process.stdin.resume();
|
397
app/node_modules/binary/index.js
generated
vendored
Normal file
397
app/node_modules/binary/index.js
generated
vendored
Normal file
@@ -0,0 +1,397 @@
|
|||||||
|
var Chainsaw = require('chainsaw');
|
||||||
|
var EventEmitter = require('events').EventEmitter;
|
||||||
|
var Buffers = require('buffers');
|
||||||
|
var Vars = require('./lib/vars.js');
|
||||||
|
var Stream = require('stream').Stream;
|
||||||
|
|
||||||
|
exports = module.exports = function (bufOrEm, eventName) {
|
||||||
|
if (Buffer.isBuffer(bufOrEm)) {
|
||||||
|
return exports.parse(bufOrEm);
|
||||||
|
}
|
||||||
|
|
||||||
|
var s = exports.stream();
|
||||||
|
if (bufOrEm && bufOrEm.pipe) {
|
||||||
|
bufOrEm.pipe(s);
|
||||||
|
}
|
||||||
|
else if (bufOrEm) {
|
||||||
|
bufOrEm.on(eventName || 'data', function (buf) {
|
||||||
|
s.write(buf);
|
||||||
|
});
|
||||||
|
|
||||||
|
bufOrEm.on('end', function () {
|
||||||
|
s.end();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return s;
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.stream = function (input) {
|
||||||
|
if (input) return exports.apply(null, arguments);
|
||||||
|
|
||||||
|
var pending = null;
|
||||||
|
function getBytes (bytes, cb, skip) {
|
||||||
|
pending = {
|
||||||
|
bytes : bytes,
|
||||||
|
skip : skip,
|
||||||
|
cb : function (buf) {
|
||||||
|
pending = null;
|
||||||
|
cb(buf);
|
||||||
|
},
|
||||||
|
};
|
||||||
|
dispatch();
|
||||||
|
}
|
||||||
|
|
||||||
|
var offset = null;
|
||||||
|
function dispatch () {
|
||||||
|
if (!pending) {
|
||||||
|
if (caughtEnd) done = true;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (typeof pending === 'function') {
|
||||||
|
pending();
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
var bytes = offset + pending.bytes;
|
||||||
|
|
||||||
|
if (buffers.length >= bytes) {
|
||||||
|
var buf;
|
||||||
|
if (offset == null) {
|
||||||
|
buf = buffers.splice(0, bytes);
|
||||||
|
if (!pending.skip) {
|
||||||
|
buf = buf.slice();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
if (!pending.skip) {
|
||||||
|
buf = buffers.slice(offset, bytes);
|
||||||
|
}
|
||||||
|
offset = bytes;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (pending.skip) {
|
||||||
|
pending.cb();
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
pending.cb(buf);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function builder (saw) {
|
||||||
|
function next () { if (!done) saw.next() }
|
||||||
|
|
||||||
|
var self = words(function (bytes, cb) {
|
||||||
|
return function (name) {
|
||||||
|
getBytes(bytes, function (buf) {
|
||||||
|
vars.set(name, cb(buf));
|
||||||
|
next();
|
||||||
|
});
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
self.tap = function (cb) {
|
||||||
|
saw.nest(cb, vars.store);
|
||||||
|
};
|
||||||
|
|
||||||
|
self.into = function (key, cb) {
|
||||||
|
if (!vars.get(key)) vars.set(key, {});
|
||||||
|
var parent = vars;
|
||||||
|
vars = Vars(parent.get(key));
|
||||||
|
|
||||||
|
saw.nest(function () {
|
||||||
|
cb.apply(this, arguments);
|
||||||
|
this.tap(function () {
|
||||||
|
vars = parent;
|
||||||
|
});
|
||||||
|
}, vars.store);
|
||||||
|
};
|
||||||
|
|
||||||
|
self.flush = function () {
|
||||||
|
vars.store = {};
|
||||||
|
next();
|
||||||
|
};
|
||||||
|
|
||||||
|
self.loop = function (cb) {
|
||||||
|
var end = false;
|
||||||
|
|
||||||
|
saw.nest(false, function loop () {
|
||||||
|
this.vars = vars.store;
|
||||||
|
cb.call(this, function () {
|
||||||
|
end = true;
|
||||||
|
next();
|
||||||
|
}, vars.store);
|
||||||
|
this.tap(function () {
|
||||||
|
if (end) saw.next()
|
||||||
|
else loop.call(this)
|
||||||
|
}.bind(this));
|
||||||
|
}, vars.store);
|
||||||
|
};
|
||||||
|
|
||||||
|
self.buffer = function (name, bytes) {
|
||||||
|
if (typeof bytes === 'string') {
|
||||||
|
bytes = vars.get(bytes);
|
||||||
|
}
|
||||||
|
|
||||||
|
getBytes(bytes, function (buf) {
|
||||||
|
vars.set(name, buf);
|
||||||
|
next();
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
self.skip = function (bytes) {
|
||||||
|
if (typeof bytes === 'string') {
|
||||||
|
bytes = vars.get(bytes);
|
||||||
|
}
|
||||||
|
|
||||||
|
getBytes(bytes, function () {
|
||||||
|
next();
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
self.scan = function find (name, search) {
|
||||||
|
if (typeof search === 'string') {
|
||||||
|
search = new Buffer(search);
|
||||||
|
}
|
||||||
|
else if (!Buffer.isBuffer(search)) {
|
||||||
|
throw new Error('search must be a Buffer or a string');
|
||||||
|
}
|
||||||
|
|
||||||
|
var taken = 0;
|
||||||
|
pending = function () {
|
||||||
|
var pos = buffers.indexOf(search, offset + taken);
|
||||||
|
var i = pos-offset-taken;
|
||||||
|
if (pos !== -1) {
|
||||||
|
pending = null;
|
||||||
|
if (offset != null) {
|
||||||
|
vars.set(
|
||||||
|
name,
|
||||||
|
buffers.slice(offset, offset + taken + i)
|
||||||
|
);
|
||||||
|
offset += taken + i + search.length;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
vars.set(
|
||||||
|
name,
|
||||||
|
buffers.slice(0, taken + i)
|
||||||
|
);
|
||||||
|
buffers.splice(0, taken + i + search.length);
|
||||||
|
}
|
||||||
|
next();
|
||||||
|
dispatch();
|
||||||
|
} else {
|
||||||
|
i = Math.max(buffers.length - search.length - offset - taken, 0);
|
||||||
|
}
|
||||||
|
taken += i;
|
||||||
|
};
|
||||||
|
dispatch();
|
||||||
|
};
|
||||||
|
|
||||||
|
self.peek = function (cb) {
|
||||||
|
offset = 0;
|
||||||
|
saw.nest(function () {
|
||||||
|
cb.call(this, vars.store);
|
||||||
|
this.tap(function () {
|
||||||
|
offset = null;
|
||||||
|
});
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
return self;
|
||||||
|
};
|
||||||
|
|
||||||
|
var stream = Chainsaw.light(builder);
|
||||||
|
stream.writable = true;
|
||||||
|
|
||||||
|
var buffers = Buffers();
|
||||||
|
|
||||||
|
stream.write = function (buf) {
|
||||||
|
buffers.push(buf);
|
||||||
|
dispatch();
|
||||||
|
};
|
||||||
|
|
||||||
|
var vars = Vars();
|
||||||
|
|
||||||
|
var done = false, caughtEnd = false;
|
||||||
|
stream.end = function () {
|
||||||
|
caughtEnd = true;
|
||||||
|
};
|
||||||
|
|
||||||
|
stream.pipe = Stream.prototype.pipe;
|
||||||
|
Object.getOwnPropertyNames(EventEmitter.prototype).forEach(function (name) {
|
||||||
|
stream[name] = EventEmitter.prototype[name];
|
||||||
|
});
|
||||||
|
|
||||||
|
return stream;
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.parse = function parse (buffer) {
|
||||||
|
var self = words(function (bytes, cb) {
|
||||||
|
return function (name) {
|
||||||
|
if (offset + bytes <= buffer.length) {
|
||||||
|
var buf = buffer.slice(offset, offset + bytes);
|
||||||
|
offset += bytes;
|
||||||
|
vars.set(name, cb(buf));
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
vars.set(name, null);
|
||||||
|
}
|
||||||
|
return self;
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
var offset = 0;
|
||||||
|
var vars = Vars();
|
||||||
|
self.vars = vars.store;
|
||||||
|
|
||||||
|
self.tap = function (cb) {
|
||||||
|
cb.call(self, vars.store);
|
||||||
|
return self;
|
||||||
|
};
|
||||||
|
|
||||||
|
self.into = function (key, cb) {
|
||||||
|
if (!vars.get(key)) {
|
||||||
|
vars.set(key, {});
|
||||||
|
}
|
||||||
|
var parent = vars;
|
||||||
|
vars = Vars(parent.get(key));
|
||||||
|
cb.call(self, vars.store);
|
||||||
|
vars = parent;
|
||||||
|
return self;
|
||||||
|
};
|
||||||
|
|
||||||
|
self.loop = function (cb) {
|
||||||
|
var end = false;
|
||||||
|
var ender = function () { end = true };
|
||||||
|
while (end === false) {
|
||||||
|
cb.call(self, ender, vars.store);
|
||||||
|
}
|
||||||
|
return self;
|
||||||
|
};
|
||||||
|
|
||||||
|
self.buffer = function (name, size) {
|
||||||
|
if (typeof size === 'string') {
|
||||||
|
size = vars.get(size);
|
||||||
|
}
|
||||||
|
var buf = buffer.slice(offset, Math.min(buffer.length, offset + size));
|
||||||
|
offset += size;
|
||||||
|
vars.set(name, buf);
|
||||||
|
|
||||||
|
return self;
|
||||||
|
};
|
||||||
|
|
||||||
|
self.skip = function (bytes) {
|
||||||
|
if (typeof bytes === 'string') {
|
||||||
|
bytes = vars.get(bytes);
|
||||||
|
}
|
||||||
|
offset += bytes;
|
||||||
|
|
||||||
|
return self;
|
||||||
|
};
|
||||||
|
|
||||||
|
self.scan = function (name, search) {
|
||||||
|
if (typeof search === 'string') {
|
||||||
|
search = new Buffer(search);
|
||||||
|
}
|
||||||
|
else if (!Buffer.isBuffer(search)) {
|
||||||
|
throw new Error('search must be a Buffer or a string');
|
||||||
|
}
|
||||||
|
vars.set(name, null);
|
||||||
|
|
||||||
|
// simple but slow string search
|
||||||
|
for (var i = 0; i + offset <= buffer.length - search.length + 1; i++) {
|
||||||
|
for (
|
||||||
|
var j = 0;
|
||||||
|
j < search.length && buffer[offset+i+j] === search[j];
|
||||||
|
j++
|
||||||
|
);
|
||||||
|
if (j === search.length) break;
|
||||||
|
}
|
||||||
|
|
||||||
|
vars.set(name, buffer.slice(offset, offset + i));
|
||||||
|
offset += i + search.length;
|
||||||
|
return self;
|
||||||
|
};
|
||||||
|
|
||||||
|
self.peek = function (cb) {
|
||||||
|
var was = offset;
|
||||||
|
cb.call(self, vars.store);
|
||||||
|
offset = was;
|
||||||
|
return self;
|
||||||
|
};
|
||||||
|
|
||||||
|
self.flush = function () {
|
||||||
|
vars.store = {};
|
||||||
|
return self;
|
||||||
|
};
|
||||||
|
|
||||||
|
self.eof = function () {
|
||||||
|
return offset >= buffer.length;
|
||||||
|
};
|
||||||
|
|
||||||
|
return self;
|
||||||
|
};
|
||||||
|
|
||||||
|
// convert byte strings to unsigned little endian numbers
|
||||||
|
function decodeLEu (bytes) {
|
||||||
|
var acc = 0;
|
||||||
|
for (var i = 0; i < bytes.length; i++) {
|
||||||
|
acc += Math.pow(256,i) * bytes[i];
|
||||||
|
}
|
||||||
|
return acc;
|
||||||
|
}
|
||||||
|
|
||||||
|
// convert byte strings to unsigned big endian numbers
|
||||||
|
function decodeBEu (bytes) {
|
||||||
|
var acc = 0;
|
||||||
|
for (var i = 0; i < bytes.length; i++) {
|
||||||
|
acc += Math.pow(256, bytes.length - i - 1) * bytes[i];
|
||||||
|
}
|
||||||
|
return acc;
|
||||||
|
}
|
||||||
|
|
||||||
|
// convert byte strings to signed big endian numbers
|
||||||
|
function decodeBEs (bytes) {
|
||||||
|
var val = decodeBEu(bytes);
|
||||||
|
if ((bytes[0] & 0x80) == 0x80) {
|
||||||
|
val -= Math.pow(256, bytes.length);
|
||||||
|
}
|
||||||
|
return val;
|
||||||
|
}
|
||||||
|
|
||||||
|
// convert byte strings to signed little endian numbers
|
||||||
|
function decodeLEs (bytes) {
|
||||||
|
var val = decodeLEu(bytes);
|
||||||
|
if ((bytes[bytes.length - 1] & 0x80) == 0x80) {
|
||||||
|
val -= Math.pow(256, bytes.length);
|
||||||
|
}
|
||||||
|
return val;
|
||||||
|
}
|
||||||
|
|
||||||
|
function words (decode) {
|
||||||
|
var self = {};
|
||||||
|
|
||||||
|
[ 1, 2, 4, 8 ].forEach(function (bytes) {
|
||||||
|
var bits = bytes * 8;
|
||||||
|
|
||||||
|
self['word' + bits + 'le']
|
||||||
|
= self['word' + bits + 'lu']
|
||||||
|
= decode(bytes, decodeLEu);
|
||||||
|
|
||||||
|
self['word' + bits + 'ls']
|
||||||
|
= decode(bytes, decodeLEs);
|
||||||
|
|
||||||
|
self['word' + bits + 'be']
|
||||||
|
= self['word' + bits + 'bu']
|
||||||
|
= decode(bytes, decodeBEu);
|
||||||
|
|
||||||
|
self['word' + bits + 'bs']
|
||||||
|
= decode(bytes, decodeBEs);
|
||||||
|
});
|
||||||
|
|
||||||
|
// word8be(n) == word8le(n) for all n
|
||||||
|
self.word8 = self.word8u = self.word8be;
|
||||||
|
self.word8s = self.word8bs;
|
||||||
|
|
||||||
|
return self;
|
||||||
|
}
|
28
app/node_modules/binary/lib/vars.js
generated
vendored
Normal file
28
app/node_modules/binary/lib/vars.js
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
module.exports = function (store) {
|
||||||
|
function getset (name, value) {
|
||||||
|
var node = vars.store;
|
||||||
|
var keys = name.split('.');
|
||||||
|
keys.slice(0,-1).forEach(function (k) {
|
||||||
|
if (node[k] === undefined) node[k] = {};
|
||||||
|
node = node[k]
|
||||||
|
});
|
||||||
|
var key = keys[keys.length - 1];
|
||||||
|
if (arguments.length == 1) {
|
||||||
|
return node[key];
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return node[key] = value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var vars = {
|
||||||
|
get : function (name) {
|
||||||
|
return getset(name);
|
||||||
|
},
|
||||||
|
set : function (name, value) {
|
||||||
|
return getset(name, value);
|
||||||
|
},
|
||||||
|
store : store || {},
|
||||||
|
};
|
||||||
|
return vars;
|
||||||
|
};
|
70
app/node_modules/binary/package.json
generated
vendored
Normal file
70
app/node_modules/binary/package.json
generated
vendored
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
{
|
||||||
|
"_from": "binary@^0.3.0",
|
||||||
|
"_id": "binary@0.3.0",
|
||||||
|
"_inBundle": false,
|
||||||
|
"_integrity": "sha1-n2BVO8XOjDOG87VTz/R0Yq3sqnk=",
|
||||||
|
"_location": "/binary",
|
||||||
|
"_phantomChildren": {},
|
||||||
|
"_requested": {
|
||||||
|
"type": "range",
|
||||||
|
"registry": true,
|
||||||
|
"raw": "binary@^0.3.0",
|
||||||
|
"name": "binary",
|
||||||
|
"escapedName": "binary",
|
||||||
|
"rawSpec": "^0.3.0",
|
||||||
|
"saveSpec": null,
|
||||||
|
"fetchSpec": "^0.3.0"
|
||||||
|
},
|
||||||
|
"_requiredBy": [
|
||||||
|
"/decompress-zip"
|
||||||
|
],
|
||||||
|
"_resolved": "https://registry.npmjs.org/binary/-/binary-0.3.0.tgz",
|
||||||
|
"_shasum": "9f60553bc5ce8c3386f3b553cff47462adecaa79",
|
||||||
|
"_spec": "binary@^0.3.0",
|
||||||
|
"_where": "E:\\projects\\p\\gitlit\\app\\node_modules\\decompress-zip",
|
||||||
|
"author": {
|
||||||
|
"name": "James Halliday",
|
||||||
|
"email": "mail@substack.net",
|
||||||
|
"url": "http://substack.net"
|
||||||
|
},
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/substack/node-binary/issues"
|
||||||
|
},
|
||||||
|
"bundleDependencies": false,
|
||||||
|
"dependencies": {
|
||||||
|
"buffers": "~0.1.1",
|
||||||
|
"chainsaw": "~0.1.0"
|
||||||
|
},
|
||||||
|
"deprecated": false,
|
||||||
|
"description": "Unpack multibyte binary values from buffers",
|
||||||
|
"devDependencies": {
|
||||||
|
"seq": "~0.2.5",
|
||||||
|
"tap": "~0.2.4"
|
||||||
|
},
|
||||||
|
"engine": {
|
||||||
|
"node": ">=0.4.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": "*"
|
||||||
|
},
|
||||||
|
"homepage": "https://github.com/substack/node-binary#readme",
|
||||||
|
"keywords": [
|
||||||
|
"binary",
|
||||||
|
"decode",
|
||||||
|
"endian",
|
||||||
|
"unpack",
|
||||||
|
"signed",
|
||||||
|
"unsigned"
|
||||||
|
],
|
||||||
|
"license": "MIT",
|
||||||
|
"main": "./index.js",
|
||||||
|
"name": "binary",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git+ssh://git@github.com/substack/node-binary.git"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"test": "tap test/*.js"
|
||||||
|
},
|
||||||
|
"version": "0.3.0"
|
||||||
|
}
|
92
app/node_modules/binary/perf/loop.js
generated
vendored
Normal file
92
app/node_modules/binary/perf/loop.js
generated
vendored
Normal file
@@ -0,0 +1,92 @@
|
|||||||
|
var Seq = require('seq');
|
||||||
|
var Hash = require('hashish');
|
||||||
|
var EventEmitter = require('events').EventEmitter;
|
||||||
|
|
||||||
|
var Bin = require('binary');
|
||||||
|
var Buf = require('bufferlist/binary');
|
||||||
|
var BufferList = require('bufferlist');
|
||||||
|
|
||||||
|
console.log('loop');
|
||||||
|
function emitter () {
|
||||||
|
var em = new EventEmitter;
|
||||||
|
|
||||||
|
var i = 0;
|
||||||
|
var iv = setInterval(function () {
|
||||||
|
var buf = new Buffer(10000);
|
||||||
|
buf[0] = 0xff;
|
||||||
|
|
||||||
|
if (++ i >= 2000) {
|
||||||
|
buf[0] = 0;
|
||||||
|
clearInterval(iv);
|
||||||
|
}
|
||||||
|
em.emit('data', buf);
|
||||||
|
}, 1);
|
||||||
|
|
||||||
|
return em;
|
||||||
|
}
|
||||||
|
|
||||||
|
Seq()
|
||||||
|
.seq(function () {
|
||||||
|
var next = this.bind({}, null);
|
||||||
|
bufferlist(next);
|
||||||
|
})
|
||||||
|
.seq(function () {
|
||||||
|
var next = this.bind({}, null);
|
||||||
|
binary(next);
|
||||||
|
})
|
||||||
|
;
|
||||||
|
|
||||||
|
function binary (next) {
|
||||||
|
var em = emitter();
|
||||||
|
var t0 = Date.now();
|
||||||
|
|
||||||
|
Bin(em)
|
||||||
|
.loop(function (end) {
|
||||||
|
this
|
||||||
|
.word8('x')
|
||||||
|
.word8('y')
|
||||||
|
.word32be('z')
|
||||||
|
.word32le('w')
|
||||||
|
.buffer('buf', 10000 - 10)
|
||||||
|
.tap(function (vars) {
|
||||||
|
if (vars.x === 0) {
|
||||||
|
var tf = Date.now();
|
||||||
|
console.log(' binary: ' + (tf - t0) + ' ms');
|
||||||
|
end();
|
||||||
|
setTimeout(next, 20);
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
;
|
||||||
|
}
|
||||||
|
|
||||||
|
function bufferlist (next) {
|
||||||
|
var em = emitter();
|
||||||
|
var t0 = Date.now();
|
||||||
|
|
||||||
|
var blist = new BufferList;
|
||||||
|
em.on('data', function (buf) {
|
||||||
|
blist.push(buf);
|
||||||
|
});
|
||||||
|
|
||||||
|
Buf(blist)
|
||||||
|
.forever(function () {
|
||||||
|
var top = this;
|
||||||
|
this
|
||||||
|
.getWord8('x')
|
||||||
|
.getWord8('y')
|
||||||
|
.getWord32be('z')
|
||||||
|
.getWord32le('w')
|
||||||
|
.getBuffer('buf', 10000 - 10)
|
||||||
|
.tap(function (vars) {
|
||||||
|
if (vars.x === 0) {
|
||||||
|
var tf = Date.now();
|
||||||
|
console.log(' bufferlist: ' + (tf - t0) + ' ms');
|
||||||
|
top.exit();
|
||||||
|
setTimeout(next, 20);
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.end()
|
||||||
|
;
|
||||||
|
}
|
80
app/node_modules/binary/perf/small.js
generated
vendored
Normal file
80
app/node_modules/binary/perf/small.js
generated
vendored
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
var Seq = require('seq');
|
||||||
|
var Hash = require('hashish');
|
||||||
|
|
||||||
|
var Bin = require('binary');
|
||||||
|
var Buf = require('bufferlist/binary');
|
||||||
|
var BufferList = require('bufferlist');
|
||||||
|
var EventEmitter = require('events').EventEmitter;
|
||||||
|
|
||||||
|
function binary (buf, cb) {
|
||||||
|
Bin(buf)
|
||||||
|
.word32le('x')
|
||||||
|
.word16be('y')
|
||||||
|
.word16be('z')
|
||||||
|
.word32le('w')
|
||||||
|
.tap(cb)
|
||||||
|
;
|
||||||
|
};
|
||||||
|
|
||||||
|
function stream (buf, cb) {
|
||||||
|
var em = new EventEmitter;
|
||||||
|
Bin(em)
|
||||||
|
.word32le('x')
|
||||||
|
.word16be('y')
|
||||||
|
.word16be('z')
|
||||||
|
.word32le('w')
|
||||||
|
.tap(cb)
|
||||||
|
;
|
||||||
|
em.emit('data', buf);
|
||||||
|
};
|
||||||
|
|
||||||
|
function parse (buf, cb) {
|
||||||
|
cb(Bin.parse(buf)
|
||||||
|
.word32le('x')
|
||||||
|
.word16be('y')
|
||||||
|
.word16be('z')
|
||||||
|
.word32le('w')
|
||||||
|
.vars
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
function bufferlist (buf, cb) {
|
||||||
|
var blist = new BufferList;
|
||||||
|
blist.push(buf);
|
||||||
|
Buf(blist)
|
||||||
|
.getWord32le('x')
|
||||||
|
.getWord16be('y')
|
||||||
|
.getWord16be('z')
|
||||||
|
.getWord32le('w')
|
||||||
|
.tap(cb)
|
||||||
|
.end()
|
||||||
|
;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
var buffers = [];
|
||||||
|
for (var i = 0; i < 200; i++) {
|
||||||
|
buffers.push(new Buffer(12));
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('small');
|
||||||
|
Seq(binary, stream, parse, bufferlist)
|
||||||
|
.seqEach(function (f) {
|
||||||
|
var t = this;
|
||||||
|
var t0 = Date.now();
|
||||||
|
Seq()
|
||||||
|
.extend(buffers)
|
||||||
|
.seqEach(function (buf) {
|
||||||
|
f(buf, this.bind(this, null));
|
||||||
|
})
|
||||||
|
.seq(function () {
|
||||||
|
var tf = Date.now();
|
||||||
|
console.log(' ' + f.name + ': ' + (tf - t0));
|
||||||
|
t(null);
|
||||||
|
})
|
||||||
|
;
|
||||||
|
})
|
||||||
|
.seq(function () {
|
||||||
|
this(null);
|
||||||
|
})
|
||||||
|
;
|
46
app/node_modules/binary/test/bu.js
generated
vendored
Normal file
46
app/node_modules/binary/test/bu.js
generated
vendored
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
var binary = require('../');
|
||||||
|
var test = require('tap').test;
|
||||||
|
|
||||||
|
test('bu', function (t) {
|
||||||
|
t.plan(8);
|
||||||
|
|
||||||
|
// note: can't store -12667700813876161 exactly in an ieee float
|
||||||
|
|
||||||
|
var buf = new Buffer([
|
||||||
|
44, // a == 44
|
||||||
|
2, 43, // b == 555
|
||||||
|
164, 213, 37, 37, // c == 2765432101
|
||||||
|
29, 81, 180, 20, 155, 115, 203, 193, // d == 2112667700813876161
|
||||||
|
]);
|
||||||
|
|
||||||
|
binary.parse(buf)
|
||||||
|
.word8bu('a')
|
||||||
|
.word16bu('b')
|
||||||
|
.word32bu('c')
|
||||||
|
.word64bu('d')
|
||||||
|
.tap(function (vars) {
|
||||||
|
t.same(vars.a, 44);
|
||||||
|
t.same(vars.b, 555);
|
||||||
|
t.same(vars.c, 2765432101);
|
||||||
|
t.ok(
|
||||||
|
Math.abs(vars.d - 2112667700813876161) < 1500
|
||||||
|
);
|
||||||
|
})
|
||||||
|
;
|
||||||
|
|
||||||
|
// also check aliases here:
|
||||||
|
binary.parse(buf)
|
||||||
|
.word8be('a')
|
||||||
|
.word16be('b')
|
||||||
|
.word32be('c')
|
||||||
|
.word64be('d')
|
||||||
|
.tap(function (vars) {
|
||||||
|
t.same(vars.a, 44);
|
||||||
|
t.same(vars.b, 555);
|
||||||
|
t.same(vars.c, 2765432101);
|
||||||
|
t.ok(
|
||||||
|
Math.abs(vars.d - 2112667700813876161) < 1500
|
||||||
|
);
|
||||||
|
})
|
||||||
|
;
|
||||||
|
});
|
20
app/node_modules/binary/test/deferred.js
generated
vendored
Normal file
20
app/node_modules/binary/test/deferred.js
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
var binary = require('../');
|
||||||
|
var test = require('tap').test;
|
||||||
|
var EventEmitter = require('events').EventEmitter;
|
||||||
|
|
||||||
|
test('deferred', function (t) {
|
||||||
|
t.plan(1);
|
||||||
|
|
||||||
|
var em = new EventEmitter;
|
||||||
|
binary.stream(em)
|
||||||
|
.word8('a')
|
||||||
|
.word16be('bc')
|
||||||
|
.tap(function (vars) {
|
||||||
|
t.same(vars, { a : 97, bc : 25187 });
|
||||||
|
})
|
||||||
|
;
|
||||||
|
|
||||||
|
setTimeout(function () {
|
||||||
|
em.emit('data', new Buffer([ 97, 98, 99 ]));
|
||||||
|
}, 10);
|
||||||
|
});
|
23
app/node_modules/binary/test/dots.js
generated
vendored
Normal file
23
app/node_modules/binary/test/dots.js
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
var binary = require('../');
|
||||||
|
var test = require('tap').test;
|
||||||
|
|
||||||
|
test('dots', function (t) {
|
||||||
|
t.plan(1);
|
||||||
|
|
||||||
|
binary.parse(new Buffer([ 97, 98, 99, 100, 101, 102 ]))
|
||||||
|
.word8('a')
|
||||||
|
.word16be('b.x')
|
||||||
|
.word16be('b.y')
|
||||||
|
.word8('b.z')
|
||||||
|
.tap(function (vars) {
|
||||||
|
t.same(vars, {
|
||||||
|
a : 97,
|
||||||
|
b : {
|
||||||
|
x : 256 * 98 + 99,
|
||||||
|
y : 256 * 100 + 101,
|
||||||
|
z : 102
|
||||||
|
},
|
||||||
|
});
|
||||||
|
})
|
||||||
|
;
|
||||||
|
});
|
41
app/node_modules/binary/test/eof.js
generated
vendored
Normal file
41
app/node_modules/binary/test/eof.js
generated
vendored
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
var binary = require('../');
|
||||||
|
var test = require('tap').test;
|
||||||
|
var EventEmitter = require('events').EventEmitter;
|
||||||
|
|
||||||
|
test('eof', function (t) {
|
||||||
|
t.plan(4);
|
||||||
|
|
||||||
|
var stream = new EventEmitter;
|
||||||
|
binary.stream(stream)
|
||||||
|
.buffer('sixone', 5)
|
||||||
|
.peek(function () {
|
||||||
|
this.word32le('len');
|
||||||
|
})
|
||||||
|
.buffer('buf', 'len')
|
||||||
|
.word8('x')
|
||||||
|
.tap(function (vars) {
|
||||||
|
t.same(
|
||||||
|
[].slice.call(vars.sixone),
|
||||||
|
[].slice.call(new Buffer([ 6, 1, 6, 1, 6 ]))
|
||||||
|
);
|
||||||
|
t.same(vars.buf.length, vars.len);
|
||||||
|
t.same(
|
||||||
|
[].slice.call(vars.buf),
|
||||||
|
[ 9, 0, 0, 0, 97, 98, 99, 100, 101 ]
|
||||||
|
);
|
||||||
|
t.same(vars.x, 102);
|
||||||
|
})
|
||||||
|
;
|
||||||
|
|
||||||
|
var bufs = [
|
||||||
|
new Buffer([ 6, 1, 6, 1, 6, 9, 0, 0, 0, 97 ]),
|
||||||
|
new Buffer([ 98, 99 ]),
|
||||||
|
new Buffer([ 100, 101, 102 ]),
|
||||||
|
];
|
||||||
|
|
||||||
|
bufs.forEach(function (buf) {
|
||||||
|
stream.emit('data', buf);
|
||||||
|
});
|
||||||
|
|
||||||
|
stream.emit('end');
|
||||||
|
});
|
17
app/node_modules/binary/test/flush.js
generated
vendored
Normal file
17
app/node_modules/binary/test/flush.js
generated
vendored
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
var binary = require('../');
|
||||||
|
var test = require('tap').test;
|
||||||
|
|
||||||
|
test('flush', function (t) {
|
||||||
|
t.plan(1);
|
||||||
|
|
||||||
|
binary.parse(new Buffer([ 97, 98, 99, 100, 101, 102 ]))
|
||||||
|
.word8('a')
|
||||||
|
.word16be('b')
|
||||||
|
.word16be('c')
|
||||||
|
.flush()
|
||||||
|
.word8('d')
|
||||||
|
.tap(function (vars) {
|
||||||
|
t.same(vars, { d : 102 });
|
||||||
|
})
|
||||||
|
;
|
||||||
|
});
|
14
app/node_modules/binary/test/from_buffer.js
generated
vendored
Normal file
14
app/node_modules/binary/test/from_buffer.js
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
var binary = require('../');
|
||||||
|
var test = require('tap').test;
|
||||||
|
|
||||||
|
test('from buffer', function (t) {
|
||||||
|
t.plan(1);
|
||||||
|
|
||||||
|
binary(new Buffer([ 97, 98, 99 ]))
|
||||||
|
.word8('a')
|
||||||
|
.word16be('bc')
|
||||||
|
.tap(function (vars) {
|
||||||
|
t.same(vars, { a : 97, bc : 25187 });
|
||||||
|
})
|
||||||
|
;
|
||||||
|
});
|
28
app/node_modules/binary/test/get_buffer.js
generated
vendored
Normal file
28
app/node_modules/binary/test/get_buffer.js
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
var binary = require('../');
|
||||||
|
var test = require('tap').test;
|
||||||
|
|
||||||
|
test('get buffer', function (t) {
|
||||||
|
t.plan(4);
|
||||||
|
|
||||||
|
var buf = new Buffer([ 4, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14 ]);
|
||||||
|
binary.parse(buf)
|
||||||
|
.word8('a')
|
||||||
|
.buffer('b', 7)
|
||||||
|
.word16lu('c')
|
||||||
|
.tap(function (vars) {
|
||||||
|
t.equal(vars.a, 4);
|
||||||
|
t.equal(
|
||||||
|
vars.b.toString(),
|
||||||
|
new Buffer([ 2, 3, 4, 5, 6, 7, 8 ]).toString()
|
||||||
|
);
|
||||||
|
t.equal(vars.c, 2569);
|
||||||
|
})
|
||||||
|
.buffer('d', 'a')
|
||||||
|
.tap(function (vars) {
|
||||||
|
t.equal(
|
||||||
|
vars.d.toString(),
|
||||||
|
new Buffer([ 11, 12, 13, 14 ]).toString()
|
||||||
|
);
|
||||||
|
})
|
||||||
|
;
|
||||||
|
});
|
18
app/node_modules/binary/test/immediate.js
generated
vendored
Normal file
18
app/node_modules/binary/test/immediate.js
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
var binary = require('../');
|
||||||
|
var test = require('tap').test;
|
||||||
|
var EventEmitter = require('events').EventEmitter;
|
||||||
|
|
||||||
|
test('immediate', function (t) {
|
||||||
|
t.plan(1);
|
||||||
|
|
||||||
|
var em = new EventEmitter;
|
||||||
|
binary.stream(em, 'moo')
|
||||||
|
.word8('a')
|
||||||
|
.word16be('bc')
|
||||||
|
.tap(function (vars) {
|
||||||
|
t.same(vars, { a : 97, bc : 25187 });
|
||||||
|
})
|
||||||
|
;
|
||||||
|
|
||||||
|
em.emit('moo', new Buffer([ 97, 98, 99 ]));
|
||||||
|
});
|
38
app/node_modules/binary/test/interval.js
generated
vendored
Normal file
38
app/node_modules/binary/test/interval.js
generated
vendored
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
var binary = require('../');
|
||||||
|
var test = require('tap').test;
|
||||||
|
var EventEmitter = require('events').EventEmitter;
|
||||||
|
|
||||||
|
test('interval', function (t) {
|
||||||
|
t.plan(1);
|
||||||
|
|
||||||
|
var em = new EventEmitter;
|
||||||
|
var i = 0;
|
||||||
|
var iv = setInterval(function () {
|
||||||
|
var buf = new Buffer(1000);
|
||||||
|
buf[0] = 0xff;
|
||||||
|
if (++i >= 1000) {
|
||||||
|
clearInterval(iv);
|
||||||
|
buf[0] = 0;
|
||||||
|
}
|
||||||
|
em.emit('data', buf);
|
||||||
|
}, 1);
|
||||||
|
|
||||||
|
var loops = 0;
|
||||||
|
binary(em)
|
||||||
|
.loop(function (end) {
|
||||||
|
this
|
||||||
|
.word8('x')
|
||||||
|
.word8('y')
|
||||||
|
.word32be('z')
|
||||||
|
.word32le('w')
|
||||||
|
.buffer('buf', 1000 - 10)
|
||||||
|
.tap(function (vars) {
|
||||||
|
loops ++;
|
||||||
|
if (vars.x == 0) end();
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.tap(function () {
|
||||||
|
t.same(loops, 1000);
|
||||||
|
})
|
||||||
|
;
|
||||||
|
});
|
35
app/node_modules/binary/test/into_buffer.js
generated
vendored
Normal file
35
app/node_modules/binary/test/into_buffer.js
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
var binary = require('../');
|
||||||
|
var test = require('tap').test;
|
||||||
|
|
||||||
|
test('intoBuffer', function (t) {
|
||||||
|
t.plan(3);
|
||||||
|
var buf = new Buffer([ 1, 2, 3, 4, 5, 6 ])
|
||||||
|
|
||||||
|
binary.parse(buf)
|
||||||
|
.into('moo', function () {
|
||||||
|
this
|
||||||
|
.word8('x')
|
||||||
|
.word8('y')
|
||||||
|
.word8('z')
|
||||||
|
;
|
||||||
|
})
|
||||||
|
.tap(function (vars) {
|
||||||
|
t.same(vars, { moo : { x : 1, y : 2, z : 3 } });
|
||||||
|
})
|
||||||
|
.word8('w')
|
||||||
|
.tap(function (vars) {
|
||||||
|
t.same(vars, {
|
||||||
|
moo : { x : 1, y : 2, z : 3 },
|
||||||
|
w : 4,
|
||||||
|
});
|
||||||
|
})
|
||||||
|
.word8('x')
|
||||||
|
.tap(function (vars) {
|
||||||
|
t.same(vars, {
|
||||||
|
moo : { x : 1, y : 2, z : 3 },
|
||||||
|
w : 4,
|
||||||
|
x : 5,
|
||||||
|
});
|
||||||
|
})
|
||||||
|
;
|
||||||
|
});
|
43
app/node_modules/binary/test/into_stream.js
generated
vendored
Normal file
43
app/node_modules/binary/test/into_stream.js
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
var binary = require('../');
|
||||||
|
var test = require('tap').test;
|
||||||
|
var EventEmitter = require('events').EventEmitter;
|
||||||
|
|
||||||
|
test('into stream', function (t) {
|
||||||
|
t.plan(3);
|
||||||
|
|
||||||
|
var digits = [ 1, 2, 3, 4, 5, 6 ];
|
||||||
|
var stream = new EventEmitter;
|
||||||
|
var iv = setInterval(function () {
|
||||||
|
var d = digits.shift();
|
||||||
|
if (d) stream.emit('data', new Buffer([ d ]))
|
||||||
|
else clearInterval(iv)
|
||||||
|
}, 20);
|
||||||
|
|
||||||
|
binary.stream(stream)
|
||||||
|
.into('moo', function () {
|
||||||
|
this
|
||||||
|
.word8('x')
|
||||||
|
.word8('y')
|
||||||
|
.word8('z')
|
||||||
|
;
|
||||||
|
})
|
||||||
|
.tap(function (vars) {
|
||||||
|
t.same(vars, { moo : { x : 1, y : 2, z : 3 } });
|
||||||
|
})
|
||||||
|
.word8('w')
|
||||||
|
.tap(function (vars) {
|
||||||
|
t.same(vars, {
|
||||||
|
moo : { x : 1, y : 2, z : 3 },
|
||||||
|
w : 4,
|
||||||
|
});
|
||||||
|
})
|
||||||
|
.word8('x')
|
||||||
|
.tap(function (vars) {
|
||||||
|
t.same(vars, {
|
||||||
|
moo : { x : 1, y : 2, z : 3 },
|
||||||
|
w : 4,
|
||||||
|
x : 5,
|
||||||
|
});
|
||||||
|
})
|
||||||
|
;
|
||||||
|
});
|
44
app/node_modules/binary/test/loop.js
generated
vendored
Normal file
44
app/node_modules/binary/test/loop.js
generated
vendored
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
var binary = require('../');
|
||||||
|
var test = require('tap').test;
|
||||||
|
var EventEmitter = require('events').EventEmitter;
|
||||||
|
|
||||||
|
test('loop', function (t) {
|
||||||
|
t.plan(3 * 2 + 1);
|
||||||
|
|
||||||
|
var em = new EventEmitter;
|
||||||
|
|
||||||
|
binary.stream(em)
|
||||||
|
.loop(function (end, vars) {
|
||||||
|
t.strictEqual(vars, this.vars);
|
||||||
|
this
|
||||||
|
.word16lu('a')
|
||||||
|
.word8u('b')
|
||||||
|
.word8s('c')
|
||||||
|
.tap(function (vars_) {
|
||||||
|
t.strictEqual(vars, vars_);
|
||||||
|
if (vars.c < 0) end();
|
||||||
|
})
|
||||||
|
;
|
||||||
|
})
|
||||||
|
.tap(function (vars) {
|
||||||
|
t.same(vars, { a : 1337, b : 55, c : -5 });
|
||||||
|
})
|
||||||
|
;
|
||||||
|
|
||||||
|
setTimeout(function () {
|
||||||
|
em.emit('data', new Buffer([ 2, 10, 88 ]));
|
||||||
|
}, 10);
|
||||||
|
setTimeout(function () {
|
||||||
|
em.emit('data', new Buffer([ 100, 3, 6, 242, 30 ]));
|
||||||
|
}, 20);
|
||||||
|
setTimeout(function () {
|
||||||
|
em.emit('data', new Buffer([ 60, 60, 199, 44 ]));
|
||||||
|
}, 30);
|
||||||
|
|
||||||
|
setTimeout(function () {
|
||||||
|
em.emit('data', new Buffer([ 57, 5 ]));
|
||||||
|
}, 80);
|
||||||
|
setTimeout(function () {
|
||||||
|
em.emit('data', new Buffer([ 55, 251 ]));
|
||||||
|
}, 90);
|
||||||
|
});
|
54
app/node_modules/binary/test/loop_scan.js
generated
vendored
Normal file
54
app/node_modules/binary/test/loop_scan.js
generated
vendored
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
var binary = require('../');
|
||||||
|
var test = require('tap').test;
|
||||||
|
var EventEmitter = require('events').EventEmitter;
|
||||||
|
|
||||||
|
test('loop scan', function (t) {
|
||||||
|
t.plan(8 + 6 + 2);
|
||||||
|
|
||||||
|
var em = new EventEmitter;
|
||||||
|
|
||||||
|
binary.stream(em)
|
||||||
|
.loop(function (end) {
|
||||||
|
var vars_ = this.vars;
|
||||||
|
this
|
||||||
|
.scan('filler', 'BEGINMSG')
|
||||||
|
.buffer('cmd', 3)
|
||||||
|
.word8('num')
|
||||||
|
.tap(function (vars) {
|
||||||
|
t.strictEqual(vars, vars_);
|
||||||
|
if (vars.num != 0x02 && vars.num != 0x06) {
|
||||||
|
t.same(vars.filler.length, 0);
|
||||||
|
}
|
||||||
|
if (vars.cmd.toString() == 'end') end();
|
||||||
|
})
|
||||||
|
;
|
||||||
|
})
|
||||||
|
.tap(function (vars) {
|
||||||
|
t.same(vars.cmd.toString(), 'end');
|
||||||
|
t.same(vars.num, 0x08);
|
||||||
|
})
|
||||||
|
;
|
||||||
|
|
||||||
|
setTimeout(function () {
|
||||||
|
em.emit('data', new Buffer(
|
||||||
|
'BEGINMSGcmd\x01'
|
||||||
|
+ 'GARBAGEDATAXXXX'
|
||||||
|
+ 'BEGINMSGcmd\x02'
|
||||||
|
+ 'BEGINMSGcmd\x03'
|
||||||
|
));
|
||||||
|
}, 10);
|
||||||
|
|
||||||
|
setTimeout(function () {
|
||||||
|
em.emit('data', new Buffer(
|
||||||
|
'BEGINMSGcmd\x04'
|
||||||
|
+ 'BEGINMSGcmd\x05'
|
||||||
|
+ 'GARBAGEDATAXXXX'
|
||||||
|
+ 'BEGINMSGcmd\x06'
|
||||||
|
));
|
||||||
|
em.emit('data', new Buffer('BEGINMSGcmd\x07'));
|
||||||
|
}, 20);
|
||||||
|
|
||||||
|
setTimeout(function () {
|
||||||
|
em.emit('data', new Buffer('BEGINMSGend\x08'));
|
||||||
|
}, 30);
|
||||||
|
});
|
46
app/node_modules/binary/test/lu.js
generated
vendored
Normal file
46
app/node_modules/binary/test/lu.js
generated
vendored
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
var binary = require('../');
|
||||||
|
var test = require('tap').test;
|
||||||
|
|
||||||
|
test('lu', function (t) {
|
||||||
|
t.plan(8);
|
||||||
|
|
||||||
|
// note: can't store -12667700813876161 exactly in an ieee float
|
||||||
|
|
||||||
|
var buf = new Buffer([
|
||||||
|
44, // a == 44
|
||||||
|
43, 2, // b == 555
|
||||||
|
37, 37, 213, 164, // c == 2765432101
|
||||||
|
193, 203, 115, 155, 20, 180, 81, 29, // d == 2112667700813876161
|
||||||
|
]);
|
||||||
|
|
||||||
|
binary.parse(buf)
|
||||||
|
.word8lu('a')
|
||||||
|
.word16lu('b')
|
||||||
|
.word32lu('c')
|
||||||
|
.word64lu('d')
|
||||||
|
.tap(function (vars) {
|
||||||
|
t.same(vars.a, 44);
|
||||||
|
t.same(vars.b, 555);
|
||||||
|
t.same(vars.c, 2765432101);
|
||||||
|
t.ok(
|
||||||
|
Math.abs(vars.d - 2112667700813876161) < 1500
|
||||||
|
);
|
||||||
|
})
|
||||||
|
;
|
||||||
|
|
||||||
|
// also check aliases here:
|
||||||
|
binary.parse(buf)
|
||||||
|
.word8le('a')
|
||||||
|
.word16le('b')
|
||||||
|
.word32le('c')
|
||||||
|
.word64le('d')
|
||||||
|
.tap(function (vars) {
|
||||||
|
t.same(vars.a, 44);
|
||||||
|
t.same(vars.b, 555);
|
||||||
|
t.same(vars.c, 2765432101);
|
||||||
|
t.ok(
|
||||||
|
Math.abs(vars.d - 2112667700813876161) < 1500
|
||||||
|
);
|
||||||
|
})
|
||||||
|
;
|
||||||
|
});
|
29
app/node_modules/binary/test/negbs.js
generated
vendored
Normal file
29
app/node_modules/binary/test/negbs.js
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
var binary = require('../');
|
||||||
|
var test = require('tap').test;
|
||||||
|
|
||||||
|
test('negbs', function (t) {
|
||||||
|
t.plan(4);
|
||||||
|
// note: can't store -12667700813876161 exactly in an ieee float
|
||||||
|
|
||||||
|
var buf = new Buffer([
|
||||||
|
226, // a == -30
|
||||||
|
246, 219, // b == -2341
|
||||||
|
255, 243, 245, 236, // c == -789012
|
||||||
|
255, 210, 254, 203, 16, 222, 52, 63, // d == -12667700813876161
|
||||||
|
]);
|
||||||
|
|
||||||
|
binary.parse(buf)
|
||||||
|
.word8bs('a')
|
||||||
|
.word16bs('b')
|
||||||
|
.word32bs('c')
|
||||||
|
.word64bs('d')
|
||||||
|
.tap(function (vars) {
|
||||||
|
t.same(vars.a, -30);
|
||||||
|
t.same(vars.b, -2341);
|
||||||
|
t.same(vars.c, -789012);
|
||||||
|
t.ok(
|
||||||
|
Math.abs(vars.d - -12667700813876161) < 1500
|
||||||
|
);
|
||||||
|
})
|
||||||
|
;
|
||||||
|
});
|
29
app/node_modules/binary/test/negls.js
generated
vendored
Normal file
29
app/node_modules/binary/test/negls.js
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
var binary = require('../');
|
||||||
|
var test = require('tap').test;
|
||||||
|
|
||||||
|
test('negls', function (t) {
|
||||||
|
t.plan(4);
|
||||||
|
// note: can't store -12667700813876161 exactly in an ieee float
|
||||||
|
|
||||||
|
var buf = new Buffer([
|
||||||
|
226, // a == -30
|
||||||
|
219, 246, // b == -2341
|
||||||
|
236, 245, 243, 255, // c == -789012
|
||||||
|
63, 52, 222, 16, 203, 254, 210, 255, // d == -12667700813876161
|
||||||
|
]);
|
||||||
|
|
||||||
|
binary.parse(buf)
|
||||||
|
.word8ls('a')
|
||||||
|
.word16ls('b')
|
||||||
|
.word32ls('c')
|
||||||
|
.word64ls('d')
|
||||||
|
.tap(function (vars) {
|
||||||
|
t.same(vars.a, -30);
|
||||||
|
t.same(vars.b, -2341);
|
||||||
|
t.same(vars.c, -789012);
|
||||||
|
t.ok(
|
||||||
|
Math.abs(vars.d - -12667700813876161) < 1000
|
||||||
|
);
|
||||||
|
})
|
||||||
|
;
|
||||||
|
});
|
35
app/node_modules/binary/test/nested.js
generated
vendored
Normal file
35
app/node_modules/binary/test/nested.js
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
var binary = require('../');
|
||||||
|
var test = require('tap').test;
|
||||||
|
var EventEmitter = require('events').EventEmitter;
|
||||||
|
|
||||||
|
test('nested', function (t) {
|
||||||
|
t.plan(3);
|
||||||
|
var insideDone = false;
|
||||||
|
|
||||||
|
var em = new EventEmitter;
|
||||||
|
binary.stream(em)
|
||||||
|
.word16be('ab')
|
||||||
|
.tap(function () {
|
||||||
|
this
|
||||||
|
.word8('c')
|
||||||
|
.word8('d')
|
||||||
|
.tap(function () {
|
||||||
|
insideDone = true;
|
||||||
|
})
|
||||||
|
;
|
||||||
|
})
|
||||||
|
.tap(function (vars) {
|
||||||
|
t.ok(insideDone);
|
||||||
|
t.same(vars.c, 'c'.charCodeAt(0));
|
||||||
|
t.same(vars.d, 'd'.charCodeAt(0));
|
||||||
|
|
||||||
|
})
|
||||||
|
;
|
||||||
|
|
||||||
|
var strs = [ 'abc', 'def', 'hi', 'jkl' ];
|
||||||
|
var iv = setInterval(function () {
|
||||||
|
var s = strs.shift();
|
||||||
|
if (s) em.emit('data', new Buffer(s));
|
||||||
|
else clearInterval(iv);
|
||||||
|
}, 50);
|
||||||
|
});
|
17
app/node_modules/binary/test/not_enough_buf.js
generated
vendored
Normal file
17
app/node_modules/binary/test/not_enough_buf.js
generated
vendored
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
var binary = require('../');
|
||||||
|
var test = require('tap').test;
|
||||||
|
|
||||||
|
test('not enough buf', function (t) {
|
||||||
|
t.plan(3);
|
||||||
|
|
||||||
|
var vars = binary(new Buffer([1,2,3,4]))
|
||||||
|
.word8('a')
|
||||||
|
.buffer('b', 10)
|
||||||
|
.word8('c')
|
||||||
|
.vars
|
||||||
|
;
|
||||||
|
|
||||||
|
t.same(vars.a, 1);
|
||||||
|
t.equal(vars.b.toString(), new Buffer([2,3,4]).toString());
|
||||||
|
t.strictEqual(vars.c, null);
|
||||||
|
});
|
19
app/node_modules/binary/test/not_enough_parse.js
generated
vendored
Normal file
19
app/node_modules/binary/test/not_enough_parse.js
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
var binary = require('../');
|
||||||
|
var test = require('tap').test;
|
||||||
|
|
||||||
|
test('not enough parse', function (t) {
|
||||||
|
t.plan(4);
|
||||||
|
|
||||||
|
var vars = binary(new Buffer([1,2]))
|
||||||
|
.word8('a')
|
||||||
|
.word8('b')
|
||||||
|
.word8('c')
|
||||||
|
.word8('d')
|
||||||
|
.vars
|
||||||
|
;
|
||||||
|
|
||||||
|
t.same(vars.a, 1);
|
||||||
|
t.same(vars.b, 2);
|
||||||
|
t.strictEqual(vars.c, null);
|
||||||
|
t.strictEqual(vars.d, null);
|
||||||
|
});
|
54
app/node_modules/binary/test/parse.js
generated
vendored
Normal file
54
app/node_modules/binary/test/parse.js
generated
vendored
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
var binary = require('../');
|
||||||
|
var test = require('tap').test;
|
||||||
|
|
||||||
|
test('parse', function (t) {
|
||||||
|
t.plan(6);
|
||||||
|
var res = binary.parse(new Buffer([ 97, 98, 99, 99, 99, 99, 1, 2, 3 ]))
|
||||||
|
.word8('a')
|
||||||
|
.word16be('bc')
|
||||||
|
.skip(3)
|
||||||
|
.buffer('def', 3)
|
||||||
|
.tap(function (vars) {
|
||||||
|
t.equal(vars.a, 97);
|
||||||
|
t.equal(vars.bc, 25187);
|
||||||
|
t.same(
|
||||||
|
[].slice.call(vars.def),
|
||||||
|
[].slice.call(new Buffer([ 1, 2, 3]))
|
||||||
|
);
|
||||||
|
})
|
||||||
|
.vars
|
||||||
|
;
|
||||||
|
t.equal(res.a, 97);
|
||||||
|
t.equal(res.bc, 25187);
|
||||||
|
t.same(
|
||||||
|
[].slice.call(res.def),
|
||||||
|
[].slice.call(new Buffer([ 1, 2, 3 ]))
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('loop', function (t) {
|
||||||
|
t.plan(2);
|
||||||
|
var res = binary.parse(new Buffer([ 97, 98, 99, 4, 5, 2, -3, 9 ]))
|
||||||
|
.word8('a')
|
||||||
|
.word16be('bc')
|
||||||
|
.loop(function (end) {
|
||||||
|
var x = this.word8s('x').vars.x;
|
||||||
|
if (x < 0) end();
|
||||||
|
})
|
||||||
|
.tap(function (vars) {
|
||||||
|
t.same(vars, {
|
||||||
|
a : 97,
|
||||||
|
bc : 25187,
|
||||||
|
x : -3,
|
||||||
|
});
|
||||||
|
})
|
||||||
|
.word8('y')
|
||||||
|
.vars
|
||||||
|
;
|
||||||
|
t.same(res, {
|
||||||
|
a : 97,
|
||||||
|
bc : 25187,
|
||||||
|
x : -3,
|
||||||
|
y : 9,
|
||||||
|
});
|
||||||
|
});
|
40
app/node_modules/binary/test/peek.js
generated
vendored
Normal file
40
app/node_modules/binary/test/peek.js
generated
vendored
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
var binary = require('../');
|
||||||
|
var test = require('tap').test;
|
||||||
|
var EventEmitter = require('events').EventEmitter;
|
||||||
|
|
||||||
|
test('peek', function (t) {
|
||||||
|
t.plan(4);
|
||||||
|
var bufs = [
|
||||||
|
new Buffer([ 6, 1, 6, 1, 6, 9, 0, 0, 0, 97 ]),
|
||||||
|
new Buffer([ 98, 99 ]),
|
||||||
|
new Buffer([ 100, 101, 102 ]),
|
||||||
|
];
|
||||||
|
|
||||||
|
var stream = new EventEmitter;
|
||||||
|
var iv = setInterval(function () {
|
||||||
|
var buf = bufs.shift();
|
||||||
|
if (buf) stream.emit('data', buf)
|
||||||
|
else clearInterval(iv)
|
||||||
|
}, 20);
|
||||||
|
|
||||||
|
binary.stream(stream)
|
||||||
|
.buffer('sixone', 5)
|
||||||
|
.peek(function () {
|
||||||
|
this.word32le('len');
|
||||||
|
})
|
||||||
|
.buffer('buf', 'len')
|
||||||
|
.word8('x')
|
||||||
|
.tap(function (vars) {
|
||||||
|
t.same(
|
||||||
|
[].slice.call(vars.sixone),
|
||||||
|
[].slice.call(new Buffer([ 6, 1, 6, 1, 6 ]))
|
||||||
|
);
|
||||||
|
t.same(vars.buf.length, vars.len);
|
||||||
|
t.same(
|
||||||
|
[].slice.call(vars.buf),
|
||||||
|
[ 9, 0, 0, 0, 97, 98, 99, 100, 101 ]
|
||||||
|
);
|
||||||
|
t.same(vars.x, 102);
|
||||||
|
})
|
||||||
|
;
|
||||||
|
});
|
49
app/node_modules/binary/test/pipe.js
generated
vendored
Normal file
49
app/node_modules/binary/test/pipe.js
generated
vendored
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
var binary = require('../');
|
||||||
|
var test = require('tap').test;
|
||||||
|
var Stream = require('stream').Stream;
|
||||||
|
|
||||||
|
test('loop', function (t) {
|
||||||
|
t.plan(3 * 2 + 1);
|
||||||
|
|
||||||
|
var rs = new Stream;
|
||||||
|
rs.readable = true;
|
||||||
|
|
||||||
|
var ws = binary()
|
||||||
|
.loop(function (end, vars) {
|
||||||
|
t.strictEqual(vars, this.vars);
|
||||||
|
this
|
||||||
|
.word16lu('a')
|
||||||
|
.word8u('b')
|
||||||
|
.word8s('c')
|
||||||
|
.tap(function (vars_) {
|
||||||
|
t.strictEqual(vars, vars_);
|
||||||
|
if (vars.c < 0) end();
|
||||||
|
})
|
||||||
|
;
|
||||||
|
})
|
||||||
|
.tap(function (vars) {
|
||||||
|
t.same(vars, { a : 1337, b : 55, c : -5 });
|
||||||
|
})
|
||||||
|
;
|
||||||
|
rs.pipe(ws);
|
||||||
|
|
||||||
|
setTimeout(function () {
|
||||||
|
rs.emit('data', new Buffer([ 2, 10, 88 ]));
|
||||||
|
}, 10);
|
||||||
|
setTimeout(function () {
|
||||||
|
rs.emit('data', new Buffer([ 100, 3, 6, 242, 30 ]));
|
||||||
|
}, 20);
|
||||||
|
setTimeout(function () {
|
||||||
|
rs.emit('data', new Buffer([ 60, 60, 199, 44 ]));
|
||||||
|
}, 30);
|
||||||
|
|
||||||
|
setTimeout(function () {
|
||||||
|
rs.emit('data', new Buffer([ 57, 5 ]));
|
||||||
|
}, 80);
|
||||||
|
setTimeout(function () {
|
||||||
|
rs.emit('data', new Buffer([ 55, 251 ]));
|
||||||
|
}, 90);
|
||||||
|
setTimeout(function () {
|
||||||
|
rs.emit('end');
|
||||||
|
}, 100);
|
||||||
|
});
|
29
app/node_modules/binary/test/posbs.js
generated
vendored
Normal file
29
app/node_modules/binary/test/posbs.js
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
var binary = require('../');
|
||||||
|
var test = require('tap').test;
|
||||||
|
|
||||||
|
test('posbs', function (t) {
|
||||||
|
t.plan(4);
|
||||||
|
// note: can't store 12667700813876161 exactly in an ieee float
|
||||||
|
|
||||||
|
var buf = new Buffer([
|
||||||
|
30, // a == -30
|
||||||
|
9, 37, // b == -2341
|
||||||
|
0, 12, 10, 20, // c == -789012
|
||||||
|
0, 45, 1, 52, 239, 33, 203, 193, // d == 12667700813876161
|
||||||
|
]);
|
||||||
|
|
||||||
|
binary.parse(buf)
|
||||||
|
.word8bs('a')
|
||||||
|
.word16bs('b')
|
||||||
|
.word32bs('c')
|
||||||
|
.word64bs('d')
|
||||||
|
.tap(function (vars) {
|
||||||
|
t.same(vars.a, 30);
|
||||||
|
t.same(vars.b, 2341);
|
||||||
|
t.same(vars.c, 789012);
|
||||||
|
t.ok(
|
||||||
|
Math.abs(vars.d - 12667700813876161) < 1000
|
||||||
|
);
|
||||||
|
})
|
||||||
|
;
|
||||||
|
});
|
29
app/node_modules/binary/test/posls.js
generated
vendored
Normal file
29
app/node_modules/binary/test/posls.js
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
var binary = require('../');
|
||||||
|
var test = require('tap').test;
|
||||||
|
|
||||||
|
test('posls', function (t) {
|
||||||
|
t.plan(4);
|
||||||
|
|
||||||
|
// note: can't store 12667700813876161 exactly in an ieee float
|
||||||
|
var buf = new Buffer([
|
||||||
|
30, // a == -30
|
||||||
|
37, 9, // b == -2341
|
||||||
|
20, 10, 12, 0, // c == -789012
|
||||||
|
193, 203, 33, 239, 52, 1, 45, 0, // d == 12667700813876161
|
||||||
|
]);
|
||||||
|
|
||||||
|
binary.parse(buf)
|
||||||
|
.word8ls('a')
|
||||||
|
.word16ls('b')
|
||||||
|
.word32ls('c')
|
||||||
|
.word64ls('d')
|
||||||
|
.tap(function (vars) {
|
||||||
|
t.same(vars.a, 30);
|
||||||
|
t.same(vars.b, 2341);
|
||||||
|
t.same(vars.c, 789012);
|
||||||
|
t.ok(
|
||||||
|
Math.abs(vars.d - 12667700813876161) < 1000
|
||||||
|
);
|
||||||
|
})
|
||||||
|
;
|
||||||
|
});
|
33
app/node_modules/binary/test/scan.js
generated
vendored
Normal file
33
app/node_modules/binary/test/scan.js
generated
vendored
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
var binary = require('../');
|
||||||
|
var test = require('tap').test;
|
||||||
|
var EventEmitter = require('events').EventEmitter;
|
||||||
|
|
||||||
|
test('scan', function (t) {
|
||||||
|
t.plan(4);
|
||||||
|
|
||||||
|
var em = new EventEmitter;
|
||||||
|
binary(em)
|
||||||
|
.word8('a')
|
||||||
|
.scan('l1', new Buffer('\r\n'))
|
||||||
|
.scan('l2', '\r\n')
|
||||||
|
.word8('z')
|
||||||
|
.tap(function (vars) {
|
||||||
|
t.same(vars.a, 99);
|
||||||
|
t.same(vars.l1.toString(), 'foo bar');
|
||||||
|
t.same(vars.l2.toString(), 'baz');
|
||||||
|
t.same(vars.z, 42);
|
||||||
|
})
|
||||||
|
;
|
||||||
|
|
||||||
|
setTimeout(function () {
|
||||||
|
em.emit('data', new Buffer([99,0x66,0x6f,0x6f,0x20]));
|
||||||
|
}, 20);
|
||||||
|
|
||||||
|
setTimeout(function () {
|
||||||
|
em.emit('data', new Buffer('bar\r'));
|
||||||
|
}, 40);
|
||||||
|
|
||||||
|
setTimeout(function () {
|
||||||
|
em.emit('data', new Buffer('\nbaz\r\n*'));
|
||||||
|
}, 60);
|
||||||
|
});
|
18
app/node_modules/binary/test/scan_buf.js
generated
vendored
Normal file
18
app/node_modules/binary/test/scan_buf.js
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
var binary = require('../');
|
||||||
|
var test = require('tap').test;
|
||||||
|
|
||||||
|
test('scan buf', function (t) {
|
||||||
|
t.plan(4);
|
||||||
|
|
||||||
|
var vars = binary(new Buffer('\x63foo bar\r\nbaz\r\n*'))
|
||||||
|
.word8('a')
|
||||||
|
.scan('l1', new Buffer('\r\n'))
|
||||||
|
.scan('l2', '\r\n')
|
||||||
|
.word8('z')
|
||||||
|
.vars
|
||||||
|
;
|
||||||
|
t.same(vars.a, 99);
|
||||||
|
t.same(vars.z, 42);
|
||||||
|
t.same(vars.l1.toString(), 'foo bar');
|
||||||
|
t.same(vars.l2.toString(), 'baz');
|
||||||
|
});
|
16
app/node_modules/binary/test/scan_buf_null.js
generated
vendored
Normal file
16
app/node_modules/binary/test/scan_buf_null.js
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
var binary = require('../');
|
||||||
|
var test = require('tap').test;
|
||||||
|
|
||||||
|
test('scan buf null', function (t) {
|
||||||
|
t.plan(3);
|
||||||
|
var vars = binary(new Buffer('\x63foo bar baz'))
|
||||||
|
.word8('a')
|
||||||
|
.scan('b', '\r\n')
|
||||||
|
.word8('c')
|
||||||
|
.vars
|
||||||
|
;
|
||||||
|
|
||||||
|
t.same(vars.a, 99);
|
||||||
|
t.same(vars.b.toString(), 'foo bar baz');
|
||||||
|
t.strictEqual(vars.c, null);
|
||||||
|
});
|
58
app/node_modules/binary/test/skip.js
generated
vendored
Normal file
58
app/node_modules/binary/test/skip.js
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
var binary = require('../');
|
||||||
|
var test = require('tap').test;
|
||||||
|
var EventEmitter = require('events').EventEmitter;
|
||||||
|
var seq = require('seq');
|
||||||
|
|
||||||
|
test('skip', function (t) {
|
||||||
|
t.plan(7);
|
||||||
|
var em = new EventEmitter;
|
||||||
|
var state = 0;
|
||||||
|
|
||||||
|
binary(em)
|
||||||
|
.word16lu('a')
|
||||||
|
.tap(function () { state = 1 })
|
||||||
|
.skip(7)
|
||||||
|
.tap(function () { state = 2 })
|
||||||
|
.word8('b')
|
||||||
|
.tap(function () { state = 3 })
|
||||||
|
.tap(function (vars) {
|
||||||
|
t.same(state, 3);
|
||||||
|
t.same(vars, {
|
||||||
|
a : 2569,
|
||||||
|
b : 8,
|
||||||
|
});
|
||||||
|
})
|
||||||
|
;
|
||||||
|
|
||||||
|
seq()
|
||||||
|
.seq(setTimeout, seq, 20)
|
||||||
|
.seq(function () {
|
||||||
|
t.same(state, 0);
|
||||||
|
em.emit('data', new Buffer([ 9 ]));
|
||||||
|
this(null);
|
||||||
|
})
|
||||||
|
.seq(setTimeout, seq, 5)
|
||||||
|
.seq(function () {
|
||||||
|
t.same(state, 0);
|
||||||
|
em.emit('data', new Buffer([ 10, 1, 2 ]));
|
||||||
|
this(null);
|
||||||
|
})
|
||||||
|
.seq(setTimeout, seq, 30)
|
||||||
|
.seq(function () {
|
||||||
|
t.same(state, 1);
|
||||||
|
em.emit('data', new Buffer([ 3, 4, 5 ]));
|
||||||
|
this(null);
|
||||||
|
})
|
||||||
|
.seq(setTimeout, seq, 15)
|
||||||
|
.seq(function () {
|
||||||
|
t.same(state, 1);
|
||||||
|
em.emit('data', new Buffer([ 6, 7 ]));
|
||||||
|
this(null);
|
||||||
|
})
|
||||||
|
.seq(function () {
|
||||||
|
t.same(state, 2);
|
||||||
|
em.emit('data', new Buffer([ 8 ]));
|
||||||
|
this(null);
|
||||||
|
})
|
||||||
|
;
|
||||||
|
});
|
34
app/node_modules/binary/test/split.js
generated
vendored
Normal file
34
app/node_modules/binary/test/split.js
generated
vendored
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
var binary = require('../');
|
||||||
|
var test = require('tap').test;
|
||||||
|
var EventEmitter = require('events').EventEmitter;
|
||||||
|
|
||||||
|
test('split', function (t) {
|
||||||
|
t.plan(1);
|
||||||
|
|
||||||
|
var em = new EventEmitter;
|
||||||
|
binary.stream(em)
|
||||||
|
.word8('a')
|
||||||
|
.word16be('bc')
|
||||||
|
.word32ls('x')
|
||||||
|
.word32bs('y')
|
||||||
|
.tap(function (vars) {
|
||||||
|
t.same(vars, {
|
||||||
|
a : 97,
|
||||||
|
bc : 25187,
|
||||||
|
x : 621609828,
|
||||||
|
y : 621609828,
|
||||||
|
});
|
||||||
|
})
|
||||||
|
;
|
||||||
|
|
||||||
|
em.emit('data', new Buffer([ 97, 98 ]));
|
||||||
|
setTimeout(function () {
|
||||||
|
em.emit('data', new Buffer([ 99, 100 ]));
|
||||||
|
}, 25);
|
||||||
|
setTimeout(function () {
|
||||||
|
em.emit('data', new Buffer([ 3, 13, 37, 37 ]));
|
||||||
|
}, 30);
|
||||||
|
setTimeout(function () {
|
||||||
|
em.emit('data', new Buffer([ 13, 3, 100 ]));
|
||||||
|
}, 40);
|
||||||
|
});
|
21
app/node_modules/bluebird/LICENSE
generated
vendored
Normal file
21
app/node_modules/bluebird/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2013-2017 Petka Antonov
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
52
app/node_modules/bluebird/README.md
generated
vendored
Normal file
52
app/node_modules/bluebird/README.md
generated
vendored
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
<a href="http://promisesaplus.com/">
|
||||||
|
<img src="http://promisesaplus.com/assets/logo-small.png" alt="Promises/A+ logo"
|
||||||
|
title="Promises/A+ 1.1 compliant" align="right" />
|
||||||
|
</a>
|
||||||
|
|
||||||
|
[](https://travis-ci.org/petkaantonov/bluebird)
|
||||||
|
[](http://petkaantonov.github.io/bluebird/coverage/debug/index.html)
|
||||||
|
|
||||||
|
**Got a question?** Join us on [stackoverflow](http://stackoverflow.com/questions/tagged/bluebird), the [mailing list](https://groups.google.com/forum/#!forum/bluebird-js) or chat on [IRC](https://webchat.freenode.net/?channels=#promises)
|
||||||
|
|
||||||
|
# Introduction
|
||||||
|
|
||||||
|
Bluebird is a fully featured promise library with focus on innovative features and performance
|
||||||
|
|
||||||
|
See the [**bluebird website**](http://bluebirdjs.com/docs/getting-started.html) for further documentation, references and instructions. See the [**API reference**](http://bluebirdjs.com/docs/api-reference.html) here.
|
||||||
|
|
||||||
|
For bluebird 2.x documentation and files, see the [2.x tree](https://github.com/petkaantonov/bluebird/tree/2.x).
|
||||||
|
|
||||||
|
# Questions and issues
|
||||||
|
|
||||||
|
The [github issue tracker](https://github.com/petkaantonov/bluebird/issues) is **_only_** for bug reports and feature requests. Anything else, such as questions for help in using the library, should be posted in [StackOverflow](http://stackoverflow.com/questions/tagged/bluebird) under tags `promise` and `bluebird`.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Thanks
|
||||||
|
|
||||||
|
Thanks to BrowserStack for providing us with a free account which lets us support old browsers like IE8.
|
||||||
|
|
||||||
|
# License
|
||||||
|
|
||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2013-2017 Petka Antonov
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
|
|
1
app/node_modules/bluebird/changelog.md
generated
vendored
Normal file
1
app/node_modules/bluebird/changelog.md
generated
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
[http://bluebirdjs.com/docs/changelog.html](http://bluebirdjs.com/docs/changelog.html)
|
3781
app/node_modules/bluebird/js/browser/bluebird.core.js
generated
vendored
Normal file
3781
app/node_modules/bluebird/js/browser/bluebird.core.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
31
app/node_modules/bluebird/js/browser/bluebird.core.min.js
generated
vendored
Normal file
31
app/node_modules/bluebird/js/browser/bluebird.core.min.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
5623
app/node_modules/bluebird/js/browser/bluebird.js
generated
vendored
Normal file
5623
app/node_modules/bluebird/js/browser/bluebird.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
31
app/node_modules/bluebird/js/browser/bluebird.min.js
generated
vendored
Normal file
31
app/node_modules/bluebird/js/browser/bluebird.min.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
21
app/node_modules/bluebird/js/release/any.js
generated
vendored
Normal file
21
app/node_modules/bluebird/js/release/any.js
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
"use strict";
|
||||||
|
module.exports = function(Promise) {
|
||||||
|
var SomePromiseArray = Promise._SomePromiseArray;
|
||||||
|
function any(promises) {
|
||||||
|
var ret = new SomePromiseArray(promises);
|
||||||
|
var promise = ret.promise();
|
||||||
|
ret.setHowMany(1);
|
||||||
|
ret.setUnwrap();
|
||||||
|
ret.init();
|
||||||
|
return promise;
|
||||||
|
}
|
||||||
|
|
||||||
|
Promise.any = function (promises) {
|
||||||
|
return any(promises);
|
||||||
|
};
|
||||||
|
|
||||||
|
Promise.prototype.any = function () {
|
||||||
|
return any(this);
|
||||||
|
};
|
||||||
|
|
||||||
|
};
|
55
app/node_modules/bluebird/js/release/assert.js
generated
vendored
Normal file
55
app/node_modules/bluebird/js/release/assert.js
generated
vendored
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
"use strict";
|
||||||
|
module.exports = (function(){
|
||||||
|
var AssertionError = (function() {
|
||||||
|
function AssertionError(a) {
|
||||||
|
this.constructor$(a);
|
||||||
|
this.message = a;
|
||||||
|
this.name = "AssertionError";
|
||||||
|
}
|
||||||
|
AssertionError.prototype = new Error();
|
||||||
|
AssertionError.prototype.constructor = AssertionError;
|
||||||
|
AssertionError.prototype.constructor$ = Error;
|
||||||
|
return AssertionError;
|
||||||
|
})();
|
||||||
|
|
||||||
|
function getParams(args) {
|
||||||
|
var params = [];
|
||||||
|
for (var i = 0; i < args.length; ++i) params.push("arg" + i);
|
||||||
|
return params;
|
||||||
|
}
|
||||||
|
|
||||||
|
function nativeAssert(callName, args, expect) {
|
||||||
|
try {
|
||||||
|
var params = getParams(args);
|
||||||
|
var constructorArgs = params;
|
||||||
|
constructorArgs.push("return " +
|
||||||
|
callName + "("+ params.join(",") + ");");
|
||||||
|
var fn = Function.apply(null, constructorArgs);
|
||||||
|
return fn.apply(null, args);
|
||||||
|
} catch (e) {
|
||||||
|
if (!(e instanceof SyntaxError)) {
|
||||||
|
throw e;
|
||||||
|
} else {
|
||||||
|
return expect;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return function assert(boolExpr, message) {
|
||||||
|
if (boolExpr === true) return;
|
||||||
|
|
||||||
|
if (typeof boolExpr === "string" &&
|
||||||
|
boolExpr.charAt(0) === "%") {
|
||||||
|
var nativeCallName = boolExpr;
|
||||||
|
var $_len = arguments.length;var args = new Array(Math.max($_len - 2, 0)); for(var $_i = 2; $_i < $_len; ++$_i) {args[$_i - 2] = arguments[$_i];};
|
||||||
|
if (nativeAssert(nativeCallName, args, message) === message) return;
|
||||||
|
message = (nativeCallName + " !== " + message);
|
||||||
|
}
|
||||||
|
|
||||||
|
var ret = new AssertionError(message);
|
||||||
|
if (Error.captureStackTrace) {
|
||||||
|
Error.captureStackTrace(ret, assert);
|
||||||
|
}
|
||||||
|
throw ret;
|
||||||
|
};
|
||||||
|
})();
|
161
app/node_modules/bluebird/js/release/async.js
generated
vendored
Normal file
161
app/node_modules/bluebird/js/release/async.js
generated
vendored
Normal file
@@ -0,0 +1,161 @@
|
|||||||
|
"use strict";
|
||||||
|
var firstLineError;
|
||||||
|
try {throw new Error(); } catch (e) {firstLineError = e;}
|
||||||
|
var schedule = require("./schedule");
|
||||||
|
var Queue = require("./queue");
|
||||||
|
var util = require("./util");
|
||||||
|
|
||||||
|
function Async() {
|
||||||
|
this._customScheduler = false;
|
||||||
|
this._isTickUsed = false;
|
||||||
|
this._lateQueue = new Queue(16);
|
||||||
|
this._normalQueue = new Queue(16);
|
||||||
|
this._haveDrainedQueues = false;
|
||||||
|
this._trampolineEnabled = true;
|
||||||
|
var self = this;
|
||||||
|
this.drainQueues = function () {
|
||||||
|
self._drainQueues();
|
||||||
|
};
|
||||||
|
this._schedule = schedule;
|
||||||
|
}
|
||||||
|
|
||||||
|
Async.prototype.setScheduler = function(fn) {
|
||||||
|
var prev = this._schedule;
|
||||||
|
this._schedule = fn;
|
||||||
|
this._customScheduler = true;
|
||||||
|
return prev;
|
||||||
|
};
|
||||||
|
|
||||||
|
Async.prototype.hasCustomScheduler = function() {
|
||||||
|
return this._customScheduler;
|
||||||
|
};
|
||||||
|
|
||||||
|
Async.prototype.enableTrampoline = function() {
|
||||||
|
this._trampolineEnabled = true;
|
||||||
|
};
|
||||||
|
|
||||||
|
Async.prototype.disableTrampolineIfNecessary = function() {
|
||||||
|
if (util.hasDevTools) {
|
||||||
|
this._trampolineEnabled = false;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Async.prototype.haveItemsQueued = function () {
|
||||||
|
return this._isTickUsed || this._haveDrainedQueues;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
Async.prototype.fatalError = function(e, isNode) {
|
||||||
|
if (isNode) {
|
||||||
|
process.stderr.write("Fatal " + (e instanceof Error ? e.stack : e) +
|
||||||
|
"\n");
|
||||||
|
process.exit(2);
|
||||||
|
} else {
|
||||||
|
this.throwLater(e);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Async.prototype.throwLater = function(fn, arg) {
|
||||||
|
if (arguments.length === 1) {
|
||||||
|
arg = fn;
|
||||||
|
fn = function () { throw arg; };
|
||||||
|
}
|
||||||
|
if (typeof setTimeout !== "undefined") {
|
||||||
|
setTimeout(function() {
|
||||||
|
fn(arg);
|
||||||
|
}, 0);
|
||||||
|
} else try {
|
||||||
|
this._schedule(function() {
|
||||||
|
fn(arg);
|
||||||
|
});
|
||||||
|
} catch (e) {
|
||||||
|
throw new Error("No async scheduler available\u000a\u000a See http://goo.gl/MqrFmX\u000a");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
function AsyncInvokeLater(fn, receiver, arg) {
|
||||||
|
this._lateQueue.push(fn, receiver, arg);
|
||||||
|
this._queueTick();
|
||||||
|
}
|
||||||
|
|
||||||
|
function AsyncInvoke(fn, receiver, arg) {
|
||||||
|
this._normalQueue.push(fn, receiver, arg);
|
||||||
|
this._queueTick();
|
||||||
|
}
|
||||||
|
|
||||||
|
function AsyncSettlePromises(promise) {
|
||||||
|
this._normalQueue._pushOne(promise);
|
||||||
|
this._queueTick();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!util.hasDevTools) {
|
||||||
|
Async.prototype.invokeLater = AsyncInvokeLater;
|
||||||
|
Async.prototype.invoke = AsyncInvoke;
|
||||||
|
Async.prototype.settlePromises = AsyncSettlePromises;
|
||||||
|
} else {
|
||||||
|
Async.prototype.invokeLater = function (fn, receiver, arg) {
|
||||||
|
if (this._trampolineEnabled) {
|
||||||
|
AsyncInvokeLater.call(this, fn, receiver, arg);
|
||||||
|
} else {
|
||||||
|
this._schedule(function() {
|
||||||
|
setTimeout(function() {
|
||||||
|
fn.call(receiver, arg);
|
||||||
|
}, 100);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Async.prototype.invoke = function (fn, receiver, arg) {
|
||||||
|
if (this._trampolineEnabled) {
|
||||||
|
AsyncInvoke.call(this, fn, receiver, arg);
|
||||||
|
} else {
|
||||||
|
this._schedule(function() {
|
||||||
|
fn.call(receiver, arg);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Async.prototype.settlePromises = function(promise) {
|
||||||
|
if (this._trampolineEnabled) {
|
||||||
|
AsyncSettlePromises.call(this, promise);
|
||||||
|
} else {
|
||||||
|
this._schedule(function() {
|
||||||
|
promise._settlePromises();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
Async.prototype._drainQueue = function(queue) {
|
||||||
|
while (queue.length() > 0) {
|
||||||
|
var fn = queue.shift();
|
||||||
|
if (typeof fn !== "function") {
|
||||||
|
fn._settlePromises();
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
var receiver = queue.shift();
|
||||||
|
var arg = queue.shift();
|
||||||
|
fn.call(receiver, arg);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Async.prototype._drainQueues = function () {
|
||||||
|
this._drainQueue(this._normalQueue);
|
||||||
|
this._reset();
|
||||||
|
this._haveDrainedQueues = true;
|
||||||
|
this._drainQueue(this._lateQueue);
|
||||||
|
};
|
||||||
|
|
||||||
|
Async.prototype._queueTick = function () {
|
||||||
|
if (!this._isTickUsed) {
|
||||||
|
this._isTickUsed = true;
|
||||||
|
this._schedule(this.drainQueues);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Async.prototype._reset = function () {
|
||||||
|
this._isTickUsed = false;
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = Async;
|
||||||
|
module.exports.firstLineError = firstLineError;
|
67
app/node_modules/bluebird/js/release/bind.js
generated
vendored
Normal file
67
app/node_modules/bluebird/js/release/bind.js
generated
vendored
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
"use strict";
|
||||||
|
module.exports = function(Promise, INTERNAL, tryConvertToPromise, debug) {
|
||||||
|
var calledBind = false;
|
||||||
|
var rejectThis = function(_, e) {
|
||||||
|
this._reject(e);
|
||||||
|
};
|
||||||
|
|
||||||
|
var targetRejected = function(e, context) {
|
||||||
|
context.promiseRejectionQueued = true;
|
||||||
|
context.bindingPromise._then(rejectThis, rejectThis, null, this, e);
|
||||||
|
};
|
||||||
|
|
||||||
|
var bindingResolved = function(thisArg, context) {
|
||||||
|
if (((this._bitField & 50397184) === 0)) {
|
||||||
|
this._resolveCallback(context.target);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
var bindingRejected = function(e, context) {
|
||||||
|
if (!context.promiseRejectionQueued) this._reject(e);
|
||||||
|
};
|
||||||
|
|
||||||
|
Promise.prototype.bind = function (thisArg) {
|
||||||
|
if (!calledBind) {
|
||||||
|
calledBind = true;
|
||||||
|
Promise.prototype._propagateFrom = debug.propagateFromFunction();
|
||||||
|
Promise.prototype._boundValue = debug.boundValueFunction();
|
||||||
|
}
|
||||||
|
var maybePromise = tryConvertToPromise(thisArg);
|
||||||
|
var ret = new Promise(INTERNAL);
|
||||||
|
ret._propagateFrom(this, 1);
|
||||||
|
var target = this._target();
|
||||||
|
ret._setBoundTo(maybePromise);
|
||||||
|
if (maybePromise instanceof Promise) {
|
||||||
|
var context = {
|
||||||
|
promiseRejectionQueued: false,
|
||||||
|
promise: ret,
|
||||||
|
target: target,
|
||||||
|
bindingPromise: maybePromise
|
||||||
|
};
|
||||||
|
target._then(INTERNAL, targetRejected, undefined, ret, context);
|
||||||
|
maybePromise._then(
|
||||||
|
bindingResolved, bindingRejected, undefined, ret, context);
|
||||||
|
ret._setOnCancel(maybePromise);
|
||||||
|
} else {
|
||||||
|
ret._resolveCallback(target);
|
||||||
|
}
|
||||||
|
return ret;
|
||||||
|
};
|
||||||
|
|
||||||
|
Promise.prototype._setBoundTo = function (obj) {
|
||||||
|
if (obj !== undefined) {
|
||||||
|
this._bitField = this._bitField | 2097152;
|
||||||
|
this._boundTo = obj;
|
||||||
|
} else {
|
||||||
|
this._bitField = this._bitField & (~2097152);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Promise.prototype._isBound = function () {
|
||||||
|
return (this._bitField & 2097152) === 2097152;
|
||||||
|
};
|
||||||
|
|
||||||
|
Promise.bind = function (thisArg, value) {
|
||||||
|
return Promise.resolve(value).bind(thisArg);
|
||||||
|
};
|
||||||
|
};
|
11
app/node_modules/bluebird/js/release/bluebird.js
generated
vendored
Normal file
11
app/node_modules/bluebird/js/release/bluebird.js
generated
vendored
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
"use strict";
|
||||||
|
var old;
|
||||||
|
if (typeof Promise !== "undefined") old = Promise;
|
||||||
|
function noConflict() {
|
||||||
|
try { if (Promise === bluebird) Promise = old; }
|
||||||
|
catch (e) {}
|
||||||
|
return bluebird;
|
||||||
|
}
|
||||||
|
var bluebird = require("./promise")();
|
||||||
|
bluebird.noConflict = noConflict;
|
||||||
|
module.exports = bluebird;
|
123
app/node_modules/bluebird/js/release/call_get.js
generated
vendored
Normal file
123
app/node_modules/bluebird/js/release/call_get.js
generated
vendored
Normal file
@@ -0,0 +1,123 @@
|
|||||||
|
"use strict";
|
||||||
|
var cr = Object.create;
|
||||||
|
if (cr) {
|
||||||
|
var callerCache = cr(null);
|
||||||
|
var getterCache = cr(null);
|
||||||
|
callerCache[" size"] = getterCache[" size"] = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = function(Promise) {
|
||||||
|
var util = require("./util");
|
||||||
|
var canEvaluate = util.canEvaluate;
|
||||||
|
var isIdentifier = util.isIdentifier;
|
||||||
|
|
||||||
|
var getMethodCaller;
|
||||||
|
var getGetter;
|
||||||
|
if (!false) {
|
||||||
|
var makeMethodCaller = function (methodName) {
|
||||||
|
return new Function("ensureMethod", " \n\
|
||||||
|
return function(obj) { \n\
|
||||||
|
'use strict' \n\
|
||||||
|
var len = this.length; \n\
|
||||||
|
ensureMethod(obj, 'methodName'); \n\
|
||||||
|
switch(len) { \n\
|
||||||
|
case 1: return obj.methodName(this[0]); \n\
|
||||||
|
case 2: return obj.methodName(this[0], this[1]); \n\
|
||||||
|
case 3: return obj.methodName(this[0], this[1], this[2]); \n\
|
||||||
|
case 0: return obj.methodName(); \n\
|
||||||
|
default: \n\
|
||||||
|
return obj.methodName.apply(obj, this); \n\
|
||||||
|
} \n\
|
||||||
|
}; \n\
|
||||||
|
".replace(/methodName/g, methodName))(ensureMethod);
|
||||||
|
};
|
||||||
|
|
||||||
|
var makeGetter = function (propertyName) {
|
||||||
|
return new Function("obj", " \n\
|
||||||
|
'use strict'; \n\
|
||||||
|
return obj.propertyName; \n\
|
||||||
|
".replace("propertyName", propertyName));
|
||||||
|
};
|
||||||
|
|
||||||
|
var getCompiled = function(name, compiler, cache) {
|
||||||
|
var ret = cache[name];
|
||||||
|
if (typeof ret !== "function") {
|
||||||
|
if (!isIdentifier(name)) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
ret = compiler(name);
|
||||||
|
cache[name] = ret;
|
||||||
|
cache[" size"]++;
|
||||||
|
if (cache[" size"] > 512) {
|
||||||
|
var keys = Object.keys(cache);
|
||||||
|
for (var i = 0; i < 256; ++i) delete cache[keys[i]];
|
||||||
|
cache[" size"] = keys.length - 256;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ret;
|
||||||
|
};
|
||||||
|
|
||||||
|
getMethodCaller = function(name) {
|
||||||
|
return getCompiled(name, makeMethodCaller, callerCache);
|
||||||
|
};
|
||||||
|
|
||||||
|
getGetter = function(name) {
|
||||||
|
return getCompiled(name, makeGetter, getterCache);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function ensureMethod(obj, methodName) {
|
||||||
|
var fn;
|
||||||
|
if (obj != null) fn = obj[methodName];
|
||||||
|
if (typeof fn !== "function") {
|
||||||
|
var message = "Object " + util.classString(obj) + " has no method '" +
|
||||||
|
util.toString(methodName) + "'";
|
||||||
|
throw new Promise.TypeError(message);
|
||||||
|
}
|
||||||
|
return fn;
|
||||||
|
}
|
||||||
|
|
||||||
|
function caller(obj) {
|
||||||
|
var methodName = this.pop();
|
||||||
|
var fn = ensureMethod(obj, methodName);
|
||||||
|
return fn.apply(obj, this);
|
||||||
|
}
|
||||||
|
Promise.prototype.call = function (methodName) {
|
||||||
|
var $_len = arguments.length;var args = new Array(Math.max($_len - 1, 0)); for(var $_i = 1; $_i < $_len; ++$_i) {args[$_i - 1] = arguments[$_i];};
|
||||||
|
if (!false) {
|
||||||
|
if (canEvaluate) {
|
||||||
|
var maybeCaller = getMethodCaller(methodName);
|
||||||
|
if (maybeCaller !== null) {
|
||||||
|
return this._then(
|
||||||
|
maybeCaller, undefined, undefined, args, undefined);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
args.push(methodName);
|
||||||
|
return this._then(caller, undefined, undefined, args, undefined);
|
||||||
|
};
|
||||||
|
|
||||||
|
function namedGetter(obj) {
|
||||||
|
return obj[this];
|
||||||
|
}
|
||||||
|
function indexedGetter(obj) {
|
||||||
|
var index = +this;
|
||||||
|
if (index < 0) index = Math.max(0, index + obj.length);
|
||||||
|
return obj[index];
|
||||||
|
}
|
||||||
|
Promise.prototype.get = function (propertyName) {
|
||||||
|
var isIndex = (typeof propertyName === "number");
|
||||||
|
var getter;
|
||||||
|
if (!isIndex) {
|
||||||
|
if (canEvaluate) {
|
||||||
|
var maybeGetter = getGetter(propertyName);
|
||||||
|
getter = maybeGetter !== null ? maybeGetter : namedGetter;
|
||||||
|
} else {
|
||||||
|
getter = namedGetter;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
getter = indexedGetter;
|
||||||
|
}
|
||||||
|
return this._then(getter, undefined, undefined, propertyName, undefined);
|
||||||
|
};
|
||||||
|
};
|
129
app/node_modules/bluebird/js/release/cancel.js
generated
vendored
Normal file
129
app/node_modules/bluebird/js/release/cancel.js
generated
vendored
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
"use strict";
|
||||||
|
module.exports = function(Promise, PromiseArray, apiRejection, debug) {
|
||||||
|
var util = require("./util");
|
||||||
|
var tryCatch = util.tryCatch;
|
||||||
|
var errorObj = util.errorObj;
|
||||||
|
var async = Promise._async;
|
||||||
|
|
||||||
|
Promise.prototype["break"] = Promise.prototype.cancel = function() {
|
||||||
|
if (!debug.cancellation()) return this._warn("cancellation is disabled");
|
||||||
|
|
||||||
|
var promise = this;
|
||||||
|
var child = promise;
|
||||||
|
while (promise._isCancellable()) {
|
||||||
|
if (!promise._cancelBy(child)) {
|
||||||
|
if (child._isFollowing()) {
|
||||||
|
child._followee().cancel();
|
||||||
|
} else {
|
||||||
|
child._cancelBranched();
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
var parent = promise._cancellationParent;
|
||||||
|
if (parent == null || !parent._isCancellable()) {
|
||||||
|
if (promise._isFollowing()) {
|
||||||
|
promise._followee().cancel();
|
||||||
|
} else {
|
||||||
|
promise._cancelBranched();
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
} else {
|
||||||
|
if (promise._isFollowing()) promise._followee().cancel();
|
||||||
|
promise._setWillBeCancelled();
|
||||||
|
child = promise;
|
||||||
|
promise = parent;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Promise.prototype._branchHasCancelled = function() {
|
||||||
|
this._branchesRemainingToCancel--;
|
||||||
|
};
|
||||||
|
|
||||||
|
Promise.prototype._enoughBranchesHaveCancelled = function() {
|
||||||
|
return this._branchesRemainingToCancel === undefined ||
|
||||||
|
this._branchesRemainingToCancel <= 0;
|
||||||
|
};
|
||||||
|
|
||||||
|
Promise.prototype._cancelBy = function(canceller) {
|
||||||
|
if (canceller === this) {
|
||||||
|
this._branchesRemainingToCancel = 0;
|
||||||
|
this._invokeOnCancel();
|
||||||
|
return true;
|
||||||
|
} else {
|
||||||
|
this._branchHasCancelled();
|
||||||
|
if (this._enoughBranchesHaveCancelled()) {
|
||||||
|
this._invokeOnCancel();
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
|
||||||
|
Promise.prototype._cancelBranched = function() {
|
||||||
|
if (this._enoughBranchesHaveCancelled()) {
|
||||||
|
this._cancel();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Promise.prototype._cancel = function() {
|
||||||
|
if (!this._isCancellable()) return;
|
||||||
|
this._setCancelled();
|
||||||
|
async.invoke(this._cancelPromises, this, undefined);
|
||||||
|
};
|
||||||
|
|
||||||
|
Promise.prototype._cancelPromises = function() {
|
||||||
|
if (this._length() > 0) this._settlePromises();
|
||||||
|
};
|
||||||
|
|
||||||
|
Promise.prototype._unsetOnCancel = function() {
|
||||||
|
this._onCancelField = undefined;
|
||||||
|
};
|
||||||
|
|
||||||
|
Promise.prototype._isCancellable = function() {
|
||||||
|
return this.isPending() && !this._isCancelled();
|
||||||
|
};
|
||||||
|
|
||||||
|
Promise.prototype.isCancellable = function() {
|
||||||
|
return this.isPending() && !this.isCancelled();
|
||||||
|
};
|
||||||
|
|
||||||
|
Promise.prototype._doInvokeOnCancel = function(onCancelCallback, internalOnly) {
|
||||||
|
if (util.isArray(onCancelCallback)) {
|
||||||
|
for (var i = 0; i < onCancelCallback.length; ++i) {
|
||||||
|
this._doInvokeOnCancel(onCancelCallback[i], internalOnly);
|
||||||
|
}
|
||||||
|
} else if (onCancelCallback !== undefined) {
|
||||||
|
if (typeof onCancelCallback === "function") {
|
||||||
|
if (!internalOnly) {
|
||||||
|
var e = tryCatch(onCancelCallback).call(this._boundValue());
|
||||||
|
if (e === errorObj) {
|
||||||
|
this._attachExtraTrace(e.e);
|
||||||
|
async.throwLater(e.e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
onCancelCallback._resultCancelled(this);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Promise.prototype._invokeOnCancel = function() {
|
||||||
|
var onCancelCallback = this._onCancel();
|
||||||
|
this._unsetOnCancel();
|
||||||
|
async.invoke(this._doInvokeOnCancel, this, onCancelCallback);
|
||||||
|
};
|
||||||
|
|
||||||
|
Promise.prototype._invokeInternalOnCancel = function() {
|
||||||
|
if (this._isCancellable()) {
|
||||||
|
this._doInvokeOnCancel(this._onCancel(), true);
|
||||||
|
this._unsetOnCancel();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Promise.prototype._resultCancelled = function() {
|
||||||
|
this.cancel();
|
||||||
|
};
|
||||||
|
|
||||||
|
};
|
42
app/node_modules/bluebird/js/release/catch_filter.js
generated
vendored
Normal file
42
app/node_modules/bluebird/js/release/catch_filter.js
generated
vendored
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
"use strict";
|
||||||
|
module.exports = function(NEXT_FILTER) {
|
||||||
|
var util = require("./util");
|
||||||
|
var getKeys = require("./es5").keys;
|
||||||
|
var tryCatch = util.tryCatch;
|
||||||
|
var errorObj = util.errorObj;
|
||||||
|
|
||||||
|
function catchFilter(instances, cb, promise) {
|
||||||
|
return function(e) {
|
||||||
|
var boundTo = promise._boundValue();
|
||||||
|
predicateLoop: for (var i = 0; i < instances.length; ++i) {
|
||||||
|
var item = instances[i];
|
||||||
|
|
||||||
|
if (item === Error ||
|
||||||
|
(item != null && item.prototype instanceof Error)) {
|
||||||
|
if (e instanceof item) {
|
||||||
|
return tryCatch(cb).call(boundTo, e);
|
||||||
|
}
|
||||||
|
} else if (typeof item === "function") {
|
||||||
|
var matchesPredicate = tryCatch(item).call(boundTo, e);
|
||||||
|
if (matchesPredicate === errorObj) {
|
||||||
|
return matchesPredicate;
|
||||||
|
} else if (matchesPredicate) {
|
||||||
|
return tryCatch(cb).call(boundTo, e);
|
||||||
|
}
|
||||||
|
} else if (util.isObject(e)) {
|
||||||
|
var keys = getKeys(item);
|
||||||
|
for (var j = 0; j < keys.length; ++j) {
|
||||||
|
var key = keys[j];
|
||||||
|
if (item[key] != e[key]) {
|
||||||
|
continue predicateLoop;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return tryCatch(cb).call(boundTo, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return NEXT_FILTER;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return catchFilter;
|
||||||
|
};
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user