1
0
mirror of https://github.com/S2-/gitlit synced 2025-08-03 21:00:04 +02:00

remove electron-in-page-search

This commit is contained in:
s2
2019-06-06 15:44:24 +02:00
parent e2a57318a7
commit c5f9b551ab
92637 changed files with 636010 additions and 15 deletions

74
app/node_modules/decompress-zip/README.md generated vendored Normal file
View File

@@ -0,0 +1,74 @@
# decompress-zip [![Build Status](https://travis-ci.org/bower/decompress-zip.svg?branch=master)](https://travis-ci.org/bower/decompress-zip) [![Coverage Status](https://coveralls.io/repos/bower/decompress-zip/badge.png?branch=master)](https://coveralls.io/r/bower/decompress-zip?branch=master)
> Extract files from a ZIP archive
## Usage
### .extract(options)
Extracts the contents of the ZIP archive `file`.
Returns an EventEmitter with two possible events - `error` on an error, and `extract` when the extraction has completed. The value passed to the `extract` event is a basic log of each file and how it was compressed.
**Options**
- **path** *String* - Path to extract into (default `.`)
- **follow** *Boolean* - If true, rather than create stored symlinks as symlinks make a shallow copy of the target instead (default `false`)
- **filter** *Function* - A function that will be called once for each file in the archive. It takes one argument which is an object containing details of the file. Return true for any file that you want to extract, and false otherwise. (default `null`)
- **strip** *Number* - Remove leading folders in the path structure. Equivalent to `--strip-components` for tar.
- **restrict** *Boolean* - If true, will restrict files from being created outside `options.path`. Setting to `false` has significant security [implications](https://snyk.io/research/zip-slip-vulnerability) if you are extracting untrusted data. (default `true`)
```js
var DecompressZip = require('decompress-zip');
var unzipper = new DecompressZip(filename)
unzipper.on('error', function (err) {
console.log('Caught an error');
});
unzipper.on('extract', function (log) {
console.log('Finished extracting');
});
unzipper.on('progress', function (fileIndex, fileCount) {
console.log('Extracted file ' + (fileIndex + 1) + ' of ' + fileCount);
});
unzipper.extract({
path: 'some/path',
filter: function (file) {
return file.type !== "SymbolicLink";
}
});
```
If `path` does not exist, decompress-zip will attempt to create it first.
### .list()
Much like extract, except:
- the success event is `list`
- the data for the event is an array of paths
- no files are actually extracted
- there are no options
```js
var DecompressZip = require('decompress-zip');
var unzipper = new DecompressZip(filename)
unzipper.on('error', function (err) {
console.log('Caught an error');
});
unzipper.on('list', function (files) {
console.log('The archive contains:');
console.log(files);
});
unzipper.list();
```
## License
MIT © Bower team

83
app/node_modules/decompress-zip/bin/decompress-zip generated vendored Normal file
View File

@@ -0,0 +1,83 @@
#!/usr/bin/env node
'use strict';
var nopt = require('nopt');
var path = require('path');
var version = require('../package.json').version;
var knownOptions = {
'list': Boolean,
'extract': Boolean,
'path': path
};
var shortcuts = {
'x': ['--extract'],
'l': ['--list'],
'p': ['--path'],
'v': ['--version']
};
var parsedOptions = nopt(knownOptions, shortcuts);
var pad = function (string, length) {
string = String(string);
if (length <= string.length) {
return string;
}
return string + (new Array(length - string.length).join(' '));
};
var octal = function (number, digits) {
var result = '';
for (var i = 0; i < digits; i++) {
result = (number & 0x07) + result;
number >>= 3;
}
return result;
};
var DecompressZip = require('../lib/decompress-zip');
var zip = new DecompressZip(parsedOptions.argv.remain[0]);
zip.on('file', function (file) {
console.log([octal(file.mode, 4), pad(file.type, 13), pad(file.compressedSize, 10), pad(file.uncompressedSize, 10), file.path].join(' '));
});
zip.on('list', function (fileList) {
// console.log(fileList);
});
zip.on('extract', function (result) {
console.log(result);
});
zip.on('error', function (error) {
console.error(error.message, error.stack);
});
if (parsedOptions.version) {
console.log('version ' + version);
} else if (parsedOptions.list) {
console.log('Mode Type Zip size Full size Path');
console.log('---- ---- -------- --------- ----');
zip.list();
} else if (parsedOptions.extract) {
var options = {};
if (parsedOptions.path) {
options.path = parsedOptions.path;
}
zip.extract(options);
} else {
console.log('Usage: decompress-zip <options> <file>');
console.log(' -x, --extract extract the given file');
console.log(' -l, --list list the contents of the given file');
console.log(' -v, --version extract the given file');
console.log(' -p, --path <path> extract the file into <path>');
console.log(' -h, --help show this message');
}

7
app/node_modules/decompress-zip/changelog.md generated vendored Normal file
View File

@@ -0,0 +1,7 @@
# 0.3.0
- Enable file mode preservation
# 0.2.1
- Update graceful-fs to 4.x

323
app/node_modules/decompress-zip/lib/decompress-zip.js generated vendored Normal file
View File

@@ -0,0 +1,323 @@
'use strict';
// The zip file spec is at http://www.pkware.com/documents/casestudies/APPNOTE.TXT
// TODO: There is fair chunk of the spec that I have ignored. Need to add
// assertions everywhere to make sure that we are not dealing with a ZIP type
// that I haven't designed for. Things like spanning archives, non-DEFLATE
// compression, encryption, etc.
var fs = require('graceful-fs');
var Q = require('q');
var path = require('path');
var util = require('util');
var events = require('events');
var structures = require('./structures');
var signatures = require('./signatures');
var extractors = require('./extractors');
var FileDetails = require('./file-details');
var fstat = Q.denodeify(fs.fstat);
var read = Q.denodeify(fs.read);
var fopen = Q.denodeify(fs.open);
function DecompressZip(filename) {
events.EventEmitter.call(this);
this.filename = filename;
this.stats = null;
this.fd = null;
this.chunkSize = 1024 * 1024; // Buffer up to 1Mb at a time
this.dirCache = {};
// When we need a resource, we should check if there is a promise for it
// already and use that. If the promise is already fulfilled we don't do the
// async work again and we get to queue up dependant tasks.
this._p = {}; // _p instead of _promises because it is a lot easier to read
}
util.inherits(DecompressZip, events.EventEmitter);
DecompressZip.prototype.openFile = function () {
return fopen(this.filename, 'r');
};
DecompressZip.prototype.closeFile = function () {
if (this.fd) {
fs.closeSync(this.fd);
this.fd = null;
}
};
DecompressZip.prototype.statFile = function (fd) {
this.fd = fd;
return fstat(fd);
};
DecompressZip.prototype.list = function () {
var self = this;
this.getFiles()
.then(function (files) {
var result = [];
files.forEach(function (file) {
result.push(file.path);
});
self.emit('list', result);
})
.fail(function (error) {
self.emit('error', error);
})
.fin(self.closeFile.bind(self));
return this;
};
DecompressZip.prototype.extract = function (options) {
var self = this;
options = options || {};
options.path = options.path || process.cwd();
options.filter = options.filter || null;
options.follow = !!options.follow;
options.strip = +options.strip || 0;
options.restrict = options.restrict !== false;
this.getFiles()
.then(function (files) {
var copies = [];
if (options.restrict) {
files = files.map(function (file) {
var destination = path.join(options.path, file.path);
// The destination path must not be outside options.path
if (destination.indexOf(options.path) !== 0) {
throw new Error('You cannot extract a file outside of the target path');
}
return file;
});
}
if (options.filter) {
files = files.filter(options.filter);
}
if (options.follow) {
copies = files.filter(function (file) {
return file.type === 'SymbolicLink';
});
files = files.filter(function (file) {
return file.type !== 'SymbolicLink';
});
}
if (options.strip) {
files = files.map(function (file) {
if (file.type !== 'Directory') {
// we don't use `path.sep` as we're using `/` in Windows too
var dir = file.parent.split('/');
var filename = file.filename;
if (options.strip > dir.length) {
throw new Error('You cannot strip more levels than there are directories');
} else {
dir = dir.slice(options.strip);
}
file.path = path.join(dir.join(path.sep), filename);
return file;
}
});
}
return self.extractFiles(files, options)
.then(self.extractFiles.bind(self, copies, options));
})
.then(function (results) {
self.emit('extract', results);
})
.fail(function (error) {
self.emit('error', error);
})
.fin(self.closeFile.bind(self));
return this;
};
// Utility methods
DecompressZip.prototype.getSearchBuffer = function (stats) {
var size = Math.min(stats.size, this.chunkSize);
this.stats = stats;
return this.getBuffer(stats.size - size, stats.size);
};
DecompressZip.prototype.getBuffer = function (start, end) {
var size = end - start;
return read(this.fd, new Buffer(size), 0, size, start)
.then(function (result) {
return result[1];
});
};
DecompressZip.prototype.findEndOfDirectory = function (buffer) {
var index = buffer.length - 3;
var chunk = '';
// Apparently the ZIP spec is not very good and it is impossible to
// guarantee that you have read a zip file correctly, or to determine
// the location of the CD without hunting.
// Search backwards through the buffer, as it is very likely to be near the
// end of the file.
while (index > Math.max(buffer.length - this.chunkSize, 0) && chunk !== signatures.END_OF_CENTRAL_DIRECTORY) {
index--;
chunk = buffer.readUInt32LE(index);
}
if (chunk !== signatures.END_OF_CENTRAL_DIRECTORY) {
throw new Error('Could not find the End of Central Directory Record');
}
return buffer.slice(index);
};
// Directory here means the ZIP Central Directory, not a folder
DecompressZip.prototype.readDirectory = function (recordBuffer) {
var record = structures.readEndRecord(recordBuffer);
return this.getBuffer(record.directoryOffset, record.directoryOffset + record.directorySize)
.then(structures.readDirectory.bind(null));
};
DecompressZip.prototype.getFiles = function () {
if (!this._p.getFiles) {
this._p.getFiles = this.openFile()
.then(this.statFile.bind(this))
.then(this.getSearchBuffer.bind(this))
.then(this.findEndOfDirectory.bind(this))
.then(this.readDirectory.bind(this))
.then(this.readFileEntries.bind(this));
}
return this._p.getFiles;
};
DecompressZip.prototype.readFileEntries = function (directory) {
var promises = [];
var files = [];
var self = this;
directory.forEach(function (directoryEntry, index) {
var start = directoryEntry.relativeOffsetOfLocalHeader;
var end = Math.min(self.stats.size, start + structures.maxFileEntrySize);
var fileDetails = new FileDetails(directoryEntry);
var promise = self.getBuffer(start, end)
.then(structures.readFileEntry.bind(null))
.then(function (fileEntry) {
var maxSize;
if (fileDetails.compressedSize > 0) {
maxSize = fileDetails.compressedSize;
} else {
maxSize = self.stats.size;
if (index < directory.length - 1) {
maxSize = directory[index + 1].relativeOffsetOfLocalHeader;
}
maxSize -= start + fileEntry.entryLength;
}
fileDetails._offset = start + fileEntry.entryLength;
fileDetails._maxSize = maxSize;
self.emit('file', fileDetails);
files[index] = fileDetails;
});
promises.push(promise);
});
return Q.all(promises)
.then(function () {
return files;
});
};
DecompressZip.prototype.extractFiles = function (files, options, results) {
var promises = [];
var self = this;
results = results || [];
var fileIndex = 0;
files.forEach(function (file) {
var promise = self.extractFile(file, options)
.then(function (result) {
self.emit('progress', fileIndex++, files.length);
results.push(result);
});
promises.push(promise);
});
return Q.all(promises)
.then(function () {
return results;
});
};
DecompressZip.prototype.extractFile = function (file, options) {
var destination = path.join(options.path, file.path);
// Possible compression methods:
// 0 - The file is stored (no compression)
// 1 - The file is Shrunk
// 2 - The file is Reduced with compression factor 1
// 3 - The file is Reduced with compression factor 2
// 4 - The file is Reduced with compression factor 3
// 5 - The file is Reduced with compression factor 4
// 6 - The file is Imploded
// 7 - Reserved for Tokenizing compression algorithm
// 8 - The file is Deflated
// 9 - Enhanced Deflating using Deflate64(tm)
// 10 - PKWARE Data Compression Library Imploding (old IBM TERSE)
// 11 - Reserved by PKWARE
// 12 - File is compressed using BZIP2 algorithm
// 13 - Reserved by PKWARE
// 14 - LZMA (EFS)
// 15 - Reserved by PKWARE
// 16 - Reserved by PKWARE
// 17 - Reserved by PKWARE
// 18 - File is compressed using IBM TERSE (new)
// 19 - IBM LZ77 z Architecture (PFS)
// 97 - WavPack compressed data
// 98 - PPMd version I, Rev 1
if (file.type === 'Directory') {
return extractors.folder(file, destination, this);
}
if (file.type === 'File') {
switch (file.compressionMethod) {
case 0:
return extractors.store(file, destination, this);
case 8:
return extractors.deflate(file, destination, this);
default:
throw new Error('Unsupported compression type');
}
}
if (file.type === 'SymbolicLink') {
if (options.follow) {
return extractors.copy(file, destination, this, options.path);
} else {
return extractors.symlink(file, destination, this, options.path);
}
}
throw new Error('Unsupported file type "' + file.type + '"');
};
module.exports = DecompressZip;

184
app/node_modules/decompress-zip/lib/extractors.js generated vendored Normal file
View File

@@ -0,0 +1,184 @@
var stream = require('stream');
if (!stream.Readable) {
var stream = require('readable-stream');
}
var fs = require('graceful-fs');
var Q = require('q');
var path = require('path');
var zlib = require('zlib');
var touch = Q.denodeify(require('touch'));
var mkpath = Q.denodeify(require('mkpath'));
var writeFile = Q.denodeify(fs.writeFile);
var inflateRaw = Q.denodeify(zlib.inflateRaw);
var symlink = Q.denodeify(fs.symlink);
var stat = Q.denodeify(fs.stat);
// Use a cache of promises for building the directory tree. This allows us to
// correctly queue up file extractions for after their path has been created,
// avoid trying to create the path twice and still be async.
var mkdir = function (dir, cache, mode) {
dir = path.normalize(path.resolve(process.cwd(), dir) + path.sep);
if (mode === undefined) {
mode = parseInt('777', 8) & (~process.umask());
}
if (!cache[dir]) {
var parent;
if (fs.existsSync(dir)) {
parent = new Q();
} else {
parent = mkdir(path.dirname(dir), cache, mode);
}
cache[dir] = parent.then(function () {
return mkpath(dir, mode);
});
}
return cache[dir];
};
// Utility methods for writing output files
var extractors = {
folder: function (folder, destination, zip) {
return mkdir(destination, zip.dirCache, folder.mode)
.then(function () {
return {folder: folder.path};
});
},
store: function (file, destination, zip) {
var writer;
if (file.uncompressedSize === 0) {
writer = touch.bind(null, destination);
} else if (file.uncompressedSize <= zip.chunkSize) {
writer = function () {
return zip.getBuffer(file._offset, file._offset + file.uncompressedSize)
.then(function (buffer) {
return writeFile(destination, buffer, { mode: file.mode });
});
};
} else {
var input = new stream.Readable();
input.wrap(fs.createReadStream(zip.filename, {start: file._offset, end: file._offset + file.uncompressedSize - 1}));
writer = pipePromise.bind(null, input, destination, { mode: file.mode });
}
return mkdir(path.dirname(destination), zip.dirCache)
.then(writer)
.then(function () {
return {stored: file.path};
});
},
deflate: function (file, destination, zip) {
// For Deflate you don't actually need to specify the end offset - and
// in fact many ZIP files don't include compressed file sizes for
// Deflated files so we don't even know what the end offset is.
return mkdir(path.dirname(destination), zip.dirCache)
.then(function () {
if (file._maxSize <= zip.chunkSize) {
return zip.getBuffer(file._offset, file._offset + file._maxSize)
.then(inflateRaw)
.then(function (buffer) {
return writeFile(destination, buffer, { mode: file.mode });
});
} else {
// For node 0.8 we need to create the Zlib stream and attach
// handlers in the same tick of the event loop, which is why we do
// the creation in here
var input = new stream.Readable();
input.wrap(fs.createReadStream(zip.filename, {start: file._offset}));
var inflater = input.pipe(zlib.createInflateRaw({highWaterMark: 32 * 1024}));
return pipePromise(inflater, destination, { mode: file.mode });
}
})
.then(function () {
return {deflated: file.path};
});
},
symlink: function (file, destination, zip, basePath) {
var parent = path.dirname(destination);
return mkdir(parent, zip.dirCache)
.then(function () {
return getLinkLocation(file, destination, zip, basePath);
})
.then(function (linkTo) {
return symlink(path.resolve(parent, linkTo), destination)
.then(function () {
return {symlink: file.path, linkTo: linkTo};
});
});
},
// Make a shallow copy of the file/directory this symlink points to instead
// of actually creating a link
copy: function (file, destination, zip, basePath) {
var type;
var parent = path.dirname(destination);
return mkdir(parent, zip.dirCache)
.then(function () {
return getLinkLocation(file, destination, zip, basePath);
})
.then(function (linkTo) {
return stat(path.resolve(parent, linkTo))
.then(function (stats) {
if (stats.isFile()) {
type = 'File';
var input = new stream.Readable();
input.wrap(fs.createReadStream(path.resolve(parent, linkTo)));
return pipePromise(input, destination);
} else if (stats.isDirectory()) {
type = 'Directory';
return mkdir(destination, zip.dirCache);
} else {
throw new Error('Could not follow symlink to unknown file type');
}
})
.then(function () {
return {copy: file.path, original: linkTo, type: type};
});
});
}
};
var getLinkLocation = function (file, destination, zip, basePath) {
var parent = path.dirname(destination);
return zip.getBuffer(file._offset, file._offset + file.uncompressedSize)
.then(function (buffer) {
var linkTo = buffer.toString();
var fullLink = path.resolve(parent, linkTo);
if (path.relative(basePath, fullLink).slice(0, 2) === '..') {
throw new Error('Symlink links outside archive');
}
return linkTo;
});
};
var pipePromise = function (input, destination, options) {
var deferred = Q.defer();
var output = fs.createWriteStream(destination, options);
var errorHandler = function (error) {
deferred.reject(error);
};
input.on('error', errorHandler);
output.on('error', errorHandler);
// For node 0.8 we can't just use the 'finish' event of the pipe
input.on('end', function () {
output.end(function () {
deferred.resolve();
});
});
input.pipe(output, {end: false});
return deferred.promise;
};
module.exports = extractors;

37
app/node_modules/decompress-zip/lib/file-details.js generated vendored Normal file
View File

@@ -0,0 +1,37 @@
// Objects with this prototype are used as the public representation of a file
var path = require('path');
var FileDetails = function (directoryEntry) {
// TODO: Add 'extra field' support
this._offset = 0;
this._maxSize = 0;
this.parent = path.dirname(directoryEntry.fileName);
this.filename = path.basename(directoryEntry.fileName);
this.path = path.normalize(directoryEntry.fileName);
this.type = directoryEntry.fileAttributes.type;
this.mode = directoryEntry.fileAttributes.mode;
this.compressionMethod = directoryEntry.compressionMethod;
this.modified = directoryEntry.modifiedTime;
this.crc32 = directoryEntry.crc32;
this.compressedSize = directoryEntry.compressedSize;
this.uncompressedSize = directoryEntry.uncompressedSize;
this.comment = directoryEntry.fileComment;
this.flags = {
encrypted: directoryEntry.generalPurposeFlags.encrypted,
compressionFlag1: directoryEntry.generalPurposeFlags.compressionFlag1,
compressionFlag2: directoryEntry.generalPurposeFlags.compressionFlag2,
useDataDescriptor: directoryEntry.generalPurposeFlags.useDataDescriptor,
enhancedDeflating: directoryEntry.generalPurposeFlags.enhancedDeflating,
compressedPatched: directoryEntry.generalPurposeFlags.compressedPatched,
strongEncryption: directoryEntry.generalPurposeFlags.strongEncryption,
utf8: directoryEntry.generalPurposeFlags.utf8,
encryptedCD: directoryEntry.generalPurposeFlags.encryptedCD
};
};
module.exports = FileDetails;

10
app/node_modules/decompress-zip/lib/signatures.js generated vendored Normal file
View File

@@ -0,0 +1,10 @@
module.exports = {
LOCAL_FILE_HEADER: 0x04034b50,
DATA_DESCRIPTOR_RECORD: 0x08074b50,
ARCHIVE_EXTRA_DATA: 0x08064b50,
CENTRAL_FILE_HEADER: 0x02014b50,
HEADER: 0x05054b50,
ZIP64_END_OF_CENTRAL_DIRECTORY: 0x06064b50,
ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR: 0x07064b50,
END_OF_CENTRAL_DIRECTORY: 0x06054b50
};

228
app/node_modules/decompress-zip/lib/structures.js generated vendored Normal file
View File

@@ -0,0 +1,228 @@
'use strict';
var binary = require('binary');
var convertDateTime = function (dosDate, dosTime) {
var year = ((dosDate >> 9) & 0x7F) + 1980;
var month = (dosDate >> 5) & 0x0F;
var day = dosDate & 0x1F;
var hour = (dosTime >> 11);
var minute = (dosTime >> 5) & 0x3F;
var second = (dosTime & 0x1F) * 2;
var result = new Date(year, month - 1, day, hour, minute, second, 0);
return result;
};
var convertGeneralPurposeFlags = function (value) {
var bits = [];
for (var i = 0; i < 16; i++) {
bits[i] = (value >> i) & 1;
}
return {
encrypted: !!bits[0],
compressionFlag1: !!bits[1],
compressionFlag2: !!bits[2],
useDataDescriptor: !!bits[3],
enhancedDeflating: !!bits[4],
compressedPatched: !!bits[5],
strongEncryption: !!bits[6],
utf8: !!bits[11],
encryptedCD: !!bits[13]
};
};
var parseExternalFileAttributes = function (externalAttributes, platform) {
var types = {
// In theory, any of these could be set. Realistically, though, it will
// be regular, directory or symlink
1: 'NamedPipe',
2: 'Character',
4: 'Directory',
6: 'Block',
8: 'File',
10: 'SymbolicLink',
12: 'Socket'
};
switch (platform) {
case 3: // Unix
return {
platform: 'Unix',
type: types[(externalAttributes >> 28) & 0x0F],
mode: (externalAttributes >> 16) & 0xFFF
};
// case 0: // MSDOS
default:
if (platform !== 0) {
console.warn('Possibly unsupported ZIP platform type, ' + platform);
}
var attribs = {
A: (externalAttributes >> 5) & 0x01,
D: (externalAttributes >> 4) & 0x01,
V: (externalAttributes >> 3) & 0x01,
S: (externalAttributes >> 2) & 0x01,
H: (externalAttributes >> 1) & 0x01,
R: externalAttributes & 0x01
};
// With no better guidance we'll make the default permissions ugo+r
var mode = parseInt('0444', 8);
if (attribs.D) {
mode |= parseInt('0111', 8); // Set the execute bit
}
if (!attribs.R) {
mode |= parseInt('0222', 8); // Set the write bit
}
mode &= ~process.umask();
return {
platform: 'DOS',
type: attribs.D ? 'Directory' : 'File',
mode: mode
};
}
};
var readEndRecord = function (buffer) {
var data = binary.parse(buffer)
.word32lu('signature')
.word16lu('diskNumber')
.word16lu('directoryStartDisk')
.word16lu('directoryEntryCountDisk')
.word16lu('directoryEntryCount')
.word32lu('directorySize')
.word32lu('directoryOffset')
.word16lu('commentLength')
.buffer('comment', 'commentLength')
.vars;
data.comment = data.comment.toString();
return data;
};
var directorySort = function (a, b) {
return a.relativeOffsetOfLocalHeader - b.relativeOffsetOfLocalHeader;
};
var readDirectory = function (buffer) {
var directory = [];
var current;
var index = 0;
while (index < buffer.length) {
current = binary.parse(buffer.slice(index, index + 46))
.word32lu('signature')
.word8lu('creatorSpecVersion')
.word8lu('creatorPlatform')
.word8lu('requiredSpecVersion')
.word8lu('requiredPlatform')
.word16lu('generalPurposeBitFlag')
.word16lu('compressionMethod')
.word16lu('lastModFileTime')
.word16lu('lastModFileDate')
.word32lu('crc32')
.word32lu('compressedSize')
.word32lu('uncompressedSize')
.word16lu('fileNameLength')
.word16lu('extraFieldLength')
.word16lu('fileCommentLength')
.word16lu('diskNumberStart')
.word16lu('internalFileAttributes')
.word32lu('externalFileAttributes')
.word32lu('relativeOffsetOfLocalHeader')
.vars;
index += 46;
current.generalPurposeFlags = convertGeneralPurposeFlags(current.generalPurposeBitFlag);
current.fileAttributes = parseExternalFileAttributes(current.externalFileAttributes, current.creatorPlatform);
current.modifiedTime = convertDateTime(current.lastModFileDate, current.lastModFileTime);
current.fileName = current.extraField = current.fileComment = '';
current.headerLength = 46 + current.fileNameLength + current.extraFieldLength + current.fileCommentLength;
if (current.fileNameLength > 0) {
current.fileName = buffer.slice(index, index + current.fileNameLength).toString();
index += current.fileNameLength;
}
if (current.extraFieldLength > 0) {
current.extraField = buffer.slice(index, index + current.extraFieldLength).toString();
index += current.extraFieldLength;
}
if (current.fileCommentLength > 0) {
current.fileComment = buffer.slice(index, index + current.fileCommentLength).toString();
index += current.fileCommentLength;
}
if (current.fileAttributes.type !== 'Directory' && current.fileName.substr(-1) === '/') {
// TODO: check that this is a reasonable check
current.fileAttributes.type = 'Directory';
}
directory.push(current);
}
directory.sort(directorySort);
return directory;
};
var readFileEntry = function (buffer) {
var index = 0;
var fileEntry = binary.parse(buffer.slice(index, 30))
.word32lu('signature')
.word16lu('versionNeededToExtract')
.word16lu('generalPurposeBitFlag')
.word16lu('compressionMethod')
.word16lu('lastModFileTime')
.word16lu('lastModFileDate')
.word32lu('crc32')
.word32lu('compressedSize')
.word32lu('uncompressedSize')
.word16lu('fileNameLength')
.word16lu('extraFieldLength')
.vars;
index += 30;
fileEntry.fileName = fileEntry.extraField = '';
fileEntry.entryLength = 30 + fileEntry.fileNameLength + fileEntry.extraFieldLength;
if (fileEntry.entryLength > structures.maxFileEntrySize) {
throw new Error('File entry unexpectedly large: ' + fileEntry.entryLength + ' (max: ' + structures.maxFileEntrySize + ')');
}
if (fileEntry.fileNameLength > 0) {
fileEntry.fileName = buffer.slice(index, index + fileEntry.fileNameLength).toString();
index += fileEntry.fileNameLength;
}
if (fileEntry.extraFieldLength > 0) {
fileEntry.extraField = buffer.slice(index, index + fileEntry.extraFieldLength).toString();
index += fileEntry.extraFieldLength;
}
return fileEntry;
};
var structures = module.exports = {
readEndRecord: readEndRecord,
readDirectory: readDirectory,
readFileEntry: readFileEntry,
maxFileEntrySize: 4096
};

9
app/node_modules/decompress-zip/license generated vendored Normal file
View File

@@ -0,0 +1,9 @@
MIT License
Copyright (c) Bower team
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

91
app/node_modules/decompress-zip/package.json generated vendored Normal file
View File

@@ -0,0 +1,91 @@
{
"_from": "decompress-zip@0.3.x",
"_id": "decompress-zip@0.3.2",
"_inBundle": false,
"_integrity": "sha512-Ab1QY4LrWMrUuo53lLnmGOby7v8ryqxJ+bKibKSiPisx+25mhut1dScVBXAYx14i/PqSrFZvR2FRRazhLbvL+g==",
"_location": "/decompress-zip",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "decompress-zip@0.3.x",
"name": "decompress-zip",
"escapedName": "decompress-zip",
"rawSpec": "0.3.x",
"saveSpec": null,
"fetchSpec": "0.3.x"
},
"_requiredBy": [
"/mksnapshot"
],
"_resolved": "https://registry.npmjs.org/decompress-zip/-/decompress-zip-0.3.2.tgz",
"_shasum": "f3fa2841666abce394604f4a9e8a7085c202d464",
"_spec": "decompress-zip@0.3.x",
"_where": "F:\\projects\\p\\gitlit\\app\\node_modules\\mksnapshot",
"author": {
"name": "Bower"
},
"bin": {
"decompress-zip": "bin/decompress-zip"
},
"bugs": {
"url": "https://github.com/bower/decompress-zip/issues"
},
"bundleDependencies": false,
"dependencies": {
"binary": "^0.3.0",
"graceful-fs": "^4.1.3",
"mkpath": "^0.1.0",
"nopt": "^3.0.1",
"q": "^1.1.2",
"readable-stream": "^1.1.8",
"touch": "0.0.3"
},
"deprecated": false,
"description": "Extract files from a ZIP archive",
"devDependencies": {
"archiver": "^0.13.1",
"chai": "^1.10.0",
"coveralls": "^2.11.2",
"fs-jetpack": "^0.5.3",
"grunt": "^0.4.1",
"grunt-cli": "^0.1.13",
"grunt-contrib-jshint": "^0.11.0",
"grunt-contrib-watch": "^0.6.1",
"grunt-exec": "^0.4.2",
"grunt-simple-mocha": "^0.4.0",
"istanbul": "^0.3.5",
"mocha": "^2.1.0",
"tmp": "0.0.24"
},
"engines": {
"node": ">=0.10.0"
},
"files": [
"bin",
"lib"
],
"homepage": "https://github.com/bower/decompress-zip#readme",
"keywords": [
"zip",
"unzip",
"tar",
"untar",
"compress",
"decompress",
"archive",
"extract",
"zlib"
],
"license": "MIT",
"main": "lib/decompress-zip.js",
"name": "decompress-zip",
"repository": {
"type": "git",
"url": "git+https://github.com/bower/decompress-zip.git"
},
"scripts": {
"test": "grunt test"
},
"version": "0.3.2"
}