mirror of
https://github.com/S2-/gitlit
synced 2025-08-03 21:00:04 +02:00
add node modules to repo
This commit is contained in:
312
node_modules/decompress-zip/lib/decompress-zip.js
generated
vendored
Normal file
312
node_modules/decompress-zip/lib/decompress-zip.js
generated
vendored
Normal file
@@ -0,0 +1,312 @@
|
||||
'use strict';
|
||||
|
||||
// The zip file spec is at http://www.pkware.com/documents/casestudies/APPNOTE.TXT
|
||||
// TODO: There is fair chunk of the spec that I have ignored. Need to add
|
||||
// assertions everywhere to make sure that we are not dealing with a ZIP type
|
||||
// that I haven't designed for. Things like spanning archives, non-DEFLATE
|
||||
// compression, encryption, etc.
|
||||
var fs = require('graceful-fs');
|
||||
var Q = require('q');
|
||||
var path = require('path');
|
||||
var util = require('util');
|
||||
var events = require('events');
|
||||
var structures = require('./structures');
|
||||
var signatures = require('./signatures');
|
||||
var extractors = require('./extractors');
|
||||
var FileDetails = require('./file-details');
|
||||
|
||||
var fstat = Q.denodeify(fs.fstat);
|
||||
var read = Q.denodeify(fs.read);
|
||||
var fopen = Q.denodeify(fs.open);
|
||||
|
||||
function DecompressZip(filename) {
|
||||
events.EventEmitter.call(this);
|
||||
|
||||
this.filename = filename;
|
||||
this.stats = null;
|
||||
this.fd = null;
|
||||
this.chunkSize = 1024 * 1024; // Buffer up to 1Mb at a time
|
||||
this.dirCache = {};
|
||||
|
||||
// When we need a resource, we should check if there is a promise for it
|
||||
// already and use that. If the promise is already fulfilled we don't do the
|
||||
// async work again and we get to queue up dependant tasks.
|
||||
this._p = {}; // _p instead of _promises because it is a lot easier to read
|
||||
}
|
||||
|
||||
util.inherits(DecompressZip, events.EventEmitter);
|
||||
|
||||
DecompressZip.prototype.openFile = function () {
|
||||
return fopen(this.filename, 'r');
|
||||
};
|
||||
|
||||
DecompressZip.prototype.closeFile = function () {
|
||||
if (this.fd) {
|
||||
fs.closeSync(this.fd);
|
||||
this.fd = null;
|
||||
}
|
||||
};
|
||||
|
||||
DecompressZip.prototype.statFile = function (fd) {
|
||||
this.fd = fd;
|
||||
return fstat(fd);
|
||||
};
|
||||
|
||||
DecompressZip.prototype.list = function () {
|
||||
var self = this;
|
||||
|
||||
this.getFiles()
|
||||
.then(function (files) {
|
||||
var result = [];
|
||||
|
||||
files.forEach(function (file) {
|
||||
result.push(file.path);
|
||||
});
|
||||
|
||||
self.emit('list', result);
|
||||
})
|
||||
.fail(function (error) {
|
||||
self.emit('error', error);
|
||||
})
|
||||
.fin(self.closeFile.bind(self));
|
||||
|
||||
return this;
|
||||
};
|
||||
|
||||
DecompressZip.prototype.extract = function (options) {
|
||||
var self = this;
|
||||
|
||||
options = options || {};
|
||||
options.path = options.path || '.';
|
||||
options.filter = options.filter || null;
|
||||
options.follow = !!options.follow;
|
||||
options.strip = +options.strip || 0;
|
||||
|
||||
this.getFiles()
|
||||
.then(function (files) {
|
||||
var copies = [];
|
||||
|
||||
if (options.filter) {
|
||||
files = files.filter(options.filter);
|
||||
}
|
||||
|
||||
if (options.follow) {
|
||||
copies = files.filter(function (file) {
|
||||
return file.type === 'SymbolicLink';
|
||||
});
|
||||
files = files.filter(function (file) {
|
||||
return file.type !== 'SymbolicLink';
|
||||
});
|
||||
}
|
||||
|
||||
if (options.strip) {
|
||||
files = files.map(function (file) {
|
||||
if (file.type !== 'Directory') {
|
||||
// we don't use `path.sep` as we're using `/` in Windows too
|
||||
var dir = file.parent.split('/');
|
||||
var filename = file.filename;
|
||||
|
||||
if (options.strip > dir.length) {
|
||||
throw new Error('You cannot strip more levels than there are directories');
|
||||
} else {
|
||||
dir = dir.slice(options.strip);
|
||||
}
|
||||
|
||||
file.path = path.join(dir.join(path.sep), filename);
|
||||
return file;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return self.extractFiles(files, options)
|
||||
.then(self.extractFiles.bind(self, copies, options));
|
||||
})
|
||||
.then(function (results) {
|
||||
self.emit('extract', results);
|
||||
})
|
||||
.fail(function (error) {
|
||||
self.emit('error', error);
|
||||
})
|
||||
.fin(self.closeFile.bind(self));
|
||||
|
||||
return this;
|
||||
};
|
||||
|
||||
// Utility methods
|
||||
DecompressZip.prototype.getSearchBuffer = function (stats) {
|
||||
var size = Math.min(stats.size, this.chunkSize);
|
||||
this.stats = stats;
|
||||
return this.getBuffer(stats.size - size, stats.size);
|
||||
};
|
||||
|
||||
DecompressZip.prototype.getBuffer = function (start, end) {
|
||||
var size = end - start;
|
||||
return read(this.fd, new Buffer(size), 0, size, start)
|
||||
.then(function (result) {
|
||||
return result[1];
|
||||
});
|
||||
};
|
||||
|
||||
DecompressZip.prototype.findEndOfDirectory = function (buffer) {
|
||||
var index = buffer.length - 3;
|
||||
var chunk = '';
|
||||
|
||||
// Apparently the ZIP spec is not very good and it is impossible to
|
||||
// guarantee that you have read a zip file correctly, or to determine
|
||||
// the location of the CD without hunting.
|
||||
// Search backwards through the buffer, as it is very likely to be near the
|
||||
// end of the file.
|
||||
while (index > Math.max(buffer.length - this.chunkSize, 0) && chunk !== signatures.END_OF_CENTRAL_DIRECTORY) {
|
||||
index--;
|
||||
chunk = buffer.readUInt32LE(index);
|
||||
}
|
||||
|
||||
if (chunk !== signatures.END_OF_CENTRAL_DIRECTORY) {
|
||||
throw new Error('Could not find the End of Central Directory Record');
|
||||
}
|
||||
|
||||
return buffer.slice(index);
|
||||
};
|
||||
|
||||
// Directory here means the ZIP Central Directory, not a folder
|
||||
DecompressZip.prototype.readDirectory = function (recordBuffer) {
|
||||
var record = structures.readEndRecord(recordBuffer);
|
||||
|
||||
return this.getBuffer(record.directoryOffset, record.directoryOffset + record.directorySize)
|
||||
.then(structures.readDirectory.bind(null));
|
||||
};
|
||||
|
||||
DecompressZip.prototype.getFiles = function () {
|
||||
if (!this._p.getFiles) {
|
||||
this._p.getFiles = this.openFile()
|
||||
.then(this.statFile.bind(this))
|
||||
.then(this.getSearchBuffer.bind(this))
|
||||
.then(this.findEndOfDirectory.bind(this))
|
||||
.then(this.readDirectory.bind(this))
|
||||
.then(this.readFileEntries.bind(this));
|
||||
}
|
||||
|
||||
return this._p.getFiles;
|
||||
};
|
||||
|
||||
DecompressZip.prototype.readFileEntries = function (directory) {
|
||||
var promises = [];
|
||||
var files = [];
|
||||
var self = this;
|
||||
|
||||
directory.forEach(function (directoryEntry, index) {
|
||||
var start = directoryEntry.relativeOffsetOfLocalHeader;
|
||||
var end = Math.min(self.stats.size, start + structures.maxFileEntrySize);
|
||||
var fileDetails = new FileDetails(directoryEntry);
|
||||
|
||||
var promise = self.getBuffer(start, end)
|
||||
.then(structures.readFileEntry.bind(null))
|
||||
.then(function (fileEntry) {
|
||||
var maxSize;
|
||||
|
||||
if (fileDetails.compressedSize > 0) {
|
||||
maxSize = fileDetails.compressedSize;
|
||||
} else {
|
||||
maxSize = self.stats.size;
|
||||
|
||||
if (index < directory.length - 1) {
|
||||
maxSize = directory[index + 1].relativeOffsetOfLocalHeader;
|
||||
}
|
||||
|
||||
maxSize -= start + fileEntry.entryLength;
|
||||
}
|
||||
|
||||
fileDetails._offset = start + fileEntry.entryLength;
|
||||
fileDetails._maxSize = maxSize;
|
||||
|
||||
self.emit('file', fileDetails);
|
||||
files[index] = fileDetails;
|
||||
});
|
||||
|
||||
promises.push(promise);
|
||||
});
|
||||
|
||||
return Q.all(promises)
|
||||
.then(function () {
|
||||
return files;
|
||||
});
|
||||
};
|
||||
|
||||
DecompressZip.prototype.extractFiles = function (files, options, results) {
|
||||
var promises = [];
|
||||
var self = this;
|
||||
|
||||
results = results || [];
|
||||
var fileIndex = 0;
|
||||
files.forEach(function (file) {
|
||||
var promise = self.extractFile(file, options)
|
||||
.then(function (result) {
|
||||
self.emit('progress', fileIndex++, files.length);
|
||||
results.push(result);
|
||||
});
|
||||
|
||||
promises.push(promise);
|
||||
});
|
||||
|
||||
return Q.all(promises)
|
||||
.then(function () {
|
||||
return results;
|
||||
});
|
||||
};
|
||||
|
||||
DecompressZip.prototype.extractFile = function (file, options) {
|
||||
var destination = path.join(options.path, file.path);
|
||||
|
||||
// Possible compression methods:
|
||||
// 0 - The file is stored (no compression)
|
||||
// 1 - The file is Shrunk
|
||||
// 2 - The file is Reduced with compression factor 1
|
||||
// 3 - The file is Reduced with compression factor 2
|
||||
// 4 - The file is Reduced with compression factor 3
|
||||
// 5 - The file is Reduced with compression factor 4
|
||||
// 6 - The file is Imploded
|
||||
// 7 - Reserved for Tokenizing compression algorithm
|
||||
// 8 - The file is Deflated
|
||||
// 9 - Enhanced Deflating using Deflate64(tm)
|
||||
// 10 - PKWARE Data Compression Library Imploding (old IBM TERSE)
|
||||
// 11 - Reserved by PKWARE
|
||||
// 12 - File is compressed using BZIP2 algorithm
|
||||
// 13 - Reserved by PKWARE
|
||||
// 14 - LZMA (EFS)
|
||||
// 15 - Reserved by PKWARE
|
||||
// 16 - Reserved by PKWARE
|
||||
// 17 - Reserved by PKWARE
|
||||
// 18 - File is compressed using IBM TERSE (new)
|
||||
// 19 - IBM LZ77 z Architecture (PFS)
|
||||
// 97 - WavPack compressed data
|
||||
// 98 - PPMd version I, Rev 1
|
||||
|
||||
if (file.type === 'Directory') {
|
||||
return extractors.folder(file, destination, this);
|
||||
}
|
||||
|
||||
if (file.type === 'File') {
|
||||
switch (file.compressionMethod) {
|
||||
case 0:
|
||||
return extractors.store(file, destination, this);
|
||||
|
||||
case 8:
|
||||
return extractors.deflate(file, destination, this);
|
||||
|
||||
default:
|
||||
throw new Error('Unsupported compression type');
|
||||
}
|
||||
}
|
||||
|
||||
if (file.type === 'SymbolicLink') {
|
||||
if (options.follow) {
|
||||
return extractors.copy(file, destination, this, options.path);
|
||||
} else {
|
||||
return extractors.symlink(file, destination, this, options.path);
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error('Unsupported file type "' + file.type + '"');
|
||||
};
|
||||
|
||||
module.exports = DecompressZip;
|
184
node_modules/decompress-zip/lib/extractors.js
generated
vendored
Normal file
184
node_modules/decompress-zip/lib/extractors.js
generated
vendored
Normal file
@@ -0,0 +1,184 @@
|
||||
var stream = require('stream');
|
||||
if (!stream.Readable) {
|
||||
var stream = require('readable-stream');
|
||||
}
|
||||
var fs = require('graceful-fs');
|
||||
var Q = require('q');
|
||||
var path = require('path');
|
||||
var zlib = require('zlib');
|
||||
var touch = Q.denodeify(require('touch'));
|
||||
var mkpath = Q.denodeify(require('mkpath'));
|
||||
var writeFile = Q.denodeify(fs.writeFile);
|
||||
var inflateRaw = Q.denodeify(zlib.inflateRaw);
|
||||
var symlink = Q.denodeify(fs.symlink);
|
||||
var stat = Q.denodeify(fs.stat);
|
||||
|
||||
// Use a cache of promises for building the directory tree. This allows us to
|
||||
// correctly queue up file extractions for after their path has been created,
|
||||
// avoid trying to create the path twice and still be async.
|
||||
var mkdir = function (dir, cache, mode) {
|
||||
dir = path.normalize(path.resolve(process.cwd(), dir) + path.sep);
|
||||
if (mode === undefined) {
|
||||
mode = parseInt('777', 8) & (~process.umask());
|
||||
}
|
||||
|
||||
if (!cache[dir]) {
|
||||
var parent;
|
||||
|
||||
if (fs.existsSync(dir)) {
|
||||
parent = new Q();
|
||||
} else {
|
||||
parent = mkdir(path.dirname(dir), cache, mode);
|
||||
}
|
||||
|
||||
cache[dir] = parent.then(function () {
|
||||
return mkpath(dir, mode);
|
||||
});
|
||||
}
|
||||
|
||||
return cache[dir];
|
||||
};
|
||||
|
||||
// Utility methods for writing output files
|
||||
var extractors = {
|
||||
folder: function (folder, destination, zip) {
|
||||
return mkdir(destination, zip.dirCache, folder.mode)
|
||||
.then(function () {
|
||||
return {folder: folder.path};
|
||||
});
|
||||
},
|
||||
store: function (file, destination, zip) {
|
||||
var writer;
|
||||
|
||||
if (file.uncompressedSize === 0) {
|
||||
writer = touch.bind(null, destination);
|
||||
} else if (file.uncompressedSize <= zip.chunkSize) {
|
||||
writer = function () {
|
||||
return zip.getBuffer(file._offset, file._offset + file.uncompressedSize)
|
||||
.then(function (buffer) {
|
||||
return writeFile(destination, buffer, { mode: file.mode });
|
||||
});
|
||||
};
|
||||
} else {
|
||||
var input = new stream.Readable();
|
||||
input.wrap(fs.createReadStream(zip.filename, {start: file._offset, end: file._offset + file.uncompressedSize - 1}));
|
||||
writer = pipePromise.bind(null, input, destination, { mode: file.mode });
|
||||
}
|
||||
|
||||
return mkdir(path.dirname(destination), zip.dirCache)
|
||||
.then(writer)
|
||||
.then(function () {
|
||||
return {stored: file.path};
|
||||
});
|
||||
},
|
||||
deflate: function (file, destination, zip) {
|
||||
// For Deflate you don't actually need to specify the end offset - and
|
||||
// in fact many ZIP files don't include compressed file sizes for
|
||||
// Deflated files so we don't even know what the end offset is.
|
||||
|
||||
return mkdir(path.dirname(destination), zip.dirCache)
|
||||
.then(function () {
|
||||
if (file._maxSize <= zip.chunkSize) {
|
||||
return zip.getBuffer(file._offset, file._offset + file._maxSize)
|
||||
.then(inflateRaw)
|
||||
.then(function (buffer) {
|
||||
return writeFile(destination, buffer, { mode: file.mode });
|
||||
});
|
||||
} else {
|
||||
// For node 0.8 we need to create the Zlib stream and attach
|
||||
// handlers in the same tick of the event loop, which is why we do
|
||||
// the creation in here
|
||||
var input = new stream.Readable();
|
||||
input.wrap(fs.createReadStream(zip.filename, {start: file._offset}));
|
||||
var inflater = input.pipe(zlib.createInflateRaw({highWaterMark: 32 * 1024}));
|
||||
|
||||
return pipePromise(inflater, destination, { mode: file.mode });
|
||||
}
|
||||
})
|
||||
.then(function () {
|
||||
return {deflated: file.path};
|
||||
});
|
||||
},
|
||||
symlink: function (file, destination, zip, basePath) {
|
||||
var parent = path.dirname(destination);
|
||||
return mkdir(parent, zip.dirCache)
|
||||
.then(function () {
|
||||
return getLinkLocation(file, destination, zip, basePath);
|
||||
})
|
||||
.then(function (linkTo) {
|
||||
return symlink(path.resolve(parent, linkTo), destination)
|
||||
.then(function () {
|
||||
return {symlink: file.path, linkTo: linkTo};
|
||||
});
|
||||
});
|
||||
},
|
||||
// Make a shallow copy of the file/directory this symlink points to instead
|
||||
// of actually creating a link
|
||||
copy: function (file, destination, zip, basePath) {
|
||||
var type;
|
||||
var parent = path.dirname(destination);
|
||||
|
||||
return mkdir(parent, zip.dirCache)
|
||||
.then(function () {
|
||||
return getLinkLocation(file, destination, zip, basePath);
|
||||
})
|
||||
.then(function (linkTo) {
|
||||
return stat(path.resolve(parent, linkTo))
|
||||
.then(function (stats) {
|
||||
if (stats.isFile()) {
|
||||
type = 'File';
|
||||
var input = new stream.Readable();
|
||||
input.wrap(fs.createReadStream(path.resolve(parent, linkTo)));
|
||||
return pipePromise(input, destination);
|
||||
} else if (stats.isDirectory()) {
|
||||
type = 'Directory';
|
||||
return mkdir(destination, zip.dirCache);
|
||||
} else {
|
||||
throw new Error('Could not follow symlink to unknown file type');
|
||||
}
|
||||
})
|
||||
.then(function () {
|
||||
return {copy: file.path, original: linkTo, type: type};
|
||||
});
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
var getLinkLocation = function (file, destination, zip, basePath) {
|
||||
var parent = path.dirname(destination);
|
||||
return zip.getBuffer(file._offset, file._offset + file.uncompressedSize)
|
||||
.then(function (buffer) {
|
||||
var linkTo = buffer.toString();
|
||||
var fullLink = path.resolve(parent, linkTo);
|
||||
|
||||
if (path.relative(basePath, fullLink).slice(0, 2) === '..') {
|
||||
throw new Error('Symlink links outside archive');
|
||||
}
|
||||
|
||||
return linkTo;
|
||||
});
|
||||
};
|
||||
|
||||
var pipePromise = function (input, destination, options) {
|
||||
var deferred = Q.defer();
|
||||
var output = fs.createWriteStream(destination, options);
|
||||
var errorHandler = function (error) {
|
||||
deferred.reject(error);
|
||||
};
|
||||
|
||||
input.on('error', errorHandler);
|
||||
output.on('error', errorHandler);
|
||||
|
||||
// For node 0.8 we can't just use the 'finish' event of the pipe
|
||||
input.on('end', function () {
|
||||
output.end(function () {
|
||||
deferred.resolve();
|
||||
});
|
||||
});
|
||||
|
||||
input.pipe(output, {end: false});
|
||||
|
||||
return deferred.promise;
|
||||
};
|
||||
|
||||
module.exports = extractors;
|
37
node_modules/decompress-zip/lib/file-details.js
generated
vendored
Normal file
37
node_modules/decompress-zip/lib/file-details.js
generated
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
// Objects with this prototype are used as the public representation of a file
|
||||
var path = require('path');
|
||||
|
||||
var FileDetails = function (directoryEntry) {
|
||||
// TODO: Add 'extra field' support
|
||||
|
||||
this._offset = 0;
|
||||
this._maxSize = 0;
|
||||
|
||||
this.parent = path.dirname(directoryEntry.fileName);
|
||||
this.filename = path.basename(directoryEntry.fileName);
|
||||
this.path = path.normalize(directoryEntry.fileName);
|
||||
|
||||
this.type = directoryEntry.fileAttributes.type;
|
||||
this.mode = directoryEntry.fileAttributes.mode;
|
||||
this.compressionMethod = directoryEntry.compressionMethod;
|
||||
this.modified = directoryEntry.modifiedTime;
|
||||
this.crc32 = directoryEntry.crc32;
|
||||
this.compressedSize = directoryEntry.compressedSize;
|
||||
this.uncompressedSize = directoryEntry.uncompressedSize;
|
||||
this.comment = directoryEntry.fileComment;
|
||||
|
||||
this.flags = {
|
||||
encrypted: directoryEntry.generalPurposeFlags.encrypted,
|
||||
compressionFlag1: directoryEntry.generalPurposeFlags.compressionFlag1,
|
||||
compressionFlag2: directoryEntry.generalPurposeFlags.compressionFlag2,
|
||||
useDataDescriptor: directoryEntry.generalPurposeFlags.useDataDescriptor,
|
||||
enhancedDeflating: directoryEntry.generalPurposeFlags.enhancedDeflating,
|
||||
compressedPatched: directoryEntry.generalPurposeFlags.compressedPatched,
|
||||
strongEncryption: directoryEntry.generalPurposeFlags.strongEncryption,
|
||||
utf8: directoryEntry.generalPurposeFlags.utf8,
|
||||
encryptedCD: directoryEntry.generalPurposeFlags.encryptedCD
|
||||
};
|
||||
|
||||
};
|
||||
|
||||
module.exports = FileDetails;
|
10
node_modules/decompress-zip/lib/signatures.js
generated
vendored
Normal file
10
node_modules/decompress-zip/lib/signatures.js
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
module.exports = {
|
||||
LOCAL_FILE_HEADER: 0x04034b50,
|
||||
DATA_DESCRIPTOR_RECORD: 0x08074b50,
|
||||
ARCHIVE_EXTRA_DATA: 0x08064b50,
|
||||
CENTRAL_FILE_HEADER: 0x02014b50,
|
||||
HEADER: 0x05054b50,
|
||||
ZIP64_END_OF_CENTRAL_DIRECTORY: 0x06064b50,
|
||||
ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR: 0x07064b50,
|
||||
END_OF_CENTRAL_DIRECTORY: 0x06054b50
|
||||
};
|
228
node_modules/decompress-zip/lib/structures.js
generated
vendored
Normal file
228
node_modules/decompress-zip/lib/structures.js
generated
vendored
Normal file
@@ -0,0 +1,228 @@
|
||||
'use strict';
|
||||
var binary = require('binary');
|
||||
|
||||
var convertDateTime = function (dosDate, dosTime) {
|
||||
var year = ((dosDate >> 9) & 0x7F) + 1980;
|
||||
var month = (dosDate >> 5) & 0x0F;
|
||||
var day = dosDate & 0x1F;
|
||||
|
||||
var hour = (dosTime >> 11);
|
||||
var minute = (dosTime >> 5) & 0x3F;
|
||||
var second = (dosTime & 0x1F) * 2;
|
||||
|
||||
var result = new Date(year, month - 1, day, hour, minute, second, 0);
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
var convertGeneralPurposeFlags = function (value) {
|
||||
var bits = [];
|
||||
|
||||
for (var i = 0; i < 16; i++) {
|
||||
bits[i] = (value >> i) & 1;
|
||||
}
|
||||
|
||||
return {
|
||||
encrypted: !!bits[0],
|
||||
compressionFlag1: !!bits[1],
|
||||
compressionFlag2: !!bits[2],
|
||||
useDataDescriptor: !!bits[3],
|
||||
enhancedDeflating: !!bits[4],
|
||||
compressedPatched: !!bits[5],
|
||||
strongEncryption: !!bits[6],
|
||||
utf8: !!bits[11],
|
||||
encryptedCD: !!bits[13]
|
||||
};
|
||||
};
|
||||
|
||||
var parseExternalFileAttributes = function (externalAttributes, platform) {
|
||||
var types = {
|
||||
// In theory, any of these could be set. Realistically, though, it will
|
||||
// be regular, directory or symlink
|
||||
1: 'NamedPipe',
|
||||
2: 'Character',
|
||||
4: 'Directory',
|
||||
6: 'Block',
|
||||
8: 'File',
|
||||
10: 'SymbolicLink',
|
||||
12: 'Socket'
|
||||
};
|
||||
|
||||
switch (platform) {
|
||||
|
||||
case 3: // Unix
|
||||
return {
|
||||
platform: 'Unix',
|
||||
type: types[(externalAttributes >> 28) & 0x0F],
|
||||
mode: (externalAttributes >> 16) & 0xFFF
|
||||
};
|
||||
|
||||
// case 0: // MSDOS
|
||||
default:
|
||||
if (platform !== 0) {
|
||||
console.warn('Possibly unsupported ZIP platform type, ' + platform);
|
||||
}
|
||||
|
||||
var attribs = {
|
||||
A: (externalAttributes >> 5) & 0x01,
|
||||
D: (externalAttributes >> 4) & 0x01,
|
||||
V: (externalAttributes >> 3) & 0x01,
|
||||
S: (externalAttributes >> 2) & 0x01,
|
||||
H: (externalAttributes >> 1) & 0x01,
|
||||
R: externalAttributes & 0x01
|
||||
};
|
||||
|
||||
// With no better guidance we'll make the default permissions ugo+r
|
||||
var mode = parseInt('0444', 8);
|
||||
|
||||
if (attribs.D) {
|
||||
mode |= parseInt('0111', 8); // Set the execute bit
|
||||
}
|
||||
|
||||
if (!attribs.R) {
|
||||
mode |= parseInt('0222', 8); // Set the write bit
|
||||
}
|
||||
|
||||
mode &= ~process.umask();
|
||||
|
||||
return {
|
||||
platform: 'DOS',
|
||||
type: attribs.D ? 'Directory' : 'File',
|
||||
mode: mode
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
var readEndRecord = function (buffer) {
|
||||
var data = binary.parse(buffer)
|
||||
.word32lu('signature')
|
||||
.word16lu('diskNumber')
|
||||
.word16lu('directoryStartDisk')
|
||||
.word16lu('directoryEntryCountDisk')
|
||||
.word16lu('directoryEntryCount')
|
||||
.word32lu('directorySize')
|
||||
.word32lu('directoryOffset')
|
||||
.word16lu('commentLength')
|
||||
.buffer('comment', 'commentLength')
|
||||
.vars;
|
||||
|
||||
data.comment = data.comment.toString();
|
||||
|
||||
return data;
|
||||
};
|
||||
|
||||
var directorySort = function (a, b) {
|
||||
return a.relativeOffsetOfLocalHeader - b.relativeOffsetOfLocalHeader;
|
||||
};
|
||||
|
||||
var readDirectory = function (buffer) {
|
||||
var directory = [];
|
||||
var current;
|
||||
var index = 0;
|
||||
|
||||
while (index < buffer.length) {
|
||||
current = binary.parse(buffer.slice(index, index + 46))
|
||||
.word32lu('signature')
|
||||
.word8lu('creatorSpecVersion')
|
||||
.word8lu('creatorPlatform')
|
||||
.word8lu('requiredSpecVersion')
|
||||
.word8lu('requiredPlatform')
|
||||
.word16lu('generalPurposeBitFlag')
|
||||
.word16lu('compressionMethod')
|
||||
.word16lu('lastModFileTime')
|
||||
.word16lu('lastModFileDate')
|
||||
.word32lu('crc32')
|
||||
.word32lu('compressedSize')
|
||||
.word32lu('uncompressedSize')
|
||||
.word16lu('fileNameLength')
|
||||
.word16lu('extraFieldLength')
|
||||
.word16lu('fileCommentLength')
|
||||
.word16lu('diskNumberStart')
|
||||
.word16lu('internalFileAttributes')
|
||||
.word32lu('externalFileAttributes')
|
||||
.word32lu('relativeOffsetOfLocalHeader')
|
||||
.vars;
|
||||
|
||||
index += 46;
|
||||
|
||||
current.generalPurposeFlags = convertGeneralPurposeFlags(current.generalPurposeBitFlag);
|
||||
current.fileAttributes = parseExternalFileAttributes(current.externalFileAttributes, current.creatorPlatform);
|
||||
|
||||
current.modifiedTime = convertDateTime(current.lastModFileDate, current.lastModFileTime);
|
||||
current.fileName = current.extraField = current.fileComment = '';
|
||||
current.headerLength = 46 + current.fileNameLength + current.extraFieldLength + current.fileCommentLength;
|
||||
|
||||
if (current.fileNameLength > 0) {
|
||||
current.fileName = buffer.slice(index, index + current.fileNameLength).toString();
|
||||
index += current.fileNameLength;
|
||||
}
|
||||
|
||||
if (current.extraFieldLength > 0) {
|
||||
current.extraField = buffer.slice(index, index + current.extraFieldLength).toString();
|
||||
index += current.extraFieldLength;
|
||||
}
|
||||
|
||||
if (current.fileCommentLength > 0) {
|
||||
current.fileComment = buffer.slice(index, index + current.fileCommentLength).toString();
|
||||
index += current.fileCommentLength;
|
||||
}
|
||||
|
||||
if (current.fileAttributes.type !== 'Directory' && current.fileName.substr(-1) === '/') {
|
||||
// TODO: check that this is a reasonable check
|
||||
current.fileAttributes.type = 'Directory';
|
||||
}
|
||||
|
||||
directory.push(current);
|
||||
}
|
||||
|
||||
directory.sort(directorySort);
|
||||
|
||||
return directory;
|
||||
};
|
||||
|
||||
var readFileEntry = function (buffer) {
|
||||
var index = 0;
|
||||
|
||||
var fileEntry = binary.parse(buffer.slice(index, 30))
|
||||
.word32lu('signature')
|
||||
.word16lu('versionNeededToExtract')
|
||||
.word16lu('generalPurposeBitFlag')
|
||||
.word16lu('compressionMethod')
|
||||
.word16lu('lastModFileTime')
|
||||
.word16lu('lastModFileDate')
|
||||
.word32lu('crc32')
|
||||
.word32lu('compressedSize')
|
||||
.word32lu('uncompressedSize')
|
||||
.word16lu('fileNameLength')
|
||||
.word16lu('extraFieldLength')
|
||||
.vars;
|
||||
|
||||
index += 30;
|
||||
|
||||
fileEntry.fileName = fileEntry.extraField = '';
|
||||
|
||||
fileEntry.entryLength = 30 + fileEntry.fileNameLength + fileEntry.extraFieldLength;
|
||||
|
||||
if (fileEntry.entryLength > structures.maxFileEntrySize) {
|
||||
throw new Error('File entry unexpectedly large: ' + fileEntry.entryLength + ' (max: ' + structures.maxFileEntrySize + ')');
|
||||
}
|
||||
|
||||
if (fileEntry.fileNameLength > 0) {
|
||||
fileEntry.fileName = buffer.slice(index, index + fileEntry.fileNameLength).toString();
|
||||
index += fileEntry.fileNameLength;
|
||||
}
|
||||
|
||||
if (fileEntry.extraFieldLength > 0) {
|
||||
fileEntry.extraField = buffer.slice(index, index + fileEntry.extraFieldLength).toString();
|
||||
index += fileEntry.extraFieldLength;
|
||||
}
|
||||
|
||||
return fileEntry;
|
||||
};
|
||||
|
||||
var structures = module.exports = {
|
||||
readEndRecord: readEndRecord,
|
||||
readDirectory: readDirectory,
|
||||
readFileEntry: readFileEntry,
|
||||
maxFileEntrySize: 4096
|
||||
};
|
Reference in New Issue
Block a user