Node JS version
This commit is contained in:
21
node_modules/adm-zip/LICENSE
generated
vendored
Normal file
21
node_modules/adm-zip/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2012 Another-D-Mention Software and other contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
65
node_modules/adm-zip/README.md
generated
vendored
Normal file
65
node_modules/adm-zip/README.md
generated
vendored
Normal file
@@ -0,0 +1,65 @@
|
||||
# ADM-ZIP for NodeJS with added support for electron original-fs
|
||||
|
||||
ADM-ZIP is a pure JavaScript implementation for zip data compression for [NodeJS](https://nodejs.org/).
|
||||
|
||||
# Installation
|
||||
|
||||
With [npm](https://www.npmjs.com/) do:
|
||||
|
||||
$ npm install adm-zip
|
||||
|
||||
## What is it good for?
|
||||
|
||||
The library allows you to:
|
||||
|
||||
- decompress zip files directly to disk or in memory buffers
|
||||
- compress files and store them to disk in .zip format or in compressed buffers
|
||||
- update content of/add new/delete files from an existing .zip
|
||||
|
||||
# Dependencies
|
||||
|
||||
There are no other nodeJS libraries that ADM-ZIP is dependent of
|
||||
|
||||
# Examples
|
||||
|
||||
## Basic usage
|
||||
|
||||
```javascript
|
||||
var AdmZip = require("adm-zip");
|
||||
|
||||
// reading archives
|
||||
var zip = new AdmZip("./my_file.zip");
|
||||
var zipEntries = zip.getEntries(); // an array of ZipEntry records
|
||||
|
||||
zipEntries.forEach(function (zipEntry) {
|
||||
console.log(zipEntry.toString()); // outputs zip entries information
|
||||
if (zipEntry.entryName == "my_file.txt") {
|
||||
console.log(zipEntry.getData().toString("utf8"));
|
||||
}
|
||||
});
|
||||
// outputs the content of some_folder/my_file.txt
|
||||
console.log(zip.readAsText("some_folder/my_file.txt"));
|
||||
// extracts the specified file to the specified location
|
||||
zip.extractEntryTo(/*entry name*/ "some_folder/my_file.txt", /*target path*/ "/home/me/tempfolder", /*maintainEntryPath*/ false, /*overwrite*/ true);
|
||||
// extracts everything
|
||||
zip.extractAllTo(/*target path*/ "/home/me/zipcontent/", /*overwrite*/ true);
|
||||
|
||||
// creating archives
|
||||
var zip = new AdmZip();
|
||||
|
||||
// add file directly
|
||||
var content = "inner content of the file";
|
||||
zip.addFile("test.txt", Buffer.from(content, "utf8"), "entry comment goes here");
|
||||
// add local file
|
||||
zip.addLocalFile("/home/me/some_picture.png");
|
||||
// get everything as a buffer
|
||||
var willSendthis = zip.toBuffer();
|
||||
// or write everything to disk
|
||||
zip.writeZip(/*target file name*/ "/home/me/files.zip");
|
||||
|
||||
// ... more examples in the wiki
|
||||
```
|
||||
|
||||
For more detailed information please check out the [wiki](https://github.com/cthackers/adm-zip/wiki).
|
||||
|
||||
[](https://travis-ci.org/cthackers/adm-zip)
|
786
node_modules/adm-zip/adm-zip.js
generated
vendored
Normal file
786
node_modules/adm-zip/adm-zip.js
generated
vendored
Normal file
@@ -0,0 +1,786 @@
|
||||
const Utils = require("./util");
|
||||
const pth = require("path");
|
||||
const ZipEntry = require("./zipEntry");
|
||||
const ZipFile = require("./zipFile");
|
||||
|
||||
const get_Bool = (val, def) => (typeof val === "boolean" ? val : def);
|
||||
const get_Str = (val, def) => (typeof val === "string" ? val : def);
|
||||
|
||||
const defaultOptions = {
|
||||
// option "noSort" : if true it disables files sorting
|
||||
noSort: false,
|
||||
// read entries during load (initial loading may be slower)
|
||||
readEntries: false,
|
||||
// default method is none
|
||||
method: Utils.Constants.NONE,
|
||||
// file system
|
||||
fs: null
|
||||
};
|
||||
|
||||
module.exports = function (/**String*/ input, /** object */ options) {
|
||||
let inBuffer = null;
|
||||
|
||||
// create object based default options, allowing them to be overwritten
|
||||
const opts = Object.assign(Object.create(null), defaultOptions);
|
||||
|
||||
// test input variable
|
||||
if (input && "object" === typeof input) {
|
||||
// if value is not buffer we accept it to be object with options
|
||||
if (!(input instanceof Uint8Array)) {
|
||||
Object.assign(opts, input);
|
||||
input = opts.input ? opts.input : undefined;
|
||||
if (opts.input) delete opts.input;
|
||||
}
|
||||
|
||||
// if input is buffer
|
||||
if (Buffer.isBuffer(input)) {
|
||||
inBuffer = input;
|
||||
opts.method = Utils.Constants.BUFFER;
|
||||
input = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
// assign options
|
||||
Object.assign(opts, options);
|
||||
|
||||
// instanciate utils filesystem
|
||||
const filetools = new Utils(opts);
|
||||
|
||||
// if input is file name we retrieve its content
|
||||
if (input && "string" === typeof input) {
|
||||
// load zip file
|
||||
if (filetools.fs.existsSync(input)) {
|
||||
opts.method = Utils.Constants.FILE;
|
||||
opts.filename = input;
|
||||
inBuffer = filetools.fs.readFileSync(input);
|
||||
} else {
|
||||
throw new Error(Utils.Errors.INVALID_FILENAME);
|
||||
}
|
||||
}
|
||||
|
||||
// create variable
|
||||
const _zip = new ZipFile(inBuffer, opts);
|
||||
|
||||
const { canonical, sanitize } = Utils;
|
||||
|
||||
function getEntry(/**Object*/ entry) {
|
||||
if (entry && _zip) {
|
||||
var item;
|
||||
// If entry was given as a file name
|
||||
if (typeof entry === "string") item = _zip.getEntry(entry);
|
||||
// if entry was given as a ZipEntry object
|
||||
if (typeof entry === "object" && typeof entry.entryName !== "undefined" && typeof entry.header !== "undefined") item = _zip.getEntry(entry.entryName);
|
||||
|
||||
if (item) {
|
||||
return item;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function fixPath(zipPath) {
|
||||
const { join, normalize, sep } = pth.posix;
|
||||
// convert windows file separators and normalize
|
||||
return join(".", normalize(sep + zipPath.split("\\").join(sep) + sep));
|
||||
}
|
||||
|
||||
return {
|
||||
/**
|
||||
* Extracts the given entry from the archive and returns the content as a Buffer object
|
||||
* @param entry ZipEntry object or String with the full path of the entry
|
||||
*
|
||||
* @return Buffer or Null in case of error
|
||||
*/
|
||||
readFile: function (/**Object*/ entry, /*String, Buffer*/ pass) {
|
||||
var item = getEntry(entry);
|
||||
return (item && item.getData(pass)) || null;
|
||||
},
|
||||
|
||||
/**
|
||||
* Asynchronous readFile
|
||||
* @param entry ZipEntry object or String with the full path of the entry
|
||||
* @param callback
|
||||
*
|
||||
* @return Buffer or Null in case of error
|
||||
*/
|
||||
readFileAsync: function (/**Object*/ entry, /**Function*/ callback) {
|
||||
var item = getEntry(entry);
|
||||
if (item) {
|
||||
item.getDataAsync(callback);
|
||||
} else {
|
||||
callback(null, "getEntry failed for:" + entry);
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Extracts the given entry from the archive and returns the content as plain text in the given encoding
|
||||
* @param entry ZipEntry object or String with the full path of the entry
|
||||
* @param encoding Optional. If no encoding is specified utf8 is used
|
||||
*
|
||||
* @return String
|
||||
*/
|
||||
readAsText: function (/**Object*/ entry, /**String=*/ encoding) {
|
||||
var item = getEntry(entry);
|
||||
if (item) {
|
||||
var data = item.getData();
|
||||
if (data && data.length) {
|
||||
return data.toString(encoding || "utf8");
|
||||
}
|
||||
}
|
||||
return "";
|
||||
},
|
||||
|
||||
/**
|
||||
* Asynchronous readAsText
|
||||
* @param entry ZipEntry object or String with the full path of the entry
|
||||
* @param callback
|
||||
* @param encoding Optional. If no encoding is specified utf8 is used
|
||||
*
|
||||
* @return String
|
||||
*/
|
||||
readAsTextAsync: function (/**Object*/ entry, /**Function*/ callback, /**String=*/ encoding) {
|
||||
var item = getEntry(entry);
|
||||
if (item) {
|
||||
item.getDataAsync(function (data, err) {
|
||||
if (err) {
|
||||
callback(data, err);
|
||||
return;
|
||||
}
|
||||
|
||||
if (data && data.length) {
|
||||
callback(data.toString(encoding || "utf8"));
|
||||
} else {
|
||||
callback("");
|
||||
}
|
||||
});
|
||||
} else {
|
||||
callback("");
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Remove the entry from the file or the entry and all it's nested directories and files if the given entry is a directory
|
||||
*
|
||||
* @param entry
|
||||
*/
|
||||
deleteFile: function (/**Object*/ entry) {
|
||||
// @TODO: test deleteFile
|
||||
var item = getEntry(entry);
|
||||
if (item) {
|
||||
_zip.deleteEntry(item.entryName);
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Adds a comment to the zip. The zip must be rewritten after adding the comment.
|
||||
*
|
||||
* @param comment
|
||||
*/
|
||||
addZipComment: function (/**String*/ comment) {
|
||||
// @TODO: test addZipComment
|
||||
_zip.comment = comment;
|
||||
},
|
||||
|
||||
/**
|
||||
* Returns the zip comment
|
||||
*
|
||||
* @return String
|
||||
*/
|
||||
getZipComment: function () {
|
||||
return _zip.comment || "";
|
||||
},
|
||||
|
||||
/**
|
||||
* Adds a comment to a specified zipEntry. The zip must be rewritten after adding the comment
|
||||
* The comment cannot exceed 65535 characters in length
|
||||
*
|
||||
* @param entry
|
||||
* @param comment
|
||||
*/
|
||||
addZipEntryComment: function (/**Object*/ entry, /**String*/ comment) {
|
||||
var item = getEntry(entry);
|
||||
if (item) {
|
||||
item.comment = comment;
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Returns the comment of the specified entry
|
||||
*
|
||||
* @param entry
|
||||
* @return String
|
||||
*/
|
||||
getZipEntryComment: function (/**Object*/ entry) {
|
||||
var item = getEntry(entry);
|
||||
if (item) {
|
||||
return item.comment || "";
|
||||
}
|
||||
return "";
|
||||
},
|
||||
|
||||
/**
|
||||
* Updates the content of an existing entry inside the archive. The zip must be rewritten after updating the content
|
||||
*
|
||||
* @param entry
|
||||
* @param content
|
||||
*/
|
||||
updateFile: function (/**Object*/ entry, /**Buffer*/ content) {
|
||||
var item = getEntry(entry);
|
||||
if (item) {
|
||||
item.setData(content);
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Adds a file from the disk to the archive
|
||||
*
|
||||
* @param localPath File to add to zip
|
||||
* @param zipPath Optional path inside the zip
|
||||
* @param zipName Optional name for the file
|
||||
*/
|
||||
addLocalFile: function (/**String*/ localPath, /**String=*/ zipPath, /**String=*/ zipName, /**String*/ comment) {
|
||||
if (filetools.fs.existsSync(localPath)) {
|
||||
// fix ZipPath
|
||||
zipPath = zipPath ? fixPath(zipPath) : "";
|
||||
|
||||
// p - local file name
|
||||
var p = localPath.split("\\").join("/").split("/").pop();
|
||||
|
||||
// add file name into zippath
|
||||
zipPath += zipName ? zipName : p;
|
||||
|
||||
// read file attributes
|
||||
const _attr = filetools.fs.statSync(localPath);
|
||||
|
||||
// add file into zip file
|
||||
this.addFile(zipPath, filetools.fs.readFileSync(localPath), comment, _attr);
|
||||
} else {
|
||||
throw new Error(Utils.Errors.FILE_NOT_FOUND.replace("%s", localPath));
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Adds a local directory and all its nested files and directories to the archive
|
||||
*
|
||||
* @param localPath
|
||||
* @param zipPath optional path inside zip
|
||||
* @param filter optional RegExp or Function if files match will
|
||||
* be included.
|
||||
* @param {number | object} attr - number as unix file permissions, object as filesystem Stats object
|
||||
*/
|
||||
addLocalFolder: function (/**String*/ localPath, /**String=*/ zipPath, /**=RegExp|Function*/ filter, /**=number|object*/ attr) {
|
||||
// Prepare filter
|
||||
if (filter instanceof RegExp) {
|
||||
// if filter is RegExp wrap it
|
||||
filter = (function (rx) {
|
||||
return function (filename) {
|
||||
return rx.test(filename);
|
||||
};
|
||||
})(filter);
|
||||
} else if ("function" !== typeof filter) {
|
||||
// if filter is not function we will replace it
|
||||
filter = function () {
|
||||
return true;
|
||||
};
|
||||
}
|
||||
|
||||
// fix ZipPath
|
||||
zipPath = zipPath ? fixPath(zipPath) : "";
|
||||
|
||||
// normalize the path first
|
||||
localPath = pth.normalize(localPath);
|
||||
|
||||
if (filetools.fs.existsSync(localPath)) {
|
||||
const items = filetools.findFiles(localPath);
|
||||
const self = this;
|
||||
|
||||
if (items.length) {
|
||||
items.forEach(function (filepath) {
|
||||
var p = pth.relative(localPath, filepath).split("\\").join("/"); //windows fix
|
||||
if (filter(p)) {
|
||||
var stats = filetools.fs.statSync(filepath);
|
||||
if (stats.isFile()) {
|
||||
self.addFile(zipPath + p, filetools.fs.readFileSync(filepath), "", attr ? attr : stats);
|
||||
} else {
|
||||
self.addFile(zipPath + p + "/", Buffer.alloc(0), "", attr ? attr : stats);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
} else {
|
||||
throw new Error(Utils.Errors.FILE_NOT_FOUND.replace("%s", localPath));
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Asynchronous addLocalFile
|
||||
* @param localPath
|
||||
* @param callback
|
||||
* @param zipPath optional path inside zip
|
||||
* @param filter optional RegExp or Function if files match will
|
||||
* be included.
|
||||
*/
|
||||
addLocalFolderAsync: function (/*String*/ localPath, /*Function*/ callback, /*String*/ zipPath, /*RegExp|Function*/ filter) {
|
||||
if (filter instanceof RegExp) {
|
||||
filter = (function (rx) {
|
||||
return function (filename) {
|
||||
return rx.test(filename);
|
||||
};
|
||||
})(filter);
|
||||
} else if ("function" !== typeof filter) {
|
||||
filter = function () {
|
||||
return true;
|
||||
};
|
||||
}
|
||||
|
||||
// fix ZipPath
|
||||
zipPath = zipPath ? fixPath(zipPath) : "";
|
||||
|
||||
// normalize the path first
|
||||
localPath = pth.normalize(localPath);
|
||||
|
||||
var self = this;
|
||||
filetools.fs.open(localPath, "r", function (err) {
|
||||
if (err && err.code === "ENOENT") {
|
||||
callback(undefined, Utils.Errors.FILE_NOT_FOUND.replace("%s", localPath));
|
||||
} else if (err) {
|
||||
callback(undefined, err);
|
||||
} else {
|
||||
var items = filetools.findFiles(localPath);
|
||||
var i = -1;
|
||||
|
||||
var next = function () {
|
||||
i += 1;
|
||||
if (i < items.length) {
|
||||
var filepath = items[i];
|
||||
var p = pth.relative(localPath, filepath).split("\\").join("/"); //windows fix
|
||||
p = p
|
||||
.normalize("NFD")
|
||||
.replace(/[\u0300-\u036f]/g, "")
|
||||
.replace(/[^\x20-\x7E]/g, ""); // accent fix
|
||||
if (filter(p)) {
|
||||
filetools.fs.stat(filepath, function (er0, stats) {
|
||||
if (er0) callback(undefined, er0);
|
||||
if (stats.isFile()) {
|
||||
filetools.fs.readFile(filepath, function (er1, data) {
|
||||
if (er1) {
|
||||
callback(undefined, er1);
|
||||
} else {
|
||||
self.addFile(zipPath + p, data, "", stats);
|
||||
next();
|
||||
}
|
||||
});
|
||||
} else {
|
||||
self.addFile(zipPath + p + "/", Buffer.alloc(0), "", stats);
|
||||
next();
|
||||
}
|
||||
});
|
||||
} else {
|
||||
process.nextTick(() => {
|
||||
next();
|
||||
});
|
||||
}
|
||||
} else {
|
||||
callback(true, undefined);
|
||||
}
|
||||
};
|
||||
|
||||
next();
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} localPath - path where files will be extracted
|
||||
* @param {object} props - optional properties
|
||||
* @param {string} props.zipPath - optional path inside zip
|
||||
* @param {regexp, function} props.filter - RegExp or Function if files match will be included.
|
||||
*/
|
||||
addLocalFolderPromise: function (/*String*/ localPath, /* object */ props) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const { filter, zipPath } = Object.assign({}, props);
|
||||
this.addLocalFolderAsync(
|
||||
localPath,
|
||||
(done, err) => {
|
||||
if (err) reject(err);
|
||||
if (done) resolve(this);
|
||||
},
|
||||
zipPath,
|
||||
filter
|
||||
);
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* Allows you to create a entry (file or directory) in the zip file.
|
||||
* If you want to create a directory the entryName must end in / and a null buffer should be provided.
|
||||
* Comment and attributes are optional
|
||||
*
|
||||
* @param {string} entryName
|
||||
* @param {Buffer | string} content - file content as buffer or utf8 coded string
|
||||
* @param {string} comment - file comment
|
||||
* @param {number | object} attr - number as unix file permissions, object as filesystem Stats object
|
||||
*/
|
||||
addFile: function (/**String*/ entryName, /**Buffer*/ content, /**String*/ comment, /**Number*/ attr) {
|
||||
let entry = getEntry(entryName);
|
||||
const update = entry != null;
|
||||
|
||||
// prepare new entry
|
||||
if (!update) {
|
||||
entry = new ZipEntry();
|
||||
entry.entryName = entryName;
|
||||
}
|
||||
entry.comment = comment || "";
|
||||
|
||||
const isStat = "object" === typeof attr && attr instanceof filetools.fs.Stats;
|
||||
|
||||
// last modification time from file stats
|
||||
if (isStat) {
|
||||
entry.header.time = attr.mtime;
|
||||
}
|
||||
|
||||
// Set file attribute
|
||||
var fileattr = entry.isDirectory ? 0x10 : 0; // (MS-DOS directory flag)
|
||||
|
||||
// extended attributes field for Unix
|
||||
// set file type either S_IFDIR / S_IFREG
|
||||
let unix = entry.isDirectory ? 0x4000 : 0x8000;
|
||||
|
||||
if (isStat) {
|
||||
// File attributes from file stats
|
||||
unix |= 0xfff & attr.mode;
|
||||
} else if ("number" === typeof attr) {
|
||||
// attr from given attr values
|
||||
unix |= 0xfff & attr;
|
||||
} else {
|
||||
// Default values:
|
||||
unix |= entry.isDirectory ? 0o755 : 0o644; // permissions (drwxr-xr-x) or (-r-wr--r--)
|
||||
}
|
||||
|
||||
fileattr = (fileattr | (unix << 16)) >>> 0; // add attributes
|
||||
|
||||
entry.attr = fileattr;
|
||||
|
||||
entry.setData(content);
|
||||
if (!update) _zip.setEntry(entry);
|
||||
},
|
||||
|
||||
/**
|
||||
* Returns an array of ZipEntry objects representing the files and folders inside the archive
|
||||
*
|
||||
* @return Array
|
||||
*/
|
||||
getEntries: function () {
|
||||
return _zip ? _zip.entries : [];
|
||||
},
|
||||
|
||||
/**
|
||||
* Returns a ZipEntry object representing the file or folder specified by ``name``.
|
||||
*
|
||||
* @param name
|
||||
* @return ZipEntry
|
||||
*/
|
||||
getEntry: function (/**String*/ name) {
|
||||
return getEntry(name);
|
||||
},
|
||||
|
||||
getEntryCount: function () {
|
||||
return _zip.getEntryCount();
|
||||
},
|
||||
|
||||
forEach: function (callback) {
|
||||
return _zip.forEach(callback);
|
||||
},
|
||||
|
||||
/**
|
||||
* Extracts the given entry to the given targetPath
|
||||
* If the entry is a directory inside the archive, the entire directory and it's subdirectories will be extracted
|
||||
*
|
||||
* @param entry ZipEntry object or String with the full path of the entry
|
||||
* @param targetPath Target folder where to write the file
|
||||
* @param maintainEntryPath If maintainEntryPath is true and the entry is inside a folder, the entry folder
|
||||
* will be created in targetPath as well. Default is TRUE
|
||||
* @param overwrite If the file already exists at the target path, the file will be overwriten if this is true.
|
||||
* Default is FALSE
|
||||
* @param keepOriginalPermission The file will be set as the permission from the entry if this is true.
|
||||
* Default is FALSE
|
||||
* @param outFileName String If set will override the filename of the extracted file (Only works if the entry is a file)
|
||||
*
|
||||
* @return Boolean
|
||||
*/
|
||||
extractEntryTo: function (
|
||||
/**Object*/ entry,
|
||||
/**String*/ targetPath,
|
||||
/**Boolean*/ maintainEntryPath,
|
||||
/**Boolean*/ overwrite,
|
||||
/**Boolean*/ keepOriginalPermission,
|
||||
/**String**/ outFileName
|
||||
) {
|
||||
overwrite = get_Bool(overwrite, false);
|
||||
keepOriginalPermission = get_Bool(keepOriginalPermission, false);
|
||||
maintainEntryPath = get_Bool(maintainEntryPath, true);
|
||||
outFileName = get_Str(outFileName, get_Str(keepOriginalPermission, undefined));
|
||||
|
||||
var item = getEntry(entry);
|
||||
if (!item) {
|
||||
throw new Error(Utils.Errors.NO_ENTRY);
|
||||
}
|
||||
|
||||
var entryName = canonical(item.entryName);
|
||||
|
||||
var target = sanitize(targetPath, outFileName && !item.isDirectory ? outFileName : maintainEntryPath ? entryName : pth.basename(entryName));
|
||||
|
||||
if (item.isDirectory) {
|
||||
var children = _zip.getEntryChildren(item);
|
||||
children.forEach(function (child) {
|
||||
if (child.isDirectory) return;
|
||||
var content = child.getData();
|
||||
if (!content) {
|
||||
throw new Error(Utils.Errors.CANT_EXTRACT_FILE);
|
||||
}
|
||||
var name = canonical(child.entryName);
|
||||
var childName = sanitize(targetPath, maintainEntryPath ? name : pth.basename(name));
|
||||
// The reverse operation for attr depend on method addFile()
|
||||
const fileAttr = keepOriginalPermission ? child.header.fileAttr : undefined;
|
||||
filetools.writeFileTo(childName, content, overwrite, fileAttr);
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
var content = item.getData();
|
||||
if (!content) throw new Error(Utils.Errors.CANT_EXTRACT_FILE);
|
||||
|
||||
if (filetools.fs.existsSync(target) && !overwrite) {
|
||||
throw new Error(Utils.Errors.CANT_OVERRIDE);
|
||||
}
|
||||
// The reverse operation for attr depend on method addFile()
|
||||
const fileAttr = keepOriginalPermission ? entry.header.fileAttr : undefined;
|
||||
filetools.writeFileTo(target, content, overwrite, fileAttr);
|
||||
|
||||
return true;
|
||||
},
|
||||
|
||||
/**
|
||||
* Test the archive
|
||||
*
|
||||
*/
|
||||
test: function (pass) {
|
||||
if (!_zip) {
|
||||
return false;
|
||||
}
|
||||
|
||||
for (var entry in _zip.entries) {
|
||||
try {
|
||||
if (entry.isDirectory) {
|
||||
continue;
|
||||
}
|
||||
var content = _zip.entries[entry].getData(pass);
|
||||
if (!content) {
|
||||
return false;
|
||||
}
|
||||
} catch (err) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
},
|
||||
|
||||
/**
|
||||
* Extracts the entire archive to the given location
|
||||
*
|
||||
* @param targetPath Target location
|
||||
* @param overwrite If the file already exists at the target path, the file will be overwriten if this is true.
|
||||
* Default is FALSE
|
||||
* @param keepOriginalPermission The file will be set as the permission from the entry if this is true.
|
||||
* Default is FALSE
|
||||
*/
|
||||
extractAllTo: function (/**String*/ targetPath, /**Boolean*/ overwrite, /**Boolean*/ keepOriginalPermission, /*String, Buffer*/ pass) {
|
||||
overwrite = get_Bool(overwrite, false);
|
||||
pass = get_Str(keepOriginalPermission, pass);
|
||||
keepOriginalPermission = get_Bool(keepOriginalPermission, false);
|
||||
if (!_zip) {
|
||||
throw new Error(Utils.Errors.NO_ZIP);
|
||||
}
|
||||
_zip.entries.forEach(function (entry) {
|
||||
var entryName = sanitize(targetPath, canonical(entry.entryName.toString()));
|
||||
if (entry.isDirectory) {
|
||||
filetools.makeDir(entryName);
|
||||
return;
|
||||
}
|
||||
var content = entry.getData(pass);
|
||||
if (!content) {
|
||||
throw new Error(Utils.Errors.CANT_EXTRACT_FILE);
|
||||
}
|
||||
// The reverse operation for attr depend on method addFile()
|
||||
const fileAttr = keepOriginalPermission ? entry.header.fileAttr : undefined;
|
||||
filetools.writeFileTo(entryName, content, overwrite, fileAttr);
|
||||
try {
|
||||
filetools.fs.utimesSync(entryName, entry.header.time, entry.header.time);
|
||||
} catch (err) {
|
||||
throw new Error(Utils.Errors.CANT_EXTRACT_FILE);
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* Asynchronous extractAllTo
|
||||
*
|
||||
* @param targetPath Target location
|
||||
* @param overwrite If the file already exists at the target path, the file will be overwriten if this is true.
|
||||
* Default is FALSE
|
||||
* @param keepOriginalPermission The file will be set as the permission from the entry if this is true.
|
||||
* Default is FALSE
|
||||
* @param callback The callback will be executed when all entries are extracted successfully or any error is thrown.
|
||||
*/
|
||||
extractAllToAsync: function (/**String*/ targetPath, /**Boolean*/ overwrite, /**Boolean*/ keepOriginalPermission, /**Function*/ callback) {
|
||||
overwrite = get_Bool(overwrite, false);
|
||||
if (typeof keepOriginalPermission === "function" && !callback) callback = keepOriginalPermission;
|
||||
keepOriginalPermission = get_Bool(keepOriginalPermission, false);
|
||||
if (!callback) {
|
||||
callback = function (err) {
|
||||
throw new Error(err);
|
||||
};
|
||||
}
|
||||
if (!_zip) {
|
||||
callback(new Error(Utils.Errors.NO_ZIP));
|
||||
return;
|
||||
}
|
||||
|
||||
targetPath = pth.resolve(targetPath);
|
||||
// convert entryName to
|
||||
const getPath = (entry) => sanitize(targetPath, pth.normalize(canonical(entry.entryName.toString())));
|
||||
const getError = (msg, file) => new Error(msg + ': "' + file + '"');
|
||||
|
||||
// separate directories from files
|
||||
const dirEntries = [];
|
||||
const fileEntries = new Set();
|
||||
_zip.entries.forEach((e) => {
|
||||
if (e.isDirectory) {
|
||||
dirEntries.push(e);
|
||||
} else {
|
||||
fileEntries.add(e);
|
||||
}
|
||||
});
|
||||
|
||||
// Create directory entries first synchronously
|
||||
// this prevents race condition and assures folders are there before writing files
|
||||
for (const entry of dirEntries) {
|
||||
const dirPath = getPath(entry);
|
||||
// The reverse operation for attr depend on method addFile()
|
||||
const dirAttr = keepOriginalPermission ? entry.header.fileAttr : undefined;
|
||||
try {
|
||||
filetools.makeDir(dirPath);
|
||||
if (dirAttr) filetools.fs.chmodSync(dirPath, dirAttr);
|
||||
// in unix timestamp will change if files are later added to folder, but still
|
||||
filetools.fs.utimesSync(dirPath, entry.header.time, entry.header.time);
|
||||
} catch (er) {
|
||||
callback(getError("Unable to create folder", dirPath));
|
||||
}
|
||||
}
|
||||
|
||||
// callback wrapper, for some house keeping
|
||||
const done = () => {
|
||||
if (fileEntries.size === 0) {
|
||||
callback();
|
||||
}
|
||||
};
|
||||
|
||||
// Extract file entries asynchronously
|
||||
for (const entry of fileEntries.values()) {
|
||||
const entryName = pth.normalize(canonical(entry.entryName.toString()));
|
||||
const filePath = sanitize(targetPath, entryName);
|
||||
entry.getDataAsync(function (content, err_1) {
|
||||
if (err_1) {
|
||||
callback(new Error(err_1));
|
||||
return;
|
||||
}
|
||||
if (!content) {
|
||||
callback(new Error(Utils.Errors.CANT_EXTRACT_FILE));
|
||||
} else {
|
||||
// The reverse operation for attr depend on method addFile()
|
||||
const fileAttr = keepOriginalPermission ? entry.header.fileAttr : undefined;
|
||||
filetools.writeFileToAsync(filePath, content, overwrite, fileAttr, function (succ) {
|
||||
if (!succ) {
|
||||
callback(getError("Unable to write file", filePath));
|
||||
return;
|
||||
}
|
||||
filetools.fs.utimes(filePath, entry.header.time, entry.header.time, function (err_2) {
|
||||
if (err_2) {
|
||||
callback(getError("Unable to set times", filePath));
|
||||
return;
|
||||
}
|
||||
fileEntries.delete(entry);
|
||||
// call the callback if it was last entry
|
||||
done();
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
// call the callback if fileEntries was empty
|
||||
done();
|
||||
},
|
||||
|
||||
/**
|
||||
* Writes the newly created zip file to disk at the specified location or if a zip was opened and no ``targetFileName`` is provided, it will overwrite the opened zip
|
||||
*
|
||||
* @param targetFileName
|
||||
* @param callback
|
||||
*/
|
||||
writeZip: function (/**String*/ targetFileName, /**Function*/ callback) {
|
||||
if (arguments.length === 1) {
|
||||
if (typeof targetFileName === "function") {
|
||||
callback = targetFileName;
|
||||
targetFileName = "";
|
||||
}
|
||||
}
|
||||
|
||||
if (!targetFileName && opts.filename) {
|
||||
targetFileName = opts.filename;
|
||||
}
|
||||
if (!targetFileName) return;
|
||||
|
||||
var zipData = _zip.compressToBuffer();
|
||||
if (zipData) {
|
||||
var ok = filetools.writeFileTo(targetFileName, zipData, true);
|
||||
if (typeof callback === "function") callback(!ok ? new Error("failed") : null, "");
|
||||
}
|
||||
},
|
||||
|
||||
writeZipPromise: function (/**String*/ targetFileName, /* object */ props) {
|
||||
const { overwrite, perm } = Object.assign({ overwrite: true }, props);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
// find file name
|
||||
if (!targetFileName && opts.filename) targetFileName = opts.filename;
|
||||
if (!targetFileName) reject("ADM-ZIP: ZIP File Name Missing");
|
||||
|
||||
this.toBufferPromise().then((zipData) => {
|
||||
const ret = (done) => (done ? resolve(done) : reject("ADM-ZIP: Wasn't able to write zip file"));
|
||||
filetools.writeFileToAsync(targetFileName, zipData, overwrite, perm, ret);
|
||||
}, reject);
|
||||
});
|
||||
},
|
||||
|
||||
toBufferPromise: function () {
|
||||
return new Promise((resolve, reject) => {
|
||||
_zip.toAsyncBuffer(resolve, reject);
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* Returns the content of the entire zip file as a Buffer object
|
||||
*
|
||||
* @return Buffer
|
||||
*/
|
||||
toBuffer: function (/**Function=*/ onSuccess, /**Function=*/ onFail, /**Function=*/ onItemStart, /**Function=*/ onItemEnd) {
|
||||
this.valueOf = 2;
|
||||
if (typeof onSuccess === "function") {
|
||||
_zip.toAsyncBuffer(onSuccess, onFail, onItemStart, onItemEnd);
|
||||
return null;
|
||||
}
|
||||
return _zip.compressToBuffer();
|
||||
}
|
||||
};
|
||||
};
|
338
node_modules/adm-zip/headers/entryHeader.js
generated
vendored
Normal file
338
node_modules/adm-zip/headers/entryHeader.js
generated
vendored
Normal file
@@ -0,0 +1,338 @@
|
||||
var Utils = require("../util"),
|
||||
Constants = Utils.Constants;
|
||||
|
||||
/* The central directory file header */
|
||||
module.exports = function () {
|
||||
var _verMade = 20, // v2.0
|
||||
_version = 10, // v1.0
|
||||
_flags = 0,
|
||||
_method = 0,
|
||||
_time = 0,
|
||||
_crc = 0,
|
||||
_compressedSize = 0,
|
||||
_size = 0,
|
||||
_fnameLen = 0,
|
||||
_extraLen = 0,
|
||||
_comLen = 0,
|
||||
_diskStart = 0,
|
||||
_inattr = 0,
|
||||
_attr = 0,
|
||||
_offset = 0;
|
||||
|
||||
_verMade |= Utils.isWin ? 0x0a00 : 0x0300;
|
||||
|
||||
// Set EFS flag since filename and comment fields are all by default encoded using UTF-8.
|
||||
// Without it file names may be corrupted for other apps when file names use unicode chars
|
||||
_flags |= Constants.FLG_EFS;
|
||||
|
||||
var _dataHeader = {};
|
||||
|
||||
function setTime(val) {
|
||||
val = new Date(val);
|
||||
_time =
|
||||
(((val.getFullYear() - 1980) & 0x7f) << 25) | // b09-16 years from 1980
|
||||
((val.getMonth() + 1) << 21) | // b05-08 month
|
||||
(val.getDate() << 16) | // b00-04 hour
|
||||
// 2 bytes time
|
||||
(val.getHours() << 11) | // b11-15 hour
|
||||
(val.getMinutes() << 5) | // b05-10 minute
|
||||
(val.getSeconds() >> 1); // b00-04 seconds divided by 2
|
||||
}
|
||||
|
||||
setTime(+new Date());
|
||||
|
||||
return {
|
||||
get made() {
|
||||
return _verMade;
|
||||
},
|
||||
set made(val) {
|
||||
_verMade = val;
|
||||
},
|
||||
|
||||
get version() {
|
||||
return _version;
|
||||
},
|
||||
set version(val) {
|
||||
_version = val;
|
||||
},
|
||||
|
||||
get flags() {
|
||||
return _flags;
|
||||
},
|
||||
set flags(val) {
|
||||
_flags = val;
|
||||
},
|
||||
|
||||
get method() {
|
||||
return _method;
|
||||
},
|
||||
set method(val) {
|
||||
switch (val) {
|
||||
case Constants.STORED:
|
||||
this.version = 10;
|
||||
case Constants.DEFLATED:
|
||||
default:
|
||||
this.version = 20;
|
||||
}
|
||||
_method = val;
|
||||
},
|
||||
|
||||
get time() {
|
||||
return new Date(((_time >> 25) & 0x7f) + 1980, ((_time >> 21) & 0x0f) - 1, (_time >> 16) & 0x1f, (_time >> 11) & 0x1f, (_time >> 5) & 0x3f, (_time & 0x1f) << 1);
|
||||
},
|
||||
set time(val) {
|
||||
setTime(val);
|
||||
},
|
||||
|
||||
get crc() {
|
||||
return _crc;
|
||||
},
|
||||
set crc(val) {
|
||||
_crc = Math.max(0, val) >>> 0;
|
||||
},
|
||||
|
||||
get compressedSize() {
|
||||
return _compressedSize;
|
||||
},
|
||||
set compressedSize(val) {
|
||||
_compressedSize = Math.max(0, val) >>> 0;
|
||||
},
|
||||
|
||||
get size() {
|
||||
return _size;
|
||||
},
|
||||
set size(val) {
|
||||
_size = Math.max(0, val) >>> 0;
|
||||
},
|
||||
|
||||
get fileNameLength() {
|
||||
return _fnameLen;
|
||||
},
|
||||
set fileNameLength(val) {
|
||||
_fnameLen = val;
|
||||
},
|
||||
|
||||
get extraLength() {
|
||||
return _extraLen;
|
||||
},
|
||||
set extraLength(val) {
|
||||
_extraLen = val;
|
||||
},
|
||||
|
||||
get commentLength() {
|
||||
return _comLen;
|
||||
},
|
||||
set commentLength(val) {
|
||||
_comLen = val;
|
||||
},
|
||||
|
||||
get diskNumStart() {
|
||||
return _diskStart;
|
||||
},
|
||||
set diskNumStart(val) {
|
||||
_diskStart = Math.max(0, val) >>> 0;
|
||||
},
|
||||
|
||||
get inAttr() {
|
||||
return _inattr;
|
||||
},
|
||||
set inAttr(val) {
|
||||
_inattr = Math.max(0, val) >>> 0;
|
||||
},
|
||||
|
||||
get attr() {
|
||||
return _attr;
|
||||
},
|
||||
set attr(val) {
|
||||
_attr = Math.max(0, val) >>> 0;
|
||||
},
|
||||
|
||||
// get Unix file permissions
|
||||
get fileAttr() {
|
||||
return _attr ? (((_attr >>> 0) | 0) >> 16) & 0xfff : 0;
|
||||
},
|
||||
|
||||
get offset() {
|
||||
return _offset;
|
||||
},
|
||||
set offset(val) {
|
||||
_offset = Math.max(0, val) >>> 0;
|
||||
},
|
||||
|
||||
get encripted() {
|
||||
return (_flags & 1) === 1;
|
||||
},
|
||||
|
||||
get entryHeaderSize() {
|
||||
return Constants.CENHDR + _fnameLen + _extraLen + _comLen;
|
||||
},
|
||||
|
||||
get realDataOffset() {
|
||||
return _offset + Constants.LOCHDR + _dataHeader.fnameLen + _dataHeader.extraLen;
|
||||
},
|
||||
|
||||
get dataHeader() {
|
||||
return _dataHeader;
|
||||
},
|
||||
|
||||
loadDataHeaderFromBinary: function (/*Buffer*/ input) {
|
||||
var data = input.slice(_offset, _offset + Constants.LOCHDR);
|
||||
// 30 bytes and should start with "PK\003\004"
|
||||
if (data.readUInt32LE(0) !== Constants.LOCSIG) {
|
||||
throw new Error(Utils.Errors.INVALID_LOC);
|
||||
}
|
||||
_dataHeader = {
|
||||
// version needed to extract
|
||||
version: data.readUInt16LE(Constants.LOCVER),
|
||||
// general purpose bit flag
|
||||
flags: data.readUInt16LE(Constants.LOCFLG),
|
||||
// compression method
|
||||
method: data.readUInt16LE(Constants.LOCHOW),
|
||||
// modification time (2 bytes time, 2 bytes date)
|
||||
time: data.readUInt32LE(Constants.LOCTIM),
|
||||
// uncompressed file crc-32 value
|
||||
crc: data.readUInt32LE(Constants.LOCCRC),
|
||||
// compressed size
|
||||
compressedSize: data.readUInt32LE(Constants.LOCSIZ),
|
||||
// uncompressed size
|
||||
size: data.readUInt32LE(Constants.LOCLEN),
|
||||
// filename length
|
||||
fnameLen: data.readUInt16LE(Constants.LOCNAM),
|
||||
// extra field length
|
||||
extraLen: data.readUInt16LE(Constants.LOCEXT)
|
||||
};
|
||||
},
|
||||
|
||||
loadFromBinary: function (/*Buffer*/ data) {
|
||||
// data should be 46 bytes and start with "PK 01 02"
|
||||
if (data.length !== Constants.CENHDR || data.readUInt32LE(0) !== Constants.CENSIG) {
|
||||
throw new Error(Utils.Errors.INVALID_CEN);
|
||||
}
|
||||
// version made by
|
||||
_verMade = data.readUInt16LE(Constants.CENVEM);
|
||||
// version needed to extract
|
||||
_version = data.readUInt16LE(Constants.CENVER);
|
||||
// encrypt, decrypt flags
|
||||
_flags = data.readUInt16LE(Constants.CENFLG);
|
||||
// compression method
|
||||
_method = data.readUInt16LE(Constants.CENHOW);
|
||||
// modification time (2 bytes time, 2 bytes date)
|
||||
_time = data.readUInt32LE(Constants.CENTIM);
|
||||
// uncompressed file crc-32 value
|
||||
_crc = data.readUInt32LE(Constants.CENCRC);
|
||||
// compressed size
|
||||
_compressedSize = data.readUInt32LE(Constants.CENSIZ);
|
||||
// uncompressed size
|
||||
_size = data.readUInt32LE(Constants.CENLEN);
|
||||
// filename length
|
||||
_fnameLen = data.readUInt16LE(Constants.CENNAM);
|
||||
// extra field length
|
||||
_extraLen = data.readUInt16LE(Constants.CENEXT);
|
||||
// file comment length
|
||||
_comLen = data.readUInt16LE(Constants.CENCOM);
|
||||
// volume number start
|
||||
_diskStart = data.readUInt16LE(Constants.CENDSK);
|
||||
// internal file attributes
|
||||
_inattr = data.readUInt16LE(Constants.CENATT);
|
||||
// external file attributes
|
||||
_attr = data.readUInt32LE(Constants.CENATX);
|
||||
// LOC header offset
|
||||
_offset = data.readUInt32LE(Constants.CENOFF);
|
||||
},
|
||||
|
||||
dataHeaderToBinary: function () {
|
||||
// LOC header size (30 bytes)
|
||||
var data = Buffer.alloc(Constants.LOCHDR);
|
||||
// "PK\003\004"
|
||||
data.writeUInt32LE(Constants.LOCSIG, 0);
|
||||
// version needed to extract
|
||||
data.writeUInt16LE(_version, Constants.LOCVER);
|
||||
// general purpose bit flag
|
||||
data.writeUInt16LE(_flags, Constants.LOCFLG);
|
||||
// compression method
|
||||
data.writeUInt16LE(_method, Constants.LOCHOW);
|
||||
// modification time (2 bytes time, 2 bytes date)
|
||||
data.writeUInt32LE(_time, Constants.LOCTIM);
|
||||
// uncompressed file crc-32 value
|
||||
data.writeUInt32LE(_crc, Constants.LOCCRC);
|
||||
// compressed size
|
||||
data.writeUInt32LE(_compressedSize, Constants.LOCSIZ);
|
||||
// uncompressed size
|
||||
data.writeUInt32LE(_size, Constants.LOCLEN);
|
||||
// filename length
|
||||
data.writeUInt16LE(_fnameLen, Constants.LOCNAM);
|
||||
// extra field length
|
||||
data.writeUInt16LE(_extraLen, Constants.LOCEXT);
|
||||
return data;
|
||||
},
|
||||
|
||||
entryHeaderToBinary: function () {
|
||||
// CEN header size (46 bytes)
|
||||
var data = Buffer.alloc(Constants.CENHDR + _fnameLen + _extraLen + _comLen);
|
||||
// "PK\001\002"
|
||||
data.writeUInt32LE(Constants.CENSIG, 0);
|
||||
// version made by
|
||||
data.writeUInt16LE(_verMade, Constants.CENVEM);
|
||||
// version needed to extract
|
||||
data.writeUInt16LE(_version, Constants.CENVER);
|
||||
// encrypt, decrypt flags
|
||||
data.writeUInt16LE(_flags, Constants.CENFLG);
|
||||
// compression method
|
||||
data.writeUInt16LE(_method, Constants.CENHOW);
|
||||
// modification time (2 bytes time, 2 bytes date)
|
||||
data.writeUInt32LE(_time, Constants.CENTIM);
|
||||
// uncompressed file crc-32 value
|
||||
data.writeUInt32LE(_crc, Constants.CENCRC);
|
||||
// compressed size
|
||||
data.writeUInt32LE(_compressedSize, Constants.CENSIZ);
|
||||
// uncompressed size
|
||||
data.writeUInt32LE(_size, Constants.CENLEN);
|
||||
// filename length
|
||||
data.writeUInt16LE(_fnameLen, Constants.CENNAM);
|
||||
// extra field length
|
||||
data.writeUInt16LE(_extraLen, Constants.CENEXT);
|
||||
// file comment length
|
||||
data.writeUInt16LE(_comLen, Constants.CENCOM);
|
||||
// volume number start
|
||||
data.writeUInt16LE(_diskStart, Constants.CENDSK);
|
||||
// internal file attributes
|
||||
data.writeUInt16LE(_inattr, Constants.CENATT);
|
||||
// external file attributes
|
||||
data.writeUInt32LE(_attr, Constants.CENATX);
|
||||
// LOC header offset
|
||||
data.writeUInt32LE(_offset, Constants.CENOFF);
|
||||
// fill all with
|
||||
data.fill(0x00, Constants.CENHDR);
|
||||
return data;
|
||||
},
|
||||
|
||||
toJSON: function () {
|
||||
const bytes = function (nr) {
|
||||
return nr + " bytes";
|
||||
};
|
||||
|
||||
return {
|
||||
made: _verMade,
|
||||
version: _version,
|
||||
flags: _flags,
|
||||
method: Utils.methodToString(_method),
|
||||
time: this.time,
|
||||
crc: "0x" + _crc.toString(16).toUpperCase(),
|
||||
compressedSize: bytes(_compressedSize),
|
||||
size: bytes(_size),
|
||||
fileNameLength: bytes(_fnameLen),
|
||||
extraLength: bytes(_extraLen),
|
||||
commentLength: bytes(_comLen),
|
||||
diskNumStart: _diskStart,
|
||||
inAttr: _inattr,
|
||||
attr: _attr,
|
||||
offset: _offset,
|
||||
entryHeaderSize: bytes(Constants.CENHDR + _fnameLen + _extraLen + _comLen)
|
||||
};
|
||||
},
|
||||
|
||||
toString: function () {
|
||||
return JSON.stringify(this.toJSON(), null, "\t");
|
||||
}
|
||||
};
|
||||
};
|
2
node_modules/adm-zip/headers/index.js
generated
vendored
Normal file
2
node_modules/adm-zip/headers/index.js
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
exports.EntryHeader = require("./entryHeader");
|
||||
exports.MainHeader = require("./mainHeader");
|
130
node_modules/adm-zip/headers/mainHeader.js
generated
vendored
Normal file
130
node_modules/adm-zip/headers/mainHeader.js
generated
vendored
Normal file
@@ -0,0 +1,130 @@
|
||||
var Utils = require("../util"),
|
||||
Constants = Utils.Constants;
|
||||
|
||||
/* The entries in the end of central directory */
|
||||
module.exports = function () {
|
||||
var _volumeEntries = 0,
|
||||
_totalEntries = 0,
|
||||
_size = 0,
|
||||
_offset = 0,
|
||||
_commentLength = 0;
|
||||
|
||||
return {
|
||||
get diskEntries() {
|
||||
return _volumeEntries;
|
||||
},
|
||||
set diskEntries(/*Number*/ val) {
|
||||
_volumeEntries = _totalEntries = val;
|
||||
},
|
||||
|
||||
get totalEntries() {
|
||||
return _totalEntries;
|
||||
},
|
||||
set totalEntries(/*Number*/ val) {
|
||||
_totalEntries = _volumeEntries = val;
|
||||
},
|
||||
|
||||
get size() {
|
||||
return _size;
|
||||
},
|
||||
set size(/*Number*/ val) {
|
||||
_size = val;
|
||||
},
|
||||
|
||||
get offset() {
|
||||
return _offset;
|
||||
},
|
||||
set offset(/*Number*/ val) {
|
||||
_offset = val;
|
||||
},
|
||||
|
||||
get commentLength() {
|
||||
return _commentLength;
|
||||
},
|
||||
set commentLength(/*Number*/ val) {
|
||||
_commentLength = val;
|
||||
},
|
||||
|
||||
get mainHeaderSize() {
|
||||
return Constants.ENDHDR + _commentLength;
|
||||
},
|
||||
|
||||
loadFromBinary: function (/*Buffer*/ data) {
|
||||
// data should be 22 bytes and start with "PK 05 06"
|
||||
// or be 56+ bytes and start with "PK 06 06" for Zip64
|
||||
if (
|
||||
(data.length !== Constants.ENDHDR || data.readUInt32LE(0) !== Constants.ENDSIG) &&
|
||||
(data.length < Constants.ZIP64HDR || data.readUInt32LE(0) !== Constants.ZIP64SIG)
|
||||
) {
|
||||
throw new Error(Utils.Errors.INVALID_END);
|
||||
}
|
||||
|
||||
if (data.readUInt32LE(0) === Constants.ENDSIG) {
|
||||
// number of entries on this volume
|
||||
_volumeEntries = data.readUInt16LE(Constants.ENDSUB);
|
||||
// total number of entries
|
||||
_totalEntries = data.readUInt16LE(Constants.ENDTOT);
|
||||
// central directory size in bytes
|
||||
_size = data.readUInt32LE(Constants.ENDSIZ);
|
||||
// offset of first CEN header
|
||||
_offset = data.readUInt32LE(Constants.ENDOFF);
|
||||
// zip file comment length
|
||||
_commentLength = data.readUInt16LE(Constants.ENDCOM);
|
||||
} else {
|
||||
// number of entries on this volume
|
||||
_volumeEntries = Utils.readBigUInt64LE(data, Constants.ZIP64SUB);
|
||||
// total number of entries
|
||||
_totalEntries = Utils.readBigUInt64LE(data, Constants.ZIP64TOT);
|
||||
// central directory size in bytes
|
||||
_size = Utils.readBigUInt64LE(data, Constants.ZIP64SIZE);
|
||||
// offset of first CEN header
|
||||
_offset = Utils.readBigUInt64LE(data, Constants.ZIP64OFF);
|
||||
|
||||
_commentLength = 0;
|
||||
}
|
||||
},
|
||||
|
||||
toBinary: function () {
|
||||
var b = Buffer.alloc(Constants.ENDHDR + _commentLength);
|
||||
// "PK 05 06" signature
|
||||
b.writeUInt32LE(Constants.ENDSIG, 0);
|
||||
b.writeUInt32LE(0, 4);
|
||||
// number of entries on this volume
|
||||
b.writeUInt16LE(_volumeEntries, Constants.ENDSUB);
|
||||
// total number of entries
|
||||
b.writeUInt16LE(_totalEntries, Constants.ENDTOT);
|
||||
// central directory size in bytes
|
||||
b.writeUInt32LE(_size, Constants.ENDSIZ);
|
||||
// offset of first CEN header
|
||||
b.writeUInt32LE(_offset, Constants.ENDOFF);
|
||||
// zip file comment length
|
||||
b.writeUInt16LE(_commentLength, Constants.ENDCOM);
|
||||
// fill comment memory with spaces so no garbage is left there
|
||||
b.fill(" ", Constants.ENDHDR);
|
||||
|
||||
return b;
|
||||
},
|
||||
|
||||
toJSON: function () {
|
||||
// creates 0x0000 style output
|
||||
const offset = function (nr, len) {
|
||||
let offs = nr.toString(16).toUpperCase();
|
||||
while (offs.length < len) offs = "0" + offs;
|
||||
return "0x" + offs;
|
||||
};
|
||||
|
||||
return {
|
||||
diskEntries: _volumeEntries,
|
||||
totalEntries: _totalEntries,
|
||||
size: _size + " bytes",
|
||||
offset: offset(_offset, 4),
|
||||
commentLength: _commentLength
|
||||
};
|
||||
},
|
||||
|
||||
toString: function () {
|
||||
return JSON.stringify(this.toJSON(), null, "\t");
|
||||
}
|
||||
};
|
||||
};
|
||||
// Misspelled
|
33
node_modules/adm-zip/methods/deflater.js
generated
vendored
Normal file
33
node_modules/adm-zip/methods/deflater.js
generated
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
module.exports = function (/*Buffer*/ inbuf) {
|
||||
var zlib = require("zlib");
|
||||
|
||||
var opts = { chunkSize: (parseInt(inbuf.length / 1024) + 1) * 1024 };
|
||||
|
||||
return {
|
||||
deflate: function () {
|
||||
return zlib.deflateRawSync(inbuf, opts);
|
||||
},
|
||||
|
||||
deflateAsync: function (/*Function*/ callback) {
|
||||
var tmp = zlib.createDeflateRaw(opts),
|
||||
parts = [],
|
||||
total = 0;
|
||||
tmp.on("data", function (data) {
|
||||
parts.push(data);
|
||||
total += data.length;
|
||||
});
|
||||
tmp.on("end", function () {
|
||||
var buf = Buffer.alloc(total),
|
||||
written = 0;
|
||||
buf.fill(0);
|
||||
for (var i = 0; i < parts.length; i++) {
|
||||
var part = parts[i];
|
||||
part.copy(buf, written);
|
||||
written += part.length;
|
||||
}
|
||||
callback && callback(buf);
|
||||
});
|
||||
tmp.end(inbuf);
|
||||
}
|
||||
};
|
||||
};
|
3
node_modules/adm-zip/methods/index.js
generated
vendored
Normal file
3
node_modules/adm-zip/methods/index.js
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
exports.Deflater = require("./deflater");
|
||||
exports.Inflater = require("./inflater");
|
||||
exports.ZipCrypto = require("./zipcrypto");
|
31
node_modules/adm-zip/methods/inflater.js
generated
vendored
Normal file
31
node_modules/adm-zip/methods/inflater.js
generated
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
module.exports = function (/*Buffer*/ inbuf) {
|
||||
var zlib = require("zlib");
|
||||
|
||||
return {
|
||||
inflate: function () {
|
||||
return zlib.inflateRawSync(inbuf);
|
||||
},
|
||||
|
||||
inflateAsync: function (/*Function*/ callback) {
|
||||
var tmp = zlib.createInflateRaw(),
|
||||
parts = [],
|
||||
total = 0;
|
||||
tmp.on("data", function (data) {
|
||||
parts.push(data);
|
||||
total += data.length;
|
||||
});
|
||||
tmp.on("end", function () {
|
||||
var buf = Buffer.alloc(total),
|
||||
written = 0;
|
||||
buf.fill(0);
|
||||
for (var i = 0; i < parts.length; i++) {
|
||||
var part = parts[i];
|
||||
part.copy(buf, written);
|
||||
written += part.length;
|
||||
}
|
||||
callback && callback(buf);
|
||||
});
|
||||
tmp.end(inbuf);
|
||||
}
|
||||
};
|
||||
};
|
170
node_modules/adm-zip/methods/zipcrypto.js
generated
vendored
Normal file
170
node_modules/adm-zip/methods/zipcrypto.js
generated
vendored
Normal file
@@ -0,0 +1,170 @@
|
||||
"use strict";
|
||||
|
||||
// node crypt, we use it for generate salt
|
||||
// eslint-disable-next-line node/no-unsupported-features/node-builtins
|
||||
const { randomFillSync } = require("crypto");
|
||||
|
||||
// generate CRC32 lookup table
|
||||
const crctable = new Uint32Array(256).map((t, crc) => {
|
||||
for (let j = 0; j < 8; j++) {
|
||||
if (0 !== (crc & 1)) {
|
||||
crc = (crc >>> 1) ^ 0xedb88320;
|
||||
} else {
|
||||
crc >>>= 1;
|
||||
}
|
||||
}
|
||||
return crc >>> 0;
|
||||
});
|
||||
|
||||
// C-style uInt32 Multiply (discards higher bits, when JS multiply discards lower bits)
|
||||
const uMul = (a, b) => Math.imul(a, b) >>> 0;
|
||||
|
||||
// crc32 byte single update (actually same function is part of utils.crc32 function :) )
|
||||
const crc32update = (pCrc32, bval) => {
|
||||
return crctable[(pCrc32 ^ bval) & 0xff] ^ (pCrc32 >>> 8);
|
||||
};
|
||||
|
||||
// function for generating salt for encrytion header
|
||||
const genSalt = () => {
|
||||
if ("function" === typeof randomFillSync) {
|
||||
return randomFillSync(Buffer.alloc(12));
|
||||
} else {
|
||||
// fallback if function is not defined
|
||||
return genSalt.node();
|
||||
}
|
||||
};
|
||||
|
||||
// salt generation with node random function (mainly as fallback)
|
||||
genSalt.node = () => {
|
||||
const salt = Buffer.alloc(12);
|
||||
const len = salt.length;
|
||||
for (let i = 0; i < len; i++) salt[i] = (Math.random() * 256) & 0xff;
|
||||
return salt;
|
||||
};
|
||||
|
||||
// general config
|
||||
const config = {
|
||||
genSalt
|
||||
};
|
||||
|
||||
// Class Initkeys handles same basic ops with keys
|
||||
function Initkeys(pw) {
|
||||
const pass = Buffer.isBuffer(pw) ? pw : Buffer.from(pw);
|
||||
this.keys = new Uint32Array([0x12345678, 0x23456789, 0x34567890]);
|
||||
for (let i = 0; i < pass.length; i++) {
|
||||
this.updateKeys(pass[i]);
|
||||
}
|
||||
}
|
||||
|
||||
Initkeys.prototype.updateKeys = function (byteValue) {
|
||||
const keys = this.keys;
|
||||
keys[0] = crc32update(keys[0], byteValue);
|
||||
keys[1] += keys[0] & 0xff;
|
||||
keys[1] = uMul(keys[1], 134775813) + 1;
|
||||
keys[2] = crc32update(keys[2], keys[1] >>> 24);
|
||||
return byteValue;
|
||||
};
|
||||
|
||||
Initkeys.prototype.next = function () {
|
||||
const k = (this.keys[2] | 2) >>> 0; // key
|
||||
return (uMul(k, k ^ 1) >> 8) & 0xff; // decode
|
||||
};
|
||||
|
||||
function make_decrypter(/*Buffer*/ pwd) {
|
||||
// 1. Stage initialize key
|
||||
const keys = new Initkeys(pwd);
|
||||
|
||||
// return decrypter function
|
||||
return function (/*Buffer*/ data) {
|
||||
// result - we create new Buffer for results
|
||||
const result = Buffer.alloc(data.length);
|
||||
let pos = 0;
|
||||
// process input data
|
||||
for (let c of data) {
|
||||
//c ^= keys.next();
|
||||
//result[pos++] = c; // decode & Save Value
|
||||
result[pos++] = keys.updateKeys(c ^ keys.next()); // update keys with decoded byte
|
||||
}
|
||||
return result;
|
||||
};
|
||||
}
|
||||
|
||||
function make_encrypter(/*Buffer*/ pwd) {
|
||||
// 1. Stage initialize key
|
||||
const keys = new Initkeys(pwd);
|
||||
|
||||
// return encrypting function, result and pos is here so we dont have to merge buffers later
|
||||
return function (/*Buffer*/ data, /*Buffer*/ result, /* Number */ pos = 0) {
|
||||
// result - we create new Buffer for results
|
||||
if (!result) result = Buffer.alloc(data.length);
|
||||
// process input data
|
||||
for (let c of data) {
|
||||
const k = keys.next(); // save key byte
|
||||
result[pos++] = c ^ k; // save val
|
||||
keys.updateKeys(c); // update keys with decoded byte
|
||||
}
|
||||
return result;
|
||||
};
|
||||
}
|
||||
|
||||
function decrypt(/*Buffer*/ data, /*Object*/ header, /*String, Buffer*/ pwd) {
|
||||
if (!data || !Buffer.isBuffer(data) || data.length < 12) {
|
||||
return Buffer.alloc(0);
|
||||
}
|
||||
|
||||
// 1. We Initialize and generate decrypting function
|
||||
const decrypter = make_decrypter(pwd);
|
||||
|
||||
// 2. decrypt salt what is always 12 bytes and is a part of file content
|
||||
const salt = decrypter(data.slice(0, 12));
|
||||
|
||||
// 3. does password meet expectations
|
||||
if (salt[11] !== header.crc >>> 24) {
|
||||
throw "ADM-ZIP: Wrong Password";
|
||||
}
|
||||
|
||||
// 4. decode content
|
||||
return decrypter(data.slice(12));
|
||||
}
|
||||
|
||||
// lets add way to populate salt, NOT RECOMMENDED for production but maybe useful for testing general functionality
|
||||
function _salter(data) {
|
||||
if (Buffer.isBuffer(data) && data.length >= 12) {
|
||||
// be aware - currently salting buffer data is modified
|
||||
config.genSalt = function () {
|
||||
return data.slice(0, 12);
|
||||
};
|
||||
} else if (data === "node") {
|
||||
// test salt generation with node random function
|
||||
config.genSalt = genSalt.node;
|
||||
} else {
|
||||
// if value is not acceptable config gets reset.
|
||||
config.genSalt = genSalt;
|
||||
}
|
||||
}
|
||||
|
||||
function encrypt(/*Buffer*/ data, /*Object*/ header, /*String, Buffer*/ pwd, /*Boolean*/ oldlike = false) {
|
||||
// 1. test data if data is not Buffer we make buffer from it
|
||||
if (data == null) data = Buffer.alloc(0);
|
||||
// if data is not buffer be make buffer from it
|
||||
if (!Buffer.isBuffer(data)) data = Buffer.from(data.toString());
|
||||
|
||||
// 2. We Initialize and generate encrypting function
|
||||
const encrypter = make_encrypter(pwd);
|
||||
|
||||
// 3. generate salt (12-bytes of random data)
|
||||
const salt = config.genSalt();
|
||||
salt[11] = (header.crc >>> 24) & 0xff;
|
||||
|
||||
// old implementations (before PKZip 2.04g) used two byte check
|
||||
if (oldlike) salt[10] = (header.crc >>> 16) & 0xff;
|
||||
|
||||
// 4. create output
|
||||
const result = Buffer.alloc(data.length + 12);
|
||||
encrypter(salt, result);
|
||||
|
||||
// finally encode content
|
||||
return encrypter(data, result, 12);
|
||||
}
|
||||
|
||||
module.exports = { decrypt, encrypt, _salter };
|
48
node_modules/adm-zip/package.json
generated
vendored
Normal file
48
node_modules/adm-zip/package.json
generated
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
{
|
||||
"name": "adm-zip",
|
||||
"version": "0.5.10",
|
||||
"description": "Javascript implementation of zip for nodejs with support for electron original-fs. Allows user to create or extract zip files both in memory or to/from disk",
|
||||
"scripts": {
|
||||
"test": "mocha -R spec",
|
||||
"test:format": "npm run format:prettier:raw -- --check",
|
||||
"format": "npm run format:prettier",
|
||||
"format:prettier": "npm run format:prettier:raw -- --write",
|
||||
"format:prettier:raw": "prettier \"**/*.{js,yml,json}\""
|
||||
},
|
||||
"keywords": [
|
||||
"zip",
|
||||
"methods",
|
||||
"archive",
|
||||
"unzip"
|
||||
],
|
||||
"homepage": "https://github.com/cthackers/adm-zip",
|
||||
"author": "Nasca Iacob <sy@another-d-mention.ro> (https://github.com/cthackers)",
|
||||
"bugs": {
|
||||
"email": "sy@another-d-mention.ro",
|
||||
"url": "https://github.com/cthackers/adm-zip/issues"
|
||||
},
|
||||
"license": "MIT",
|
||||
"files": [
|
||||
"adm-zip.js",
|
||||
"headers",
|
||||
"methods",
|
||||
"util",
|
||||
"zipEntry.js",
|
||||
"zipFile.js",
|
||||
"LICENSE"
|
||||
],
|
||||
"main": "adm-zip.js",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/cthackers/adm-zip.git"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"chai": "^4.3.4",
|
||||
"mocha": "^10.2.0",
|
||||
"prettier": "^2.2.1",
|
||||
"rimraf": "^3.0.2"
|
||||
}
|
||||
}
|
142
node_modules/adm-zip/util/constants.js
generated
vendored
Normal file
142
node_modules/adm-zip/util/constants.js
generated
vendored
Normal file
@@ -0,0 +1,142 @@
|
||||
module.exports = {
|
||||
/* The local file header */
|
||||
LOCHDR : 30, // LOC header size
|
||||
LOCSIG : 0x04034b50, // "PK\003\004"
|
||||
LOCVER : 4, // version needed to extract
|
||||
LOCFLG : 6, // general purpose bit flag
|
||||
LOCHOW : 8, // compression method
|
||||
LOCTIM : 10, // modification time (2 bytes time, 2 bytes date)
|
||||
LOCCRC : 14, // uncompressed file crc-32 value
|
||||
LOCSIZ : 18, // compressed size
|
||||
LOCLEN : 22, // uncompressed size
|
||||
LOCNAM : 26, // filename length
|
||||
LOCEXT : 28, // extra field length
|
||||
|
||||
/* The Data descriptor */
|
||||
EXTSIG : 0x08074b50, // "PK\007\008"
|
||||
EXTHDR : 16, // EXT header size
|
||||
EXTCRC : 4, // uncompressed file crc-32 value
|
||||
EXTSIZ : 8, // compressed size
|
||||
EXTLEN : 12, // uncompressed size
|
||||
|
||||
/* The central directory file header */
|
||||
CENHDR : 46, // CEN header size
|
||||
CENSIG : 0x02014b50, // "PK\001\002"
|
||||
CENVEM : 4, // version made by
|
||||
CENVER : 6, // version needed to extract
|
||||
CENFLG : 8, // encrypt, decrypt flags
|
||||
CENHOW : 10, // compression method
|
||||
CENTIM : 12, // modification time (2 bytes time, 2 bytes date)
|
||||
CENCRC : 16, // uncompressed file crc-32 value
|
||||
CENSIZ : 20, // compressed size
|
||||
CENLEN : 24, // uncompressed size
|
||||
CENNAM : 28, // filename length
|
||||
CENEXT : 30, // extra field length
|
||||
CENCOM : 32, // file comment length
|
||||
CENDSK : 34, // volume number start
|
||||
CENATT : 36, // internal file attributes
|
||||
CENATX : 38, // external file attributes (host system dependent)
|
||||
CENOFF : 42, // LOC header offset
|
||||
|
||||
/* The entries in the end of central directory */
|
||||
ENDHDR : 22, // END header size
|
||||
ENDSIG : 0x06054b50, // "PK\005\006"
|
||||
ENDSUB : 8, // number of entries on this disk
|
||||
ENDTOT : 10, // total number of entries
|
||||
ENDSIZ : 12, // central directory size in bytes
|
||||
ENDOFF : 16, // offset of first CEN header
|
||||
ENDCOM : 20, // zip file comment length
|
||||
|
||||
END64HDR : 20, // zip64 END header size
|
||||
END64SIG : 0x07064b50, // zip64 Locator signature, "PK\006\007"
|
||||
END64START : 4, // number of the disk with the start of the zip64
|
||||
END64OFF : 8, // relative offset of the zip64 end of central directory
|
||||
END64NUMDISKS : 16, // total number of disks
|
||||
|
||||
ZIP64SIG : 0x06064b50, // zip64 signature, "PK\006\006"
|
||||
ZIP64HDR : 56, // zip64 record minimum size
|
||||
ZIP64LEAD : 12, // leading bytes at the start of the record, not counted by the value stored in ZIP64SIZE
|
||||
ZIP64SIZE : 4, // zip64 size of the central directory record
|
||||
ZIP64VEM : 12, // zip64 version made by
|
||||
ZIP64VER : 14, // zip64 version needed to extract
|
||||
ZIP64DSK : 16, // zip64 number of this disk
|
||||
ZIP64DSKDIR : 20, // number of the disk with the start of the record directory
|
||||
ZIP64SUB : 24, // number of entries on this disk
|
||||
ZIP64TOT : 32, // total number of entries
|
||||
ZIP64SIZB : 40, // zip64 central directory size in bytes
|
||||
ZIP64OFF : 48, // offset of start of central directory with respect to the starting disk number
|
||||
ZIP64EXTRA : 56, // extensible data sector
|
||||
|
||||
/* Compression methods */
|
||||
STORED : 0, // no compression
|
||||
SHRUNK : 1, // shrunk
|
||||
REDUCED1 : 2, // reduced with compression factor 1
|
||||
REDUCED2 : 3, // reduced with compression factor 2
|
||||
REDUCED3 : 4, // reduced with compression factor 3
|
||||
REDUCED4 : 5, // reduced with compression factor 4
|
||||
IMPLODED : 6, // imploded
|
||||
// 7 reserved for Tokenizing compression algorithm
|
||||
DEFLATED : 8, // deflated
|
||||
ENHANCED_DEFLATED: 9, // enhanced deflated
|
||||
PKWARE : 10,// PKWare DCL imploded
|
||||
// 11 reserved by PKWARE
|
||||
BZIP2 : 12, // compressed using BZIP2
|
||||
// 13 reserved by PKWARE
|
||||
LZMA : 14, // LZMA
|
||||
// 15-17 reserved by PKWARE
|
||||
IBM_TERSE : 18, // compressed using IBM TERSE
|
||||
IBM_LZ77 : 19, // IBM LZ77 z
|
||||
AES_ENCRYPT : 99, // WinZIP AES encryption method
|
||||
|
||||
/* General purpose bit flag */
|
||||
// values can obtained with expression 2**bitnr
|
||||
FLG_ENC : 1, // Bit 0: encrypted file
|
||||
FLG_COMP1 : 2, // Bit 1, compression option
|
||||
FLG_COMP2 : 4, // Bit 2, compression option
|
||||
FLG_DESC : 8, // Bit 3, data descriptor
|
||||
FLG_ENH : 16, // Bit 4, enhanced deflating
|
||||
FLG_PATCH : 32, // Bit 5, indicates that the file is compressed patched data.
|
||||
FLG_STR : 64, // Bit 6, strong encryption (patented)
|
||||
// Bits 7-10: Currently unused.
|
||||
FLG_EFS : 2048, // Bit 11: Language encoding flag (EFS)
|
||||
// Bit 12: Reserved by PKWARE for enhanced compression.
|
||||
// Bit 13: encrypted the Central Directory (patented).
|
||||
// Bits 14-15: Reserved by PKWARE.
|
||||
FLG_MSK : 4096, // mask header values
|
||||
|
||||
/* Load type */
|
||||
FILE : 2,
|
||||
BUFFER : 1,
|
||||
NONE : 0,
|
||||
|
||||
/* 4.5 Extensible data fields */
|
||||
EF_ID : 0,
|
||||
EF_SIZE : 2,
|
||||
|
||||
/* Header IDs */
|
||||
ID_ZIP64 : 0x0001,
|
||||
ID_AVINFO : 0x0007,
|
||||
ID_PFS : 0x0008,
|
||||
ID_OS2 : 0x0009,
|
||||
ID_NTFS : 0x000a,
|
||||
ID_OPENVMS : 0x000c,
|
||||
ID_UNIX : 0x000d,
|
||||
ID_FORK : 0x000e,
|
||||
ID_PATCH : 0x000f,
|
||||
ID_X509_PKCS7 : 0x0014,
|
||||
ID_X509_CERTID_F : 0x0015,
|
||||
ID_X509_CERTID_C : 0x0016,
|
||||
ID_STRONGENC : 0x0017,
|
||||
ID_RECORD_MGT : 0x0018,
|
||||
ID_X509_PKCS7_RL : 0x0019,
|
||||
ID_IBM1 : 0x0065,
|
||||
ID_IBM2 : 0x0066,
|
||||
ID_POSZIP : 0x4690,
|
||||
|
||||
EF_ZIP64_OR_32 : 0xffffffff,
|
||||
EF_ZIP64_OR_16 : 0xffff,
|
||||
EF_ZIP64_SUNCOMP : 0,
|
||||
EF_ZIP64_SCOMP : 8,
|
||||
EF_ZIP64_RHO : 16,
|
||||
EF_ZIP64_DSN : 24
|
||||
};
|
35
node_modules/adm-zip/util/errors.js
generated
vendored
Normal file
35
node_modules/adm-zip/util/errors.js
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
module.exports = {
|
||||
/* Header error messages */
|
||||
INVALID_LOC: "Invalid LOC header (bad signature)",
|
||||
INVALID_CEN: "Invalid CEN header (bad signature)",
|
||||
INVALID_END: "Invalid END header (bad signature)",
|
||||
|
||||
/* ZipEntry error messages*/
|
||||
NO_DATA: "Nothing to decompress",
|
||||
BAD_CRC: "CRC32 checksum failed",
|
||||
FILE_IN_THE_WAY: "There is a file in the way: %s",
|
||||
UNKNOWN_METHOD: "Invalid/unsupported compression method",
|
||||
|
||||
/* Inflater error messages */
|
||||
AVAIL_DATA: "inflate::Available inflate data did not terminate",
|
||||
INVALID_DISTANCE: "inflate::Invalid literal/length or distance code in fixed or dynamic block",
|
||||
TO_MANY_CODES: "inflate::Dynamic block code description: too many length or distance codes",
|
||||
INVALID_REPEAT_LEN: "inflate::Dynamic block code description: repeat more than specified lengths",
|
||||
INVALID_REPEAT_FIRST: "inflate::Dynamic block code description: repeat lengths with no first length",
|
||||
INCOMPLETE_CODES: "inflate::Dynamic block code description: code lengths codes incomplete",
|
||||
INVALID_DYN_DISTANCE: "inflate::Dynamic block code description: invalid distance code lengths",
|
||||
INVALID_CODES_LEN: "inflate::Dynamic block code description: invalid literal/length code lengths",
|
||||
INVALID_STORE_BLOCK: "inflate::Stored block length did not match one's complement",
|
||||
INVALID_BLOCK_TYPE: "inflate::Invalid block type (type == 3)",
|
||||
|
||||
/* ADM-ZIP error messages */
|
||||
CANT_EXTRACT_FILE: "Could not extract the file",
|
||||
CANT_OVERRIDE: "Target file already exists",
|
||||
NO_ZIP: "No zip file was loaded",
|
||||
NO_ENTRY: "Entry doesn't exist",
|
||||
DIRECTORY_CONTENT_ERROR: "A directory cannot have content",
|
||||
FILE_NOT_FOUND: "File not found: %s",
|
||||
NOT_IMPLEMENTED: "Not implemented",
|
||||
INVALID_FILENAME: "Invalid filename",
|
||||
INVALID_FORMAT: "Invalid or unsupported zip format. No END header found"
|
||||
};
|
79
node_modules/adm-zip/util/fattr.js
generated
vendored
Normal file
79
node_modules/adm-zip/util/fattr.js
generated
vendored
Normal file
@@ -0,0 +1,79 @@
|
||||
const fs = require("./fileSystem").require();
|
||||
const pth = require("path");
|
||||
|
||||
fs.existsSync = fs.existsSync || pth.existsSync;
|
||||
|
||||
module.exports = function (/*String*/ path) {
|
||||
var _path = path || "",
|
||||
_obj = newAttr(),
|
||||
_stat = null;
|
||||
|
||||
function newAttr() {
|
||||
return {
|
||||
directory: false,
|
||||
readonly: false,
|
||||
hidden: false,
|
||||
executable: false,
|
||||
mtime: 0,
|
||||
atime: 0
|
||||
};
|
||||
}
|
||||
|
||||
if (_path && fs.existsSync(_path)) {
|
||||
_stat = fs.statSync(_path);
|
||||
_obj.directory = _stat.isDirectory();
|
||||
_obj.mtime = _stat.mtime;
|
||||
_obj.atime = _stat.atime;
|
||||
_obj.executable = (0o111 & _stat.mode) !== 0; // file is executable who ever har right not just owner
|
||||
_obj.readonly = (0o200 & _stat.mode) === 0; // readonly if owner has no write right
|
||||
_obj.hidden = pth.basename(_path)[0] === ".";
|
||||
} else {
|
||||
console.warn("Invalid path: " + _path);
|
||||
}
|
||||
|
||||
return {
|
||||
get directory() {
|
||||
return _obj.directory;
|
||||
},
|
||||
|
||||
get readOnly() {
|
||||
return _obj.readonly;
|
||||
},
|
||||
|
||||
get hidden() {
|
||||
return _obj.hidden;
|
||||
},
|
||||
|
||||
get mtime() {
|
||||
return _obj.mtime;
|
||||
},
|
||||
|
||||
get atime() {
|
||||
return _obj.atime;
|
||||
},
|
||||
|
||||
get executable() {
|
||||
return _obj.executable;
|
||||
},
|
||||
|
||||
decodeAttributes: function () {},
|
||||
|
||||
encodeAttributes: function () {},
|
||||
|
||||
toJSON: function () {
|
||||
return {
|
||||
path: _path,
|
||||
isDirectory: _obj.directory,
|
||||
isReadOnly: _obj.readonly,
|
||||
isHidden: _obj.hidden,
|
||||
isExecutable: _obj.executable,
|
||||
mTime: _obj.mtime,
|
||||
aTime: _obj.atime
|
||||
};
|
||||
},
|
||||
|
||||
toString: function () {
|
||||
return JSON.stringify(this.toJSON(), null, "\t");
|
||||
}
|
||||
};
|
||||
};
|
11
node_modules/adm-zip/util/fileSystem.js
generated
vendored
Normal file
11
node_modules/adm-zip/util/fileSystem.js
generated
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
exports.require = function () {
|
||||
if (typeof process === "object" && process.versions && process.versions["electron"]) {
|
||||
try {
|
||||
const originalFs = require("original-fs");
|
||||
if (Object.keys(originalFs).length > 0) {
|
||||
return originalFs;
|
||||
}
|
||||
} catch (e) {}
|
||||
}
|
||||
return require("fs");
|
||||
};
|
4
node_modules/adm-zip/util/index.js
generated
vendored
Normal file
4
node_modules/adm-zip/util/index.js
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
module.exports = require("./utils");
|
||||
module.exports.Constants = require("./constants");
|
||||
module.exports.Errors = require("./errors");
|
||||
module.exports.FileAttr = require("./fattr");
|
247
node_modules/adm-zip/util/utils.js
generated
vendored
Normal file
247
node_modules/adm-zip/util/utils.js
generated
vendored
Normal file
@@ -0,0 +1,247 @@
|
||||
const fsystem = require("./fileSystem").require();
|
||||
const pth = require("path");
|
||||
const Constants = require("./constants");
|
||||
const Errors = require("./errors");
|
||||
const isWin = typeof process === "object" && "win32" === process.platform;
|
||||
|
||||
const is_Obj = (obj) => obj && typeof obj === "object";
|
||||
|
||||
// generate CRC32 lookup table
|
||||
const crcTable = new Uint32Array(256).map((t, c) => {
|
||||
for (let k = 0; k < 8; k++) {
|
||||
if ((c & 1) !== 0) {
|
||||
c = 0xedb88320 ^ (c >>> 1);
|
||||
} else {
|
||||
c >>>= 1;
|
||||
}
|
||||
}
|
||||
return c >>> 0;
|
||||
});
|
||||
|
||||
// UTILS functions
|
||||
|
||||
function Utils(opts) {
|
||||
this.sep = pth.sep;
|
||||
this.fs = fsystem;
|
||||
|
||||
if (is_Obj(opts)) {
|
||||
// custom filesystem
|
||||
if (is_Obj(opts.fs) && typeof opts.fs.statSync === "function") {
|
||||
this.fs = opts.fs;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Utils;
|
||||
|
||||
// INSTANCED functions
|
||||
|
||||
Utils.prototype.makeDir = function (/*String*/ folder) {
|
||||
const self = this;
|
||||
|
||||
// Sync - make directories tree
|
||||
function mkdirSync(/*String*/ fpath) {
|
||||
let resolvedPath = fpath.split(self.sep)[0];
|
||||
fpath.split(self.sep).forEach(function (name) {
|
||||
if (!name || name.substr(-1, 1) === ":") return;
|
||||
resolvedPath += self.sep + name;
|
||||
var stat;
|
||||
try {
|
||||
stat = self.fs.statSync(resolvedPath);
|
||||
} catch (e) {
|
||||
self.fs.mkdirSync(resolvedPath);
|
||||
}
|
||||
if (stat && stat.isFile()) throw Errors.FILE_IN_THE_WAY.replace("%s", resolvedPath);
|
||||
});
|
||||
}
|
||||
|
||||
mkdirSync(folder);
|
||||
};
|
||||
|
||||
Utils.prototype.writeFileTo = function (/*String*/ path, /*Buffer*/ content, /*Boolean*/ overwrite, /*Number*/ attr) {
|
||||
const self = this;
|
||||
if (self.fs.existsSync(path)) {
|
||||
if (!overwrite) return false; // cannot overwrite
|
||||
|
||||
var stat = self.fs.statSync(path);
|
||||
if (stat.isDirectory()) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
var folder = pth.dirname(path);
|
||||
if (!self.fs.existsSync(folder)) {
|
||||
self.makeDir(folder);
|
||||
}
|
||||
|
||||
var fd;
|
||||
try {
|
||||
fd = self.fs.openSync(path, "w", 438); // 0666
|
||||
} catch (e) {
|
||||
self.fs.chmodSync(path, 438);
|
||||
fd = self.fs.openSync(path, "w", 438);
|
||||
}
|
||||
if (fd) {
|
||||
try {
|
||||
self.fs.writeSync(fd, content, 0, content.length, 0);
|
||||
} finally {
|
||||
self.fs.closeSync(fd);
|
||||
}
|
||||
}
|
||||
self.fs.chmodSync(path, attr || 438);
|
||||
return true;
|
||||
};
|
||||
|
||||
Utils.prototype.writeFileToAsync = function (/*String*/ path, /*Buffer*/ content, /*Boolean*/ overwrite, /*Number*/ attr, /*Function*/ callback) {
|
||||
if (typeof attr === "function") {
|
||||
callback = attr;
|
||||
attr = undefined;
|
||||
}
|
||||
|
||||
const self = this;
|
||||
|
||||
self.fs.exists(path, function (exist) {
|
||||
if (exist && !overwrite) return callback(false);
|
||||
|
||||
self.fs.stat(path, function (err, stat) {
|
||||
if (exist && stat.isDirectory()) {
|
||||
return callback(false);
|
||||
}
|
||||
|
||||
var folder = pth.dirname(path);
|
||||
self.fs.exists(folder, function (exists) {
|
||||
if (!exists) self.makeDir(folder);
|
||||
|
||||
self.fs.open(path, "w", 438, function (err, fd) {
|
||||
if (err) {
|
||||
self.fs.chmod(path, 438, function () {
|
||||
self.fs.open(path, "w", 438, function (err, fd) {
|
||||
self.fs.write(fd, content, 0, content.length, 0, function () {
|
||||
self.fs.close(fd, function () {
|
||||
self.fs.chmod(path, attr || 438, function () {
|
||||
callback(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
} else if (fd) {
|
||||
self.fs.write(fd, content, 0, content.length, 0, function () {
|
||||
self.fs.close(fd, function () {
|
||||
self.fs.chmod(path, attr || 438, function () {
|
||||
callback(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
} else {
|
||||
self.fs.chmod(path, attr || 438, function () {
|
||||
callback(true);
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
Utils.prototype.findFiles = function (/*String*/ path) {
|
||||
const self = this;
|
||||
|
||||
function findSync(/*String*/ dir, /*RegExp*/ pattern, /*Boolean*/ recursive) {
|
||||
if (typeof pattern === "boolean") {
|
||||
recursive = pattern;
|
||||
pattern = undefined;
|
||||
}
|
||||
let files = [];
|
||||
self.fs.readdirSync(dir).forEach(function (file) {
|
||||
var path = pth.join(dir, file);
|
||||
|
||||
if (self.fs.statSync(path).isDirectory() && recursive) files = files.concat(findSync(path, pattern, recursive));
|
||||
|
||||
if (!pattern || pattern.test(path)) {
|
||||
files.push(pth.normalize(path) + (self.fs.statSync(path).isDirectory() ? self.sep : ""));
|
||||
}
|
||||
});
|
||||
return files;
|
||||
}
|
||||
|
||||
return findSync(path, undefined, true);
|
||||
};
|
||||
|
||||
Utils.prototype.getAttributes = function () {};
|
||||
|
||||
Utils.prototype.setAttributes = function () {};
|
||||
|
||||
// STATIC functions
|
||||
|
||||
// crc32 single update (it is part of crc32)
|
||||
Utils.crc32update = function (crc, byte) {
|
||||
return crcTable[(crc ^ byte) & 0xff] ^ (crc >>> 8);
|
||||
};
|
||||
|
||||
Utils.crc32 = function (buf) {
|
||||
if (typeof buf === "string") {
|
||||
buf = Buffer.from(buf, "utf8");
|
||||
}
|
||||
// Generate crcTable
|
||||
if (!crcTable.length) genCRCTable();
|
||||
|
||||
let len = buf.length;
|
||||
let crc = ~0;
|
||||
for (let off = 0; off < len; ) crc = Utils.crc32update(crc, buf[off++]);
|
||||
// xor and cast as uint32 number
|
||||
return ~crc >>> 0;
|
||||
};
|
||||
|
||||
Utils.methodToString = function (/*Number*/ method) {
|
||||
switch (method) {
|
||||
case Constants.STORED:
|
||||
return "STORED (" + method + ")";
|
||||
case Constants.DEFLATED:
|
||||
return "DEFLATED (" + method + ")";
|
||||
default:
|
||||
return "UNSUPPORTED (" + method + ")";
|
||||
}
|
||||
};
|
||||
|
||||
// removes ".." style path elements
|
||||
Utils.canonical = function (/*string*/ path) {
|
||||
if (!path) return "";
|
||||
// trick normalize think path is absolute
|
||||
var safeSuffix = pth.posix.normalize("/" + path.split("\\").join("/"));
|
||||
return pth.join(".", safeSuffix);
|
||||
};
|
||||
|
||||
// make abolute paths taking prefix as root folder
|
||||
Utils.sanitize = function (/*string*/ prefix, /*string*/ name) {
|
||||
prefix = pth.resolve(pth.normalize(prefix));
|
||||
var parts = name.split("/");
|
||||
for (var i = 0, l = parts.length; i < l; i++) {
|
||||
var path = pth.normalize(pth.join(prefix, parts.slice(i, l).join(pth.sep)));
|
||||
if (path.indexOf(prefix) === 0) {
|
||||
return path;
|
||||
}
|
||||
}
|
||||
return pth.normalize(pth.join(prefix, pth.basename(name)));
|
||||
};
|
||||
|
||||
// converts buffer, Uint8Array, string types to buffer
|
||||
Utils.toBuffer = function toBuffer(/*buffer, Uint8Array, string*/ input) {
|
||||
if (Buffer.isBuffer(input)) {
|
||||
return input;
|
||||
} else if (input instanceof Uint8Array) {
|
||||
return Buffer.from(input);
|
||||
} else {
|
||||
// expect string all other values are invalid and return empty buffer
|
||||
return typeof input === "string" ? Buffer.from(input, "utf8") : Buffer.alloc(0);
|
||||
}
|
||||
};
|
||||
|
||||
Utils.readBigUInt64LE = function (/*Buffer*/ buffer, /*int*/ index) {
|
||||
var slice = Buffer.from(buffer.slice(index, index + 8));
|
||||
slice.swap64();
|
||||
|
||||
return parseInt(`0x${slice.toString("hex")}`);
|
||||
};
|
||||
|
||||
Utils.isWin = isWin; // Do we have windows system
|
||||
Utils.crcTable = crcTable;
|
333
node_modules/adm-zip/zipEntry.js
generated
vendored
Normal file
333
node_modules/adm-zip/zipEntry.js
generated
vendored
Normal file
@@ -0,0 +1,333 @@
|
||||
var Utils = require("./util"),
|
||||
Headers = require("./headers"),
|
||||
Constants = Utils.Constants,
|
||||
Methods = require("./methods");
|
||||
|
||||
module.exports = function (/*Buffer*/ input) {
|
||||
var _entryHeader = new Headers.EntryHeader(),
|
||||
_entryName = Buffer.alloc(0),
|
||||
_comment = Buffer.alloc(0),
|
||||
_isDirectory = false,
|
||||
uncompressedData = null,
|
||||
_extra = Buffer.alloc(0);
|
||||
|
||||
function getCompressedDataFromZip() {
|
||||
if (!input || !Buffer.isBuffer(input)) {
|
||||
return Buffer.alloc(0);
|
||||
}
|
||||
_entryHeader.loadDataHeaderFromBinary(input);
|
||||
return input.slice(_entryHeader.realDataOffset, _entryHeader.realDataOffset + _entryHeader.compressedSize);
|
||||
}
|
||||
|
||||
function crc32OK(data) {
|
||||
// if bit 3 (0x08) of the general-purpose flags field is set, then the CRC-32 and file sizes are not known when the header is written
|
||||
if ((_entryHeader.flags & 0x8) !== 0x8) {
|
||||
if (Utils.crc32(data) !== _entryHeader.dataHeader.crc) {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
// @TODO: load and check data descriptor header
|
||||
// The fields in the local header are filled with zero, and the CRC-32 and size are appended in a 12-byte structure
|
||||
// (optionally preceded by a 4-byte signature) immediately after the compressed data:
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
function decompress(/*Boolean*/ async, /*Function*/ callback, /*String, Buffer*/ pass) {
|
||||
if (typeof callback === "undefined" && typeof async === "string") {
|
||||
pass = async;
|
||||
async = void 0;
|
||||
}
|
||||
if (_isDirectory) {
|
||||
if (async && callback) {
|
||||
callback(Buffer.alloc(0), Utils.Errors.DIRECTORY_CONTENT_ERROR); //si added error.
|
||||
}
|
||||
return Buffer.alloc(0);
|
||||
}
|
||||
|
||||
var compressedData = getCompressedDataFromZip();
|
||||
|
||||
if (compressedData.length === 0) {
|
||||
// File is empty, nothing to decompress.
|
||||
if (async && callback) callback(compressedData);
|
||||
return compressedData;
|
||||
}
|
||||
|
||||
if (_entryHeader.encripted) {
|
||||
if ("string" !== typeof pass && !Buffer.isBuffer(pass)) {
|
||||
throw new Error("ADM-ZIP: Incompatible password parameter");
|
||||
}
|
||||
compressedData = Methods.ZipCrypto.decrypt(compressedData, _entryHeader, pass);
|
||||
}
|
||||
|
||||
var data = Buffer.alloc(_entryHeader.size);
|
||||
|
||||
switch (_entryHeader.method) {
|
||||
case Utils.Constants.STORED:
|
||||
compressedData.copy(data);
|
||||
if (!crc32OK(data)) {
|
||||
if (async && callback) callback(data, Utils.Errors.BAD_CRC); //si added error
|
||||
throw new Error(Utils.Errors.BAD_CRC);
|
||||
} else {
|
||||
//si added otherwise did not seem to return data.
|
||||
if (async && callback) callback(data);
|
||||
return data;
|
||||
}
|
||||
case Utils.Constants.DEFLATED:
|
||||
var inflater = new Methods.Inflater(compressedData);
|
||||
if (!async) {
|
||||
const result = inflater.inflate(data);
|
||||
result.copy(data, 0);
|
||||
if (!crc32OK(data)) {
|
||||
throw new Error(Utils.Errors.BAD_CRC + " " + _entryName.toString());
|
||||
}
|
||||
return data;
|
||||
} else {
|
||||
inflater.inflateAsync(function (result) {
|
||||
result.copy(result, 0);
|
||||
if (callback) {
|
||||
if (!crc32OK(result)) {
|
||||
callback(result, Utils.Errors.BAD_CRC); //si added error
|
||||
} else {
|
||||
callback(result);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
break;
|
||||
default:
|
||||
if (async && callback) callback(Buffer.alloc(0), Utils.Errors.UNKNOWN_METHOD);
|
||||
throw new Error(Utils.Errors.UNKNOWN_METHOD);
|
||||
}
|
||||
}
|
||||
|
||||
function compress(/*Boolean*/ async, /*Function*/ callback) {
|
||||
if ((!uncompressedData || !uncompressedData.length) && Buffer.isBuffer(input)) {
|
||||
// no data set or the data wasn't changed to require recompression
|
||||
if (async && callback) callback(getCompressedDataFromZip());
|
||||
return getCompressedDataFromZip();
|
||||
}
|
||||
|
||||
if (uncompressedData.length && !_isDirectory) {
|
||||
var compressedData;
|
||||
// Local file header
|
||||
switch (_entryHeader.method) {
|
||||
case Utils.Constants.STORED:
|
||||
_entryHeader.compressedSize = _entryHeader.size;
|
||||
|
||||
compressedData = Buffer.alloc(uncompressedData.length);
|
||||
uncompressedData.copy(compressedData);
|
||||
|
||||
if (async && callback) callback(compressedData);
|
||||
return compressedData;
|
||||
default:
|
||||
case Utils.Constants.DEFLATED:
|
||||
var deflater = new Methods.Deflater(uncompressedData);
|
||||
if (!async) {
|
||||
var deflated = deflater.deflate();
|
||||
_entryHeader.compressedSize = deflated.length;
|
||||
return deflated;
|
||||
} else {
|
||||
deflater.deflateAsync(function (data) {
|
||||
compressedData = Buffer.alloc(data.length);
|
||||
_entryHeader.compressedSize = data.length;
|
||||
data.copy(compressedData);
|
||||
callback && callback(compressedData);
|
||||
});
|
||||
}
|
||||
deflater = null;
|
||||
break;
|
||||
}
|
||||
} else if (async && callback) {
|
||||
callback(Buffer.alloc(0));
|
||||
} else {
|
||||
return Buffer.alloc(0);
|
||||
}
|
||||
}
|
||||
|
||||
function readUInt64LE(buffer, offset) {
|
||||
return (buffer.readUInt32LE(offset + 4) << 4) + buffer.readUInt32LE(offset);
|
||||
}
|
||||
|
||||
function parseExtra(data) {
|
||||
var offset = 0;
|
||||
var signature, size, part;
|
||||
while (offset < data.length) {
|
||||
signature = data.readUInt16LE(offset);
|
||||
offset += 2;
|
||||
size = data.readUInt16LE(offset);
|
||||
offset += 2;
|
||||
part = data.slice(offset, offset + size);
|
||||
offset += size;
|
||||
if (Constants.ID_ZIP64 === signature) {
|
||||
parseZip64ExtendedInformation(part);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//Override header field values with values from the ZIP64 extra field
|
||||
function parseZip64ExtendedInformation(data) {
|
||||
var size, compressedSize, offset, diskNumStart;
|
||||
|
||||
if (data.length >= Constants.EF_ZIP64_SCOMP) {
|
||||
size = readUInt64LE(data, Constants.EF_ZIP64_SUNCOMP);
|
||||
if (_entryHeader.size === Constants.EF_ZIP64_OR_32) {
|
||||
_entryHeader.size = size;
|
||||
}
|
||||
}
|
||||
if (data.length >= Constants.EF_ZIP64_RHO) {
|
||||
compressedSize = readUInt64LE(data, Constants.EF_ZIP64_SCOMP);
|
||||
if (_entryHeader.compressedSize === Constants.EF_ZIP64_OR_32) {
|
||||
_entryHeader.compressedSize = compressedSize;
|
||||
}
|
||||
}
|
||||
if (data.length >= Constants.EF_ZIP64_DSN) {
|
||||
offset = readUInt64LE(data, Constants.EF_ZIP64_RHO);
|
||||
if (_entryHeader.offset === Constants.EF_ZIP64_OR_32) {
|
||||
_entryHeader.offset = offset;
|
||||
}
|
||||
}
|
||||
if (data.length >= Constants.EF_ZIP64_DSN + 4) {
|
||||
diskNumStart = data.readUInt32LE(Constants.EF_ZIP64_DSN);
|
||||
if (_entryHeader.diskNumStart === Constants.EF_ZIP64_OR_16) {
|
||||
_entryHeader.diskNumStart = diskNumStart;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
get entryName() {
|
||||
return _entryName.toString();
|
||||
},
|
||||
get rawEntryName() {
|
||||
return _entryName;
|
||||
},
|
||||
set entryName(val) {
|
||||
_entryName = Utils.toBuffer(val);
|
||||
var lastChar = _entryName[_entryName.length - 1];
|
||||
_isDirectory = lastChar === 47 || lastChar === 92;
|
||||
_entryHeader.fileNameLength = _entryName.length;
|
||||
},
|
||||
|
||||
get extra() {
|
||||
return _extra;
|
||||
},
|
||||
set extra(val) {
|
||||
_extra = val;
|
||||
_entryHeader.extraLength = val.length;
|
||||
parseExtra(val);
|
||||
},
|
||||
|
||||
get comment() {
|
||||
return _comment.toString();
|
||||
},
|
||||
set comment(val) {
|
||||
_comment = Utils.toBuffer(val);
|
||||
_entryHeader.commentLength = _comment.length;
|
||||
},
|
||||
|
||||
get name() {
|
||||
var n = _entryName.toString();
|
||||
return _isDirectory
|
||||
? n
|
||||
.substr(n.length - 1)
|
||||
.split("/")
|
||||
.pop()
|
||||
: n.split("/").pop();
|
||||
},
|
||||
get isDirectory() {
|
||||
return _isDirectory;
|
||||
},
|
||||
|
||||
getCompressedData: function () {
|
||||
return compress(false, null);
|
||||
},
|
||||
|
||||
getCompressedDataAsync: function (/*Function*/ callback) {
|
||||
compress(true, callback);
|
||||
},
|
||||
|
||||
setData: function (value) {
|
||||
uncompressedData = Utils.toBuffer(value);
|
||||
if (!_isDirectory && uncompressedData.length) {
|
||||
_entryHeader.size = uncompressedData.length;
|
||||
_entryHeader.method = Utils.Constants.DEFLATED;
|
||||
_entryHeader.crc = Utils.crc32(value);
|
||||
_entryHeader.changed = true;
|
||||
} else {
|
||||
// folders and blank files should be stored
|
||||
_entryHeader.method = Utils.Constants.STORED;
|
||||
}
|
||||
},
|
||||
|
||||
getData: function (pass) {
|
||||
if (_entryHeader.changed) {
|
||||
return uncompressedData;
|
||||
} else {
|
||||
return decompress(false, null, pass);
|
||||
}
|
||||
},
|
||||
|
||||
getDataAsync: function (/*Function*/ callback, pass) {
|
||||
if (_entryHeader.changed) {
|
||||
callback(uncompressedData);
|
||||
} else {
|
||||
decompress(true, callback, pass);
|
||||
}
|
||||
},
|
||||
|
||||
set attr(attr) {
|
||||
_entryHeader.attr = attr;
|
||||
},
|
||||
get attr() {
|
||||
return _entryHeader.attr;
|
||||
},
|
||||
|
||||
set header(/*Buffer*/ data) {
|
||||
_entryHeader.loadFromBinary(data);
|
||||
},
|
||||
|
||||
get header() {
|
||||
return _entryHeader;
|
||||
},
|
||||
|
||||
packHeader: function () {
|
||||
// 1. create header (buffer)
|
||||
var header = _entryHeader.entryHeaderToBinary();
|
||||
var addpos = Utils.Constants.CENHDR;
|
||||
// 2. add file name
|
||||
_entryName.copy(header, addpos);
|
||||
addpos += _entryName.length;
|
||||
// 3. add extra data
|
||||
if (_entryHeader.extraLength) {
|
||||
_extra.copy(header, addpos);
|
||||
addpos += _entryHeader.extraLength;
|
||||
}
|
||||
// 4. add file comment
|
||||
if (_entryHeader.commentLength) {
|
||||
_comment.copy(header, addpos);
|
||||
}
|
||||
return header;
|
||||
},
|
||||
|
||||
toJSON: function () {
|
||||
const bytes = function (nr) {
|
||||
return "<" + ((nr && nr.length + " bytes buffer") || "null") + ">";
|
||||
};
|
||||
|
||||
return {
|
||||
entryName: this.entryName,
|
||||
name: this.name,
|
||||
comment: this.comment,
|
||||
isDirectory: this.isDirectory,
|
||||
header: _entryHeader.toJSON(),
|
||||
compressedData: bytes(input),
|
||||
data: bytes(uncompressedData)
|
||||
};
|
||||
},
|
||||
|
||||
toString: function () {
|
||||
return JSON.stringify(this.toJSON(), null, "\t");
|
||||
}
|
||||
};
|
||||
};
|
384
node_modules/adm-zip/zipFile.js
generated
vendored
Normal file
384
node_modules/adm-zip/zipFile.js
generated
vendored
Normal file
@@ -0,0 +1,384 @@
|
||||
const ZipEntry = require("./zipEntry");
|
||||
const Headers = require("./headers");
|
||||
const Utils = require("./util");
|
||||
|
||||
module.exports = function (/*Buffer|null*/ inBuffer, /** object */ options) {
|
||||
var entryList = [],
|
||||
entryTable = {},
|
||||
_comment = Buffer.alloc(0),
|
||||
mainHeader = new Headers.MainHeader(),
|
||||
loadedEntries = false;
|
||||
|
||||
// assign options
|
||||
const opts = Object.assign(Object.create(null), options);
|
||||
|
||||
const { noSort } = opts;
|
||||
|
||||
if (inBuffer) {
|
||||
// is a memory buffer
|
||||
readMainHeader(opts.readEntries);
|
||||
} else {
|
||||
// none. is a new file
|
||||
loadedEntries = true;
|
||||
}
|
||||
|
||||
function iterateEntries(callback) {
|
||||
const totalEntries = mainHeader.diskEntries; // total number of entries
|
||||
let index = mainHeader.offset; // offset of first CEN header
|
||||
|
||||
for (let i = 0; i < totalEntries; i++) {
|
||||
let tmp = index;
|
||||
const entry = new ZipEntry(inBuffer);
|
||||
|
||||
entry.header = inBuffer.slice(tmp, (tmp += Utils.Constants.CENHDR));
|
||||
entry.entryName = inBuffer.slice(tmp, (tmp += entry.header.fileNameLength));
|
||||
|
||||
index += entry.header.entryHeaderSize;
|
||||
|
||||
callback(entry);
|
||||
}
|
||||
}
|
||||
|
||||
function readEntries() {
|
||||
loadedEntries = true;
|
||||
entryTable = {};
|
||||
entryList = new Array(mainHeader.diskEntries); // total number of entries
|
||||
var index = mainHeader.offset; // offset of first CEN header
|
||||
for (var i = 0; i < entryList.length; i++) {
|
||||
var tmp = index,
|
||||
entry = new ZipEntry(inBuffer);
|
||||
entry.header = inBuffer.slice(tmp, (tmp += Utils.Constants.CENHDR));
|
||||
|
||||
entry.entryName = inBuffer.slice(tmp, (tmp += entry.header.fileNameLength));
|
||||
|
||||
if (entry.header.extraLength) {
|
||||
entry.extra = inBuffer.slice(tmp, (tmp += entry.header.extraLength));
|
||||
}
|
||||
|
||||
if (entry.header.commentLength) entry.comment = inBuffer.slice(tmp, tmp + entry.header.commentLength);
|
||||
|
||||
index += entry.header.entryHeaderSize;
|
||||
|
||||
entryList[i] = entry;
|
||||
entryTable[entry.entryName] = entry;
|
||||
}
|
||||
}
|
||||
|
||||
function readMainHeader(/*Boolean*/ readNow) {
|
||||
var i = inBuffer.length - Utils.Constants.ENDHDR, // END header size
|
||||
max = Math.max(0, i - 0xffff), // 0xFFFF is the max zip file comment length
|
||||
n = max,
|
||||
endStart = inBuffer.length,
|
||||
endOffset = -1, // Start offset of the END header
|
||||
commentEnd = 0;
|
||||
|
||||
for (i; i >= n; i--) {
|
||||
if (inBuffer[i] !== 0x50) continue; // quick check that the byte is 'P'
|
||||
if (inBuffer.readUInt32LE(i) === Utils.Constants.ENDSIG) {
|
||||
// "PK\005\006"
|
||||
endOffset = i;
|
||||
commentEnd = i;
|
||||
endStart = i + Utils.Constants.ENDHDR;
|
||||
// We already found a regular signature, let's look just a bit further to check if there's any zip64 signature
|
||||
n = i - Utils.Constants.END64HDR;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (inBuffer.readUInt32LE(i) === Utils.Constants.END64SIG) {
|
||||
// Found a zip64 signature, let's continue reading the whole zip64 record
|
||||
n = max;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (inBuffer.readUInt32LE(i) === Utils.Constants.ZIP64SIG) {
|
||||
// Found the zip64 record, let's determine it's size
|
||||
endOffset = i;
|
||||
endStart = i + Utils.readBigUInt64LE(inBuffer, i + Utils.Constants.ZIP64SIZE) + Utils.Constants.ZIP64LEAD;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!~endOffset) throw new Error(Utils.Errors.INVALID_FORMAT);
|
||||
|
||||
mainHeader.loadFromBinary(inBuffer.slice(endOffset, endStart));
|
||||
if (mainHeader.commentLength) {
|
||||
_comment = inBuffer.slice(commentEnd + Utils.Constants.ENDHDR);
|
||||
}
|
||||
if (readNow) readEntries();
|
||||
}
|
||||
|
||||
function sortEntries() {
|
||||
if (entryList.length > 1 && !noSort) {
|
||||
entryList.sort((a, b) => a.entryName.toLowerCase().localeCompare(b.entryName.toLowerCase()));
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
/**
|
||||
* Returns an array of ZipEntry objects existent in the current opened archive
|
||||
* @return Array
|
||||
*/
|
||||
get entries() {
|
||||
if (!loadedEntries) {
|
||||
readEntries();
|
||||
}
|
||||
return entryList;
|
||||
},
|
||||
|
||||
/**
|
||||
* Archive comment
|
||||
* @return {String}
|
||||
*/
|
||||
get comment() {
|
||||
return _comment.toString();
|
||||
},
|
||||
set comment(val) {
|
||||
_comment = Utils.toBuffer(val);
|
||||
mainHeader.commentLength = _comment.length;
|
||||
},
|
||||
|
||||
getEntryCount: function () {
|
||||
if (!loadedEntries) {
|
||||
return mainHeader.diskEntries;
|
||||
}
|
||||
|
||||
return entryList.length;
|
||||
},
|
||||
|
||||
forEach: function (callback) {
|
||||
if (!loadedEntries) {
|
||||
iterateEntries(callback);
|
||||
return;
|
||||
}
|
||||
|
||||
entryList.forEach(callback);
|
||||
},
|
||||
|
||||
/**
|
||||
* Returns a reference to the entry with the given name or null if entry is inexistent
|
||||
*
|
||||
* @param entryName
|
||||
* @return ZipEntry
|
||||
*/
|
||||
getEntry: function (/*String*/ entryName) {
|
||||
if (!loadedEntries) {
|
||||
readEntries();
|
||||
}
|
||||
return entryTable[entryName] || null;
|
||||
},
|
||||
|
||||
/**
|
||||
* Adds the given entry to the entry list
|
||||
*
|
||||
* @param entry
|
||||
*/
|
||||
setEntry: function (/*ZipEntry*/ entry) {
|
||||
if (!loadedEntries) {
|
||||
readEntries();
|
||||
}
|
||||
entryList.push(entry);
|
||||
entryTable[entry.entryName] = entry;
|
||||
mainHeader.totalEntries = entryList.length;
|
||||
},
|
||||
|
||||
/**
|
||||
* Removes the entry with the given name from the entry list.
|
||||
*
|
||||
* If the entry is a directory, then all nested files and directories will be removed
|
||||
* @param entryName
|
||||
*/
|
||||
deleteEntry: function (/*String*/ entryName) {
|
||||
if (!loadedEntries) {
|
||||
readEntries();
|
||||
}
|
||||
var entry = entryTable[entryName];
|
||||
if (entry && entry.isDirectory) {
|
||||
var _self = this;
|
||||
this.getEntryChildren(entry).forEach(function (child) {
|
||||
if (child.entryName !== entryName) {
|
||||
_self.deleteEntry(child.entryName);
|
||||
}
|
||||
});
|
||||
}
|
||||
entryList.splice(entryList.indexOf(entry), 1);
|
||||
delete entryTable[entryName];
|
||||
mainHeader.totalEntries = entryList.length;
|
||||
},
|
||||
|
||||
/**
|
||||
* Iterates and returns all nested files and directories of the given entry
|
||||
*
|
||||
* @param entry
|
||||
* @return Array
|
||||
*/
|
||||
getEntryChildren: function (/*ZipEntry*/ entry) {
|
||||
if (!loadedEntries) {
|
||||
readEntries();
|
||||
}
|
||||
if (entry && entry.isDirectory) {
|
||||
const list = [];
|
||||
const name = entry.entryName;
|
||||
const len = name.length;
|
||||
|
||||
entryList.forEach(function (zipEntry) {
|
||||
if (zipEntry.entryName.substr(0, len) === name) {
|
||||
list.push(zipEntry);
|
||||
}
|
||||
});
|
||||
return list;
|
||||
}
|
||||
return [];
|
||||
},
|
||||
|
||||
/**
|
||||
* Returns the zip file
|
||||
*
|
||||
* @return Buffer
|
||||
*/
|
||||
compressToBuffer: function () {
|
||||
if (!loadedEntries) {
|
||||
readEntries();
|
||||
}
|
||||
sortEntries();
|
||||
|
||||
const dataBlock = [];
|
||||
const entryHeaders = [];
|
||||
let totalSize = 0;
|
||||
let dindex = 0;
|
||||
|
||||
mainHeader.size = 0;
|
||||
mainHeader.offset = 0;
|
||||
|
||||
for (const entry of entryList) {
|
||||
// compress data and set local and entry header accordingly. Reason why is called first
|
||||
const compressedData = entry.getCompressedData();
|
||||
// 1. construct data header
|
||||
entry.header.offset = dindex;
|
||||
const dataHeader = entry.header.dataHeaderToBinary();
|
||||
const entryNameLen = entry.rawEntryName.length;
|
||||
// 1.2. postheader - data after data header
|
||||
const postHeader = Buffer.alloc(entryNameLen + entry.extra.length);
|
||||
entry.rawEntryName.copy(postHeader, 0);
|
||||
postHeader.copy(entry.extra, entryNameLen);
|
||||
|
||||
// 2. offsets
|
||||
const dataLength = dataHeader.length + postHeader.length + compressedData.length;
|
||||
dindex += dataLength;
|
||||
|
||||
// 3. store values in sequence
|
||||
dataBlock.push(dataHeader);
|
||||
dataBlock.push(postHeader);
|
||||
dataBlock.push(compressedData);
|
||||
|
||||
// 4. construct entry header
|
||||
const entryHeader = entry.packHeader();
|
||||
entryHeaders.push(entryHeader);
|
||||
// 5. update main header
|
||||
mainHeader.size += entryHeader.length;
|
||||
totalSize += dataLength + entryHeader.length;
|
||||
}
|
||||
|
||||
totalSize += mainHeader.mainHeaderSize; // also includes zip file comment length
|
||||
// point to end of data and beginning of central directory first record
|
||||
mainHeader.offset = dindex;
|
||||
|
||||
dindex = 0;
|
||||
const outBuffer = Buffer.alloc(totalSize);
|
||||
// write data blocks
|
||||
for (const content of dataBlock) {
|
||||
content.copy(outBuffer, dindex);
|
||||
dindex += content.length;
|
||||
}
|
||||
|
||||
// write central directory entries
|
||||
for (const content of entryHeaders) {
|
||||
content.copy(outBuffer, dindex);
|
||||
dindex += content.length;
|
||||
}
|
||||
|
||||
// write main header
|
||||
const mh = mainHeader.toBinary();
|
||||
if (_comment) {
|
||||
_comment.copy(mh, Utils.Constants.ENDHDR); // add zip file comment
|
||||
}
|
||||
mh.copy(outBuffer, dindex);
|
||||
|
||||
return outBuffer;
|
||||
},
|
||||
|
||||
toAsyncBuffer: function (/*Function*/ onSuccess, /*Function*/ onFail, /*Function*/ onItemStart, /*Function*/ onItemEnd) {
|
||||
try {
|
||||
if (!loadedEntries) {
|
||||
readEntries();
|
||||
}
|
||||
sortEntries();
|
||||
|
||||
const dataBlock = [];
|
||||
const entryHeaders = [];
|
||||
let totalSize = 0;
|
||||
let dindex = 0;
|
||||
|
||||
mainHeader.size = 0;
|
||||
mainHeader.offset = 0;
|
||||
|
||||
const compress2Buffer = function (entryLists) {
|
||||
if (entryLists.length) {
|
||||
const entry = entryLists.pop();
|
||||
const name = entry.entryName + entry.extra.toString();
|
||||
if (onItemStart) onItemStart(name);
|
||||
entry.getCompressedDataAsync(function (compressedData) {
|
||||
if (onItemEnd) onItemEnd(name);
|
||||
|
||||
entry.header.offset = dindex;
|
||||
// data header
|
||||
const dataHeader = entry.header.dataHeaderToBinary();
|
||||
const postHeader = Buffer.alloc(name.length, name);
|
||||
const dataLength = dataHeader.length + postHeader.length + compressedData.length;
|
||||
|
||||
dindex += dataLength;
|
||||
|
||||
dataBlock.push(dataHeader);
|
||||
dataBlock.push(postHeader);
|
||||
dataBlock.push(compressedData);
|
||||
|
||||
const entryHeader = entry.packHeader();
|
||||
entryHeaders.push(entryHeader);
|
||||
mainHeader.size += entryHeader.length;
|
||||
totalSize += dataLength + entryHeader.length;
|
||||
|
||||
compress2Buffer(entryLists);
|
||||
});
|
||||
} else {
|
||||
totalSize += mainHeader.mainHeaderSize; // also includes zip file comment length
|
||||
// point to end of data and beginning of central directory first record
|
||||
mainHeader.offset = dindex;
|
||||
|
||||
dindex = 0;
|
||||
const outBuffer = Buffer.alloc(totalSize);
|
||||
dataBlock.forEach(function (content) {
|
||||
content.copy(outBuffer, dindex); // write data blocks
|
||||
dindex += content.length;
|
||||
});
|
||||
entryHeaders.forEach(function (content) {
|
||||
content.copy(outBuffer, dindex); // write central directory entries
|
||||
dindex += content.length;
|
||||
});
|
||||
|
||||
const mh = mainHeader.toBinary();
|
||||
if (_comment) {
|
||||
_comment.copy(mh, Utils.Constants.ENDHDR); // add zip file comment
|
||||
}
|
||||
|
||||
mh.copy(outBuffer, dindex); // write main header
|
||||
|
||||
onSuccess(outBuffer);
|
||||
}
|
||||
};
|
||||
|
||||
compress2Buffer(entryList);
|
||||
} catch (e) {
|
||||
onFail(e);
|
||||
}
|
||||
}
|
||||
};
|
||||
};
|
Reference in New Issue
Block a user