From 9d529f5b1aab670b7ab036a4b79260ace6e8988e Mon Sep 17 00:00:00 2001 From: 5saviahv <5saviahv@users.noreply.github.com> Date: Tue, 25 Jun 2024 18:29:31 +0300 Subject: [PATCH 1/4] restore asset file --- .github/workflows/codeql.yml | 74 +++++++++++++++++------------------ test/assets/ultra.zip | Bin 4446 -> 4086 bytes 2 files changed, 37 insertions(+), 37 deletions(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 28f357d..741d1aa 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -13,11 +13,11 @@ name: "CodeQL" on: push: - branches: [ "master" ] + branches: ["master"] pull_request: - branches: [ "master" ] + branches: ["master"] schedule: - - cron: '41 3 * * 5' + - cron: "41 3 * * 5" jobs: analyze: @@ -44,8 +44,8 @@ jobs: fail-fast: false matrix: include: - - language: javascript-typescript - build-mode: none + - language: javascript-typescript + build-mode: none # CodeQL supports the following values keywords for 'language': 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift' # Use `c-cpp` to analyze code written in C, C++ or both # Use 'java-kotlin' to analyze code written in Java, Kotlin or both @@ -55,39 +55,39 @@ jobs: # If you are analyzing a compiled language, you can modify the 'build-mode' for that language to customize how # your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages steps: - - name: Checkout repository - uses: actions/checkout@v4 + - name: Checkout repository + uses: actions/checkout@v4 - # Initializes the CodeQL tools for scanning. - - name: Initialize CodeQL - uses: github/codeql-action/init@v3 - with: - languages: ${{ matrix.language }} - build-mode: ${{ matrix.build-mode }} - # If you wish to specify custom queries, you can do so here or in a config file. - # By default, queries listed here will override any specified in a config file. - # Prefix the list here with "+" to use these queries and those in the config file. + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v3 + with: + languages: ${{ matrix.language }} + build-mode: ${{ matrix.build-mode }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. - # For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs - # queries: security-extended,security-and-quality + # For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs + # queries: security-extended,security-and-quality - # If the analyze step fails for one of the languages you are analyzing with - # "We were unable to automatically build your code", modify the matrix above - # to set the build mode to "manual" for that language. Then modify this step - # to build your code. - # ℹī¸ Command-line programs to run using the OS shell. - # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun - - if: matrix.build-mode == 'manual' - shell: bash - run: | - echo 'If you are using a "manual" build mode for one or more of the' \ - 'languages you are analyzing, replace this with the commands to build' \ - 'your code, for example:' - echo ' make bootstrap' - echo ' make release' - exit 1 + # If the analyze step fails for one of the languages you are analyzing with + # "We were unable to automatically build your code", modify the matrix above + # to set the build mode to "manual" for that language. Then modify this step + # to build your code. + # ℹī¸ Command-line programs to run using the OS shell. + # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun + - if: matrix.build-mode == 'manual' + shell: bash + run: | + echo 'If you are using a "manual" build mode for one or more of the' \ + 'languages you are analyzing, replace this with the commands to build' \ + 'your code, for example:' + echo ' make bootstrap' + echo ' make release' + exit 1 - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3 - with: - category: "/language:${{matrix.language}}" + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v3 + with: + category: "/language:${{matrix.language}}" diff --git a/test/assets/ultra.zip b/test/assets/ultra.zip index 3b7cb84c4ccff3e585e13d44c379b824f547a3be..e6e8362b9bac0dd4557f9996900f2ea74261b661 100644 GIT binary patch delta 349 zcmcbo^i6)UK9dxaI>Y4O0-Bp&v%4`(!|6p3PDgeaUZzystuFipNFpuq%G z^IO1wvI(!$HNS(P9)k+h<;p<%7FZ^D@-=>$$<_RP%qeUjl}eM>@w-fZEx^UD2vqZw1LhDT T5mq*kW-cJCU}0d$;0Eyk+7?uC delta 342 zcmew+e@{s^z?+#xgn@yBfgvh(odW|B5MWT5%*SIsc{7s~qsrt09&09MmdWdRq$V>m zYfj=-o2{qNs5E&$pCL>TWEM!<27Uu1p^35@llO9ibb2sNe$B1G$S}E)S$MKMj~`?dd`#1B+NKDfH#k+m~rv}-dv_jjFTBTWq}THn9Re^$E42$=FVl~0y@TF zat|LL(-ju5&`!Q!0d)oysNWTVbQ3#JLUZySeznO(d{Rh~3P4Fsu8A_5Kxe6mu(E;l PZ~ Date: Tue, 25 Jun 2024 18:38:09 +0300 Subject: [PATCH 2/4] handle local- and central header extra data separately --- headers/entryHeader.js | 57 ++++++++++++++++++++++++++---------------- zipEntry.js | 21 ++++++++-------- zipFile.js | 6 +++++ 3 files changed, 51 insertions(+), 33 deletions(-) diff --git a/headers/entryHeader.js b/headers/entryHeader.js index 6375d0c..a44833e 100644 --- a/headers/entryHeader.js +++ b/headers/entryHeader.js @@ -25,7 +25,9 @@ module.exports = function () { // Without it file names may be corrupted for other apps when file names use unicode chars _flags |= Constants.FLG_EFS; - var _localHeader = {}; + const _localHeader = { + extraLen: 0 + }; function setTime(val) { val = new Date(val); @@ -143,6 +145,13 @@ module.exports = function () { _extraLen = val; }, + get extraLocalLength() { + return _localHeader.extraLen; + }, + set extraLocalLength(val) { + _localHeader.extraLen = val; + }, + get commentLength() { return _comLen; }, @@ -205,26 +214,30 @@ module.exports = function () { if (data.readUInt32LE(0) !== Constants.LOCSIG) { throw new Error(Utils.Errors.INVALID_LOC); } - _localHeader = { - // version needed to extract - version: data.readUInt16LE(Constants.LOCVER), - // general purpose bit flag - flags: data.readUInt16LE(Constants.LOCFLG), - // compression method - method: data.readUInt16LE(Constants.LOCHOW), - // modification time (2 bytes time, 2 bytes date) - time: data.readUInt32LE(Constants.LOCTIM), - // uncompressed file crc-32 value - crc: data.readUInt32LE(Constants.LOCCRC), - // compressed size - compressedSize: data.readUInt32LE(Constants.LOCSIZ), - // uncompressed size - size: data.readUInt32LE(Constants.LOCLEN), - // filename length - fnameLen: data.readUInt16LE(Constants.LOCNAM), - // extra field length - extraLen: data.readUInt16LE(Constants.LOCEXT) - }; + + // version needed to extract + _localHeader.version = data.readUInt16LE(Constants.LOCVER); + // general purpose bit flag + _localHeader.flags = data.readUInt16LE(Constants.LOCFLG); + // compression method + _localHeader.method = data.readUInt16LE(Constants.LOCHOW); + // modification time (2 bytes time, 2 bytes date) + _localHeader.time = data.readUInt32LE(Constants.LOCTIM); + // uncompressed file crc-32 valu + _localHeader.crc = data.readUInt32LE(Constants.LOCCRC); + // compressed size + _localHeader.compressedSize = data.readUInt32LE(Constants.LOCSIZ); + // uncompressed size + _localHeader.size = data.readUInt32LE(Constants.LOCLEN); + // filename length + _localHeader.fnameLen = data.readUInt16LE(Constants.LOCNAM); + // extra field length + _localHeader.extraLen = data.readUInt16LE(Constants.LOCEXT); + + // read extra data + const extraStart = _offset + Constants.LOCHDR + _localHeader.fnameLen; + const extraEnd = extraStart + _localHeader.extraLen; + return input.slice(extraStart, extraEnd); }, loadFromBinary: function (/*Buffer*/ data) { @@ -286,7 +299,7 @@ module.exports = function () { // filename length data.writeUInt16LE(_fnameLen, Constants.LOCNAM); // extra field length - data.writeUInt16LE(_extraLen, Constants.LOCEXT); + data.writeUInt16LE(_localHeader.extraLen, Constants.LOCEXT); return data; }, diff --git a/zipEntry.js b/zipEntry.js index c2cae53..50082c2 100644 --- a/zipEntry.js +++ b/zipEntry.js @@ -10,6 +10,7 @@ module.exports = function (/** object */ options, /*Buffer*/ input) { _isDirectory = false, uncompressedData = null, _extra = Buffer.alloc(0), + _extralocal = Buffer.alloc(0), _efs = true; // assign options @@ -23,7 +24,7 @@ module.exports = function (/** object */ options, /*Buffer*/ input) { if (!input || !(input instanceof Uint8Array)) { return Buffer.alloc(0); } - _centralHeader.loadLocalHeaderFromBinary(input); + _extralocal = _centralHeader.loadLocalHeaderFromBinary(input); return input.slice(_centralHeader.realDataOffset, _centralHeader.realDataOffset + _centralHeader.compressedSize); } @@ -340,6 +341,7 @@ module.exports = function (/** object */ options, /*Buffer*/ input) { packCentralHeader: function () { _centralHeader.flags_efs = this.efs; + _centralHeader.extraLength = _extra.length; // 1. create header (buffer) var header = _centralHeader.centralHeaderToBinary(); var addpos = Utils.Constants.CENHDR; @@ -347,24 +349,21 @@ module.exports = function (/** object */ options, /*Buffer*/ input) { _entryName.copy(header, addpos); addpos += _entryName.length; // 3. add extra data - if (_centralHeader.extraLength) { - _extra.copy(header, addpos); - addpos += _centralHeader.extraLength; - } + _extra.copy(header, addpos); + addpos += _centralHeader.extraLength; // 4. add file comment - if (_centralHeader.commentLength) { - _comment.copy(header, addpos); - } + _comment.copy(header, addpos); return header; }, packLocalHeader: function () { let addpos = 0; _centralHeader.flags_efs = this.efs; + _centralHeader.extraLocalLength = _extralocal.length; // 1. construct local header Buffer const localHeaderBuf = _centralHeader.localHeaderToBinary(); // 2. localHeader - crate header buffer - const localHeader = Buffer.alloc(localHeaderBuf.length + _entryName.length + _extra.length); + const localHeader = Buffer.alloc(localHeaderBuf.length + _entryName.length + _centralHeader.extraLocalLength); // 2.1 add localheader localHeaderBuf.copy(localHeader, addpos); addpos += localHeaderBuf.length; @@ -372,8 +371,8 @@ module.exports = function (/** object */ options, /*Buffer*/ input) { _entryName.copy(localHeader, addpos); addpos += _entryName.length; // 2.3 add extra field - _extra.copy(localHeader, addpos); - addpos += _extra.length; + _extralocal.copy(localHeader, addpos); + addpos += _extralocal.length; return localHeader; }, diff --git a/zipFile.js b/zipFile.js index a33e792..8919632 100644 --- a/zipFile.js +++ b/zipFile.js @@ -256,6 +256,7 @@ module.exports = function (/*Buffer|null*/ inBuffer, /** object */ options) { mainHeader.size = 0; mainHeader.offset = 0; + totalEntries = 0; for (const entry of entryList) { // compress data and set local and entry header accordingly. Reason why is called first @@ -279,11 +280,13 @@ module.exports = function (/*Buffer|null*/ inBuffer, /** object */ options) { // 5. update main header mainHeader.size += centralHeader.length; totalSize += dataLength + centralHeader.length; + totalEntries++; } totalSize += mainHeader.mainHeaderSize; // also includes zip file comment length // point to end of data and beginning of central directory first record mainHeader.offset = dindex; + mainHeader.totalEntries = totalEntries; dindex = 0; const outBuffer = Buffer.alloc(totalSize); @@ -327,6 +330,7 @@ module.exports = function (/*Buffer|null*/ inBuffer, /** object */ options) { const centralHeaders = []; let totalSize = 0; let dindex = 0; + let totalEntries = 0; mainHeader.size = 0; mainHeader.offset = 0; @@ -356,6 +360,7 @@ module.exports = function (/*Buffer|null*/ inBuffer, /** object */ options) { centralHeaders.push(centalHeader); mainHeader.size += centalHeader.length; totalSize += dataLength + centalHeader.length; + totalEntries++; compress2Buffer(entryLists); }); @@ -363,6 +368,7 @@ module.exports = function (/*Buffer|null*/ inBuffer, /** object */ options) { totalSize += mainHeader.mainHeaderSize; // also includes zip file comment length // point to end of data and beginning of central directory first record mainHeader.offset = dindex; + mainHeader.totalEntries = totalEntries; dindex = 0; const outBuffer = Buffer.alloc(totalSize); From f9d1a3ad7c24bd990fb94a64758929306ef720b6 Mon Sep 17 00:00:00 2001 From: 5saviahv <5saviahv@users.noreply.github.com> Date: Tue, 25 Jun 2024 18:57:13 +0300 Subject: [PATCH 3/4] added some functions to handle entries --- adm-zip.js | 29 ++++++++++++++++++++++++++++- zipFile.js | 37 ++++++++++++++++++++++++++++++++++--- 2 files changed, 62 insertions(+), 4 deletions(-) diff --git a/adm-zip.js b/adm-zip.js index a445dae..97fdcce 100644 --- a/adm-zip.js +++ b/adm-zip.js @@ -123,6 +123,18 @@ module.exports = function (/**String*/ input, /** object */ options) { return (item && item.getData(pass)) || null; }, + /** + * Returns how many child elements has on entry (directories) on files it is always 0 + * @param {ZipEntry|string} entry ZipEntry object or String with the full path of the entry + * @returns {integer} + */ + childCount: function (entry) { + const item = getEntry(entry); + if (item) { + return _zip.getChildCount(item); + } + }, + /** * Asynchronous readFile * @param {ZipEntry|string} entry ZipEntry object or String with the full path of the entry @@ -188,11 +200,26 @@ module.exports = function (/**String*/ input, /** object */ options) { /** * Remove the entry from the file or the entry and all it's nested directories and files if the given entry is a directory * - * @param {ZipEntry} entry + * @param {ZipEntry|string} entry + * @returns {void} */ deleteFile: function (entry) { // @TODO: test deleteFile var item = getEntry(entry); + if (item) { + _zip.deleteFile(item.entryName); + } + }, + + /** + * Remove the entry from the file or directory without affecting any nested entries + * + * @param {ZipEntry|string} entry + * @returns {void} + */ + deleteEntry: function (entry) { + // @TODO: test deleteEntry + var item = getEntry(entry); if (item) { _zip.deleteEntry(item.entryName); } diff --git a/zipFile.js b/zipFile.js index 8919632..d269e1c 100644 --- a/zipFile.js +++ b/zipFile.js @@ -190,12 +190,13 @@ module.exports = function (/*Buffer|null*/ inBuffer, /** object */ options) { }, /** - * Removes the entry with the given name from the entry list. + * Removes the file with the given name from the entry list. * * If the entry is a directory, then all nested files and directories will be removed * @param entryName + * @returns {void} */ - deleteEntry: function (/*String*/ entryName) { + deleteFile: function (/*String*/ entryName) { if (!loadedEntries) { readEntries(); } @@ -204,7 +205,7 @@ module.exports = function (/*Buffer|null*/ inBuffer, /** object */ options) { var _self = this; this.getEntryChildren(entry).forEach(function (child) { if (child.entryName !== entryName) { - _self.deleteEntry(child.entryName); + _self.deleteFile(child.entryName); } }); } @@ -213,6 +214,22 @@ module.exports = function (/*Buffer|null*/ inBuffer, /** object */ options) { mainHeader.totalEntries = entryList.length; }, + /** + * Removes the entry with the given name from the entry list. + * + * @param {string} entryName + * @returns {void} + */ + deleteEntry: function (/*String*/ entryName) { + if (!loadedEntries) { + readEntries(); + } + const entry = entryTable[entryName]; + entryList.splice(entryList.indexOf(entry), 1); + delete entryTable[entryName]; + mainHeader.totalEntries = entryList.length; + }, + /** * Iterates and returns all nested files and directories of the given entry * @@ -238,6 +255,20 @@ module.exports = function (/*Buffer|null*/ inBuffer, /** object */ options) { return []; }, + /** + * How many child elements entry has + * + * @param {ZipEntry} entry + * @return {integer} + */ + getChildCount: function (entry) { + if (entry && entry.isDirectory) { + const list = this.getEntryChildren(entry); + return list.includes(entry) ? list.length - 1 : list.length; + } + return 0; + }, + /** * Returns the zip file * From 86bae22bdcfe2b61f5be86c17d7e9559941ea0ca Mon Sep 17 00:00:00 2001 From: 5saviahv <5saviahv@users.noreply.github.com> Date: Tue, 25 Jun 2024 19:52:35 +0300 Subject: [PATCH 4/4] add temporary folders into list --- test/assets/maximum3.zip | Bin 0 -> 3624 bytes test/methods/methods.test.js | 54 +++++++++++++++++++++++++++++++++++ zipFile.js | 53 ++++++++++++++++++++-------------- 3 files changed, 85 insertions(+), 22 deletions(-) create mode 100644 test/assets/maximum3.zip diff --git a/test/assets/maximum3.zip b/test/assets/maximum3.zip new file mode 100644 index 0000000000000000000000000000000000000000..a38233807185a7b459590013a1c004aa168236ca GIT binary patch literal 3624 zcmWIWW@h1HVBi3v#!LqWB%ldoCzg~HWhRxDq!z~mafyCnaf-fQYPmv4YDI~HOMY@` zZfahMUP(ns0Nj|L4XDP*VKF8tCowNuAuTf}6{c4N?1VpPdS!9z&Cf|mEz$>B3)0HL zAO>{(ktV)2J0=E(f6NRFdRVlAT%HEen30*1lA0G^l$w~5pO;e!az;p?ch(UDp1Z$A zlEQRZcD>i+ddn&k-4`mO9Z_`n&c012dCq7#`oCANPrU8fvV(~;C1=OI#O!@q$5?}$ zS^NYxL#%aMl9PFQ#u}^fqzj4*| zhtpRqI50b{<@WcZJNEid?$~Fs%3uRKi+bj>br%A*1WHJ82k{~v6e!Tsk-?!OoYU{r5sZWlxwHFGB%yCzJ zz~%b%SR+%+hF$x5Hf(myWc%lAc$;w(!!zbMi*~^mTQ#n7En?1BHLUFYJSXbGo9BR0o3+nsV80jWv-PmqCCgn$I)!f=@^MGMJL4)Mz_se*9U-kL%Urc8YYK9n?OCz> zg4@k?^In-Qn9aRHF!)s6pY+S*YaE6TC>Sl+RTCNp4Gn^tzH= zM>MTe_LZ|&FNu3q`6AzJ+M8qxyV<*B4!XQeyu0%f$BLNWPGVDYWtK#id$gCtFPmJn zPHLW>Qq`-lmy9hAGvWs7g#q98G|u36NF^P#u2)94!M!%RpL}fdrCZ zaei(ptTixN4UbmClvKlvO!f@88ni0Fk`M$Gz$6LHTP~;;Mg|Fnf^OCgU;mzARB8Xv zz1Hw7oDT3tSdG~TMz-1=Xgzw%8nsD`Y~O)*r3*mzF6&Yiz@;QwCbK4~up9o8HLwI~+WC0A&BF;BVmIh0*xz zS7m_NuLQK)1K9jW4_*xWK@Bc!)*3r~1zQWF@mZ@5w^k8o?MYU=)*@H4*eq5|{SLMm wMiXbT0?=X~VCM~we~}AUY!*BH{Ry@hMhAGavH?wHU { }); describe(".extractEntryTo() - sync", () => { + // each entry one by one it("zip.extractEntryTo(entry, destination, false, true)", () => { const zip = new Zip("./test/assets/ultra.zip"); var zipEntries = zip.getEntries(); @@ -132,6 +133,7 @@ describe("adm-zip.js - methods handling local files", () => { expect(files.sort()).to.deep.equal(ultrazip.sort()); }); + // each entry one by one it("zip.extractEntryTo(entry, destination, true, true)", () => { const zip = new Zip("./test/assets/ultra.zip"); var zipEntries = zip.getEntries(); @@ -149,6 +151,58 @@ describe("adm-zip.js - methods handling local files", () => { expect(files.sort()).to.deep.equal(ultrazip.sort()); }); + + it("zip.extractEntryTo(entry, destination, false, true) - [ extract folder from file where folders exists ]", () => { + const zip = new Zip("./test/assets/maximum.zip"); + + zip.extractEntryTo("./attributes_test/New folder/", destination, false, true); + + const files = walk(destination); + const maximumzip = ["hidden.txt", "hidden_readonly.txt", "readonly.txt", "somefile.txt"].map(wrapList); + + expect(files.sort()).to.deep.equal(maximumzip.sort()); + }); + + it("zip.extractEntryTo(entry, destination, false, true) - [ extract folder from file where folders does not exists ]", () => { + const zip = new Zip("./test/assets/maximum3.zip"); + + zip.extractEntryTo("./attributes_test/New folder/", destination, false, true); + + const files = walk(destination); + const maximumzip = ["hidden.txt", "hidden_readonly.txt", "readonly.txt", "somefile.txt"].map(wrapList); + + expect(files.sort()).to.deep.equal(maximumzip.sort()); + }); + + it("zip.extractEntryTo(entry, destination, true, true) - [ extract folder from file where folders exists ]", () => { + const zip = new Zip("./test/assets/maximum.zip"); + + zip.extractEntryTo("./attributes_test/New folder/", destination, true, true); + + const files = walk(destination); + const maximumzip = [ + "./attributes_test/New folder/hidden.txt", + "./attributes_test/New folder/hidden_readonly.txt", + "./attributes_test/New folder/readonly.txt", + "./attributes_test/New folder/somefile.txt" + ].map(wrapList); + expect(files.sort()).to.deep.equal(maximumzip.sort()); + }); + + it("zip.extractEntryTo(entry, destination, true, true) - [ extract folder from file where folders does not exists ]", () => { + const zip = new Zip("./test/assets/maximum3.zip"); + + zip.extractEntryTo("./attributes_test/New folder/", destination, true, true); + + const files = walk(destination); + const maximumzip = [ + "./attributes_test/New folder/hidden.txt", + "./attributes_test/New folder/hidden_readonly.txt", + "./attributes_test/New folder/readonly.txt", + "./attributes_test/New folder/somefile.txt" + ].map(wrapList); + expect(files.sort()).to.deep.equal(maximumzip.sort()); + }); }); describe(".addLocalFolder() - sync", () => { diff --git a/zipFile.js b/zipFile.js index d269e1c..43e69fe 100644 --- a/zipFile.js +++ b/zipFile.js @@ -9,6 +9,7 @@ module.exports = function (/*Buffer|null*/ inBuffer, /** object */ options) { mainHeader = new Headers.MainHeader(), loadedEntries = false; var password = null; + const temporary = new Set(); // assign options const opts = options; @@ -23,20 +24,31 @@ module.exports = function (/*Buffer|null*/ inBuffer, /** object */ options) { loadedEntries = true; } - function iterateEntries(callback) { - const totalEntries = mainHeader.diskEntries; // total number of entries - let index = mainHeader.offset; // offset of first CEN header - - for (let i = 0; i < totalEntries; i++) { - let tmp = index; - const entry = new ZipEntry(opts, inBuffer); - - entry.header = inBuffer.slice(tmp, (tmp += Utils.Constants.CENHDR)); - entry.entryName = inBuffer.slice(tmp, (tmp += entry.header.fileNameLength)); - - index += entry.header.centralHeaderSize; + function makeTemporaryFolders() { + const foldersList = new Set(); + + // Make list of all folders in file + for (const elem of Object.keys(entryTable)) { + const elements = elem.split("/"); + elements.pop(); // filename + if (!elements.length) continue; // no folders + for (let i = 0; i < elements.length; i++) { + const sub = elements.slice(0, i + 1).join("/") + "/"; + foldersList.add(sub); + } + } - callback(entry); + // create missing folders as temporary + for (const elem of foldersList) { + if (!(elem in entryTable)) { + const tempfolder = new ZipEntry(opts); + tempfolder.entryName = elem; + tempfolder.attr = 0x10; + tempfolder.temporary = true; + entryList.push(tempfolder); + entryTable[tempfolder.entryName] = tempfolder; + temporary.add(tempfolder); + } } } @@ -66,6 +78,8 @@ module.exports = function (/*Buffer|null*/ inBuffer, /** object */ options) { entryList[i] = entry; entryTable[entry.entryName] = entry; } + temporary.clear(); + makeTemporaryFolders(); } function readMainHeader(/*Boolean*/ readNow) { @@ -130,7 +144,7 @@ module.exports = function (/*Buffer|null*/ inBuffer, /** object */ options) { if (!loadedEntries) { readEntries(); } - return entryList; + return entryList.filter((e) => !temporary.has(e)); }, /** @@ -154,12 +168,7 @@ module.exports = function (/*Buffer|null*/ inBuffer, /** object */ options) { }, forEach: function (callback) { - if (!loadedEntries) { - iterateEntries(callback); - return; - } - - entryList.forEach(callback); + this.entries.forEach(callback); }, /** @@ -289,7 +298,7 @@ module.exports = function (/*Buffer|null*/ inBuffer, /** object */ options) { mainHeader.offset = 0; totalEntries = 0; - for (const entry of entryList) { + for (const entry of this.entries) { // compress data and set local and entry header accordingly. Reason why is called first const compressedData = entry.getCompressedData(); entry.header.offset = dindex; @@ -430,7 +439,7 @@ module.exports = function (/*Buffer|null*/ inBuffer, /** object */ options) { } }; - compress2Buffer(Array.from(entryList)); + compress2Buffer(Array.from(this.entries)); } catch (e) { onFail(e); }