Skip to content

Commit

Permalink
Merge pull request #180 from alubbe/easier-async-iteration
Browse files Browse the repository at this point in the history
Add forceStream option
  • Loading branch information
ZJONSSON committed Feb 21, 2020
2 parents 1a0cbc7 + 1666bc6 commit 4a178d8
Show file tree
Hide file tree
Showing 3 changed files with 63 additions and 11 deletions.
20 changes: 18 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ There are no added compiled dependencies - inflation is handled by node.js's bui

Please note: Methods that use the Central Directory instead of parsing entire file can be found under [`Open`](#open)

Chrome extension files (.crx) are zipfiles with an [extra header](http://www.adambarth.com/experimental/crx/docs/crx.html) at the start of the file. Unzipper will parse .crx file with the streaming methods (`Parse` and `ParseOne`). The `Open` methods will check for `crx` headers and parse crx files, but only if you provide `crx: true` in options.
Chrome extension files (.crx) are zipfiles with an [extra header](http://www.adambarth.com/experimental/crx/docs/crx.html) at the start of the file. Unzipper will parse .crx file with the streaming methods (`Parse` and `ParseOne`). The `Open` methods will check for `crx` headers and parse crx files, but only if you provide `crx: true` in options.

## Installation

Expand Down Expand Up @@ -62,7 +62,6 @@ entry.autodrain().on('error' => handleError);

Here is a quick example:


```js
fs.createReadStream('path/to/archive.zip')
.pipe(unzipper.Parse())
Expand All @@ -77,6 +76,23 @@ fs.createReadStream('path/to/archive.zip')
}
});
```

and the same example using async iterators:

```js
const zip = fs.createReadStream('path/to/archive.zip').pipe(unzipper.Parse({forceStream: true}));
for await (const entry of zip) {
const fileName = entry.path;
const type = entry.type; // 'Directory' or 'File'
const size = entry.vars.uncompressedSize; // There is also compressedSize;
if (fileName === "this IS the file I'm looking for") {
entry.pipe(fs.createWriteStream('output/path'));
} else {
entry.autodrain();
}
}
```

### Parse zip by piping entries downstream

If you `pipe` from unzipper the downstream components will receive each `entry` for further processing. This allows for clean pipelines transforming zipfiles into unzipped data.
Expand Down
22 changes: 13 additions & 9 deletions lib/parse.js
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ Parse.prototype._readCrxHeader = function() {
.word32lu('version')
.word32lu('pubKeyLength')
.word32lu('signatureLength')
.vars;
.vars;
return self.pull(self.crxHeader.pubKeyLength + self.crxHeader.signatureLength);
}).then(function(data) {
self.crxHeader.publicKey = data.slice(0,self.crxHeader.pubKeyLength);
Expand Down Expand Up @@ -108,7 +108,7 @@ Parse.prototype._readFile = function () {
var fileName = fileNameBuffer.toString('utf8');
var entry = Stream.PassThrough();
var __autodraining = false;

entry.autodrain = function() {
__autodraining = true;
var draining = entry.pipe(NoopStream());
Expand Down Expand Up @@ -145,17 +145,21 @@ Parse.prototype._readFile = function () {
}
}
}

return self.pull(vars.extraFieldLength).then(function(extraField) {
var extra = parseExtraField(extraField, vars);

entry.vars = vars;
entry.extra = extra;

self.emit('entry', entry);

if (self._readableState.pipesCount || (self._readableState.pipes && self._readableState.pipes.length))
if (self._opts.forceStream) {
self.push(entry);
} else {
self.emit('entry', entry);

if (self._readableState.pipesCount || (self._readableState.pipes && self._readableState.pipes.length))
self.push(entry);
}

if (self._opts.verbose)
console.log({
Expand Down Expand Up @@ -212,7 +216,7 @@ Parse.prototype._processDataDescriptor = function (entry) {
Parse.prototype._readCentralDirectoryFileHeader = function () {
var self = this;
return self.pull(42).then(function(data) {

var vars = binary.parse(data)
.word16lu('versionMadeBy')
.word16lu('versionsNeededToExtract')
Expand Down Expand Up @@ -248,7 +252,7 @@ Parse.prototype._readCentralDirectoryFileHeader = function () {
Parse.prototype._readEndOfCentralDirectoryRecord = function() {
var self = this;
return self.pull(18).then(function(data) {

var vars = binary.parse(data)
.word16lu('diskNumber')
.word16lu('diskStart')
Expand All @@ -264,7 +268,7 @@ Parse.prototype._readEndOfCentralDirectoryRecord = function() {
self.end();
self.push(null);
});

});
};

Expand Down
32 changes: 32 additions & 0 deletions test/forceStream.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
'use strict';

var test = require('tap').test;
var fs = require('fs');
var path = require('path');
var Stream = require('stream');
var unzip = require('../');

// Backwards compatibility for node versions < 8
if (!Stream.Writable || !Stream.Writable.prototype.destroy)
Stream = require('readable-stream');

test("verify that setting the forceStream option emits a data event instead of entry", function (t) {
var archive = path.join(__dirname, '../testData/compressed-standard/archive.zip');

var dataEventEmitted = false;
var entryEventEmitted = false;
fs.createReadStream(archive)
.pipe(unzip.Parse({ forceStream: true }))
.on('data', function(entry) {
t.equal(entry instanceof Stream.PassThrough, true);
dataEventEmitted = true;
})
.on('entry', function() {
entryEventEmitted = true;
})
.on('finish', function() {
t.equal(dataEventEmitted, true);
t.equal(entryEventEmitted, false);
t.end();
});
});

0 comments on commit 4a178d8

Please sign in to comment.