diff --git a/test/parallel/test-zlib-from-gzip.js b/test/parallel/test-zlib-from-gzip.js index 7bd377eb8698fd..bca810e1a75d5f 100644 --- a/test/parallel/test-zlib-from-gzip.js +++ b/test/parallel/test-zlib-from-gzip.js @@ -21,10 +21,10 @@ const inp = fs.createReadStream(fixture); const out = fs.createWriteStream(outputFile); inp.pipe(gunzip).pipe(out); -out.on('close', function() { +out.on('close', common.mustCall(() => { const actual = fs.readFileSync(outputFile); assert.strictEqual(actual.length, expect.length, 'length should match'); for (let i = 0, l = actual.length; i < l; i++) { assert.strictEqual(actual[i], expect[i], `byte[${i}]`); } -}); +})); diff --git a/test/parallel/test-zlib-from-string.js b/test/parallel/test-zlib-from-string.js index ab2f7d023d8bc8..cf24d2a804cc73 100644 --- a/test/parallel/test-zlib-from-string.js +++ b/test/parallel/test-zlib-from-string.js @@ -1,7 +1,7 @@ 'use strict'; // test compressing and uncompressing a string with zlib -require('../common'); +const common = require('../common'); const assert = require('assert'); const zlib = require('zlib'); @@ -33,32 +33,32 @@ const expectedBase64Gzip = 'H4sIAAAAAAAAA11RS05DMQy8yhzg6d2BPSAkJPZu4laWkjiN4' + 'mHo33kJO8xfkckmLjE5XMKBQ4gxIsfvCZ44doUThF2mcZq8q2' + 'sHnHNzRtagj5AQAA'; -zlib.deflate(inputString, function(err, buffer) { +zlib.deflate(inputString, common.mustCall((err, buffer) => { assert.strictEqual(buffer.toString('base64'), expectedBase64Deflate, 'deflate encoded string should match'); -}); +})); -zlib.gzip(inputString, function(err, buffer) { +zlib.gzip(inputString, common.mustCall((err, buffer) => { // Can't actually guarantee that we'll get exactly the same // deflated bytes when we compress a string, since the header // depends on stuff other than the input string itself. // However, decrypting it should definitely yield the same // result that we're expecting, and this should match what we get // from inflating the known valid deflate data. - zlib.gunzip(buffer, function(err, gunzipped) { + zlib.gunzip(buffer, common.mustCall((err, gunzipped) => { assert.strictEqual(gunzipped.toString(), inputString, 'Should get original string after gzip/gunzip'); - }); -}); + })); +})); let buffer = Buffer.from(expectedBase64Deflate, 'base64'); -zlib.unzip(buffer, function(err, buffer) { +zlib.unzip(buffer, common.mustCall((err, buffer) => { assert.strictEqual(buffer.toString(), inputString, 'decoded inflated string should match'); -}); +})); buffer = Buffer.from(expectedBase64Gzip, 'base64'); -zlib.unzip(buffer, function(err, buffer) { +zlib.unzip(buffer, common.mustCall((err, buffer) => { assert.strictEqual(buffer.toString(), inputString, 'decoded gunzipped string should match'); -}); +})); diff --git a/test/parallel/test-zlib-invalid-input.js b/test/parallel/test-zlib-invalid-input.js index 6e87ab8b4f8227..d1d157678bba25 100644 --- a/test/parallel/test-zlib-invalid-input.js +++ b/test/parallel/test-zlib-invalid-input.js @@ -1,14 +1,26 @@ 'use strict'; // test uncompressing invalid input -require('../common'); +const common = require('../common'); const assert = require('assert'); const zlib = require('zlib'); -const nonStringInputs = [1, true, {a: 1}, ['a']]; +const nonStringInputs = [ + 1, + true, + { a: 1 }, + ['a'] +]; -console.error('Doing the non-strings'); -nonStringInputs.forEach(function(input) { +// zlib.Unzip classes need to get valid data, or else they'll throw. +const unzips = [ + zlib.Unzip(), + zlib.Gunzip(), + zlib.Inflate(), + zlib.InflateRaw() +]; + +nonStringInputs.forEach(common.mustCall((input) => { // zlib.gunzip should not throw an error when called with bad input. assert.doesNotThrow(function() { zlib.gunzip(input, function(err, buffer) { @@ -16,30 +28,12 @@ nonStringInputs.forEach(function(input) { assert.ok(err); }); }); -}); +}, nonStringInputs.length)); -console.error('Doing the unzips'); -// zlib.Unzip classes need to get valid data, or else they'll throw. -const unzips = [ zlib.Unzip(), - zlib.Gunzip(), - zlib.Inflate(), - zlib.InflateRaw() ]; -const hadError = []; -unzips.forEach(function(uz, i) { - console.error(`Error for ${uz.constructor.name}`); - uz.on('error', function(er) { - console.error('Error event', er); - hadError[i] = true; - }); - - uz.on('end', function(er) { - throw new Error(`end event should not be emitted ${uz.constructor.name}`); - }); +unzips.forEach(common.mustCall((uz, i) => { + uz.on('error', common.mustCall()); + uz.on('end', common.mustNotCall); // this will trigger error event uz.write('this is not valid compressed data.'); -}); - -process.on('exit', function() { - assert.deepStrictEqual(hadError, [true, true, true, true], 'expect 4 errors'); -}); +}, unzips.length)); diff --git a/test/parallel/test-zlib-random-byte-pipes.js b/test/parallel/test-zlib-random-byte-pipes.js index 143ab526f70a49..5ec5908777c814 100644 --- a/test/parallel/test-zlib-random-byte-pipes.js +++ b/test/parallel/test-zlib-random-byte-pipes.js @@ -6,124 +6,119 @@ if (!common.hasCrypto) const assert = require('assert'); const crypto = require('crypto'); const stream = require('stream'); -const util = require('util'); const zlib = require('zlib'); const Stream = stream.Stream; // emit random bytes, and keep a shasum -function RandomReadStream(opt) { - Stream.call(this); +class RandomReadStream extends Stream { + constructor(opt) { + super(); - this.readable = true; - this._paused = false; - this._processing = false; - - this._hasher = crypto.createHash('sha1'); - opt = opt || {}; - - // base block size. - opt.block = opt.block || 256 * 1024; + this.readable = true; + this._paused = false; + this._processing = false; - // total number of bytes to emit - opt.total = opt.total || 256 * 1024 * 1024; - this._remaining = opt.total; + this._hasher = crypto.createHash('sha1'); + opt = opt || {}; - // how variable to make the block sizes - opt.jitter = opt.jitter || 1024; + // base block size. + opt.block = opt.block || 256 * 1024; - this._opt = opt; + // total number of bytes to emit + opt.total = opt.total || 256 * 1024 * 1024; + this._remaining = opt.total; - this._process = this._process.bind(this); + // how variable to make the block sizes + opt.jitter = opt.jitter || 1024; - process.nextTick(this._process); -} + this._opt = opt; -util.inherits(RandomReadStream, Stream); + this._process = this._process.bind(this); -RandomReadStream.prototype.pause = function() { - this._paused = true; - this.emit('pause'); -}; + process.nextTick(this._process); + } -RandomReadStream.prototype.resume = function() { - // console.error("rrs resume"); - this._paused = false; - this.emit('resume'); - this._process(); -}; + pause() { + this._paused = true; + this.emit('pause'); + } -RandomReadStream.prototype._process = function() { - if (this._processing) return; - if (this._paused) return; + resume() { + // console.error("rrs resume"); + this._paused = false; + this.emit('resume'); + this._process(); + } - this._processing = true; + _process() { + if (this._processing) return; + if (this._paused) return; - if (!this._remaining) { - this._hash = this._hasher.digest('hex').toLowerCase().trim(); - this._processing = false; + this._processing = true; - this.emit('end'); - return; - } + if (!this._remaining) { + this._hash = this._hasher.digest('hex').toLowerCase().trim(); + this._processing = false; - // figure out how many bytes to output - // if finished, then just emit end. - let block = this._opt.block; - const jitter = this._opt.jitter; - if (jitter) { - block += Math.ceil(Math.random() * jitter - (jitter / 2)); - } - block = Math.min(block, this._remaining); - const buf = Buffer.allocUnsafe(block); - for (let i = 0; i < block; i++) { - buf[i] = Math.random() * 256; - } + this.emit('end'); + return; + } - this._hasher.update(buf); + // figure out how many bytes to output + // if finished, then just emit end. + let block = this._opt.block; + const jitter = this._opt.jitter; + if (jitter) { + block += Math.ceil(Math.random() * jitter - (jitter / 2)); + } + block = Math.min(block, this._remaining); + const buf = Buffer.allocUnsafe(block); + for (let i = 0; i < block; i++) { + buf[i] = Math.random() * 256; + } - this._remaining -= block; + this._hasher.update(buf); - console.error('block=%d\nremain=%d\n', block, this._remaining); - this._processing = false; + this._remaining -= block; - this.emit('data', buf); - process.nextTick(this._process); -}; + this._processing = false; + this.emit('data', buf); + process.nextTick(this._process); + } +} // a filter that just verifies a shasum -function HashStream() { - Stream.call(this); +class HashStream extends Stream { + constructor() { + super(); + this.readable = this.writable = true; + this._hasher = crypto.createHash('sha1'); + } - this.readable = this.writable = true; - this._hasher = crypto.createHash('sha1'); -} + write(c) { + // Simulate the way that an fs.ReadStream returns false + // on *every* write, only to resume a moment later. + this._hasher.update(c); + process.nextTick(() => this.resume()); + return false; + } + + resume() { + this.emit('resume'); + process.nextTick(() => this.emit('drain')); + } -util.inherits(HashStream, Stream); - -HashStream.prototype.write = function(c) { - // Simulate the way that an fs.ReadStream returns false - // on *every* write like a jerk, only to resume a - // moment later. - this._hasher.update(c); - process.nextTick(this.resume.bind(this)); - return false; -}; - -HashStream.prototype.resume = function() { - this.emit('resume'); - process.nextTick(this.emit.bind(this, 'drain')); -}; - -HashStream.prototype.end = function(c) { - if (c) { - this.write(c); + end(c) { + if (c) { + this.write(c); + } + this._hash = this._hasher.digest('hex').toLowerCase().trim(); + this.emit('data', this._hash); + this.emit('end'); } - this._hash = this._hasher.digest('hex').toLowerCase().trim(); - this.emit('data', this._hash); - this.emit('end'); -}; +} const inp = new RandomReadStream({ total: 1024, block: 256, jitter: 16 }); @@ -133,23 +128,6 @@ const gunz = zlib.createGunzip(); inp.pipe(gzip).pipe(gunz).pipe(out); -inp.on('data', function(c) { - console.error('inp data', c.length); -}); - -gzip.on('data', function(c) { - console.error('gzip data', c.length); -}); - -gunz.on('data', function(c) { - console.error('gunz data', c.length); -}); - -out.on('data', function(c) { - console.error('out data', c.length); -}); - -out.on('data', common.mustCall(function(c) { - console.error('hash=%s', c); +out.on('data', common.mustCall((c) => { assert.strictEqual(c, inp._hash, 'hashes should match'); })); diff --git a/test/parallel/test-zlib-sync-no-event.js b/test/parallel/test-zlib-sync-no-event.js index 33c0018b6f9108..9defd3d31f4fb0 100644 --- a/test/parallel/test-zlib-sync-no-event.js +++ b/test/parallel/test-zlib-sync-no-event.js @@ -1,20 +1,18 @@ 'use strict'; -require('../common'); +const common = require('../common'); const zlib = require('zlib'); const assert = require('assert'); -const shouldNotBeCalled = () => { throw new Error('unexpected event'); }; - const message = 'Come on, Fhqwhgads.'; +const buffer = Buffer.from(message); const zipper = new zlib.Gzip(); -zipper.on('close', shouldNotBeCalled); +zipper.on('close', common.mustNotCall); -const buffer = Buffer.from(message); const zipped = zipper._processChunk(buffer, zlib.Z_FINISH); const unzipper = new zlib.Gunzip(); -unzipper.on('close', shouldNotBeCalled); +unzipper.on('close', common.mustNotCall); const unzipped = unzipper._processChunk(zipped, zlib.Z_FINISH); assert.notStrictEqual(zipped.toString(), message); diff --git a/test/parallel/test-zlib-write-after-flush.js b/test/parallel/test-zlib-write-after-flush.js index caacc976e51a46..5ab601f7f1d46c 100644 --- a/test/parallel/test-zlib-write-after-flush.js +++ b/test/parallel/test-zlib-write-after-flush.js @@ -1,5 +1,5 @@ 'use strict'; -require('../common'); +const common = require('../common'); const assert = require('assert'); const zlib = require('zlib'); @@ -11,23 +11,14 @@ gzip.pipe(gunz); let output = ''; const input = 'A line of data\n'; gunz.setEncoding('utf8'); -gunz.on('data', function(c) { - output += c; -}); - -process.on('exit', function() { +gunz.on('data', (c) => output += c); +gunz.on('end', common.mustCall(() => { assert.strictEqual(output, input); - - // Make sure that the flush flag was set back to normal assert.strictEqual(gzip._flushFlag, zlib.Z_NO_FLUSH); - - console.log('ok'); -}); +})); // make sure that flush/write doesn't trigger an assert failure -gzip.flush(); write(); -function write() { - gzip.write(input); - gzip.end(); - gunz.read(0); -} +gzip.flush(); +gzip.write(input); +gzip.end(); +gunz.read(0); diff --git a/test/parallel/test-zlib.js b/test/parallel/test-zlib.js index 468b5f346f04ee..dbd437e500b10f 100644 --- a/test/parallel/test-zlib.js +++ b/test/parallel/test-zlib.js @@ -4,7 +4,6 @@ const assert = require('assert'); const zlib = require('zlib'); const path = require('path'); const fs = require('fs'); -const util = require('util'); const stream = require('stream'); let zlibPairs = [ @@ -48,105 +47,104 @@ if (process.env.FAST) { } const tests = {}; -testFiles.forEach(function(file) { +testFiles.forEach(common.mustCall((file) => { tests[file] = fs.readFileSync(path.resolve(common.fixturesDir, file)); -}); +}, testFiles.length)); // stream that saves everything -function BufferStream() { - this.chunks = []; - this.length = 0; - this.writable = true; - this.readable = true; +class BufferStream extends stream.Stream { + constructor() { + super(); + this.chunks = []; + this.length = 0; + this.writable = true; + this.readable = true; + } + + write(c) { + this.chunks.push(c); + this.length += c.length; + return true; + } + + end(c) { + if (c) this.write(c); + // flatten + const buf = Buffer.allocUnsafe(this.length); + let i = 0; + this.chunks.forEach((c) => { + c.copy(buf, i); + i += c.length; + }); + this.emit('data', buf); + this.emit('end'); + return true; + } } -util.inherits(BufferStream, stream.Stream); - -BufferStream.prototype.write = function(c) { - this.chunks.push(c); - this.length += c.length; - return true; -}; - -BufferStream.prototype.end = function(c) { - if (c) this.write(c); - // flatten - const buf = Buffer.allocUnsafe(this.length); - let i = 0; - this.chunks.forEach(function(c) { - c.copy(buf, i); - i += c.length; - }); - this.emit('data', buf); - this.emit('end'); - return true; -}; - - -function SlowStream(trickle) { - this.trickle = trickle; - this.offset = 0; - this.readable = this.writable = true; +class SlowStream extends stream.Stream { + constructor(trickle) { + super(); + this.trickle = trickle; + this.offset = 0; + this.readable = this.writable = true; + } + + write() { + throw new Error('not implemented, just call ss.end(chunk)'); + } + + pause() { + this.paused = true; + this.emit('pause'); + } + + resume() { + const emit = () => { + if (this.paused) return; + if (this.offset >= this.length) { + this.ended = true; + return this.emit('end'); + } + const end = Math.min(this.offset + this.trickle, this.length); + const c = this.chunk.slice(this.offset, end); + this.offset += c.length; + this.emit('data', c); + process.nextTick(emit); + }; + + if (this.ended) return; + this.emit('resume'); + if (!this.chunk) return; + this.paused = false; + emit(); + } + + end(chunk) { + // walk over the chunk in blocks. + this.chunk = chunk; + this.length = chunk.length; + this.resume(); + return this.ended; + } } -util.inherits(SlowStream, stream.Stream); - -SlowStream.prototype.write = function() { - throw new Error('not implemented, just call ss.end(chunk)'); -}; - -SlowStream.prototype.pause = function() { - this.paused = true; - this.emit('pause'); -}; - -SlowStream.prototype.resume = function() { - const emit = () => { - if (this.paused) return; - if (this.offset >= this.length) { - this.ended = true; - return this.emit('end'); - } - const end = Math.min(this.offset + this.trickle, this.length); - const c = this.chunk.slice(this.offset, end); - this.offset += c.length; - this.emit('data', c); - process.nextTick(emit); - }; - - if (this.ended) return; - this.emit('resume'); - if (!this.chunk) return; - this.paused = false; - emit(); -}; - -SlowStream.prototype.end = function(chunk) { - // walk over the chunk in blocks. - this.chunk = chunk; - this.length = chunk.length; - this.resume(); - return this.ended; -}; - // for each of the files, make sure that compressing and // decompressing results in the same data, for every combination // of the options set above. -let failures = 0; -let total = 0; -let done = 0; -Object.keys(tests).forEach(function(file) { +const testKeys = Object.keys(tests); +testKeys.forEach(common.mustCall((file) => { const test = tests[file]; - chunkSize.forEach(function(chunkSize) { - trickle.forEach(function(trickle) { - windowBits.forEach(function(windowBits) { - level.forEach(function(level) { - memLevel.forEach(function(memLevel) { - strategy.forEach(function(strategy) { - zlibPairs.forEach(function(pair) { + chunkSize.forEach(common.mustCall((chunkSize) => { + trickle.forEach(common.mustCall((trickle) => { + windowBits.forEach(common.mustCall((windowBits) => { + level.forEach(common.mustCall((level) => { + memLevel.forEach(common.mustCall((memLevel) => { + strategy.forEach(common.mustCall((strategy) => { + zlibPairs.forEach(common.mustCall((pair) => { const Def = pair[0]; const Inf = pair[1]; const opts = { level: level, @@ -154,57 +152,32 @@ Object.keys(tests).forEach(function(file) { memLevel: memLevel, strategy: strategy }; - total++; - const def = new Def(opts); const inf = new Inf(opts); const ss = new SlowStream(trickle); const buf = new BufferStream(); // verify that the same exact buffer comes out the other end. - buf.on('data', function(c) { + buf.on('data', common.mustCall((c) => { const msg = `${file} ${chunkSize} ${ JSON.stringify(opts)} ${Def.name} -> ${Inf.name}`; - let ok = true; - const testNum = ++done; let i; for (i = 0; i < Math.max(c.length, test.length); i++) { if (c[i] !== test[i]) { - ok = false; - failures++; + assert.fail(null, null, msg); break; } } - if (ok) { - console.log(`ok ${testNum} ${msg}`); - } else { - console.log(`not ok ${testNum} msg`); - console.log(' ...'); - console.log(` testfile: ${file}`); - console.log(` type: ${Def.name} -> ${Inf.name}`); - console.log(` position: ${i}`); - console.log(` options: ${JSON.stringify(opts)}`); - console.log(` expect: ${test[i]}`); - console.log(` actual: ${c[i]}`); - console.log(` chunkSize: ${chunkSize}`); - console.log(' ---'); - } - }); + })); // the magic happens here. ss.pipe(def).pipe(inf).pipe(buf); ss.end(test); - }); - }); - }); - }); - }); - }); - }); -}); - -process.on('exit', function(code) { - console.log(`1..${done}`); - assert.strictEqual(done, total, `${total - done} tests left unfinished`); - assert.strictEqual(failures, 0, 'some test failures'); -}); + }, zlibPairs.length)); + }, strategy.length)); + }, memLevel.length)); + }, level.length)); + }, windowBits.length)); + }, trickle.length)); + }, chunkSize.length)); +}, testKeys.length));