From 3d147828c2b16105fc59755a0ecef7a1bd8ad325 Mon Sep 17 00:00:00 2001 From: Bartosz Sosnowski Date: Fri, 5 Aug 2016 11:34:50 +0200 Subject: [PATCH 1/9] benchmark: support for multiple http benchmarkers This adds support for multiple HTTP benchmarkers. Adds autocannon as the secondary benchmarker. --- Makefile | 9 +- benchmark/README.md | 26 ++++- benchmark/common.js | 179 +++++++++++++++++++++++------ benchmark/http/chunked.js | 4 +- benchmark/http/cluster.js | 3 +- benchmark/http/end-vs-write-end.js | 3 +- benchmark/http/simple.js | 3 +- 7 files changed, 170 insertions(+), 57 deletions(-) diff --git a/Makefile b/Makefile index fac19326946302..294726d0b7a2db 100644 --- a/Makefile +++ b/Makefile @@ -620,13 +620,6 @@ ifeq ($(XZ), 0) ssh $(STAGINGSERVER) "touch nodejs/$(DISTTYPEDIR)/$(FULLVERSION)/node-$(FULLVERSION)-$(OSTYPE)-$(ARCH).tar.xz.done" endif -haswrk=$(shell which wrk > /dev/null 2>&1; echo $$?) -wrk: -ifneq ($(haswrk), 0) - @echo "please install wrk before proceeding. More information can be found in benchmark/README.md." >&2 - @exit 1 -endif - bench-net: all @$(NODE) benchmark/run.js net @@ -636,7 +629,7 @@ bench-crypto: all bench-tls: all @$(NODE) benchmark/run.js tls -bench-http: wrk all +bench-http: all @$(NODE) benchmark/run.js http bench-fs: all diff --git a/benchmark/README.md b/benchmark/README.md index 225236cc103d91..cdb5d1f5680660 100644 --- a/benchmark/README.md +++ b/benchmark/README.md @@ -14,9 +14,28 @@ This folder contains benchmarks to measure the performance of the Node.js APIs. ## Prerequisites -Most of the http benchmarks require [`wrk`][wrk] to be installed. It may be -available through your preferred package manager. If not, `wrk` can be built -[from source][wrk] via `make`. +Most of the HTTP benchmarks require a benchmarker to be installed, this can be +either [`wrk`][wrk] or [`autocannon`][autocannon]. + +`Autocannon` is a Node script that can be installed using +`npm install -g autocannon`. It will use the Node executable that is in the +path, hence if you want to compare two HTTP benchmark runs make sure that the +Node version in the path is not altered. + +`wrk` may be available through your preferred package manger. If not, you can +easily build it [from source][wrk] via `make`. + +To select which tool will be used to run your HTTP benchmark you can: +* When running the benchmakrs, set `NODE_HTTP_BENCHMARKER` environment variable +to desired benchmarker. +* To select the default benchmarker for a particular benchmark, specify it as +`benchmarker` key (e.g. `benchmarker: 'wrk'`) in configuration passed to +`createBenchmark`. This can be overridden by `NODE_HTTP_BENCHMARKER` in run +time. + +If you do not specify which benchmarker to use, all of the installed tools will +be used to run the benchmarks. This will also happen if you pass `all` as the +desired benchmark tool. To analyze the results `R` should be installed. Check you package manager or download it from https://www.r-project.org/. @@ -287,5 +306,6 @@ function main(conf) { } ``` +[autocannon]: https://github.com/mcollina/autocannon [wrk]: https://github.com/wg/wrk [t-test]: https://en.wikipedia.org/wiki/Student%27s_t-test#Equal_or_unequal_sample_sizes.2C_unequal_variances diff --git a/benchmark/common.js b/benchmark/common.js index 3807fea7957096..42bf22e1118652 100644 --- a/benchmark/common.js +++ b/benchmark/common.js @@ -88,52 +88,157 @@ Benchmark.prototype._queue = function(options) { return queue; }; -function hasWrk() { - const result = child_process.spawnSync('wrk', ['-h']); - if (result.error && result.error.code === 'ENOENT') { - console.error('Couldn\'t locate `wrk` which is needed for running ' + - 'benchmarks. Check benchmark/README.md for further instructions.'); - process.exit(1); - } +function AutocannonBenchmarker() { + const autocannon_exe = process.platform === 'win32' + ? 'autocannon.cmd' + : 'autocannon'; + this.present = function() { + var result = child_process.spawnSync(autocannon_exe, ['-h']); + if (result.error && result.error.code === 'ENOENT') + return false; + else + return true; + }; + this.create = function(path, duration, connections) { + const args = ['-d', duration, '-c', connections, '-j', '-n', + 'http://127.0.0.1:' + exports.PORT + path ]; + var child = child_process.spawn(autocannon_exe, args); + child.stdout.setEncoding('utf8'); + return child; + }; + this.processResults = function(output) { + let result; + try { + result = JSON.parse(output); + } catch (err) { + // Do nothing, let next line handle this + } + if (!result || !result.requests || !result.requests.average) { + return undefined; + } else { + return result.requests.average; + } + }; } -// benchmark an http server. -const WRK_REGEXP = /Requests\/sec:[ \t]+([0-9\.]+)/; -Benchmark.prototype.http = function(urlPath, args, cb) { - hasWrk(); - const self = this; - - const urlFull = 'http://127.0.0.1:' + exports.PORT + urlPath; - args = args.concat(urlFull); - - const childStart = process.hrtime(); - const child = child_process.spawn('wrk', args); - child.stderr.pipe(process.stderr); +function WrkBenchmarker() { + this.present = function() { + var result = child_process.spawnSync('wrk', ['-h']); + if (result.error && result.error.code === 'ENOENT') + return false; + else + return true; + }; + this.create = function(path, duration, connections) { + const args = ['-d', duration, '-c', connections, '-t', 8, + 'http://127.0.0.1:' + exports.PORT + path ]; + var child = child_process.spawn('wrk', args); + child.stdout.setEncoding('utf8'); + child.stderr.pipe(process.stderr); + return child; + }; + const regexp = /Requests\/sec:[ \t]+([0-9\.]+)/; + this.processResults = function(output) { + const match = output.match(regexp); + const result = match && +match[1]; + if (!result) + return undefined; + else + return result; + }; +} - // Collect stdout - let stdout = ''; - child.stdout.on('data', (chunk) => stdout += chunk.toString()); +const HTTPBenchmarkers = { + autocannon: new AutocannonBenchmarker(), + wrk: new WrkBenchmarker() +}; - child.once('close', function(code) { - const elapsed = process.hrtime(childStart); - if (cb) cb(code); +// Benchmark an http server. +Benchmark.prototype.http = function(urlPath, duration, connections, cb) { + const self = this; + duration = 1; + + const picked_benchmarker = process.env.NODE_HTTP_BENCHMARKER || + this.config.benchmarker || 'all'; + const benchmarkers = picked_benchmarker === 'all' + ? Object.keys(HTTPBenchmarkers) + : [picked_benchmarker]; + + // See if any benchmarker is available. Also test if all used benchmarkers + // are defined + var any_available = false; + for (var i = 0; i < benchmarkers.length; ++i) { + const benchmarker = benchmarkers[i]; + const http_benchmarker = HTTPBenchmarkers[benchmarker]; + if (http_benchmarker === undefined) { + console.error('Unknown http benchmarker: ', benchmarker); + process.exit(1); + } + if (http_benchmarker.present()) { + any_available = true; + } + } + if (!any_available) { + console.error('Couldn\'t locate any of the required http benchmarkers ' + + '(' + benchmarkers.join(', ') + '). Check ' + + 'benchmark/README.md for further instructions.'); + process.exit(1); + } - if (code) { - console.error('wrk failed with ' + code); - process.exit(code); + function runHttpBenchmarker(index, collected_code) { + // All benchmarkers executed + if (index === benchmarkers.length) { + if (cb) + cb(collected_code); + if (collected_code !== 0) + process.exit(1); + return; } - // Extract requests pr second and check for odd results - const match = stdout.match(WRK_REGEXP); - if (!match || match.length <= 1) { - console.error('wrk produced strange output:'); - console.error(stdout); - process.exit(1); + // Run next benchmarker + const benchmarker = benchmarkers[index]; + self.config.benchmarker = benchmarker; + + const http_benchmarker = HTTPBenchmarkers[benchmarker]; + if (http_benchmarker.present()) { + const child_start = process.hrtime(); + var child = http_benchmarker.create(urlPath, duration, connections); + + // Collect stdout + let stdout = ''; + child.stdout.on('data', (chunk) => stdout += chunk.toString()); + + child.once('close', function(code) { + const elapsed = process.hrtime(child_start); + if (code) { + if (stdout === '') { + console.error(benchmarker + ' failed with ' + code); + } else { + console.error(benchmarker + ' failed with ' + code + '. Output: '); + console.error(stdout); + } + runHttpBenchmarker(index + 1, code); + return; + } + + var result = http_benchmarker.processResults(stdout); + if (!result) { + console.error(benchmarker + ' produced strange output'); + console.error(stdout); + runHttpBenchmarker(index + 1, 1); + return; + } + + self.report(result, elapsed); + runHttpBenchmarker(index + 1, collected_code); + }); + } else { + runHttpBenchmarker(index + 1, collected_code); } + } - // Report rate - self.report(+match[1], elapsed); - }); + // Run with all benchmarkers + runHttpBenchmarker(0, 0); }; Benchmark.prototype._run = function() { diff --git a/benchmark/http/chunked.js b/benchmark/http/chunked.js index a61978c732233a..65b50430494c61 100644 --- a/benchmark/http/chunked.js +++ b/benchmark/http/chunked.js @@ -20,8 +20,6 @@ function main(conf) { const http = require('http'); var chunk = Buffer.alloc(conf.size, '8'); - var args = ['-d', '10s', '-t', 8, '-c', conf.c]; - var server = http.createServer(function(req, res) { function send(left) { if (left === 0) return res.end(); @@ -34,7 +32,7 @@ function main(conf) { }); server.listen(common.PORT, function() { - bench.http('/', args, function() { + bench.http('/', 10, conf.c, function() { server.close(); }); }); diff --git a/benchmark/http/cluster.js b/benchmark/http/cluster.js index 95e76e69cc3903..b947eec15eec65 100644 --- a/benchmark/http/cluster.js +++ b/benchmark/http/cluster.js @@ -27,9 +27,8 @@ function main(conf) { setTimeout(function() { var path = '/' + conf.type + '/' + conf.length; - var args = ['-d', '10s', '-t', 8, '-c', conf.c]; - bench.http(path, args, function() { + bench.http(path, 10, conf.c, function() { w1.destroy(); w2.destroy(); }); diff --git a/benchmark/http/end-vs-write-end.js b/benchmark/http/end-vs-write-end.js index 0cdc88111de146..cfd067d794fd00 100644 --- a/benchmark/http/end-vs-write-end.js +++ b/benchmark/http/end-vs-write-end.js @@ -43,14 +43,13 @@ function main(conf) { } var method = conf.method === 'write' ? write : end; - var args = ['-d', '10s', '-t', 8, '-c', conf.c]; var server = http.createServer(function(req, res) { method(res); }); server.listen(common.PORT, function() { - bench.http('/', args, function() { + bench.http('/', 10, conf.c, function() { server.close(); }); }); diff --git a/benchmark/http/simple.js b/benchmark/http/simple.js index eedda8e98f6c4c..a6bf493020c7e6 100644 --- a/benchmark/http/simple.js +++ b/benchmark/http/simple.js @@ -15,9 +15,8 @@ function main(conf) { var server = require('./_http_simple.js'); setTimeout(function() { var path = '/' + conf.type + '/' + conf.length + '/' + conf.chunks; - var args = ['-d', '10s', '-t', 8, '-c', conf.c]; - bench.http(path, args, function() { + bench.http(path, 10, conf.c, function() { server.close(); }); }, 2000); From 6113ceca37df01046d28d7a5303d608dc57b95c6 Mon Sep 17 00:00:00 2001 From: Bartosz Sosnowski Date: Thu, 18 Aug 2016 09:55:58 +0200 Subject: [PATCH 2/9] fixup: nits, benchmarkers in separate file --- benchmark/common.js | 81 ++++------------------------------ benchmark/http-benchmarkers.js | 62 ++++++++++++++++++++++++++ 2 files changed, 71 insertions(+), 72 deletions(-) create mode 100644 benchmark/http-benchmarkers.js diff --git a/benchmark/common.js b/benchmark/common.js index 42bf22e1118652..5619d778c9c1c7 100644 --- a/benchmark/common.js +++ b/benchmark/common.js @@ -1,6 +1,7 @@ 'use strict'; const child_process = require('child_process'); +const HTTPBenchmarkers = require('./http-benchmarkers.js').HTTPBenchmarkers; // The port used by servers and wrk exports.PORT = process.env.PORT || 12346; @@ -88,71 +89,6 @@ Benchmark.prototype._queue = function(options) { return queue; }; -function AutocannonBenchmarker() { - const autocannon_exe = process.platform === 'win32' - ? 'autocannon.cmd' - : 'autocannon'; - this.present = function() { - var result = child_process.spawnSync(autocannon_exe, ['-h']); - if (result.error && result.error.code === 'ENOENT') - return false; - else - return true; - }; - this.create = function(path, duration, connections) { - const args = ['-d', duration, '-c', connections, '-j', '-n', - 'http://127.0.0.1:' + exports.PORT + path ]; - var child = child_process.spawn(autocannon_exe, args); - child.stdout.setEncoding('utf8'); - return child; - }; - this.processResults = function(output) { - let result; - try { - result = JSON.parse(output); - } catch (err) { - // Do nothing, let next line handle this - } - if (!result || !result.requests || !result.requests.average) { - return undefined; - } else { - return result.requests.average; - } - }; -} - -function WrkBenchmarker() { - this.present = function() { - var result = child_process.spawnSync('wrk', ['-h']); - if (result.error && result.error.code === 'ENOENT') - return false; - else - return true; - }; - this.create = function(path, duration, connections) { - const args = ['-d', duration, '-c', connections, '-t', 8, - 'http://127.0.0.1:' + exports.PORT + path ]; - var child = child_process.spawn('wrk', args); - child.stdout.setEncoding('utf8'); - child.stderr.pipe(process.stderr); - return child; - }; - const regexp = /Requests\/sec:[ \t]+([0-9\.]+)/; - this.processResults = function(output) { - const match = output.match(regexp); - const result = match && +match[1]; - if (!result) - return undefined; - else - return result; - }; -} - -const HTTPBenchmarkers = { - autocannon: new AutocannonBenchmarker(), - wrk: new WrkBenchmarker() -}; - // Benchmark an http server. Benchmark.prototype.http = function(urlPath, duration, connections, cb) { const self = this; @@ -171,7 +107,7 @@ Benchmark.prototype.http = function(urlPath, duration, connections, cb) { const benchmarker = benchmarkers[i]; const http_benchmarker = HTTPBenchmarkers[benchmarker]; if (http_benchmarker === undefined) { - console.error('Unknown http benchmarker: ', benchmarker); + console.error(`Unknown http benchmarker: ${benchmarker}`); process.exit(1); } if (http_benchmarker.present()) { @@ -179,8 +115,8 @@ Benchmark.prototype.http = function(urlPath, duration, connections, cb) { } } if (!any_available) { - console.error('Couldn\'t locate any of the required http benchmarkers ' + - '(' + benchmarkers.join(', ') + '). Check ' + + console.error('Could not locate any of the required http benchmarkers' + + `(${benchmarkers.join(', ')}). Check ` + 'benchmark/README.md for further instructions.'); process.exit(1); } @@ -202,7 +138,8 @@ Benchmark.prototype.http = function(urlPath, duration, connections, cb) { const http_benchmarker = HTTPBenchmarkers[benchmarker]; if (http_benchmarker.present()) { const child_start = process.hrtime(); - var child = http_benchmarker.create(urlPath, duration, connections); + var child = http_benchmarker.create(exports.PORT, urlPath, duration, + connections); // Collect stdout let stdout = ''; @@ -212,9 +149,9 @@ Benchmark.prototype.http = function(urlPath, duration, connections, cb) { const elapsed = process.hrtime(child_start); if (code) { if (stdout === '') { - console.error(benchmarker + ' failed with ' + code); + console.error(`${benchmarker} failed with ${code}`); } else { - console.error(benchmarker + ' failed with ' + code + '. Output: '); + console.error(`${benchmarker} failed with ${code}. Output:`); console.error(stdout); } runHttpBenchmarker(index + 1, code); @@ -223,7 +160,7 @@ Benchmark.prototype.http = function(urlPath, duration, connections, cb) { var result = http_benchmarker.processResults(stdout); if (!result) { - console.error(benchmarker + ' produced strange output'); + console.error(`${benchmarker} produced strange output`); console.error(stdout); runHttpBenchmarker(index + 1, 1); return; diff --git a/benchmark/http-benchmarkers.js b/benchmark/http-benchmarkers.js new file mode 100644 index 00000000000000..0941007147a8a5 --- /dev/null +++ b/benchmark/http-benchmarkers.js @@ -0,0 +1,62 @@ +'use strict'; + +const child_process = require('child_process'); + +function AutocannonBenchmarker() { + const autocannon_exe = process.platform === 'win32' + ? 'autocannon.cmd' + : 'autocannon'; + this.present = function() { + var result = child_process.spawnSync(autocannon_exe, ['-h']); + return !(result.error && result.error.code === 'ENOENT'); + }; + this.create = function(port, path, duration, connections) { + const args = ['-d', duration, '-c', connections, '-j', '-n', + `http://127.0.0.1:${port}${path}` ]; + var child = child_process.spawn(autocannon_exe, args); + child.stdout.setEncoding('utf8'); + return child; + }; + this.processResults = function(output) { + let result; + try { + result = JSON.parse(output); + } catch (err) { + // Do nothing, let next line handle this + } + if (!result || !result.requests || !result.requests.average) { + return undefined; + } else { + return result.requests.average; + } + }; +} + +function WrkBenchmarker() { + this.present = function() { + var result = child_process.spawnSync('wrk', ['-h']); + return !(result.error && result.error.code === 'ENOENT'); + }; + this.create = function(port, path, duration, connections) { + const args = ['-d', duration, '-c', connections, '-t', 8, + `http://127.0.0.1:${port}${path}` ]; + var child = child_process.spawn('wrk', args); + child.stdout.setEncoding('utf8'); + child.stderr.pipe(process.stderr); + return child; + }; + const regexp = /Requests\/sec:[ \t]+([0-9\.]+)/; + this.processResults = function(output) { + const match = output.match(regexp); + const result = match && +match[1]; + if (!result) + return undefined; + else + return result; + }; +} + +exports.HTTPBenchmarkers = { + autocannon: new AutocannonBenchmarker(), + wrk: new WrkBenchmarker() +}; From 721a1c5cb629e3cc7a4c5c27d3e6c26e7c6602c5 Mon Sep 17 00:00:00 2001 From: Bartosz Sosnowski Date: Thu, 18 Aug 2016 17:53:09 +0200 Subject: [PATCH 3/9] Options as object, benchmarker as configuration --- benchmark/README.md | 68 ++++++++++++++--- benchmark/common.js | 100 +++++-------------------- benchmark/http-benchmarkers.js | 116 +++++++++++++++++++++++++++-- benchmark/http/chunked.js | 4 +- benchmark/http/cluster.js | 5 +- benchmark/http/end-vs-write-end.js | 4 +- benchmark/http/simple.js | 5 +- 7 files changed, 200 insertions(+), 102 deletions(-) diff --git a/benchmark/README.md b/benchmark/README.md index cdb5d1f5680660..b02cee83e1aaa7 100644 --- a/benchmark/README.md +++ b/benchmark/README.md @@ -25,17 +25,10 @@ Node version in the path is not altered. `wrk` may be available through your preferred package manger. If not, you can easily build it [from source][wrk] via `make`. -To select which tool will be used to run your HTTP benchmark you can: -* When running the benchmakrs, set `NODE_HTTP_BENCHMARKER` environment variable -to desired benchmarker. -* To select the default benchmarker for a particular benchmark, specify it as -`benchmarker` key (e.g. `benchmarker: 'wrk'`) in configuration passed to -`createBenchmark`. This can be overridden by `NODE_HTTP_BENCHMARKER` in run -time. - -If you do not specify which benchmarker to use, all of the installed tools will -be used to run the benchmarks. This will also happen if you pass `all` as the -desired benchmark tool. +By default first found benchmark tool will be used to run HTTP benchmarks. You +can overridde this by seting `NODE_HTTP_BENCHMARKER` environment variable to +the desired benchmarker name. When creating a HTTP benchmark you can also +specify which benchmarker should be used. To analyze the results `R` should be installed. Check you package manager or download it from https://www.r-project.org/. @@ -306,6 +299,59 @@ function main(conf) { } ``` +## Creating HTTP benchmark + +The `bench` object returned by `createBenchmark` implements +`http(options, callback)` method. It can be used to run external tool to +benchmark HTTP servers. This benchmarks simple HTTP server with all installed +benchmarking tools. + +```js +'use strict'; + +var common = require('../common.js'); + +var bench = common.createBenchmark(main, { + kb: [64, 128, 256, 1024], + connections: [100, 500], + benchmarker: common.installed_http_benchmarkers +}); + +function main(conf) { + const http = require('http'); + const len = conf.kb * 1024; + const chunk = Buffer.alloc(len, 'x'); + var server = http.createServer(function(req, res) { + res.end(chunk); + }); + + server.listen(common.PORT, function() { + bench.http({ + connections: conf.connections, + benchmarker: conf.benchmarker + }, function() { + server.close(); + }); + }); +} +``` + +Supported options keys are: +* `port` - defaults to `common.PORT` +* `path` - defaults to `/` +* `connections` - number of concurrent connections to use, defaults to 100 +* `duration` - duration of the benchmark in seconds, defaults to 10 +* `benchmarker` - benchmarker to use, defaults to +`common.default_http_benchmarker` + +The `common.js` module defines 3 handy constants: +* `supported_http_benchmarkers` - array with names of all supported +benchmarkers +* `installed_http_benchmarkers` - array with names of all supported +benchmarkers that are currently installed on this machine +* `default_http_benchmarker` - first element from `installed_http_benchmarkers` +or value of `process.env.NODE_HTTP_BENCHMARKER` if it is set + [autocannon]: https://github.com/mcollina/autocannon [wrk]: https://github.com/wg/wrk [t-test]: https://en.wikipedia.org/wiki/Student%27s_t-test#Equal_or_unequal_sample_sizes.2C_unequal_variances diff --git a/benchmark/common.js b/benchmark/common.js index 5619d778c9c1c7..def9d4fbbc22b5 100644 --- a/benchmark/common.js +++ b/benchmark/common.js @@ -1,7 +1,7 @@ 'use strict'; const child_process = require('child_process'); -const HTTPBenchmarkers = require('./http-benchmarkers.js').HTTPBenchmarkers; +const http_benchmarkers = require('./http-benchmarkers.js'); // The port used by servers and wrk exports.PORT = process.env.PORT || 12346; @@ -90,92 +90,28 @@ Benchmark.prototype._queue = function(options) { }; // Benchmark an http server. -Benchmark.prototype.http = function(urlPath, duration, connections, cb) { +exports.default_http_benchmarker = + http_benchmarkers.default_http_benchmarker; +exports.supported_http_benchmarkers = + http_benchmarkers.supported_http_benchmarkers; +exports.installed_http_benchmarkers = + http_benchmarkers.installed_http_benchmarkers; + +Benchmark.prototype.http = function(options, cb) { const self = this; - duration = 1; - - const picked_benchmarker = process.env.NODE_HTTP_BENCHMARKER || - this.config.benchmarker || 'all'; - const benchmarkers = picked_benchmarker === 'all' - ? Object.keys(HTTPBenchmarkers) - : [picked_benchmarker]; - - // See if any benchmarker is available. Also test if all used benchmarkers - // are defined - var any_available = false; - for (var i = 0; i < benchmarkers.length; ++i) { - const benchmarker = benchmarkers[i]; - const http_benchmarker = HTTPBenchmarkers[benchmarker]; - if (http_benchmarker === undefined) { - console.error(`Unknown http benchmarker: ${benchmarker}`); - process.exit(1); - } - if (http_benchmarker.present()) { - any_available = true; - } - } - if (!any_available) { - console.error('Could not locate any of the required http benchmarkers' + - `(${benchmarkers.join(', ')}). Check ` + - 'benchmark/README.md for further instructions.'); - process.exit(1); + if (!options.port) { + options.port = exports.PORT; } - function runHttpBenchmarker(index, collected_code) { - // All benchmarkers executed - if (index === benchmarkers.length) { - if (cb) - cb(collected_code); - if (collected_code !== 0) - process.exit(1); - return; + http_benchmarkers.run(options, function(benchmarker_name, result, elapsed) { + if (!self.config.benchmarker) { + self.config.benchmarker = benchmarker_name; } - - // Run next benchmarker - const benchmarker = benchmarkers[index]; - self.config.benchmarker = benchmarker; - - const http_benchmarker = HTTPBenchmarkers[benchmarker]; - if (http_benchmarker.present()) { - const child_start = process.hrtime(); - var child = http_benchmarker.create(exports.PORT, urlPath, duration, - connections); - - // Collect stdout - let stdout = ''; - child.stdout.on('data', (chunk) => stdout += chunk.toString()); - - child.once('close', function(code) { - const elapsed = process.hrtime(child_start); - if (code) { - if (stdout === '') { - console.error(`${benchmarker} failed with ${code}`); - } else { - console.error(`${benchmarker} failed with ${code}. Output:`); - console.error(stdout); - } - runHttpBenchmarker(index + 1, code); - return; - } - - var result = http_benchmarker.processResults(stdout); - if (!result) { - console.error(`${benchmarker} produced strange output`); - console.error(stdout); - runHttpBenchmarker(index + 1, 1); - return; - } - - self.report(result, elapsed); - runHttpBenchmarker(index + 1, collected_code); - }); - } else { - runHttpBenchmarker(index + 1, collected_code); + self.report(result, elapsed); + if (cb) { + cb(0); } - } - - // Run with all benchmarkers - runHttpBenchmarker(0, 0); + }); }; Benchmark.prototype._run = function() { diff --git a/benchmark/http-benchmarkers.js b/benchmark/http-benchmarkers.js index 0941007147a8a5..51e7bd9462f327 100644 --- a/benchmark/http-benchmarkers.js +++ b/benchmark/http-benchmarkers.js @@ -3,6 +3,8 @@ const child_process = require('child_process'); function AutocannonBenchmarker() { + this.name = 'autocannon'; + const autocannon_exe = process.platform === 'win32' ? 'autocannon.cmd' : 'autocannon'; @@ -10,6 +12,7 @@ function AutocannonBenchmarker() { var result = child_process.spawnSync(autocannon_exe, ['-h']); return !(result.error && result.error.code === 'ENOENT'); }; + this.create = function(port, path, duration, connections) { const args = ['-d', duration, '-c', connections, '-j', '-n', `http://127.0.0.1:${port}${path}` ]; @@ -17,6 +20,7 @@ function AutocannonBenchmarker() { child.stdout.setEncoding('utf8'); return child; }; + this.processResults = function(output) { let result; try { @@ -33,10 +37,13 @@ function AutocannonBenchmarker() { } function WrkBenchmarker() { + this.name = 'wrk'; + this.present = function() { var result = child_process.spawnSync('wrk', ['-h']); return !(result.error && result.error.code === 'ENOENT'); }; + this.create = function(port, path, duration, connections) { const args = ['-d', duration, '-c', connections, '-t', 8, `http://127.0.0.1:${port}${path}` ]; @@ -45,18 +52,117 @@ function WrkBenchmarker() { child.stderr.pipe(process.stderr); return child; }; + const regexp = /Requests\/sec:[ \t]+([0-9\.]+)/; this.processResults = function(output) { const match = output.match(regexp); const result = match && +match[1]; - if (!result) + if (!result) { return undefined; - else + } else { return result; + } }; } -exports.HTTPBenchmarkers = { - autocannon: new AutocannonBenchmarker(), - wrk: new WrkBenchmarker() +const http_benchmarkers = [ new AutocannonBenchmarker(), + new WrkBenchmarker() ]; + +var default_http_benchmarker; +var supported_http_benchmarkers = []; +var installed_http_benchmarkers = []; +var benchmarkers = {}; + +http_benchmarkers.forEach((benchmarker) => { + const name = benchmarker.name; + const present = benchmarker.present(); + benchmarkers[name] = { + benchmarker: benchmarker, + present: present, + default: false + }; + + supported_http_benchmarkers.push(name); + if (present) { + if (!default_http_benchmarker) { + default_http_benchmarker = name; + benchmarkers[name].default = true; + } + installed_http_benchmarkers.push(name); + } +}); + +function getBenchmarker(name) { + const benchmarker = benchmarkers[name]; + if (!benchmarker) { + throw new Error(`benchmarker '${name}' is not supported`); + } + if (!benchmarker.present) { + throw new Error(`benchmarker '${name}' is not installed`); + } + return benchmarker.benchmarker; +} + +if (process.env.NODE_HTTP_BENCHMARKER) { + const requested = process.env.NODE_HTTP_BENCHMARKER; + try { + default_http_benchmarker = requested; + getBenchmarker(requested); + } catch (err) { + console.error('Error when overriding default http benchmarker: ' + + err.message); + process.exit(1); + } +} + +exports.run = function(options, callback) { + options = Object.assign({ + port: 1234, + path: '/', + connections: 100, + duration: 10, + benchmarker: default_http_benchmarker + }, options); + if (!options.benchmarker) { + console.error('Could not locate any of the required http benchmarkers' + + 'Check benchmark/README.md for further instructions.'); + process.exit(1); + } + const benchmarker = getBenchmarker(options.benchmarker); + + const benchmarker_start = process.hrtime(); + + var child = benchmarker.create(options.port, options.path, options.duration, + options.connections); + + let stdout = ''; + child.stdout.on('data', (chunk) => stdout += chunk.toString()); + + child.once('close', function(code) { + const elapsed = process.hrtime(benchmarker_start); + if (code) { + if (stdout === '') { + console.error(`${options.benchmarker} failed with ${code}`); + } else { + console.error(`${options.benchmarker} failed with ${code}. Output:`); + console.error(stdout); + } + process.exit(1); + } + + var result = benchmarker.processResults(stdout); + if (!result) { + console.error(`${options.benchmarker} produced strange output`); + console.error(stdout); + process.exit(1); + } + + callback(options.benchmarker, result, elapsed); + }); + }; + +exports.default_http_benchmarker = default_http_benchmarker; +exports.supported_http_benchmarkers = supported_http_benchmarkers; +exports.installed_http_benchmarkers = installed_http_benchmarkers; + diff --git a/benchmark/http/chunked.js b/benchmark/http/chunked.js index 65b50430494c61..46d6ab2e266879 100644 --- a/benchmark/http/chunked.js +++ b/benchmark/http/chunked.js @@ -32,7 +32,9 @@ function main(conf) { }); server.listen(common.PORT, function() { - bench.http('/', 10, conf.c, function() { + bench.http({ + connections: conf.c + }, function() { server.close(); }); }); diff --git a/benchmark/http/cluster.js b/benchmark/http/cluster.js index b947eec15eec65..732a5fad6646c9 100644 --- a/benchmark/http/cluster.js +++ b/benchmark/http/cluster.js @@ -28,7 +28,10 @@ function main(conf) { setTimeout(function() { var path = '/' + conf.type + '/' + conf.length; - bench.http(path, 10, conf.c, function() { + bench.http({ + path: path, + connections: conf.c + }, function() { w1.destroy(); w2.destroy(); }); diff --git a/benchmark/http/end-vs-write-end.js b/benchmark/http/end-vs-write-end.js index cfd067d794fd00..62b1a6a0975b48 100644 --- a/benchmark/http/end-vs-write-end.js +++ b/benchmark/http/end-vs-write-end.js @@ -49,7 +49,9 @@ function main(conf) { }); server.listen(common.PORT, function() { - bench.http('/', 10, conf.c, function() { + bench.http({ + connections: conf.c + }, function() { server.close(); }); }); diff --git a/benchmark/http/simple.js b/benchmark/http/simple.js index a6bf493020c7e6..66113ed3758c48 100644 --- a/benchmark/http/simple.js +++ b/benchmark/http/simple.js @@ -16,7 +16,10 @@ function main(conf) { setTimeout(function() { var path = '/' + conf.type + '/' + conf.length + '/' + conf.chunks; - bench.http(path, 10, conf.c, function() { + bench.http({ + path: path, + connections: conf.c + }, function() { server.close(); }); }, 2000); From 52521b904770f09cfc6582aa1bc16ed9f83dffb0 Mon Sep 17 00:00:00 2001 From: Bartosz Sosnowski Date: Tue, 23 Aug 2016 15:21:32 +0200 Subject: [PATCH 4/9] Rename file, style changes, wrk as default --- benchmark/README.md | 6 +- benchmark/_http-benchmarkers.js | 167 +++++++++++++++++++++++++++++++ benchmark/common.js | 19 ++-- benchmark/http-benchmarkers.js | 168 -------------------------------- 4 files changed, 179 insertions(+), 181 deletions(-) create mode 100644 benchmark/_http-benchmarkers.js delete mode 100644 benchmark/http-benchmarkers.js diff --git a/benchmark/README.md b/benchmark/README.md index b02cee83e1aaa7..41a08af6936e6b 100644 --- a/benchmark/README.md +++ b/benchmark/README.md @@ -309,9 +309,9 @@ benchmarking tools. ```js 'use strict'; -var common = require('../common.js'); +const common = require('../common.js'); -var bench = common.createBenchmark(main, { +const bench = common.createBenchmark(main, { kb: [64, 128, 256, 1024], connections: [100, 500], benchmarker: common.installed_http_benchmarkers @@ -321,7 +321,7 @@ function main(conf) { const http = require('http'); const len = conf.kb * 1024; const chunk = Buffer.alloc(len, 'x'); - var server = http.createServer(function(req, res) { + const server = http.createServer(function(req, res) { res.end(chunk); }); diff --git a/benchmark/_http-benchmarkers.js b/benchmark/_http-benchmarkers.js new file mode 100644 index 00000000000000..1ae0ae2f3964a9 --- /dev/null +++ b/benchmark/_http-benchmarkers.js @@ -0,0 +1,167 @@ +'use strict'; + +const child_process = require('child_process'); + +function AutocannonBenchmarker() { + this.name = 'autocannon'; +} + +AutocannonBenchmarker.prototype.autocannon_exe = process.platform === 'win32' + ? 'autocannon.cmd' + : 'autocannon'; + +AutocannonBenchmarker.prototype.present = function() { + const result = child_process.spawnSync(this.autocannon_exe, ['-h']); + return !(result.error && result.error.code === 'ENOENT'); +}; + +AutocannonBenchmarker.prototype.create = function(port, path, duration, + connections) { + const args = ['-d', duration, '-c', connections, '-j', '-n', + `http://127.0.0.1:${port}${path}` ]; + const child = child_process.spawn(this.autocannon_exe, args); + child.stdout.setEncoding('utf8'); + return child; +}; + +AutocannonBenchmarker.prototype.processResults = function(output) { + let result; + try { + result = JSON.parse(output); + } catch (err) { + // Do nothing, let next line handle this + } + if (!result || !result.requests || !result.requests.average) { + return undefined; + } else { + return result.requests.average; + } +}; + +function WrkBenchmarker() { + this.name = 'wrk'; + this.regexp = /Requests\/sec:[ \t]+([0-9\.]+)/; +} + +WrkBenchmarker.prototype.present = function() { + const result = child_process.spawnSync('wrk', ['-h']); + return !(result.error && result.error.code === 'ENOENT'); +}; + +WrkBenchmarker.prototype.create = function(port, path, duration, connections) { + const args = ['-d', duration, '-c', connections, '-t', 8, + `http://127.0.0.1:${port}${path}` ]; + const child = child_process.spawn('wrk', args); + child.stdout.setEncoding('utf8'); + child.stderr.pipe(process.stderr); + return child; +}; + +WrkBenchmarker.prototype.processResults = function(output) { + const match = output.match(this.regexp); + const result = match && +match[1]; + if (!result) { + return undefined; + } else { + return result; + } +}; + +const http_benchmarkers = [ new WrkBenchmarker(), + new AutocannonBenchmarker() ]; + +const supported_http_benchmarkers = []; +const installed_http_benchmarkers = []; +var benchmarkers = {}; + +http_benchmarkers.forEach((benchmarker) => { + const name = benchmarker.name; + const present = benchmarker.present(); + + benchmarkers[name] = { + instance: benchmarker, + present: present + }; + + supported_http_benchmarkers.push(name); + if (present) { + installed_http_benchmarkers.push(name); + } + +}); + +let default_http_benchmarker; + +if (process.env.NODE_HTTP_BENCHMARKER) { + default_http_benchmarker = process.env.NODE_HTTP_BENCHMARKER; + if (!benchmarkers[default_http_benchmarker]) { + throw new Error('Requested default benchmarker ' + + `'${default_http_benchmarker}' is not supported`); + } + if (!benchmarkers[default_http_benchmarker].present) { + throw new Error('Requested default benchmarker ' + + `${default_http_benchmarker}' is not installed`); + } +} else { + default_http_benchmarker = installed_http_benchmarkers[0]; +} + +exports.run = function(options, callback) { + options = Object.assign({ + port: 1234, + path: '/', + connections: 100, + duration: 10, + benchmarker: default_http_benchmarker + }, options); + if (!options.benchmarker) { + callback('Could not locate any of the required http benchmarkers. ' + + 'Check benchmark/README.md for further instructions.'); + return; + } + var benchmarker = benchmarkers[options.benchmarker]; + if (!benchmarker) { + callback(`Requested benchmarker '${options.benchmarker}' is not supported`); + return; + } + if (!benchmarker.present) { + callback(`Requested benchmarker '${options.benchmarker}' is not installed`); + return; + } + + const benchmarker_start = process.hrtime(); + + var child = benchmarker.instance.create(options.port, options.path, + options.duration, + options.connections); + + let stdout = ''; + child.stdout.on('data', (chunk) => stdout += chunk.toString()); + + child.once('close', function(code) { + const elapsed = process.hrtime(benchmarker_start); + if (code) { + var error_message = `${options.benchmarker} failed with ${code}.`; + if (stdout !== '') { + error_message += ` Output: ${stdout}`; + } + callback(error_message, code); + return; + } + + const result = benchmarker.instance.processResults(stdout); + if (!result) { + callback(`${options.benchmarker} produced strange output: ${stdout}.`, + code); + return; + } + + callback(undefined, code, options.benchmarker, result, elapsed); + }); + +}; + +exports.default_http_benchmarker = default_http_benchmarker; +exports.supported_http_benchmarkers = supported_http_benchmarkers; +exports.installed_http_benchmarkers = installed_http_benchmarkers; + diff --git a/benchmark/common.js b/benchmark/common.js index def9d4fbbc22b5..83c9952071b1cc 100644 --- a/benchmark/common.js +++ b/benchmark/common.js @@ -1,7 +1,7 @@ 'use strict'; const child_process = require('child_process'); -const http_benchmarkers = require('./http-benchmarkers.js'); +const http_benchmarkers = require('./_http-benchmarkers.js'); // The port used by servers and wrk exports.PORT = process.env.PORT || 12346; @@ -30,7 +30,6 @@ function Benchmark(fn, options) { Benchmark.prototype._parseArgs = function(argv, options) { const cliOptions = Object.assign({}, options); - // Parse configuration arguments for (const arg of argv) { const match = arg.match(/^(.+?)=([\s\S]*)$/); @@ -99,17 +98,17 @@ exports.installed_http_benchmarkers = Benchmark.prototype.http = function(options, cb) { const self = this; - if (!options.port) { - options.port = exports.PORT; - } - - http_benchmarkers.run(options, function(benchmarker_name, result, elapsed) { - if (!self.config.benchmarker) { - self.config.benchmarker = benchmarker_name; + const http_options = Object.assign({port: exports.PORT}, options); + http_benchmarkers.run(http_options, function(error, code, used_benchmarker, + result, elapsed) { + if (error) { + console.error(error); + process.exit(1); } + self.config.benchmarker = used_benchmarker; self.report(result, elapsed); if (cb) { - cb(0); + cb(code); } }); }; diff --git a/benchmark/http-benchmarkers.js b/benchmark/http-benchmarkers.js deleted file mode 100644 index 51e7bd9462f327..00000000000000 --- a/benchmark/http-benchmarkers.js +++ /dev/null @@ -1,168 +0,0 @@ -'use strict'; - -const child_process = require('child_process'); - -function AutocannonBenchmarker() { - this.name = 'autocannon'; - - const autocannon_exe = process.platform === 'win32' - ? 'autocannon.cmd' - : 'autocannon'; - this.present = function() { - var result = child_process.spawnSync(autocannon_exe, ['-h']); - return !(result.error && result.error.code === 'ENOENT'); - }; - - this.create = function(port, path, duration, connections) { - const args = ['-d', duration, '-c', connections, '-j', '-n', - `http://127.0.0.1:${port}${path}` ]; - var child = child_process.spawn(autocannon_exe, args); - child.stdout.setEncoding('utf8'); - return child; - }; - - this.processResults = function(output) { - let result; - try { - result = JSON.parse(output); - } catch (err) { - // Do nothing, let next line handle this - } - if (!result || !result.requests || !result.requests.average) { - return undefined; - } else { - return result.requests.average; - } - }; -} - -function WrkBenchmarker() { - this.name = 'wrk'; - - this.present = function() { - var result = child_process.spawnSync('wrk', ['-h']); - return !(result.error && result.error.code === 'ENOENT'); - }; - - this.create = function(port, path, duration, connections) { - const args = ['-d', duration, '-c', connections, '-t', 8, - `http://127.0.0.1:${port}${path}` ]; - var child = child_process.spawn('wrk', args); - child.stdout.setEncoding('utf8'); - child.stderr.pipe(process.stderr); - return child; - }; - - const regexp = /Requests\/sec:[ \t]+([0-9\.]+)/; - this.processResults = function(output) { - const match = output.match(regexp); - const result = match && +match[1]; - if (!result) { - return undefined; - } else { - return result; - } - }; -} - -const http_benchmarkers = [ new AutocannonBenchmarker(), - new WrkBenchmarker() ]; - -var default_http_benchmarker; -var supported_http_benchmarkers = []; -var installed_http_benchmarkers = []; -var benchmarkers = {}; - -http_benchmarkers.forEach((benchmarker) => { - const name = benchmarker.name; - const present = benchmarker.present(); - benchmarkers[name] = { - benchmarker: benchmarker, - present: present, - default: false - }; - - supported_http_benchmarkers.push(name); - if (present) { - if (!default_http_benchmarker) { - default_http_benchmarker = name; - benchmarkers[name].default = true; - } - installed_http_benchmarkers.push(name); - } -}); - -function getBenchmarker(name) { - const benchmarker = benchmarkers[name]; - if (!benchmarker) { - throw new Error(`benchmarker '${name}' is not supported`); - } - if (!benchmarker.present) { - throw new Error(`benchmarker '${name}' is not installed`); - } - return benchmarker.benchmarker; -} - -if (process.env.NODE_HTTP_BENCHMARKER) { - const requested = process.env.NODE_HTTP_BENCHMARKER; - try { - default_http_benchmarker = requested; - getBenchmarker(requested); - } catch (err) { - console.error('Error when overriding default http benchmarker: ' + - err.message); - process.exit(1); - } -} - -exports.run = function(options, callback) { - options = Object.assign({ - port: 1234, - path: '/', - connections: 100, - duration: 10, - benchmarker: default_http_benchmarker - }, options); - if (!options.benchmarker) { - console.error('Could not locate any of the required http benchmarkers' + - 'Check benchmark/README.md for further instructions.'); - process.exit(1); - } - const benchmarker = getBenchmarker(options.benchmarker); - - const benchmarker_start = process.hrtime(); - - var child = benchmarker.create(options.port, options.path, options.duration, - options.connections); - - let stdout = ''; - child.stdout.on('data', (chunk) => stdout += chunk.toString()); - - child.once('close', function(code) { - const elapsed = process.hrtime(benchmarker_start); - if (code) { - if (stdout === '') { - console.error(`${options.benchmarker} failed with ${code}`); - } else { - console.error(`${options.benchmarker} failed with ${code}. Output:`); - console.error(stdout); - } - process.exit(1); - } - - var result = benchmarker.processResults(stdout); - if (!result) { - console.error(`${options.benchmarker} produced strange output`); - console.error(stdout); - process.exit(1); - } - - callback(options.benchmarker, result, elapsed); - }); - -}; - -exports.default_http_benchmarker = default_http_benchmarker; -exports.supported_http_benchmarkers = supported_http_benchmarkers; -exports.installed_http_benchmarkers = installed_http_benchmarkers; - From 97b334afce41c32e8efa3cc4601af77e4779e43d Mon Sep 17 00:00:00 2001 From: Bartosz Sosnowski Date: Wed, 24 Aug 2016 10:36:14 +0200 Subject: [PATCH 5/9] fixup: implementing suggestions Move autocannon_exe to constructor. Forward options to create. Drop setEncoding. Move PORT. Add 'new Error()'. Call callback always. --- benchmark/_http-benchmarkers.js | 55 +++++++++++++++++---------------- benchmark/common.js | 17 +++++----- 2 files changed, 35 insertions(+), 37 deletions(-) diff --git a/benchmark/_http-benchmarkers.js b/benchmark/_http-benchmarkers.js index 1ae0ae2f3964a9..5f8375e803967b 100644 --- a/benchmark/_http-benchmarkers.js +++ b/benchmark/_http-benchmarkers.js @@ -2,25 +2,26 @@ const child_process = require('child_process'); +// The port used by servers and wrk +exports.PORT = process.env.PORT || 12346; + function AutocannonBenchmarker() { this.name = 'autocannon'; + this.autocannon_exe = process.platform === 'win32' + ? 'autocannon.cmd' + : 'autocannon'; } -AutocannonBenchmarker.prototype.autocannon_exe = process.platform === 'win32' - ? 'autocannon.cmd' - : 'autocannon'; - AutocannonBenchmarker.prototype.present = function() { const result = child_process.spawnSync(this.autocannon_exe, ['-h']); return !(result.error && result.error.code === 'ENOENT'); }; -AutocannonBenchmarker.prototype.create = function(port, path, duration, - connections) { - const args = ['-d', duration, '-c', connections, '-j', '-n', - `http://127.0.0.1:${port}${path}` ]; +AutocannonBenchmarker.prototype.create = function(options) { + const args = ['-d', options.duration, '-c', options.connections, '-j', '-n', + `http://127.0.0.1:${options.port}${options.path}` ]; const child = child_process.spawn(this.autocannon_exe, args); - child.stdout.setEncoding('utf8'); + child.stderr.pipe(process.stderr); return child; }; @@ -48,11 +49,10 @@ WrkBenchmarker.prototype.present = function() { return !(result.error && result.error.code === 'ENOENT'); }; -WrkBenchmarker.prototype.create = function(port, path, duration, connections) { - const args = ['-d', duration, '-c', connections, '-t', 8, - `http://127.0.0.1:${port}${path}` ]; +WrkBenchmarker.prototype.create = function(options) { + const args = ['-d', options.duration, '-c', options.connections, '-t', 8, + `http://127.0.0.1:${options.port}${options.path}` ]; const child = child_process.spawn('wrk', args); - child.stdout.setEncoding('utf8'); child.stderr.pipe(process.stderr); return child; }; @@ -72,7 +72,7 @@ const http_benchmarkers = [ new WrkBenchmarker(), const supported_http_benchmarkers = []; const installed_http_benchmarkers = []; -var benchmarkers = {}; +const benchmarkers = {}; http_benchmarkers.forEach((benchmarker) => { const name = benchmarker.name; @@ -100,7 +100,7 @@ if (process.env.NODE_HTTP_BENCHMARKER) { } if (!benchmarkers[default_http_benchmarker].present) { throw new Error('Requested default benchmarker ' + - `${default_http_benchmarker}' is not installed`); + `'${default_http_benchmarker}' is not installed`); } } else { default_http_benchmarker = installed_http_benchmarkers[0]; @@ -108,32 +108,33 @@ if (process.env.NODE_HTTP_BENCHMARKER) { exports.run = function(options, callback) { options = Object.assign({ - port: 1234, + port: exports.PORT, path: '/', connections: 100, duration: 10, benchmarker: default_http_benchmarker }, options); if (!options.benchmarker) { - callback('Could not locate any of the required http benchmarkers. ' + - 'Check benchmark/README.md for further instructions.'); + callback(new Error('Could not locate any of the required http' + + 'benchmarkers. Check benchmark/README.md for further ' + + 'instructions.')); return; } var benchmarker = benchmarkers[options.benchmarker]; if (!benchmarker) { - callback(`Requested benchmarker '${options.benchmarker}' is not supported`); + callback(new Error(`Requested benchmarker '${options.benchmarker}' is ` + + 'not supported')); return; } if (!benchmarker.present) { - callback(`Requested benchmarker '${options.benchmarker}' is not installed`); + callback(new Error(`Requested benchmarker '${options.benchmarker}' is ` + + 'not installed')); return; } const benchmarker_start = process.hrtime(); - var child = benchmarker.instance.create(options.port, options.path, - options.duration, - options.connections); + var child = benchmarker.instance.create(options); let stdout = ''; child.stdout.on('data', (chunk) => stdout += chunk.toString()); @@ -145,18 +146,18 @@ exports.run = function(options, callback) { if (stdout !== '') { error_message += ` Output: ${stdout}`; } - callback(error_message, code); + callback(new Error(error_message), code); return; } const result = benchmarker.instance.processResults(stdout); if (!result) { - callback(`${options.benchmarker} produced strange output: ${stdout}.`, - code); + callback(new Error(`${options.benchmarker} produced strange output: ` + + stdout, code)); return; } - callback(undefined, code, options.benchmarker, result, elapsed); + callback(null, code, options.benchmarker, result, elapsed); }); }; diff --git a/benchmark/common.js b/benchmark/common.js index 83c9952071b1cc..857e1529cfe5fd 100644 --- a/benchmark/common.js +++ b/benchmark/common.js @@ -3,9 +3,6 @@ const child_process = require('child_process'); const http_benchmarkers = require('./_http-benchmarkers.js'); -// The port used by servers and wrk -exports.PORT = process.env.PORT || 12346; - exports.createBenchmark = function(fn, options) { return new Benchmark(fn, options); }; @@ -95,21 +92,21 @@ exports.supported_http_benchmarkers = http_benchmarkers.supported_http_benchmarkers; exports.installed_http_benchmarkers = http_benchmarkers.installed_http_benchmarkers; +exports.PORT = http_benchmarkers.PORT; Benchmark.prototype.http = function(options, cb) { const self = this; - const http_options = Object.assign({port: exports.PORT}, options); - http_benchmarkers.run(http_options, function(error, code, used_benchmarker, - result, elapsed) { + http_benchmarkers.run(options, function(error, code, used_benchmarker, + result, elapsed) { + if (cb) { + cb(code); + } if (error) { console.error(error); - process.exit(1); + process.exit(code || 1); } self.config.benchmarker = used_benchmarker; self.report(result, elapsed); - if (cb) { - cb(code); - } }); }; From 687a64fd65eec42afcb35a76e54f814abf894160 Mon Sep 17 00:00:00 2001 From: Bartosz Sosnowski Date: Wed, 24 Aug 2016 17:11:51 +0200 Subject: [PATCH 6/9] Move stderr.pipe, clarify documentation --- benchmark/README.md | 9 +++++---- benchmark/_http-benchmarkers.js | 4 ++-- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/benchmark/README.md b/benchmark/README.md index 41a08af6936e6b..391f5ca77c84dc 100644 --- a/benchmark/README.md +++ b/benchmark/README.md @@ -25,10 +25,11 @@ Node version in the path is not altered. `wrk` may be available through your preferred package manger. If not, you can easily build it [from source][wrk] via `make`. -By default first found benchmark tool will be used to run HTTP benchmarks. You -can overridde this by seting `NODE_HTTP_BENCHMARKER` environment variable to -the desired benchmarker name. When creating a HTTP benchmark you can also -specify which benchmarker should be used. +By default `wrk` will be used as benchmarker. If it is not available +`autocannon` will be used it its place. You can overridde this by seting +`NODE_HTTP_BENCHMARKER` environment variable to the desired benchmarker name. +When creating a HTTP benchmark you can also specify which benchmarker should be +used. To analyze the results `R` should be installed. Check you package manager or download it from https://www.r-project.org/. diff --git a/benchmark/_http-benchmarkers.js b/benchmark/_http-benchmarkers.js index 5f8375e803967b..13e94166ef2ab4 100644 --- a/benchmark/_http-benchmarkers.js +++ b/benchmark/_http-benchmarkers.js @@ -21,7 +21,6 @@ AutocannonBenchmarker.prototype.create = function(options) { const args = ['-d', options.duration, '-c', options.connections, '-j', '-n', `http://127.0.0.1:${options.port}${options.path}` ]; const child = child_process.spawn(this.autocannon_exe, args); - child.stderr.pipe(process.stderr); return child; }; @@ -53,7 +52,6 @@ WrkBenchmarker.prototype.create = function(options) { const args = ['-d', options.duration, '-c', options.connections, '-t', 8, `http://127.0.0.1:${options.port}${options.path}` ]; const child = child_process.spawn('wrk', args); - child.stderr.pipe(process.stderr); return child; }; @@ -136,6 +134,8 @@ exports.run = function(options, callback) { var child = benchmarker.instance.create(options); + child.stderr.pipe(process.stderr); + let stdout = ''; child.stdout.on('data', (chunk) => stdout += chunk.toString()); From 3a8a9c80f002ff305b4de1a3ed9206fd5e086f2e Mon Sep 17 00:00:00 2001 From: Bartosz Sosnowski Date: Fri, 26 Aug 2016 13:00:49 +0200 Subject: [PATCH 7/9] Drop ENV, support --set, remove names tables --- benchmark/README.md | 26 +++++--------- benchmark/_http-benchmarkers.js | 62 +++++++-------------------------- benchmark/common.js | 37 ++++++++++++-------- 3 files changed, 44 insertions(+), 81 deletions(-) diff --git a/benchmark/README.md b/benchmark/README.md index 391f5ca77c84dc..ed9addbf98d9db 100644 --- a/benchmark/README.md +++ b/benchmark/README.md @@ -26,10 +26,13 @@ Node version in the path is not altered. easily build it [from source][wrk] via `make`. By default `wrk` will be used as benchmarker. If it is not available -`autocannon` will be used it its place. You can overridde this by seting -`NODE_HTTP_BENCHMARKER` environment variable to the desired benchmarker name. -When creating a HTTP benchmark you can also specify which benchmarker should be -used. +`autocannon` will be used it its place. When creating a HTTP benchmark you can +specify which benchmarker should be used. You can force a specific benchmarker +to be used by providing it as argument, e. g.: + +`node benchmark/run.js --set benchmarker=autocannon http` + +`node benchmark/http/simple.js benchmarker=autocannon` To analyze the results `R` should be installed. Check you package manager or download it from https://www.r-project.org/. @@ -304,8 +307,7 @@ function main(conf) { The `bench` object returned by `createBenchmark` implements `http(options, callback)` method. It can be used to run external tool to -benchmark HTTP servers. This benchmarks simple HTTP server with all installed -benchmarking tools. +benchmark HTTP servers. ```js 'use strict'; @@ -314,8 +316,7 @@ const common = require('../common.js'); const bench = common.createBenchmark(main, { kb: [64, 128, 256, 1024], - connections: [100, 500], - benchmarker: common.installed_http_benchmarkers + connections: [100, 500] }); function main(conf) { @@ -329,7 +330,6 @@ function main(conf) { server.listen(common.PORT, function() { bench.http({ connections: conf.connections, - benchmarker: conf.benchmarker }, function() { server.close(); }); @@ -345,14 +345,6 @@ Supported options keys are: * `benchmarker` - benchmarker to use, defaults to `common.default_http_benchmarker` -The `common.js` module defines 3 handy constants: -* `supported_http_benchmarkers` - array with names of all supported -benchmarkers -* `installed_http_benchmarkers` - array with names of all supported -benchmarkers that are currently installed on this machine -* `default_http_benchmarker` - first element from `installed_http_benchmarkers` -or value of `process.env.NODE_HTTP_BENCHMARKER` if it is set - [autocannon]: https://github.com/mcollina/autocannon [wrk]: https://github.com/wg/wrk [t-test]: https://en.wikipedia.org/wiki/Student%27s_t-test#Equal_or_unequal_sample_sizes.2C_unequal_variances diff --git a/benchmark/_http-benchmarkers.js b/benchmark/_http-benchmarkers.js index 13e94166ef2ab4..f4e34d013aada5 100644 --- a/benchmark/_http-benchmarkers.js +++ b/benchmark/_http-benchmarkers.js @@ -10,12 +10,9 @@ function AutocannonBenchmarker() { this.autocannon_exe = process.platform === 'win32' ? 'autocannon.cmd' : 'autocannon'; -} - -AutocannonBenchmarker.prototype.present = function() { const result = child_process.spawnSync(this.autocannon_exe, ['-h']); - return !(result.error && result.error.code === 'ENOENT'); -}; + this.present = !(result.error && result.error.code === 'ENOENT'); +} AutocannonBenchmarker.prototype.create = function(options) { const args = ['-d', options.duration, '-c', options.connections, '-j', '-n', @@ -41,12 +38,9 @@ AutocannonBenchmarker.prototype.processResults = function(output) { function WrkBenchmarker() { this.name = 'wrk'; this.regexp = /Requests\/sec:[ \t]+([0-9\.]+)/; -} - -WrkBenchmarker.prototype.present = function() { const result = child_process.spawnSync('wrk', ['-h']); - return !(result.error && result.error.code === 'ENOENT'); -}; + this.present = !(result.error && result.error.code === 'ENOENT'); +} WrkBenchmarker.prototype.create = function(options) { const args = ['-d', options.duration, '-c', options.connections, '-t', 8, @@ -68,52 +62,25 @@ WrkBenchmarker.prototype.processResults = function(output) { const http_benchmarkers = [ new WrkBenchmarker(), new AutocannonBenchmarker() ]; -const supported_http_benchmarkers = []; -const installed_http_benchmarkers = []; const benchmarkers = {}; http_benchmarkers.forEach((benchmarker) => { - const name = benchmarker.name; - const present = benchmarker.present(); - - benchmarkers[name] = { - instance: benchmarker, - present: present - }; - - supported_http_benchmarkers.push(name); - if (present) { - installed_http_benchmarkers.push(name); + benchmarkers[benchmarker.name] = benchmarker; + if (!exports.default_http_benchmarker && benchmarker.present) { + exports.default_http_benchmarker = benchmarker.name; } - }); -let default_http_benchmarker; - -if (process.env.NODE_HTTP_BENCHMARKER) { - default_http_benchmarker = process.env.NODE_HTTP_BENCHMARKER; - if (!benchmarkers[default_http_benchmarker]) { - throw new Error('Requested default benchmarker ' + - `'${default_http_benchmarker}' is not supported`); - } - if (!benchmarkers[default_http_benchmarker].present) { - throw new Error('Requested default benchmarker ' + - `'${default_http_benchmarker}' is not installed`); - } -} else { - default_http_benchmarker = installed_http_benchmarkers[0]; -} - exports.run = function(options, callback) { options = Object.assign({ port: exports.PORT, path: '/', connections: 100, duration: 10, - benchmarker: default_http_benchmarker + benchmarker: exports.default_http_benchmarker }, options); if (!options.benchmarker) { - callback(new Error('Could not locate any of the required http' + + callback(new Error('Could not locate any of the required http ' + 'benchmarkers. Check benchmark/README.md for further ' + 'instructions.')); return; @@ -121,7 +88,7 @@ exports.run = function(options, callback) { var benchmarker = benchmarkers[options.benchmarker]; if (!benchmarker) { callback(new Error(`Requested benchmarker '${options.benchmarker}' is ` + - 'not supported')); + 'not supported')); return; } if (!benchmarker.present) { @@ -132,7 +99,7 @@ exports.run = function(options, callback) { const benchmarker_start = process.hrtime(); - var child = benchmarker.instance.create(options); + var child = benchmarker.create(options); child.stderr.pipe(process.stderr); @@ -150,7 +117,7 @@ exports.run = function(options, callback) { return; } - const result = benchmarker.instance.processResults(stdout); + const result = benchmarker.processResults(stdout); if (!result) { callback(new Error(`${options.benchmarker} produced strange output: ` + stdout, code)); @@ -161,8 +128,3 @@ exports.run = function(options, callback) { }); }; - -exports.default_http_benchmarker = default_http_benchmarker; -exports.supported_http_benchmarkers = supported_http_benchmarkers; -exports.installed_http_benchmarkers = installed_http_benchmarkers; - diff --git a/benchmark/common.js b/benchmark/common.js index 857e1529cfe5fd..adc04a0b8082f9 100644 --- a/benchmark/common.js +++ b/benchmark/common.js @@ -9,7 +9,9 @@ exports.createBenchmark = function(fn, options) { function Benchmark(fn, options) { this.name = require.main.filename.slice(__dirname.length + 1); - this.options = this._parseArgs(process.argv.slice(2), options); + const parsed_args = this._parseArgs(process.argv.slice(2), options); + this.options = parsed_args.cli; + this.extra_options = parsed_args.extra; this.queue = this._queue(this.options); this.config = this.queue[0]; @@ -27,6 +29,7 @@ function Benchmark(fn, options) { Benchmark.prototype._parseArgs = function(argv, options) { const cliOptions = Object.assign({}, options); + const extraOptions = {}; // Parse configuration arguments for (const arg of argv) { const match = arg.match(/^(.+?)=([\s\S]*)$/); @@ -35,14 +38,16 @@ Benchmark.prototype._parseArgs = function(argv, options) { process.exit(1); } - // Infer the type from the options object and parse accordingly - const isNumber = typeof options[match[1]][0] === 'number'; - const value = isNumber ? +match[2] : match[2]; - - cliOptions[match[1]] = [value]; + if (options[match[1]]) { + // Infer the type from the options object and parse accordingly + const isNumber = typeof options[match[1]][0] === 'number'; + const value = isNumber ? +match[2] : match[2]; + cliOptions[match[1]] = [value]; + } else { + extraOptions[match[1]] = match[2]; + } } - - return cliOptions; + return { cli: cliOptions, extra: extraOptions }; }; Benchmark.prototype._queue = function(options) { @@ -88,16 +93,17 @@ Benchmark.prototype._queue = function(options) { // Benchmark an http server. exports.default_http_benchmarker = http_benchmarkers.default_http_benchmarker; -exports.supported_http_benchmarkers = - http_benchmarkers.supported_http_benchmarkers; -exports.installed_http_benchmarkers = - http_benchmarkers.installed_http_benchmarkers; exports.PORT = http_benchmarkers.PORT; Benchmark.prototype.http = function(options, cb) { const self = this; - http_benchmarkers.run(options, function(error, code, used_benchmarker, - result, elapsed) { + const http_options = Object.assign({ }, options); + http_options.benchmarker = http_options.benchmarker || + self.config.benchmarker || + self.extra_options.benchmarker || + exports.default_http_benchmarker; + http_benchmarkers.run(http_options, function(error, code, used_benchmarker, + result, elapsed) { if (cb) { cb(code); } @@ -126,6 +132,9 @@ Benchmark.prototype._run = function() { for (const key of Object.keys(config)) { childArgs.push(`${key}=${config[key]}`); } + for (const key of Object.keys(self.extra_options)) { + childArgs.push(`${key}=${self.extra_options[key]}`); + } const child = child_process.fork(require.main.filename, childArgs, { env: childEnv From ed3ad0f6eec4147a368277e759c3eca4dfed7fc4 Mon Sep 17 00:00:00 2001 From: Bartosz Sosnowski Date: Fri, 26 Aug 2016 13:15:37 +0200 Subject: [PATCH 8/9] fixup: documentation wording --- benchmark/README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/benchmark/README.md b/benchmark/README.md index ed9addbf98d9db..770df018378e2e 100644 --- a/benchmark/README.md +++ b/benchmark/README.md @@ -26,9 +26,9 @@ Node version in the path is not altered. easily build it [from source][wrk] via `make`. By default `wrk` will be used as benchmarker. If it is not available -`autocannon` will be used it its place. When creating a HTTP benchmark you can -specify which benchmarker should be used. You can force a specific benchmarker -to be used by providing it as argument, e. g.: +`autocannon` will be used in it its place. When creating a HTTP benchmark you +can specify which benchmarker should be used. You can force a specific +benchmarker to be used by providing it as an argument, e. g.: `node benchmark/run.js --set benchmarker=autocannon http` From 7cd3daa6d7ae0ed39bb939206e1bed69d2f61020 Mon Sep 17 00:00:00 2001 From: Bartosz Sosnowski Date: Fri, 26 Aug 2016 15:14:35 +0200 Subject: [PATCH 9/9] Change var to let or const --- benchmark/_http-benchmarkers.js | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/benchmark/_http-benchmarkers.js b/benchmark/_http-benchmarkers.js index f4e34d013aada5..ca8b4625066f67 100644 --- a/benchmark/_http-benchmarkers.js +++ b/benchmark/_http-benchmarkers.js @@ -85,7 +85,7 @@ exports.run = function(options, callback) { 'instructions.')); return; } - var benchmarker = benchmarkers[options.benchmarker]; + const benchmarker = benchmarkers[options.benchmarker]; if (!benchmarker) { callback(new Error(`Requested benchmarker '${options.benchmarker}' is ` + 'not supported')); @@ -99,7 +99,7 @@ exports.run = function(options, callback) { const benchmarker_start = process.hrtime(); - var child = benchmarker.create(options); + const child = benchmarker.create(options); child.stderr.pipe(process.stderr); @@ -109,7 +109,7 @@ exports.run = function(options, callback) { child.once('close', function(code) { const elapsed = process.hrtime(benchmarker_start); if (code) { - var error_message = `${options.benchmarker} failed with ${code}.`; + let error_message = `${options.benchmarker} failed with ${code}.`; if (stdout !== '') { error_message += ` Output: ${stdout}`; }