Skip to content

Commit

Permalink
Add benchmarks (#94)
Browse files Browse the repository at this point in the history
  • Loading branch information
ehmicky authored Aug 12, 2023
1 parent ed9f063 commit 06d9caf
Show file tree
Hide file tree
Showing 6 changed files with 161 additions and 0 deletions.
15 changes: 15 additions & 0 deletions benchmarks/fixture.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
import {writeFile, rm} from 'node:fs/promises';

// Create and delete a big fixture file
export const createFixture = async () => {
await writeFile(FIXTURE_FILE, '.'.repeat(FIXTURE_BYTE_SIZE));
};

export const deleteFixture = async () => {
await rm(FIXTURE_FILE);
};

export const FIXTURE_FILE = 'benchmark_fixture';

const FIXTURE_BYTE_SIZE = 1e8;
export const FIXTURE_HUMAN_SIZE = `${FIXTURE_BYTE_SIZE / 1e6} MB`;
44 changes: 44 additions & 0 deletions benchmarks/index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
import {text, buffer, arrayBuffer} from 'node:stream/consumers';
import getStream, {getStreamAsBuffer, getStreamAsArrayBuffer, getStreamAsArray} from '../index.js';
import {createFixture, deleteFixture, FIXTURE_HUMAN_SIZE} from './fixture.js';
import {createNodeStreamBinary, createNodeStreamText, createWebStreamBinary, createWebStreamText} from './stream.js';
import {measureTask} from './measure.js';

const runBenchmarks = async () => {
await createFixture();

try {
await benchmarkNodeStreams(createNodeStreamBinary, `Node.js stream (${FIXTURE_HUMAN_SIZE}, binary)`);
await benchmarkNodeStreams(createNodeStreamText, `Node.js stream (${FIXTURE_HUMAN_SIZE}, text)`);
await benchmarkStreams(createWebStreamBinary, `Web ReadableStream (${FIXTURE_HUMAN_SIZE}, binary)`);
await benchmarkStreams(createWebStreamText, `Web ReadableStream (${FIXTURE_HUMAN_SIZE}, text)`);
} finally {
await deleteFixture();
}
};

const benchmarkNodeStreams = async (createStream, header) => {
await benchmarkStreams(createStream, header);
await logResult('stream.toArray', createStream, stream => stream.toArray());
};

const benchmarkStreams = async (createStream, header) => {
logHeader(header);
await logResult('getStream', createStream, getStream);
await logResult('text', createStream, text);
await logResult('getStreamAsBuffer', createStream, getStreamAsBuffer);
await logResult('buffer', createStream, buffer);
await logResult('getStreamAsArrayBuffer', createStream, getStreamAsArrayBuffer);
await logResult('arrayBuffer', createStream, arrayBuffer);
await logResult('getStreamAsArray', createStream, getStreamAsArray);
};

const logHeader = header => {
console.log(`\n### ${header}\n`);
};

const logResult = async (name, createStream, task) => {
console.log(`- \`${name}()\`: ${await measureTask(createStream, task)}ms`);
};

await runBenchmarks();
24 changes: 24 additions & 0 deletions benchmarks/measure.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
import now from 'precise-now';

// Return how many ms running `task()` takes
export const measureTask = async ({start, stop}, task) => {
const taskInputs = await Promise.all(Array.from({length: MAX_LOOPS + 1}, start));

// Pre-warm
await task(taskInputs[0].stream);

const startTimestamp = now();
for (let index = 1; index <= MAX_LOOPS; index += 1) {
// eslint-disable-next-line no-await-in-loop
await task(taskInputs[index].stream);
}

const duration = Math.round((now() - startTimestamp) / (MAX_LOOPS * NANOSECS_TO_MILLESECS));

await Promise.all(taskInputs.map(taskInput => stop(taskInput)));

return duration;
};

const MAX_LOOPS = 10;
const NANOSECS_TO_MILLESECS = 1e6;
29 changes: 29 additions & 0 deletions benchmarks/stream.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
import {open} from 'node:fs/promises';
import {createReadStream} from 'node:fs';
import {FIXTURE_FILE} from './fixture.js';

const createNodeStream = encoding => ({
start: () => ({stream: createReadStream(FIXTURE_FILE, encoding)}),
stop() {},
});

export const createNodeStreamBinary = createNodeStream(undefined);
export const createNodeStreamText = createNodeStream('utf8');

const createWebStream = type => ({
async start() {
const fileHandle = await open(FIXTURE_FILE);
const stream = fileHandle.readableWebStream({type});
return {fileHandle, stream};
},
async stop({fileHandle}) {
await fileHandle.close();
},
});

export const createWebStreamBinary = createWebStream('bytes');
// `Text` is somewhat of a misnomer here:
// - `fs.readableWebStream({ type: 'bytes' })` creates a `ReadableStream` with a "bytes controller" and `Uint8Array` chunks
// - `fs.readableWebStream({ type: undefined })` creates a `ReadableStream` with a "default controller" and `ArrayBuffer` chunks.
// Node.js currently does not allow creating a file-based `ReadableStream` with string chunks.
export const createWebStreamText = createWebStream(undefined);
2 changes: 2 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
"node": ">=16"
},
"scripts": {
"benchmark": "node benchmarks/index.js",
"test": "xo && ava && tsd"
},
"files": [
Expand All @@ -41,6 +42,7 @@
"devDependencies": {
"@types/node": "^20.2.4",
"ava": "^5.3.0",
"precise-now": "^2.0.0",
"tsd": "^0.28.1",
"xo": "^0.54.2"
}
Expand Down
47 changes: 47 additions & 0 deletions readme.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
- Supports [async iterables](#async-iterables).
- Can set a [maximum stream size](#maxbuffer).
- Returns [partially read data](#errors) when the stream errors.
- [Fast](#benchmarks).

## Install

Expand Down Expand Up @@ -215,6 +216,52 @@ const stream = fs.createReadStream('unicorn.txt');
console.log(new Blob([await getStreamAsArrayBuffer(stream)]));
```

## Benchmarks

### Node.js stream (100 MB, binary)

- `getStream()`: 142ms
- `text()`: 139ms
- `getStreamAsBuffer()`: 106ms
- `buffer()`: 83ms
- `getStreamAsArrayBuffer()`: 105ms
- `arrayBuffer()`: 81ms
- `getStreamAsArray()`: 24ms
- `stream.toArray()`: 21ms

### Node.js stream (100 MB, text)

- `getStream()`: 90ms
- `text()`: 89ms
- `getStreamAsBuffer()`: 127ms
- `buffer()`: 192ms
- `getStreamAsArrayBuffer()`: 129ms
- `arrayBuffer()`: 195ms
- `getStreamAsArray()`: 89ms
- `stream.toArray()`: 90ms

### Web ReadableStream (100 MB, binary)

- `getStream()`: 223ms
- `text()`: 221ms
- `getStreamAsBuffer()`: 182ms
- `buffer()`: 153ms
- `getStreamAsArrayBuffer()`: 171ms
- `arrayBuffer()`: 155ms
- `getStreamAsArray()`: 83ms

### Web ReadableStream (100 MB, text)

- `getStream()`: 141ms
- `text()`: 139ms
- `getStreamAsBuffer()`: 91ms
- `buffer()`: 80ms
- `getStreamAsArrayBuffer()`: 89ms
- `arrayBuffer()`: 81ms
- `getStreamAsArray()`: 21ms

[Benchmarks' source file](benchmarks/index.js).

## FAQ

### How is this different from [`concat-stream`](https://github.com/maxogden/concat-stream)?
Expand Down

0 comments on commit 06d9caf

Please sign in to comment.