Skip to content

Commit

Permalink
Require Node.js 12.20 and move to ESM
Browse files Browse the repository at this point in the history
  • Loading branch information
sindresorhus committed Oct 18, 2021
1 parent ec9a3ca commit 17f3a2b
Show file tree
Hide file tree
Showing 7 changed files with 53 additions and 61 deletions.
6 changes: 2 additions & 4 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,10 @@ jobs:
fail-fast: false
matrix:
node-version:
- 14
- 12
- 10
- 16
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v1
- uses: actions/setup-node@v2
with:
node-version: ${{ matrix.node-version }}
- run: npm install
Expand Down
30 changes: 12 additions & 18 deletions index.d.ts
Original file line number Diff line number Diff line change
@@ -1,36 +1,30 @@
/// <reference types="node"/>
import {Readable as ReadableStream} from 'stream';
import {Options as CsvParserOptions} from 'csv-parser';
import {Buffer} from 'node:buffer';
import {Readable as ReadableStream} from 'node:stream';
import {Options} from 'csv-parser';

declare namespace neatCsv {
type Options = CsvParserOptions;

type Row = Record<string, string>;
}
export type Row = Record<string, string>;

/**
Fast CSV parser.
Convenience wrapper around the super-fast streaming [`csv-parser`](https://github.com/mafintosh/csv-parser) module. Use that one if you want streamed parsing.
@param data - CSV data to parse.
@param data - The CSV data to parse.
@param options - See the [`csv-parser` options](https://github.com/mafintosh/csv-parser#options).
@example
```
import neatCsv = require('neat-csv');
import neatCsv from 'neat-csv';
const csv = 'type,part\nunicorn,horn\nrainbow,pink';
(async () => {
console.log(await neatCsv(csv));
//=> [{type: 'unicorn', part: 'horn'}, {type: 'rainbow', part: 'pink'}]
})();
console.log(await neatCsv(csv));
//=> [{type: 'unicorn', part: 'horn'}, {type: 'rainbow', part: 'pink'}]
```
*/
declare function neatCsv<Row = neatCsv.Row>(
export default function neatCsv<RowType = Row>(
data: string | Buffer | ReadableStream,
options?: neatCsv.Options
): Promise<Row[]>;
options?: Options
): Promise<RowType[]>;

export = neatCsv;
export {Options} from 'csv-parser';
25 changes: 12 additions & 13 deletions index.js
Original file line number Diff line number Diff line change
@@ -1,26 +1,25 @@
'use strict';
const {promisify} = require('util');
const {pipeline} = require('stream');
const toReadableStream = require('to-readable-stream');
const csvParser = require('csv-parser');
const getStream = require('get-stream');
// TODO: Use `const {pipeline: pipelinePromise} = require('stream/promises');` when targeting Node.js 16.
import {promisify} from 'node:util';
import {Readable as ReadableStream, pipeline} from 'node:stream';
import process from 'node:process';
import {Buffer} from 'node:buffer';
import csvParser from 'csv-parser';
import getStream from 'get-stream';
// TODO: Use `import {pipeline as pipelinePromise} from 'node:stream/promises';` when targeting Node.js 16.

const pipelinePromise = promisify(pipeline);

module.exports = async (data, options) => {
export default async function neatCsv(data, options) {
if (typeof data === 'string' || Buffer.isBuffer(data)) {
// TODO: Use https://nodejs.org/api/stream.html#stream_stream_readable_from_iterable_options when targeting Node.js 12.
data = toReadableStream(data);
data = ReadableStream.from(data);
}

const parserStream = csvParser(options);

// Node.js 15.5 has a bug with `.pipeline` for large strings. It works fine in Node.js 14 and 12.
if (Number(process.versions.node.split('.')[0]) >= 15) {
// Node.js 16 has a bug with `.pipeline` for large strings. It works fine in Node.js 14 and 12.
if (Number(process.versions.node.split('.')[0]) >= 16) {
return getStream.array(data.pipe(parserStream));
}

await pipelinePromise([data, parserStream]);
return getStream.array(parserStream);
};
}
17 changes: 8 additions & 9 deletions index.test-d.ts
Original file line number Diff line number Diff line change
@@ -1,13 +1,12 @@
import {Buffer} from 'node:buffer';
import fs from 'node:fs';
import {expectType} from 'tsd';
import * as fs from 'fs';
import toReadableStream = require('to-readable-stream');
import neatCsv = require('.');
import neatCsv, {Options, Row} from './index.js';

const options: neatCsv.Options = {};
const options: Options = {}; // eslint-disable-line @typescript-eslint/no-unused-vars
const csvText = 'type,part\nunicorn,horn\nrainbow,pink';

expectType<Promise<neatCsv.Row[]>>(neatCsv(csvText));
expectType<Promise<neatCsv.Row[]>>(neatCsv(Buffer.from(csvText)));
expectType<Promise<neatCsv.Row[]>>(neatCsv(toReadableStream(csvText)));
expectType<Promise<neatCsv.Row[]>>(neatCsv(fs.createReadStream('test.csv')));
expectType<Promise<neatCsv.Row[]>>(neatCsv(csvText, {separator: ','}));
expectType<Promise<Row[]>>(neatCsv(csvText));
expectType<Promise<Row[]>>(neatCsv(Buffer.from(csvText)));
expectType<Promise<Row[]>>(neatCsv(fs.createReadStream('test.csv')));
expectType<Promise<Row[]>>(neatCsv(csvText, {separator: ','}));
13 changes: 7 additions & 6 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,10 @@
"email": "sindresorhus@gmail.com",
"url": "https://sindresorhus.com"
},
"type": "module",
"exports": "./index.js",
"engines": {
"node": ">=10"
"node": "^12.20.0 || ^14.13.1 || >=16.0.0"
},
"scripts": {
"test": "xo && ava && tsd"
Expand All @@ -37,12 +39,11 @@
],
"dependencies": {
"csv-parser": "^3.0.0",
"get-stream": "^6.0.0",
"to-readable-stream": "^2.1.0"
"get-stream": "^6.0.1"
},
"devDependencies": {
"ava": "^2.4.0",
"tsd": "^0.14.0",
"xo": "^0.37.1"
"ava": "^3.15.0",
"tsd": "^0.18.0",
"xo": "^0.45.0"
}
}
14 changes: 6 additions & 8 deletions readme.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,21 +8,19 @@ Parsing-related issues should be reported to [`csv-parser`](https://github.com/m

## Install

```
$ npm install neat-csv
```sh
npm install neat-csv
```

## Usage

```js
const neatCsv = require('neat-csv');
import neatCsv from 'neat-csv';

const csv = 'type,part\nunicorn,horn\nrainbow,pink';

(async () => {
console.log(await neatCsv(csv));
//=> [{type: 'unicorn', part: 'horn'}, {type: 'rainbow', part: 'pink'}]
})();
console.log(await neatCsv(csv));
//=> [{type: 'unicorn', part: 'horn'}, {type: 'rainbow', part: 'pink'}]
```

## API
Expand All @@ -35,7 +33,7 @@ Returns a `Promise<object[]>` with the parsed CSV.

Type: `string | Buffer | stream.Readable`

CSV data to parse.
The CSV data to parse.

#### options

Expand Down
9 changes: 6 additions & 3 deletions test.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import {Buffer} from 'node:buffer';
import {Readable as ReadableStream} from 'node:stream';
import test from 'ava';
import toReadableStream from 'to-readable-stream';
import neatCsv from './index.js';

test('buffer', async t => {
Expand All @@ -15,13 +16,15 @@ test('string', async t => {
});

test('stream', async t => {
const data = await neatCsv(toReadableStream('name,val\nfoo,1\nbar,2'));
const data = await neatCsv(ReadableStream.from('name,val\nfoo,1\nbar,2'));
t.is(data[0].name, 'foo');
t.is(data[1].name, 'bar');
});

test('error', async t => {
await t.throwsAsync(neatCsv('name,val\nfoo,1,3\nbar,2', {strict: true}), /Row length does not match headers/);
await t.throwsAsync(neatCsv('name,val\nfoo,1,3\nbar,2', {strict: true}), {
message: /Row length does not match headers/,
});
});

const largeStringFixture = `provider_name,provider_id,url,mds_api_url,gbfs_api_url
Expand Down

0 comments on commit 17f3a2b

Please sign in to comment.