Skip to content

Commit

Permalink
fix: replace node buffers with uint8arrays (#69)
Browse files Browse the repository at this point in the history
Follow up to #66 that removes node Buffers from the tests and uses
version of protons that returns uint8arrays.

BREAKING CHANGES:

- All use of node Buffers have been replaced with Uint8Arrays
- CIDs exported by this module have breaking API changes, see: multiformats/js-cid#117
  • Loading branch information
achingbrain authored Aug 5, 2020
1 parent 32e5165 commit 8a5aed2
Show file tree
Hide file tree
Showing 32 changed files with 1,083 additions and 251 deletions.
14 changes: 8 additions & 6 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

19 changes: 8 additions & 11 deletions packages/ipfs-unixfs-exporter/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -37,29 +37,26 @@
"devDependencies": {
"abort-controller": "^3.0.0",
"aegir": "^25.0.0",
"buffer": "^5.6.0",
"chai": "^4.2.0",
"chai-as-promised": "^7.1.1",
"detect-node": "^2.0.4",
"dirty-chai": "^2.0.1",
"ipfs-unixfs-importer": "^3.0.1",
"ipld": "^0.26.1",
"ipld-dag-pb": "^0.19.0",
"ipld-in-memory": "^5.0.0",
"ipld": "^0.27.0",
"ipld-dag-pb": "^0.20.0",
"ipld-in-memory": "^6.0.0",
"it-all": "^1.0.1",
"it-buffer-stream": "^1.0.2",
"it-first": "^1.0.1",
"multicodec": "^1.0.0",
"multicodec": "^2.0.0",
"nyc": "^15.0.0",
"sinon": "^9.0.1"
"sinon": "^9.0.1",
"uint8arrays": "^1.0.0"
},
"dependencies": {
"cids": "^0.8.0",
"cids": "^1.0.0",
"err-code": "^2.0.0",
"hamt-sharding": "^1.0.0",
"ipfs-unixfs": "^2.0.1",
"ipfs-utils": "^2.3.1",
"it-last": "^1.0.1",
"multihashing-async": "^1.0.0"
"multihashing-async": "^2.0.0"
}
}
5 changes: 2 additions & 3 deletions packages/ipfs-unixfs-exporter/src/utils/find-cid-in-shard.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,11 @@

const Bucket = require('hamt-sharding/src/bucket')
const multihashing = require('multihashing-async')
const TextEncoder = require('ipfs-utils/src/text-encoder')
const UTF8_ENCODER = new TextEncoder('utf8')
const uint8ArrayFromString = require('uint8arrays/from-string')

// FIXME: this is copy/pasted from ipfs-unixfs-importer/src/dir-sharded.js
const hashFn = async function (value) {
const buf = UTF8_ENCODER.encode(value)
const buf = uint8ArrayFromString(value)
const hash = await multihashing(buf, 'murmur3-128')

// Multihashing inserts preamble of 2 bytes. Remove it.
Expand Down
12 changes: 5 additions & 7 deletions packages/ipfs-unixfs-exporter/test/exporter-sharded.spec.js
Original file line number Diff line number Diff line change
@@ -1,10 +1,7 @@
/* eslint-env mocha */
'use strict'

const { Buffer } = require('buffer')
const chai = require('chai')
chai.use(require('dirty-chai'))
const expect = chai.expect
const { expect } = require('aegir/utils/chai')
const IPLD = require('ipld')
const inMemory = require('ipld-in-memory')
const UnixFS = require('ipfs-unixfs')
Expand All @@ -20,6 +17,7 @@ const {
DAGNode
} = require('ipld-dag-pb')
const blockApi = require('./helpers/block')
const uint8ArrayConcat = require('uint8arrays/concat')

const SHARD_SPLIT_THRESHOLD = 10

Expand All @@ -36,7 +34,7 @@ describe('exporter sharded', function () {
const createShardWithFileNames = (numFiles, fileName) => {
const files = new Array(numFiles).fill(0).map((_, index) => ({
path: fileName(index),
content: Buffer.from([0, 1, 2, 3, 4, index])
content: Uint8Array.from([0, 1, 2, 3, 4, index])
}))

return createShardWithFiles(files)
Expand All @@ -59,7 +57,7 @@ describe('exporter sharded', function () {

for (let i = 0; i < (SHARD_SPLIT_THRESHOLD + 1); i++) {
files[`file-${Math.random()}.txt`] = {
content: Buffer.concat(await all(randomBytes(100)))
content: uint8ArrayConcat(await all(randomBytes(100)))
}
}

Expand Down Expand Up @@ -92,7 +90,7 @@ describe('exporter sharded', function () {

for (let i = 0; i < dirFiles.length; i++) {
const dirFile = dirFiles[i]
const data = Buffer.concat(await all(dirFile.content()))
const data = uint8ArrayConcat(await all(dirFile.content()))

// validate the CID
expect(files[dirFile.name].cid.equals(dirFile.cid)).to.be.true()
Expand Down
20 changes: 9 additions & 11 deletions packages/ipfs-unixfs-exporter/test/exporter-subtree.spec.js
Original file line number Diff line number Diff line change
@@ -1,10 +1,7 @@
/* eslint-env mocha */
'use strict'

const { Buffer } = require('buffer')
const chai = require('chai')
chai.use(require('dirty-chai'))
const expect = chai.expect
const { expect } = require('aegir/utils/chai')
const IPLD = require('ipld')
const inMemory = require('ipld-in-memory')
const importer = require('ipfs-unixfs-importer')
Expand All @@ -13,6 +10,7 @@ const all = require('it-all')
const last = require('it-last')
const blockApi = require('./helpers/block')
const randomBytes = require('it-buffer-stream')
const uint8ArrayConcat = require('uint8arrays/concat')

const ONE_MEG = Math.pow(1024, 2)

Expand All @@ -28,7 +26,7 @@ describe('exporter subtree', () => {
})

it('exports a file 2 levels down', async () => {
const content = Buffer.concat(await all(randomBytes(ONE_MEG)))
const content = uint8ArrayConcat(await all(randomBytes(ONE_MEG)))

const imported = await last(importer([{
path: './200Bytes.txt',
Expand All @@ -44,12 +42,12 @@ describe('exporter subtree', () => {
expect(exported.name).to.equal('200Bytes.txt')
expect(exported.path).to.equal(`${imported.cid.toBaseEncodedString()}/level-1/200Bytes.txt`)

const data = Buffer.concat(await all(exported.content()))
const data = uint8ArrayConcat(await all(exported.content()))
expect(data).to.deep.equal(content)
})

it('exports a directory 1 level down', async () => {
const content = Buffer.concat(await all(randomBytes(ONE_MEG)))
const content = uint8ArrayConcat(await all(randomBytes(ONE_MEG)))
const imported = await last(importer([{
path: './200Bytes.txt',
content: randomBytes(ONE_MEG)
Expand All @@ -70,7 +68,7 @@ describe('exporter subtree', () => {
expect(files[1].name).to.equal('level-2')
expect(files[1].path).to.equal(`${imported.cid.toBaseEncodedString()}/level-1/level-2`)

const data = Buffer.concat(await all(files[0].content()))
const data = uint8ArrayConcat(await all(files[0].content()))
expect(data).to.deep.equal(content)
})

Expand All @@ -88,7 +86,7 @@ describe('exporter subtree', () => {
})

it('exports starting from non-protobuf node', async () => {
const content = Buffer.concat(await all(randomBytes(ONE_MEG)))
const content = uint8ArrayConcat(await all(randomBytes(ONE_MEG)))

const imported = await last(importer([{
path: './level-1/200Bytes.txt',
Expand All @@ -108,12 +106,12 @@ describe('exporter subtree', () => {
expect(exported.name).to.equal('200Bytes.txt')
expect(exported.path).to.equal(`${cborNodeCid.toBaseEncodedString()}/a/file/level-1/200Bytes.txt`)

const data = Buffer.concat(await all(exported.content()))
const data = uint8ArrayConcat(await all(exported.content()))
expect(data).to.deep.equal(content)
})

it('uses .path to export all components of a path', async () => {
const content = Buffer.concat(await all(randomBytes(ONE_MEG)))
const content = uint8ArrayConcat(await all(randomBytes(ONE_MEG)))

const imported = await last(importer([{
path: './200Bytes.txt',
Expand Down
Loading

0 comments on commit 8a5aed2

Please sign in to comment.