Skip to content
This repository has been archived by the owner on Apr 29, 2020. It is now read-only.

Commit

Permalink
fix: update to newest IPLD libraries (#23)
Browse files Browse the repository at this point in the history
ipld-dag-pb got a new release with breaking changes, update to that release.
  • Loading branch information
vmx authored and achingbrain committed Aug 5, 2019
1 parent 72680d5 commit 03f4069
Show file tree
Hide file tree
Showing 4 changed files with 18 additions and 36 deletions.
6 changes: 3 additions & 3 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -44,9 +44,9 @@
"chai": "^4.2.0",
"detect-node": "^2.0.4",
"dirty-chai": "^2.0.1",
"ipld": "~0.24.0",
"ipld-dag-pb": "~0.17.1",
"ipld-in-memory": "^2.0.0",
"ipld": "^0.25.0",
"ipld-dag-pb": "^0.18.0",
"ipld-in-memory": "^3.0.0",
"multicodec": "~0.5.1",
"multihashes": "~0.4.14",
"nyc": "^14.0.0",
Expand Down
14 changes: 4 additions & 10 deletions test/exporter-sharded.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -46,14 +46,8 @@ describe('exporter sharded', function () {
}))).cid
}

before((done) => {
inMemory(IPLD, (err, resolver) => {
expect(err).to.not.exist()

ipld = resolver

done()
})
before(async () => {
ipld = await inMemory(IPLD)
})

it('exports a sharded directory', async () => {
Expand Down Expand Up @@ -190,15 +184,15 @@ describe('exporter sharded', function () {
it('exports a file from a sharded directory inside a regular directory inside a sharded directory', async () => {
const dirCid = await createShard(15)

const node = await DAGNode.create(new UnixFS('directory').marshal(), [
const node = new DAGNode(new UnixFS('directory').marshal(), [
new DAGLink('shard', 5, dirCid)
])
const nodeCid = await ipld.put(node, mc.DAG_PB, {
cidVersion: 0,
hashAlg: mh.names['sha2-256']
})

const shardNode = await DAGNode.create(new UnixFS('hamt-sharded-directory').marshal(), [
const shardNode = new DAGNode(new UnixFS('hamt-sharded-directory').marshal(), [
new DAGLink('75normal-dir', 5, nodeCid)
])
const shardNodeCid = await ipld.put(shardNode, mc.DAG_PB, {
Expand Down
10 changes: 2 additions & 8 deletions test/exporter-subtree.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -19,14 +19,8 @@ const exporter = require('./../src')
describe('exporter subtree', () => {
let ipld

before((done) => {
inMemory(IPLD, (err, resolver) => {
expect(err).to.not.exist()

ipld = resolver

done()
})
before(async () => {
ipld = await inMemory(IPLD)
})

it('exports a file 2 levels down', async () => {
Expand Down
24 changes: 9 additions & 15 deletions test/exporter.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ describe('exporter', () => {

const file = new UnixFS(options.type, options.content)

const node = await DAGNode.create(file.marshal(), options.links)
const node = new DAGNode(file.marshal(), options.links)
const cid = await ipld.put(node, mc.DAG_PB, {
cidVersion: 0,
hashAlg: mh.names['sha2-256']
Expand Down Expand Up @@ -104,7 +104,7 @@ describe('exporter', () => {
links.push(new DAGLink('', child.node.size, child.cid))
}

const node = await DAGNode.create(file.marshal(), links)
const node = new DAGNode(file.marshal(), links)
const cid = await ipld.put(node, mc.DAG_PB, {
cidVersion: 1,
hashAlg: mh.names['sha2-256']
Expand All @@ -116,14 +116,8 @@ describe('exporter', () => {
}
}

before((done) => {
inMemory(IPLD, (err, resolver) => {
expect(err).to.not.exist()

ipld = resolver

done()
})
before(async () => {
ipld = await inMemory(IPLD)
})

it('ensure hash inputs are sanitized', async () => {
Expand Down Expand Up @@ -197,14 +191,14 @@ describe('exporter', () => {
it('exports a small file with links', async () => {
const content = Buffer.from([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
const chunk1 = new UnixFS('raw', content.slice(0, 5))
const chunkNode1 = await DAGNode.create(chunk1.marshal())
const chunkNode1 = new DAGNode(chunk1.marshal())
const chunkCid1 = await ipld.put(chunkNode1, mc.DAG_PB, {
cidVersion: 0,
hashAlg: mh.names['sha2-256']
})

const chunk2 = new UnixFS('raw', content.slice(5))
const chunkNode2 = await DAGNode.create(chunk2.marshal())
const chunkNode2 = new DAGNode(chunk2.marshal())
const chunkCid2 = await ipld.put(chunkNode2, mc.DAG_PB, {
cidVersion: 0,
hashAlg: mh.names['sha2-256']
Expand All @@ -214,7 +208,7 @@ describe('exporter', () => {
file.addBlockSize(5)
file.addBlockSize(5)

const fileNode = await DAGNode.create(file.marshal(), [
const fileNode = new DAGNode(file.marshal(), [
new DAGLink('', chunkNode1.size, chunkCid1),
new DAGLink('', chunkNode2.size, chunkCid2)
])
Expand Down Expand Up @@ -822,7 +816,7 @@ describe('exporter', () => {
})

it('errors we export a non-unixfs dag-pb node', async () => {
const cid = await ipld.put(await DAGNode.create(Buffer.from([0, 1, 2, 3, 4])), mc.DAG_PB)
const cid = await ipld.put(new DAGNode(Buffer.from([0, 1, 2, 3, 4])), mc.DAG_PB)

try {
await exporter(cid, ipld)
Expand All @@ -839,7 +833,7 @@ describe('exporter', () => {
const file = new UnixFS('file')
file.addBlockSize(100)

const cid = await ipld.put(await DAGNode.create(file.marshal(), [
const cid = await ipld.put(new DAGNode(file.marshal(), [
new DAGLink('', 100, cborNodeCid)
]), mc.DAG_PB)

Expand Down

0 comments on commit 03f4069

Please sign in to comment.