diff --git a/package.json b/package.json index 557402f7..6c6c9488 100644 --- a/package.json +++ b/package.json @@ -40,10 +40,9 @@ "aegir": "^17.0.0", "chai": "^4.2.0", "dirty-chai": "^2.0.1", - "ipfs": "~0.32.3", "ipfs-block-service": "~0.15.1", "ipfs-repo": "~0.25.0", - "ipld": "~0.19.1", + "ipld": "~0.20.0", "mkdirp": "~0.5.1", "multihashes": "~0.4.14", "ncp": "^2.0.0", @@ -58,7 +57,7 @@ "cids": "~0.5.5", "deep-extend": "~0.6.0", "ipfs-unixfs": "~0.1.16", - "ipld-dag-pb": "~0.14.11", + "ipld-dag-pb": "~0.15.0", "left-pad": "^1.3.0", "multihashing-async": "~0.5.1", "pull-batch": "^1.0.0", diff --git a/src/builder/builder.js b/src/builder/builder.js index 6400f346..09d96f19 100644 --- a/src/builder/builder.js +++ b/src/builder/builder.js @@ -6,13 +6,11 @@ const pull = require('pull-stream') const through = require('pull-through') const parallel = require('async/parallel') const waterfall = require('async/waterfall') -const dagPB = require('ipld-dag-pb') -const CID = require('cids') -const multihash = require('multihashing-async') - +const persist = require('../utils/persist') const reduce = require('./reduce') - -const DAGNode = dagPB.DAGNode +const { + DAGNode +} = require('ipld-dag-pb') const defaultOptions = { chunkerOptions: { @@ -27,12 +25,6 @@ const defaultOptions = { module.exports = function builder (createChunker, ipld, createReducer, _options) { const options = extend({}, defaultOptions, _options) - options.cidVersion = options.cidVersion || options.cidVersion - options.hashAlg = options.hashAlg || defaultOptions.hashAlg - - if (options.hashAlg !== 'sha2-256') { - options.cidVersion = 1 - } return function (source) { return function (items, cb) { @@ -71,33 +63,17 @@ module.exports = function builder (createChunker, ipld, createReducer, _options) const d = new UnixFS('directory') waterfall([ - (cb) => DAGNode.create(d.marshal(), [], options.hashAlg, cb), - (node, cb) => { - if (options.onlyHash) { - return cb(null, node) - } - - const cid = new CID(options.cidVersion, 'dag-pb', node.multihash) - - node = new DAGNode( - node.data, - node.links, - node.serialized, - cid - ) - - ipld.put(node, { - cid - }, (err) => cb(err, node)) - } - ], (err, node) => { + (cb) => DAGNode.create(d.marshal(), [], cb), + (node, cb) => persist(node, ipld, options, cb) + ], (err, result) => { if (err) { return callback(err) } + callback(null, { path: item.path, - multihash: node.multihash, - size: node.size + multihash: result.cid.buffer, + size: result.node.size }) }) } @@ -134,55 +110,42 @@ module.exports = function builder (createChunker, ipld, createReducer, _options) }), pull.asyncMap((buffer, callback) => { if (options.rawLeaves) { - return multihash(buffer, options.hashAlg, (error, hash) => { - if (error) { - return callback(error) - } - - return callback(null, { - multihash: hash, - size: buffer.length, - leafSize: buffer.length, - cid: new CID(1, 'raw', hash), - data: buffer - }) + return callback(null, { + size: buffer.length, + leafSize: buffer.length, + data: buffer }) } const file = new UnixFS(options.leafType, buffer) - DAGNode.create(file.marshal(), [], options.hashAlg, (err, node) => { + DAGNode.create(file.marshal(), [], (err, node) => { if (err) { return callback(err) } callback(null, { - multihash: node.multihash, size: node.size, leafSize: file.fileSize(), - cid: new CID(options.cidVersion, 'dag-pb', node.multihash), data: node }) }) }), pull.asyncMap((leaf, callback) => { - if (options.onlyHash) { - return callback(null, leaf) - } + persist(leaf.data, ipld, options, (error, results) => { + if (error) { + return callback(error) + } - ipld.put(leaf.data, { - cid: leaf.cid - }, (error) => callback(error, leaf)) - }), - pull.map((leaf) => { - return { - path: file.path, - multihash: leaf.cid.buffer, - size: leaf.size, - leafSize: leaf.leafSize, - name: '', - cid: leaf.cid - } + callback(null, { + size: leaf.size, + leafSize: leaf.leafSize, + data: results.node, + multihash: results.cid.buffer, + path: leaf.path, + name: '' + }) + }) }), through( // mark as single node if only one single node function onData (data) { diff --git a/src/builder/reduce.js b/src/builder/reduce.js index 483c9fd2..2979bab7 100644 --- a/src/builder/reduce.js +++ b/src/builder/reduce.js @@ -3,7 +3,7 @@ const waterfall = require('async/waterfall') const dagPB = require('ipld-dag-pb') const UnixFS = require('ipfs-unixfs') -const CID = require('cids') +const persist = require('../utils/persist') const DAGLink = dagPB.DAGLink const DAGNode = dagPB.DAGNode @@ -14,10 +14,10 @@ module.exports = function reduce (file, ipld, options) { const leaf = leaves[0] return callback(null, { - path: file.path, - multihash: leaf.multihash, size: leaf.size, leafSize: leaf.leafSize, + multihash: leaf.multihash, + path: file.path, name: leaf.name }) } @@ -28,44 +28,23 @@ module.exports = function reduce (file, ipld, options) { const links = leaves.map((leaf) => { f.addBlockSize(leaf.leafSize) - let cid = leaf.cid - - if (!cid) { - // we are an intermediate node - cid = new CID(options.cidVersion, 'dag-pb', leaf.multihash) - } - - return new DAGLink(leaf.name, leaf.size, cid.buffer) + return new DAGLink(leaf.name, leaf.size, leaf.multihash) }) waterfall([ - (cb) => DAGNode.create(f.marshal(), links, options.hashAlg, cb), - (node, cb) => { - const cid = new CID(options.cidVersion, 'dag-pb', node.multihash) - - if (options.onlyHash) { - return cb(null, { - node, cid - }) - } - - ipld.put(node, { - cid - }, (error) => cb(error, { - node, cid - })) - } + (cb) => DAGNode.create(f.marshal(), links, cb), + (node, cb) => persist(node, ipld, options, cb) ], (error, result) => { if (error) { return callback(error) } callback(null, { - name: '', - path: file.path, - multihash: result.cid.buffer, size: result.node.size, - leafSize: f.fileSize() + leafSize: f.fileSize(), + multihash: result.cid.buffer, + path: file.path, + name: '' }) }) } diff --git a/src/exporter/dir-flat.js b/src/exporter/dir-flat.js index b6c5f0b1..6d5ffc7b 100644 --- a/src/exporter/dir-flat.js +++ b/src/exporter/dir-flat.js @@ -13,7 +13,7 @@ function dirExporter (cid, node, name, path, pathRest, resolve, size, dag, paren name: name, depth: depth, path: path, - hash: cid, + multihash: cid.buffer, size: node.size, type: 'dir' } @@ -26,7 +26,7 @@ function dirExporter (cid, node, name, path, pathRest, resolve, size, dag, paren size: link.size, name: link.name, path: path + '/' + link.name, - multihash: link.multihash, + multihash: link.cid.buffer, linkName: link.name, pathRest: pathRest.slice(1), type: 'dir' diff --git a/src/exporter/dir-hamt-sharded.js b/src/exporter/dir-hamt-sharded.js index 1a091172..ef88d7cc 100644 --- a/src/exporter/dir-hamt-sharded.js +++ b/src/exporter/dir-hamt-sharded.js @@ -13,7 +13,7 @@ function shardedDirExporter (cid, node, name, path, pathRest, resolve, size, dag name: name, depth: depth, path: path, - hash: cid, + multihash: cid.buffer, size: node.size, type: 'dir' } @@ -36,7 +36,7 @@ function shardedDirExporter (cid, node, name, path, pathRest, resolve, size, dag depth: depth + 1, name: p, path: pp, - multihash: link.multihash, + multihash: link.cid.buffer, pathRest: p ? pathRest.slice(1) : pathRest, parent: dir || parent } diff --git a/src/exporter/file.js b/src/exporter/file.js index 72451af1..e7261479 100644 --- a/src/exporter/file.js +++ b/src/exporter/file.js @@ -2,7 +2,6 @@ const traverse = require('pull-traverse') const UnixFS = require('ipfs-unixfs') -const CID = require('cids') const pull = require('pull-stream') const paramap = require('pull-paramap') const extractDataFromBlock = require('./extract-data-from-block') @@ -43,7 +42,7 @@ module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, d content: pull.once(Buffer.alloc(0)), name: name, path: path, - hash: cid, + multihash: cid.buffer, size: fileSize, type: 'file' }) @@ -64,7 +63,7 @@ module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, d content: content, name: name, path: path, - hash: cid, + multihash: cid.buffer, size: fileSize, type: 'file' }]) @@ -142,7 +141,7 @@ function getChildren (dag, offset, end) { return pull( pull.values(filteredLinks), paramap((child, cb) => { - dag.get(new CID(child.link.multihash), (error, result) => cb(error, { + dag.get(child.link.cid, (error, result) => cb(error, { start: child.start, end: child.end, node: result && result.value, diff --git a/src/exporter/index.js b/src/exporter/index.js index 76c55dd4..27194efd 100644 --- a/src/exporter/index.js +++ b/src/exporter/index.js @@ -54,9 +54,11 @@ module.exports = (path, dag, options) => { const pathLengthToCut = join( [dPath.base].concat(dPath.rest.slice(0, dPath.rest.length - 1))).length + const cid = new CID(dPath.base) + return pull( pull.values([{ - multihash: new CID(dPath.base), + multihash: cid.buffer, name: dPath.base, path: dPath.base, pathRest: dPath.rest, @@ -70,7 +72,7 @@ module.exports = (path, dag, options) => { name: node.name, path: finalPathFor(node), size: node.size, - hash: node.hash || node.multihash, + hash: node.multihash, content: node.content, type: node.type } diff --git a/src/importer/dir-flat.js b/src/importer/dir-flat.js index bf6be349..7403b07f 100644 --- a/src/importer/dir-flat.js +++ b/src/importer/dir-flat.js @@ -2,12 +2,12 @@ const asyncEachSeries = require('async/eachSeries') const waterfall = require('async/waterfall') -const CID = require('cids') const dagPB = require('ipld-dag-pb') const UnixFS = require('ipfs-unixfs') const DAGLink = dagPB.DAGLink const DAGNode = dagPB.DAGNode const Dir = require('./dir') +const persist = require('../utils/persist') class DirFlat extends Dir { constructor (props, _options) { @@ -56,28 +56,17 @@ class DirFlat extends Dir { }) const dir = new UnixFS('directory') - const options = this._options waterfall( [ - (callback) => DAGNode.create(dir.marshal(), links, options.hashAlg, callback), - (node, callback) => { - if (options.onlyHash) return callback(null, node) - - let cid = new CID(node.multihash) - - if (options.cidVersion === 1) { - cid = cid.toV1() - } - - ipld.put(node, { cid }, (err) => callback(err, node)) - }, - (node, callback) => { - this.multihash = node.multihash + (callback) => DAGNode.create(dir.marshal(), links, callback), + (node, callback) => persist(node, ipld, this._options, callback), + ({cid, node}, callback) => { + this.multihash = cid.buffer this.size = node.size const pushable = { path: path, - multihash: node.multihash, + multihash: cid.buffer, size: node.size } source.push(pushable) diff --git a/src/importer/dir-sharded.js b/src/importer/dir-sharded.js index ec3920f3..3f8fd8dc 100644 --- a/src/importer/dir-sharded.js +++ b/src/importer/dir-sharded.js @@ -3,13 +3,13 @@ const leftPad = require('left-pad') const whilst = require('async/whilst') const waterfall = require('async/waterfall') -const CID = require('cids') const dagPB = require('ipld-dag-pb') const UnixFS = require('ipfs-unixfs') const DAGLink = dagPB.DAGLink const DAGNode = dagPB.DAGNode const multihashing = require('multihashing-async') const Dir = require('./dir') +const persist = require('../utils/persist') const Bucket = require('../hamt') @@ -71,14 +71,15 @@ class DirSharded extends Dir { } flush (path, ipld, source, callback) { - flush(this._options, this._bucket, path, ipld, source, (err, node) => { + flush(this._options, this._bucket, path, ipld, source, (err, results) => { if (err) { - callback(err) + return callback(err) } else { - this.multihash = node.multihash - this.size = node.size + this.multihash = results.cid.buffer + this.size = results.node.size } - callback(null, node) + + callback(null, results) }) } } @@ -112,19 +113,19 @@ function flush (options, bucket, path, ipld, source, callback) { callback(err) return // early } - haveLinks(links) + haveLinks(links, callback) } ) function collectChild (child, index, callback) { const labelPrefix = leftPad(index.toString(16).toUpperCase(), 2, '0') if (Bucket.isBucket(child)) { - flush(options, child, path, ipld, null, (err, node) => { + flush(options, child, path, ipld, null, (err, { cid, node }) => { if (err) { callback(err) return // early } - links.push(new DAGLink(labelPrefix, node.size, node.multihash)) + links.push(new DAGLink(labelPrefix, node.size, cid)) callback() }) } else { @@ -135,39 +136,27 @@ function flush (options, bucket, path, ipld, source, callback) { } } - function haveLinks (links) { + function haveLinks (links, callback) { // go-ipfs uses little endian, that's why we have to // reverse the bit field before storing it const data = Buffer.from(children.bitField().reverse()) const dir = new UnixFS('hamt-sharded-directory', data) dir.fanout = bucket.tableSize() dir.hashType = options.hashFn.code - waterfall( - [ - (callback) => DAGNode.create(dir.marshal(), links, options.hashAlg, callback), - (node, callback) => { - if (options.onlyHash) return callback(null, node) - - let cid = new CID(node.multihash) - - if (options.cidVersion === 1) { - cid = cid.toV1() - } - - ipld.put(node, { cid }, (err) => callback(err, node)) - }, - (node, callback) => { - const pushable = { - path: path, - multihash: node.multihash, - size: node.size - } - if (source) { - source.push(pushable) - } - callback(null, node) + waterfall([ + (cb) => DAGNode.create(dir.marshal(), links, cb), + (node, cb) => persist(node, ipld, options, cb), + ({ cid, node }, cb) => { + const pushable = { + path: path, + size: node.size, + multihash: cid.buffer } - ], - callback) + if (source) { + source.push(pushable) + } + cb(null, { cid, node }) + } + ], callback) } } diff --git a/src/importer/flush-tree.js b/src/importer/flush-tree.js index f6fd58ff..f12d1210 100644 --- a/src/importer/flush-tree.js +++ b/src/importer/flush-tree.js @@ -5,6 +5,7 @@ const CID = require('cids') const dagPB = require('ipld-dag-pb') const mapValues = require('async/mapValues') const waterfall = require('async/waterfall') +const persist = require('../utils/persist') const DAGLink = dagPB.DAGLink const DAGNode = dagPB.DAGNode @@ -34,8 +35,8 @@ module.exports = (files, ipld, source, callback) => { * received an array of files with the format: * { * path: // full path - * multihash: // multihash of the dagNode * size: // cumulative size + * cid: // multihash of the dagNode * } * * returns a JSON object that represents a tree where branches are the paths @@ -67,7 +68,7 @@ function createTree (files) { tmpTree[splitted[i]] = {} } if (i === splitted.length - 1) { - tmpTree[splitted[i]] = file.multihash + tmpTree[splitted[i]] = file.cid } else { tmpTree = tmpTree[splitted[i]] } @@ -79,13 +80,13 @@ function createTree (files) { /* * create a size index that goes like: - * { : } + * { : } */ function createSizeIndex (files) { const sizeIndex = {} files.forEach((file) => { - sizeIndex[new CID(file.multihash).toBaseEncodedString()] = file.size + sizeIndex[file.cid.toBaseEncodedString()] = file.size }) return sizeIndex @@ -131,15 +132,15 @@ function traverse (tree, sizeIndex, path, ipld, source, done) { waterfall([ (cb) => DAGNode.create(dir.marshal(), links, cb), - (node, cb) => { - const cid = new CID(node.multihash) + (node, cb) => persist(node, ipld, {}, cb), + ({ cid, node }, cb) => { sizeIndex[cid.toBaseEncodedString()] = node.size - ipld.put(node, { - cid - }, (err) => cb(err, node)) + cb(null, { + cid, node + }) } - ], (err, node) => { + ], (err, results) => { if (err) { source.push(new Error('failed to store dirNode')) return done(err) @@ -148,12 +149,12 @@ function traverse (tree, sizeIndex, path, ipld, source, done) { if (path) { source.push({ path: path, - multihash: node.multihash, - size: node.size + size: results.node.size, + multihash: results.cid.buffer }) } - done(null, node.multihash) + done(null, results.cid) }) }) } diff --git a/src/utils/persist.js b/src/utils/persist.js new file mode 100644 index 00000000..5fa987c1 --- /dev/null +++ b/src/utils/persist.js @@ -0,0 +1,53 @@ +'use strict' + +const { + util: { + cid + } +} = require('ipld-dag-pb') + +const defaultOptions = { + cidVersion: 0, + hashAlg: 'sha2-256', + codec: 'dag-pb' +} + +const persist = (node, ipld, options, callback) => { + let cidVersion = options.cidVersion || defaultOptions.cidVersion + let hashAlg = options.hashAlg || defaultOptions.hashAlg + let codec = options.codec || defaultOptions.codec + + if (Buffer.isBuffer(node)) { + cidVersion = 1 + codec = 'raw' + } + + if (hashAlg !== 'sha2-256') { + cidVersion = 1 + } + + if (options.onlyHash) { + return cid(node, { + version: cidVersion, + hashAlg: hashAlg + }, (err, cid) => { + callback(err, { + cid, + node + }) + }) + } + + ipld.put(node, { + version: cidVersion, + hashAlg: hashAlg, + format: codec + }, (error, cid) => { + callback(error, { + cid, + node + }) + }) +} + +module.exports = persist diff --git a/test/builder.js b/test/builder.js index 3de873bc..40b2b9ad 100644 --- a/test/builder.js +++ b/test/builder.js @@ -4,10 +4,9 @@ const chai = require('chai') chai.use(require('dirty-chai')) const expect = chai.expect -const BlockService = require('ipfs-block-service') const pull = require('pull-stream') const mh = require('multihashes') -const Ipld = require('ipld') +const IPLD = require('ipld') const eachSeries = require('async').eachSeries const CID = require('cids') const UnixFS = require('ipfs-unixfs') @@ -20,9 +19,16 @@ module.exports = (repo) => { const testMultihashes = Object.keys(mh.names).slice(1, 40) - before(() => { - const bs = new BlockService(repo) - ipld = new Ipld({blockService: bs}) + before((done) => { + IPLD.inMemory((err, resolver) => { + if (err) { + return done(err) + } + + ipld = resolver + + done() + }) }) it('allows multihash hash algorithm to be specified', (done) => { diff --git a/test/exporter.js b/test/exporter.js index 8cb84319..d3fda509 100644 --- a/test/exporter.js +++ b/test/exporter.js @@ -837,7 +837,7 @@ module.exports = (repo) => { ], cb), (file, cb) => { pull( - exporter(file.multihash, ipld), + exporter(file.cid, ipld), pull.asyncMap((file, cb) => readFile(file, cb)), pull.through(buffer => { expect(buffer).to.deep.equal(Buffer.from([0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07])) @@ -886,7 +886,7 @@ module.exports = (repo) => { }, (file, cb) => { pull( - exporter(file.multihash, ipld), + exporter(file.cid, ipld), pull.asyncMap((file, cb) => readFile(file, cb)), pull.through(buffer => { expect(buffer).to.deep.equal( @@ -910,7 +910,7 @@ module.exports = (repo) => { ], cb), (file, cb) => { pull( - exporter(file.multihash, ipld, { + exporter(file.cid, ipld, { offset: 4 }), pull.asyncMap((file, cb) => readFile(file, cb)), @@ -1004,11 +1004,11 @@ function createAndPersistNode (ipld, type, data, children, callback) { const links = [] children.forEach(child => { - const leaf = UnixFS.unmarshal(child.data) + const leaf = UnixFS.unmarshal(child.node.data) file.addBlockSize(leaf.fileSize()) - links.push(new DAGLink('', child.size, child.multihash)) + links.push(new DAGLink('', child.node.size, child.cid)) }) DAGNode.create(file.marshal(), links, (error, node) => { @@ -1017,7 +1017,12 @@ function createAndPersistNode (ipld, type, data, children, callback) { } ipld.put(node, { - cid: new CID(node.multihash) - }, (error) => callback(error, node)) + version: 1, + hashAlg: 'sha2-256', + format: 'dag-pb' + }, (error, cid) => callback(error, { + node, + cid + })) }) } diff --git a/test/helpers/collect-leaf-cids.js b/test/helpers/collect-leaf-cids.js index dd34d170..b0b78259 100644 --- a/test/helpers/collect-leaf-cids.js +++ b/test/helpers/collect-leaf-cids.js @@ -20,7 +20,7 @@ module.exports = (ipld, multihash, callback) => { } return callback( - null, node.links.map(link => new CID(link.multihash)) + null, node.links.map(link => link.cid) ) }), pull.filter(Boolean), diff --git a/test/importer.js b/test/importer.js index 6d973894..153e72af 100644 --- a/test/importer.js +++ b/test/importer.js @@ -132,7 +132,7 @@ const checkLeafNodeTypes = (ipld, options, expected, done) => { node.links.map(link => { return (done) => { waterfall([ - (next) => ipld.get(new CID(link.multihash), next), + (next) => ipld.get(link.cid, next), (result, next) => { const node = result.value const meta = UnixFs.unmarshal(node.data) diff --git a/test/with-dag-api.js b/test/with-dag-api.js index 892209cb..d758cc45 100644 --- a/test/with-dag-api.js +++ b/test/with-dag-api.js @@ -11,10 +11,8 @@ chai.use(require('dirty-chai')) const expect = chai.expect const pull = require('pull-stream') const loadFixture = require('aegir/fixtures') -const IPFS = require('ipfs') -const os = require('os') -const path = require('path') const CID = require('cids') +const IPLD = require('ipld') function stringifyMh (files) { return files.map((file) => { @@ -108,11 +106,6 @@ const strategyOverrides = { } describe('with dag-api', function () { - // TODO: waiting for IPFS support on windows, https://github.com/ipfs/js-ipfs-unixfs-engine/issues/196 - if (os.platform() === 'win32') { - return - } - strategies.forEach(strategy => { const baseFiles = strategyBaseFiles[strategy] const defaultResults = extend({}, baseFiles, { @@ -168,7 +161,7 @@ describe('with dag-api', function () { describe('importer: ' + strategy, function () { this.timeout(50 * 1000) - let node + let dag const options = { strategy: strategy, @@ -181,12 +174,15 @@ describe('with dag-api', function () { before(function (done) { this.timeout(30 * 1000) - node = new IPFS({ - repo: path.join(os.tmpdir(), 'unixfs-test-' + Math.random()), - start: false - }) + IPLD.inMemory((err, resolver) => { + if (err) { + return done(err) + } - node.on('ready', done) + dag = resolver + + done() + }) }) it('fails on bad input', (done) => { @@ -195,7 +191,7 @@ describe('with dag-api', function () { path: '200Bytes.txt', content: 'banana' }]), - importer(node.dag, options), + importer(dag, options), pull.onEnd((err) => { expect(err).to.exist() done() @@ -206,7 +202,7 @@ describe('with dag-api', function () { it('doesn\'t yield anything on empty source', (done) => { pull( pull.empty(), - importer(node.dag, options), + importer(dag, options), pull.collect((err, nodes) => { expect(err).to.not.exist() expect(nodes.length).to.be.eql(0) @@ -220,7 +216,7 @@ describe('with dag-api', function () { path: 'emptyfile', content: pull.empty() }]), - importer(node.dag, options), + importer(dag, options), pull.collect((err, nodes) => { expect(err).to.not.exist() expect(nodes.length).to.be.eql(1) @@ -242,7 +238,7 @@ describe('with dag-api', function () { content: pull.values([smallFile]) } ]), - importer(node.dag, options), + importer(dag, options), pull.onEnd((err) => { expect(err).to.exist() expect(err.message).to.be.eql('detected more than one root') @@ -257,7 +253,7 @@ describe('with dag-api', function () { path: '200Bytes.txt', content: pull.values([smallFile]) }]), - importer(node.dag, options), + importer(dag, options), pull.collect((err, files) => { expect(err).to.not.exist() expect(stringifyMh(files)).to.be.eql([expected['200Bytes.txt']]) @@ -272,7 +268,7 @@ describe('with dag-api', function () { path: '200Bytes.txt', content: smallFile }]), - importer(node.dag, options), + importer(dag, options), pull.collect((err, files) => { expect(err).to.not.exist() expect(stringifyMh(files)).to.be.eql([expected['200Bytes.txt']]) @@ -287,7 +283,7 @@ describe('with dag-api', function () { path: 'foo/bar/200Bytes.txt', content: pull.values([smallFile]) }]), - importer(node.dag, options), + importer(dag, options), pull.collect(collected) ) @@ -315,7 +311,7 @@ describe('with dag-api', function () { path: '1.2MiB.txt', content: pull.values([bigFile]) }]), - importer(node.dag, options), + importer(dag, options), pull.collect((err, files) => { expect(err).to.not.exist() expect(stringifyMh(files)).to.be.eql([expected['1.2MiB.txt']]) @@ -330,7 +326,7 @@ describe('with dag-api', function () { path: 'foo-big/1.2MiB.txt', content: pull.values([bigFile]) }]), - importer(node.dag, options), + importer(dag, options), pull.collect((err, files) => { expect(err).to.not.exist() @@ -349,7 +345,7 @@ describe('with dag-api', function () { pull.values([{ path: 'empty-dir' }]), - importer(node.dag, options), + importer(dag, options), pull.collect((err, files) => { expect(err).to.not.exist() @@ -369,7 +365,7 @@ describe('with dag-api', function () { path: 'pim/1.2MiB.txt', content: pull.values([bigFile]) }]), - importer(node.dag, options), + importer(dag, options), pull.collect((err, files) => { expect(err).to.not.exist() @@ -396,7 +392,7 @@ describe('with dag-api', function () { path: 'pam/1.2MiB.txt', content: pull.values([bigFile]) }]), - importer(node.dag, options), + importer(dag, options), pull.collect((err, files) => { expect(err).to.not.exist() @@ -409,25 +405,25 @@ describe('with dag-api', function () { function eachFile (file) { if (file.path === 'pam/pum/200Bytes.txt') { - expect(file.multihash).to.be.eql(expected['200Bytes.txt'].multihash) + expect(file.cid).to.be.eql(expected['200Bytes.txt'].cid) expect(file.size).to.be.eql(expected['200Bytes.txt'].size) } if (file.path === 'pam/pum/1.2MiB.txt') { - expect(file.multihash).to.be.eql(expected['1.2MiB.txt'].multihash) + expect(file.cid).to.be.eql(expected['1.2MiB.txt'].cid) expect(file.size).to.be.eql(expected['1.2MiB.txt'].size) } if (file.path === 'pam/pum') { const dir = expected['pam/pum'] - expect(file.multihash).to.be.eql(dir.multihash) + expect(file.cid).to.be.eql(dir.cid) expect(file.size).to.be.eql(dir.size) } if (file.path === 'pam/1.2MiB.txt') { - expect(file.multihash).to.be.eql(expected['1.2MiB.txt'].multihash) + expect(file.cid).to.be.eql(expected['1.2MiB.txt'].cid) expect(file.size).to.be.eql(expected['1.2MiB.txt'].size) } if (file.path === 'pam') { const dir = expected.pam - expect(file.multihash).to.be.eql(dir.multihash) + expect(file.cid).to.be.eql(dir.cid) expect(file.size).to.be.eql(dir.size) } }