diff --git a/.travis.yml b/.travis.yml index 3c3df143..ab015ea4 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,5 +1,6 @@ language: node_js cache: npm +dist: focal branches: only: diff --git a/packages/ipfs-unixfs-exporter/.aegir.js b/packages/ipfs-unixfs-exporter/.aegir.js index e67cd911..139ce023 100644 --- a/packages/ipfs-unixfs-exporter/.aegir.js +++ b/packages/ipfs-unixfs-exporter/.aegir.js @@ -23,6 +23,7 @@ const buildConfig = { /** @type {import('aegir').PartialOptions} */ module.exports = { build: { + bundlesizeMax: '34KB', config: buildConfig }, test: { diff --git a/packages/ipfs-unixfs-exporter/package.json b/packages/ipfs-unixfs-exporter/package.json index b9367685..6fb6330a 100644 --- a/packages/ipfs-unixfs-exporter/package.json +++ b/packages/ipfs-unixfs-exporter/package.json @@ -12,9 +12,9 @@ "test": "aegir test", "build": "aegir build", "clean": "rimraf ./dist", - "lint": "aegir ts --check && aegir lint", + "lint": "aegir ts -p check && aegir lint", "coverage": "nyc -s npm run test -t node && nyc report --reporter=html", - "depcheck": "aegir dep-check -i @types/mocha -i @types/sinon -i nyc -i abort-controller -i rimraf -i ipfs-core-types -i copy -i util -i crypto-browserify -i events -i readable-stream" + "depcheck": "aegir dep-check -i @types/mocha -i @types/sinon -i nyc -i abort-controller -i rimraf -i copy -i util -i crypto-browserify -i events -i readable-stream -i interface-blockstore" }, "repository": { "type": "git", @@ -36,37 +36,36 @@ "@types/mocha": "^8.2.1", "@types/sinon": "^10.0.0", "abort-controller": "^3.0.0", - "aegir": "^33.1.0", + "aegir": "^34.0.0", "copy": "^0.3.2", "crypto-browserify": "^3.12.0", "detect-node": "^2.0.4", "events": "^3.3.0", - "ipfs-core-types": "^0.3.1", "ipfs-unixfs-importer": "^7.0.3", - "ipld": "^0.29.0", - "ipld-block": "^0.11.1", - "ipld-dag-pb": "^0.22.2", - "ipld-in-memory": "^8.0.0", "it-all": "^1.0.5", "it-buffer-stream": "^2.0.0", "it-first": "^1.0.6", "merge-options": "^3.0.4", - "multicodec": "^3.0.1", + "murmurhash3js-revisited": "^3.0.0", "native-abort-controller": "^1.0.3", "nyc": "^15.0.0", "readable-stream": "^3.6.0", "rimraf": "^3.0.2", - "sinon": "^10.0.0", + "sinon": "^11.1.1", "uint8arrays": "^2.1.2", "util": "^0.12.3" }, "dependencies": { - "cids": "^1.1.5", + "@ipld/dag-cbor": "^6.0.4", + "@ipld/dag-pb": "^2.0.2", "err-code": "^3.0.1", "hamt-sharding": "^2.0.0", + "interface-blockstore": "^1.0.0", "ipfs-unixfs": "^4.0.3", "it-last": "^1.0.5", - "multihashing-async": "^2.1.0" + "multiformats": "^9.0.4", + "murmurhash3js-revisited": "^3.0.0", + "uint8arrays": "^2.1.5" }, "types": "dist/src/index.d.ts", "files": [ diff --git a/packages/ipfs-unixfs-exporter/src/index.js b/packages/ipfs-unixfs-exporter/src/index.js index e9cefe62..bc3bfa91 100644 --- a/packages/ipfs-unixfs-exporter/src/index.js +++ b/packages/ipfs-unixfs-exporter/src/index.js @@ -1,14 +1,13 @@ 'use strict' const errCode = require('err-code') -const CID = require('cids') +const { CID } = require('multiformats/cid') const resolve = require('./resolvers') const last = require('it-last') /** * @typedef {import('ipfs-unixfs').UnixFS} UnixFS - * @typedef {import('ipld-dag-pb').DAGNode} DAGNode - * @typedef {import('ipld')} IPLD + * @typedef {import('interface-blockstore').Blockstore} Blockstore * @typedef {import('./types').ExporterOptions} ExporterOptions * @typedef {import('./types').UnixFSFile} UnixFSFile * @typedef {import('./types').UnixFSDirectory} UnixFSDirectory @@ -32,14 +31,15 @@ const toPathComponents = (path = '') => { const cidAndRest = (path) => { if (path instanceof Uint8Array) { return { - cid: new CID(path), + cid: CID.decode(path), toResolve: [] } } - if (CID.isCID(path)) { + const cid = CID.asCID(path) + if (cid) { return { - cid: path, + cid, toResolve: [] } } @@ -52,7 +52,7 @@ const cidAndRest = (path) => { const output = toPathComponents(path) return { - cid: new CID(output[0]), + cid: CID.parse(output[0]), toResolve: output.slice(1) } } @@ -62,10 +62,10 @@ const cidAndRest = (path) => { /** * @param {string | CID} path - * @param {IPLD} ipld + * @param {Blockstore} blockstore * @param {ExporterOptions} [options] */ -async function * walkPath (path, ipld, options = {}) { +async function * walkPath (path, blockstore, options = {}) { let { cid, toResolve @@ -75,7 +75,7 @@ async function * walkPath (path, ipld, options = {}) { const startingDepth = toResolve.length while (true) { - const result = await resolve(cid, name, entryPath, toResolve, startingDepth, ipld, options) + const result = await resolve(cid, name, entryPath, toResolve, startingDepth, blockstore, options) if (!result.entry && !result.next) { throw errCode(new Error(`Could not resolve ${path}`), 'ERR_NOT_FOUND') @@ -99,11 +99,11 @@ async function * walkPath (path, ipld, options = {}) { /** * @param {string | CID} path - * @param {IPLD} ipld + * @param {Blockstore} blockstore * @param {ExporterOptions} [options] */ -async function exporter (path, ipld, options = {}) { - const result = await last(walkPath(path, ipld, options)) +async function exporter (path, blockstore, options = {}) { + const result = await last(walkPath(path, blockstore, options)) if (!result) { throw errCode(new Error(`Could not resolve ${path}`), 'ERR_NOT_FOUND') @@ -114,11 +114,11 @@ async function exporter (path, ipld, options = {}) { /** * @param {string | CID} path - * @param {IPLD} ipld + * @param {Blockstore} blockstore * @param {ExporterOptions} [options] */ -async function * recursive (path, ipld, options = {}) { - const node = await exporter(path, ipld, options) +async function * recursive (path, blockstore, options = {}) { + const node = await exporter(path, blockstore, options) if (!node) { return diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/dag-cbor.js b/packages/ipfs-unixfs-exporter/src/resolvers/dag-cbor.js index 40f69809..6a077d3d 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/dag-cbor.js +++ b/packages/ipfs-unixfs-exporter/src/resolvers/dag-cbor.js @@ -1,7 +1,8 @@ 'use strict' -const CID = require('cids') +const { CID } = require('multiformats/cid') const errCode = require('err-code') +const dagCbor = require('@ipld/dag-cbor') /** * @typedef {import('../types').Resolver} Resolver @@ -10,9 +11,9 @@ const errCode = require('err-code') /** * @type {Resolver} */ -const resolve = async (cid, name, path, toResolve, resolve, depth, ipld, options) => { - const object = await ipld.get(cid, options) - const block = await ipld.get(new CID(1, 'raw', cid.multihash)) +const resolve = async (cid, name, path, toResolve, resolve, depth, blockstore, options) => { + const block = await blockstore.get(cid) + const object = dagCbor.decode(block) let subObject = object let subPath = path @@ -24,7 +25,8 @@ const resolve = async (cid, name, path, toResolve, resolve, depth, ipld, options toResolve.shift() subPath = `${subPath}/${prop}` - if (CID.isCID(subObject[prop])) { + const subObjectCid = CID.asCID(subObject[prop]) + if (subObjectCid) { return { entry: { type: 'object', @@ -39,7 +41,7 @@ const resolve = async (cid, name, path, toResolve, resolve, depth, ipld, options } }, next: { - cid: subObject[prop], + cid: subObjectCid, name: prop, path: subPath, toResolve diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/identity.js b/packages/ipfs-unixfs-exporter/src/resolvers/identity.js index 080ee182..2c2d015a 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/identity.js +++ b/packages/ipfs-unixfs-exporter/src/resolvers/identity.js @@ -3,7 +3,7 @@ const errCode = require('err-code') const extractDataFromBlock = require('../utils/extract-data-from-block') const validateOffsetAndLength = require('../utils/validate-offset-and-length') -const mh = require('multihashing-async').multihash +const mh = require('multiformats/hashes/digest') /** * @typedef {import('../types').ExporterOptions} ExporterOptions @@ -32,12 +32,11 @@ const rawContent = (node) => { /** * @type {Resolver} */ -const resolve = async (cid, name, path, toResolve, resolve, depth, ipld, options) => { +const resolve = async (cid, name, path, toResolve, resolve, depth, blockstore, options) => { if (toResolve.length) { throw errCode(new Error(`No link named ${path} found in raw node ${cid}`), 'ERR_NOT_FOUND') } - - const buf = await mh.decode(cid.multihash) + const buf = await mh.decode(cid.multihash.bytes) return { entry: { @@ -47,7 +46,7 @@ const resolve = async (cid, name, path, toResolve, resolve, depth, ipld, options cid, content: rawContent(buf.digest), depth, - size: buf.length, + size: buf.digest.length, node: buf.digest } } diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/index.js b/packages/ipfs-unixfs-exporter/src/resolvers/index.js index ac304a1c..1ec5a034 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/index.js +++ b/packages/ipfs-unixfs-exporter/src/resolvers/index.js @@ -2,11 +2,12 @@ const errCode = require('err-code') +const dagPb = require('@ipld/dag-pb') +const dagCbor = require('@ipld/dag-cbor') +const raw = require('multiformats/codecs/raw') +const { identity } = require('multiformats/hashes/identity') + /** - * @typedef {import('cids')} CID - * @typedef {import('ipld')} IPLD - * @typedef {import('../types').ExporterOptions} ExporterOptions - * @typedef {import('../types').UnixFSEntry} UnixFSEntry * @typedef {import('../types').Resolver} Resolver * @typedef {import('../types').Resolve} Resolve */ @@ -15,23 +16,23 @@ const errCode = require('err-code') * @type {{ [ key: string ]: Resolver }} */ const resolvers = { - 'dag-pb': require('./unixfs-v1'), - raw: require('./raw'), - 'dag-cbor': require('./dag-cbor'), - identity: require('./identity') + [dagPb.code]: require('./unixfs-v1'), + [raw.code]: require('./raw'), + [dagCbor.code]: require('./dag-cbor'), + [identity.code]: require('./identity') } /** * @type {Resolve} */ -function resolve (cid, name, path, toResolve, depth, ipld, options) { - const resolver = resolvers[cid.codec] +function resolve (cid, name, path, toResolve, depth, blockstore, options) { + const resolver = resolvers[cid.code] if (!resolver) { - throw errCode(new Error(`No resolver for codec ${cid.codec}`), 'ERR_NO_RESOLVER') + throw errCode(new Error(`No resolver for code ${cid.code}`), 'ERR_NO_RESOLVER') } - return resolver(cid, name, path, toResolve, resolve, depth, ipld, options) + return resolver(cid, name, path, toResolve, resolve, depth, blockstore, options) } module.exports = resolve diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/raw.js b/packages/ipfs-unixfs-exporter/src/resolvers/raw.js index 5ec94544..a82e511b 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/raw.js +++ b/packages/ipfs-unixfs-exporter/src/resolvers/raw.js @@ -30,12 +30,12 @@ const rawContent = (node) => { /** * @type {import('../types').Resolver} */ -const resolve = async (cid, name, path, toResolve, resolve, depth, ipld, options) => { +const resolve = async (cid, name, path, toResolve, resolve, depth, blockstore, options) => { if (toResolve.length) { throw errCode(new Error(`No link named ${path} found in raw node ${cid}`), 'ERR_NOT_FOUND') } - const buf = await ipld.get(cid, options) + const block = await blockstore.get(cid, options) return { entry: { @@ -43,10 +43,10 @@ const resolve = async (cid, name, path, toResolve, resolve, depth, ipld, options name, path, cid, - content: rawContent(buf), + content: rawContent(block), depth, - size: buf.length, - node: buf + size: block.length, + node: block } } } diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/directory.js b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/directory.js index f30fac00..9433f0b4 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/directory.js +++ b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/directory.js @@ -9,7 +9,7 @@ /** * @type {UnixfsV1Resolver} */ -const directoryContent = (cid, node, unixfs, path, resolve, depth, ipld) => { +const directoryContent = (cid, node, unixfs, path, resolve, depth, blockstore) => { /** * @param {ExporterOptions} [options] * @returns {UnixfsV1DirectoryContent} @@ -20,7 +20,7 @@ const directoryContent = (cid, node, unixfs, path, resolve, depth, ipld) => { const links = node.Links.slice(offset, length) for (const link of links) { - const result = await resolve(link.Hash, link.Name, `${path}/${link.Name}`, [], depth + 1, ipld, options) + const result = await resolve(link.Hash, link.Name || '', `${path}/${link.Name || ''}`, [], depth + 1, blockstore, options) if (result.entry) { yield result.entry diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/file.js b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/file.js index 4bcdba4a..2ba6a5ca 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/file.js +++ b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/file.js @@ -4,23 +4,24 @@ const extractDataFromBlock = require('../../../utils/extract-data-from-block') const validateOffsetAndLength = require('../../../utils/validate-offset-and-length') const { UnixFS } = require('ipfs-unixfs') const errCode = require('err-code') +const dagPb = require('@ipld/dag-pb') +const dagCbor = require('@ipld/dag-cbor') +const raw = require('multiformats/codecs/raw') /** * @typedef {import('../../../types').ExporterOptions} ExporterOptions - * @typedef {import('ipld')} IPLD - * @typedef {import('ipld-dag-pb').DAGNode} DAGNode - */ - -/** - * @param {IPLD} ipld - * @param {DAGNode} node + * @typedef {import('interface-blockstore').Blockstore} Blockstore + * @typedef {import('@ipld/dag-pb').PBNode} PBNode + * + * @param {Blockstore} blockstore + * @param {PBNode} node * @param {number} start * @param {number} end * @param {number} streamPosition * @param {ExporterOptions} options * @returns {AsyncIterable} */ -async function * emitBytes (ipld, node, start, end, streamPosition = 0, options) { +async function * emitBytes (blockstore, node, start, end, streamPosition = 0, options) { // a `raw` node if (node instanceof Uint8Array) { const buf = extractDataFromBlock(node, streamPosition, start, end) @@ -34,6 +35,10 @@ async function * emitBytes (ipld, node, start, end, streamPosition = 0, options) return streamPosition } + if (node.Data == null) { + throw errCode(new Error('no data in PBNode'), 'ERR_NOT_UNIXFS') + } + let file try { @@ -63,11 +68,25 @@ async function * emitBytes (ipld, node, start, end, streamPosition = 0, options) if ((start >= childStart && start < childEnd) || // child has offset byte (end > childStart && end <= childEnd) || // child has end byte (start < childStart && end > childEnd)) { // child is between offset and end bytes - const child = await ipld.get(childLink.Hash, { + const block = await blockstore.get(childLink.Hash, { signal: options.signal }) + let child + switch (childLink.Hash.code) { + case dagPb.code: + child = await dagPb.decode(block) + break + case raw.code: + child = block + break + case dagCbor.code: + child = await dagCbor.decode(block) + break + default: + throw Error(`Unsupported codec: ${childLink.Hash.code}`) + } - for await (const buf of emitBytes(ipld, child, start, end, streamPosition, options)) { + for await (const buf of emitBytes(blockstore, child, start, end, streamPosition, options)) { streamPosition += buf.length yield buf @@ -82,7 +101,7 @@ async function * emitBytes (ipld, node, start, end, streamPosition = 0, options) /** * @type {import('../').UnixfsV1Resolver} */ -const fileContent = (cid, node, unixfs, path, resolve, depth, ipld) => { +const fileContent = (cid, node, unixfs, path, resolve, depth, blockstore) => { /** * @param {ExporterOptions} options */ @@ -101,7 +120,7 @@ const fileContent = (cid, node, unixfs, path, resolve, depth, ipld) => { const start = offset const end = offset + length - return emitBytes(ipld, node, start, end, 0, options) + return emitBytes(blockstore, node, start, end, 0, options) } return yieldFileContent diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/hamt-sharded-directory.js b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/hamt-sharded-directory.js index 25d784b5..93bf11dd 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/hamt-sharded-directory.js +++ b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/hamt-sharded-directory.js @@ -1,54 +1,57 @@ 'use strict' +const { decode } = require('@ipld/dag-pb') + /** - * @typedef {import('ipld-dag-pb').DAGNode} DAGNode - * @typedef {import('ipld')} IPLD + * @typedef {import('interface-blockstore').Blockstore} Blockstore * @typedef {import('../../../types').ExporterOptions} ExporterOptions * @typedef {import('../../../types').Resolve} Resolve * @typedef {import('../../../types').UnixfsV1DirectoryContent} UnixfsV1DirectoryContent * @typedef {import('../../../types').UnixfsV1Resolver} UnixfsV1Resolver + * @typedef {import('@ipld/dag-pb').PBNode} PBNode */ /** * @type {UnixfsV1Resolver} */ -const hamtShardedDirectoryContent = (cid, node, unixfs, path, resolve, depth, ipld) => { +const hamtShardedDirectoryContent = (cid, node, unixfs, path, resolve, depth, blockstore) => { /** * @param {ExporterOptions} options * */ function yieldHamtDirectoryContent (options = {}) { - return listDirectory(node, path, resolve, depth, ipld, options) + return listDirectory(node, path, resolve, depth, blockstore, options) } return yieldHamtDirectoryContent } /** - * @param {DAGNode} node + * @param {PBNode} node * @param {string} path * @param {Resolve} resolve * @param {number} depth - * @param {IPLD} ipld + * @param {Blockstore} blockstore * @param {ExporterOptions} options * * @returns {UnixfsV1DirectoryContent} */ -async function * listDirectory (node, path, resolve, depth, ipld, options) { +async function * listDirectory (node, path, resolve, depth, blockstore, options) { const links = node.Links for (const link of links) { - const name = link.Name.substring(2) + const name = link.Name != null ? link.Name.substring(2) : null if (name) { - const result = await resolve(link.Hash, name, `${path}/${name}`, [], depth + 1, ipld, options) + const result = await resolve(link.Hash, name, `${path}/${name}`, [], depth + 1, blockstore, options) yield result.entry } else { // descend into subshard - node = await ipld.get(link.Hash) + const block = await blockstore.get(link.Hash) + node = decode(block) - for await (const file of listDirectory(node, path, resolve, depth, ipld, options)) { + for await (const file of listDirectory(node, path, resolve, depth, blockstore, options)) { yield file } } diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/raw.js b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/raw.js index 1ac4b17e..95a38870 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/raw.js +++ b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/raw.js @@ -11,7 +11,7 @@ const validateOffsetAndLength = require('../../../utils/validate-offset-and-leng /** * @type {UnixfsV1Resolver} */ -const rawContent = (cid, node, unixfs, path, resolve, depth, ipld) => { +const rawContent = (cid, node, unixfs, path, resolve, depth, blockstore) => { /** * @param {ExporterOptions} options */ diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/index.js b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/index.js index 441c17be..86b70e91 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/index.js +++ b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/index.js @@ -3,20 +3,17 @@ const errCode = require('err-code') const { UnixFS } = require('ipfs-unixfs') const findShardCid = require('../../utils/find-cid-in-shard') +const { decode } = require('@ipld/dag-pb') /** - * @typedef {import('cids')} CID - * @typedef {import('ipld')} IPLD - * @typedef {import('ipld-dag-pb').DAGNode} DAGNode - * @typedef {import('../../types').ExporterOptions} ExporterOptions - * @typedef {import('../../types').UnixFSEntry} UnixFSEntry * @typedef {import('../../types').Resolve} Resolve * @typedef {import('../../types').Resolver} Resolver * @typedef {import('../../types').UnixfsV1Resolver} UnixfsV1Resolver + * @typedef {import('@ipld/dag-pb').PBNode} PBNode */ /** - * @param {import('ipld-dag-pb').DAGNode} node + * @param {PBNode} node * @param {string} name */ const findLinkCid = (node, name) => { @@ -33,10 +30,10 @@ const contentExporters = { file: require('./content/file'), directory: require('./content/directory'), 'hamt-sharded-directory': require('./content/hamt-sharded-directory'), - metadata: (cid, node, unixfs, path, resolve, depth, ipld) => { + metadata: (cid, node, unixfs, path, resolve, depth, blockstore) => { return () => [] }, - symlink: (cid, node, unixfs, path, resolve, depth, ipld) => { + symlink: (cid, node, unixfs, path, resolve, depth, blockstore) => { return () => [] } } @@ -44,8 +41,9 @@ const contentExporters = { /** * @type {Resolver} */ -const unixFsResolver = async (cid, name, path, toResolve, resolve, depth, ipld, options) => { - const node = await ipld.get(cid, options) +const unixFsResolver = async (cid, name, path, toResolve, resolve, depth, blockstore, options) => { + const block = await blockstore.get(cid, options) + const node = decode(block) let unixfs let next @@ -53,6 +51,10 @@ const unixFsResolver = async (cid, name, path, toResolve, resolve, depth, ipld, name = cid.toString() } + if (node.Data == null) { + throw errCode(new Error('no data in PBNode'), 'ERR_NOT_UNIXFS') + } + try { unixfs = UnixFS.unmarshal(node.Data) } catch (err) { @@ -69,7 +71,7 @@ const unixFsResolver = async (cid, name, path, toResolve, resolve, depth, ipld, if (unixfs && unixfs.type === 'hamt-sharded-directory') { // special case - unixfs v1 hamt shards - linkCid = await findShardCid(node, toResolve[0], ipld) + linkCid = await findShardCid(node, toResolve[0], blockstore) } else { linkCid = findLinkCid(node, toResolve[0]) } @@ -97,7 +99,7 @@ const unixFsResolver = async (cid, name, path, toResolve, resolve, depth, ipld, path, cid, // @ts-ignore - content: contentExporters[unixfs.type](cid, node, unixfs, path, resolve, depth, ipld), + content: contentExporters[unixfs.type](cid, node, unixfs, path, resolve, depth, blockstore), unixfs, depth, node, diff --git a/packages/ipfs-unixfs-exporter/src/types.d.ts b/packages/ipfs-unixfs-exporter/src/types.d.ts index 52f3bf1e..ea7e7131 100644 --- a/packages/ipfs-unixfs-exporter/src/types.d.ts +++ b/packages/ipfs-unixfs-exporter/src/types.d.ts @@ -1,6 +1,7 @@ -import CID from 'cids' -import UnixFS from 'ipfs-unixfs' -import DAGNode from 'ipld-dag-pb' +import { CID } from 'multiformats/cid' +import { UnixFS } from 'ipfs-unixfs' +import { PBNode } from '@ipld/dag-pb' +import { Blockstore } from 'interface-blockstore' interface ExporterOptions { offset?: number @@ -10,7 +11,7 @@ interface ExporterOptions { } interface Exportable { - type: 'file' | 'directory' | 'object' | 'raw' | 'identity', + type: 'file' | 'directory' | 'object' | 'raw' | 'identity' name: string path: string cid: CID @@ -22,13 +23,13 @@ interface Exportable { interface UnixFSFile extends Exportable { type: 'file' unixfs: UnixFS - node: DAGNode + node: PBNode } interface UnixFSDirectory extends Exportable { type: 'directory' unixfs: UnixFS - node: DAGNode + node: PBNode } interface ObjectNode extends Exportable { @@ -60,10 +61,10 @@ interface ResolveResult { next?: NextResult } -type Resolve = (cid: CID, name: string, path: string, toResolve: string[], depth: number, ipld: IPLD, options: ExporterOptions) => Promise -type Resolver = (cid: CID, name: string, path: string, toResolve: string[], resolve: Resolve, depth: number, ipld: IPLD, options: ExporterOptions) => Promise +interface Resolve { (cid: CID, name: string, path: string, toResolve: string[], depth: number, blockstore: Blockstore, options: ExporterOptions): Promise } +interface Resolver { (cid: CID, name: string, path: string, toResolve: string[], resolve: Resolve, depth: number, blockstore: Blockstore, options: ExporterOptions): Promise } type UnixfsV1FileContent = AsyncIterable | Iterable type UnixfsV1DirectoryContent = AsyncIterable | Iterable type UnixfsV1Content = UnixfsV1FileContent | UnixfsV1DirectoryContent -type UnixfsV1Resolver = (cid: CID, node: DAGNode, unixfs: UnixFS, path: string, resolve: Resolve, depth: number, ipld: IPLD) => (options: ExporterOptions) => UnixfsV1Content +interface UnixfsV1Resolver { (cid: CID, node: PBNode, unixfs: UnixFS, path: string, resolve: Resolve, depth: number, blockstore: Blockstore): (options: ExporterOptions) => UnixfsV1Content } diff --git a/packages/ipfs-unixfs-exporter/src/utils/find-cid-in-shard.js b/packages/ipfs-unixfs-exporter/src/utils/find-cid-in-shard.js index 9b48fede..5c92bdcd 100644 --- a/packages/ipfs-unixfs-exporter/src/utils/find-cid-in-shard.js +++ b/packages/ipfs-unixfs-exporter/src/utils/find-cid-in-shard.js @@ -1,44 +1,39 @@ 'use strict' const { Bucket, createHAMT } = require('hamt-sharding') -const multihashing = require('multihashing-async') +const { decode } = require('@ipld/dag-pb') +// @ts-ignore - no types available +const mur = require('murmurhash3js-revisited') +const uint8ArrayFromString = require('uint8arrays/from-string') /** + * @typedef {import('interface-blockstore').Blockstore} Blockstore + * @typedef {import('multiformats/cid').CID} CID * @typedef {import('../types').ExporterOptions} ExporterOptions - * @typedef {import('ipld')} IPLD - * @typedef {import('cids')} CID + * @typedef {import('@ipld/dag-pb').PBNode} PBNode + * @typedef {import('@ipld/dag-pb').PBLink} PBLink */ -// FIXME: this is copy/pasted from ipfs-unixfs-importer/src/dir-sharded.js +// FIXME: this is copy/pasted from ipfs-unixfs-importer/src/options.js /** * @param {Uint8Array} buf */ const hashFn = async function (buf) { - const hash = await multihashing(buf, 'murmur3-128') - - // Multihashing inserts preamble of 2 bytes. Remove it. - // Also, murmur3 outputs 128 bit but, accidentally, IPFS Go's - // implementation only uses the first 64, so we must do the same - // for parity.. - const justHash = hash.slice(2, 10) - const length = justHash.length - const result = new Uint8Array(length) - // TODO: invert buffer because that's how Go impl does it - for (let i = 0; i < length; i++) { - result[length - i - 1] = justHash[i] - } - - return result + return uint8ArrayFromString(mur.x64.hash128(buf), 'base16').slice(0, 8).reverse() } /** - * @param {import('ipld-dag-pb').DAGLink[]} links + * @param {PBLink[]} links * @param {Bucket} bucket * @param {Bucket} rootBucket */ const addLinksToHamtBucket = (links, bucket, rootBucket) => { return Promise.all( links.map(link => { + if (link.Name == null) { + // TODO(@rvagg): what do? this is technically possible + throw new Error('Unexpected Link without a Name') + } if (link.Name.length === 2) { const pos = parseInt(link.Name, 16) @@ -88,14 +83,14 @@ const toBucketPath = (position) => { * @property {Bucket} rootBucket * @property {Bucket} lastBucket * - * @param {import('ipld-dag-pb').DAGNode} node + * @param {PBNode} node * @param {string} name - * @param {IPLD} ipld + * @param {Blockstore} blockstore * @param {ShardTraversalContext} [context] * @param {ExporterOptions} [options] * @returns {Promise} */ -const findShardCid = async (node, name, ipld, context, options) => { +const findShardCid = async (node, name, blockstore, context, options) => { if (!context) { const rootBucket = createHAMT({ hashFn @@ -121,6 +116,10 @@ const findShardCid = async (node, name, ipld, context, options) => { } const link = node.Links.find(link => { + if (link.Name == null) { + return false + } + const entryPrefix = link.Name.substring(0, 2) const entryName = link.Name.substring(2) @@ -141,15 +140,16 @@ const findShardCid = async (node, name, ipld, context, options) => { return null } - if (link.Name.substring(2) === name) { + if (link.Name != null && link.Name.substring(2) === name) { return link.Hash } context.hamtDepth++ - node = await ipld.get(link.Hash, options) + const block = await blockstore.get(link.Hash, options) + node = decode(block) - return findShardCid(node, name, ipld, context, options) + return findShardCid(node, name, blockstore, context, options) } module.exports = findShardCid diff --git a/packages/ipfs-unixfs-exporter/test/exporter-sharded.spec.js b/packages/ipfs-unixfs-exporter/test/exporter-sharded.spec.js index c4bce877..91a43275 100644 --- a/packages/ipfs-unixfs-exporter/test/exporter-sharded.spec.js +++ b/packages/ipfs-unixfs-exporter/test/exporter-sharded.spec.js @@ -2,39 +2,25 @@ 'use strict' const { expect } = require('aegir/utils/chai') -// @ts-ignore -const IPLD = require('ipld') -// @ts-ignore -const inMemory = require('ipld-in-memory') const { UnixFS } = require('ipfs-unixfs') -const mh = require('multihashing-async').multihash -const mc = require('multicodec') const all = require('it-all') const last = require('it-last') const randomBytes = require('it-buffer-stream') const { exporter, walkPath } = require('../src') const { importer } = require('ipfs-unixfs-importer') -const { - DAGLink, - DAGNode -} = require('ipld-dag-pb') +const dagPb = require('@ipld/dag-pb') const blockApi = require('./helpers/block') const uint8ArrayConcat = require('uint8arrays/concat') const asAsyncIterable = require('./helpers/as-async-iterable') - -/** - * @typedef {import('cids')} CID - */ +const { CID } = require('multiformats/cid') +const { sha256 } = require('multiformats/hashes/sha2') const SHARD_SPLIT_THRESHOLD = 10 describe('exporter sharded', function () { this.timeout(30000) - /** @type {import('ipld')} */ - let ipld - /** @type {import('ipfs-unixfs-importer/src/types').BlockAPI} */ - let block + const block = blockApi() /** * @param {number} numFiles @@ -72,11 +58,6 @@ describe('exporter sharded', function () { return result.cid } - before(async () => { - ipld = await inMemory(IPLD) - block = blockApi(ipld) - }) - it('exports a sharded directory', async () => { /** @type {{ [key: string]: { content: Uint8Array, cid?: CID }}} */ const files = {} @@ -110,14 +91,18 @@ describe('exporter sharded', function () { files[imported.path].cid = imported.cid }) - const dir = await ipld.get(dirCid) + const encodedBlock = await block.get(dirCid) + const dir = dagPb.decode(encodedBlock) + if (!dir.Data) { + throw Error('PBNode Data undefined') + } const dirMetadata = UnixFS.unmarshal(dir.Data) expect(dirMetadata.type).to.equal('hamt-sharded-directory') - const exported = await exporter(dirCid, ipld) + const exported = await exporter(dirCid, block) - expect(exported.cid.equals(dirCid)).to.be.true() + expect(exported.cid.toString()).to.be.equal(dirCid.toString()) if (exported.type !== 'directory') { throw new Error('Expected directory') @@ -140,7 +125,8 @@ describe('exporter sharded', function () { const data = uint8ArrayConcat(await all(dirFile.content())) // validate the CID - expect(files[dirFile.name]).to.have.property('cid').that.deep.equals(dirFile.cid) + // @ts-ignore - files[dirFile.name].cid is defined + expect(files[dirFile.name].cid.toString()).that.deep.equals(dirFile.cid.toString()) // validate the exported file content expect(files[dirFile.name].content).to.deep.equal(data) @@ -150,7 +136,7 @@ describe('exporter sharded', function () { it('exports all files from a sharded directory with subshards', async () => { const numFiles = 31 const dirCid = await createShard(numFiles) - const exported = await exporter(dirCid, ipld) + const exported = await exporter(dirCid, block) if (exported.type !== 'directory') { throw new Error('Unexpected type') @@ -172,42 +158,42 @@ describe('exporter sharded', function () { it('exports one file from a sharded directory', async () => { const dirCid = await createShard(31) - const exported = await exporter(`/ipfs/${dirCid}/file-14`, ipld) + const exported = await exporter(`/ipfs/${dirCid}/file-14`, block) expect(exported).to.have.property('name', 'file-14') }) it('exports one file from a sharded directory sub shard', async () => { const dirCid = await createShard(31) - const exported = await exporter(`/ipfs/${dirCid}/file-30`, ipld) + const exported = await exporter(`/ipfs/${dirCid}/file-30`, block) expect(exported.name).to.deep.equal('file-30') }) it('exports one file from a shard inside a shard inside a shard', async () => { const dirCid = await createShard(2568) - const exported = await exporter(`/ipfs/${dirCid}/file-2567`, ipld) + const exported = await exporter(`/ipfs/${dirCid}/file-2567`, block) expect(exported.name).to.deep.equal('file-2567') }) it('extracts a deep folder from the sharded directory', async () => { const dirCid = await createShardWithFileNames(31, (index) => `/foo/bar/baz/file-${index}`) - const exported = await exporter(`/ipfs/${dirCid}/foo/bar/baz`, ipld) + const exported = await exporter(`/ipfs/${dirCid}/foo/bar/baz`, block) expect(exported.name).to.deep.equal('baz') }) it('extracts an intermediate folder from the sharded directory', async () => { const dirCid = await createShardWithFileNames(31, (index) => `/foo/bar/baz/file-${index}`) - const exported = await exporter(`/ipfs/${dirCid}/foo/bar`, ipld) + const exported = await exporter(`/ipfs/${dirCid}/foo/bar`, block) expect(exported.name).to.deep.equal('bar') }) it('uses .path to extract all intermediate entries from the sharded directory', async () => { const dirCid = await createShardWithFileNames(31, (index) => `/foo/bar/baz/file-${index}`) - const exported = await all(walkPath(`/ipfs/${dirCid}/foo/bar/baz/file-1`, ipld)) + const exported = await all(walkPath(`/ipfs/${dirCid}/foo/bar/baz/file-1`, block)) expect(exported.length).to.equal(5) @@ -224,7 +210,7 @@ describe('exporter sharded', function () { it('uses .path to extract all intermediate entries from the sharded directory as well as the contents', async () => { const dirCid = await createShardWithFileNames(31, (index) => `/foo/bar/baz/file-${index}`) - const exported = await all(walkPath(`/ipfs/${dirCid}/foo/bar/baz`, ipld)) + const exported = await all(walkPath(`/ipfs/${dirCid}/foo/bar/baz`, block)) expect(exported.length).to.equal(4) @@ -252,23 +238,29 @@ describe('exporter sharded', function () { it('exports a file from a sharded directory inside a regular directory inside a sharded directory', async () => { const dirCid = await createShard(15) - const node = new DAGNode(new UnixFS({ type: 'directory' }).marshal(), [ - new DAGLink('shard', 5, dirCid) - ]) - const nodeCid = await ipld.put(node, mc.DAG_PB, { - cidVersion: 0, - hashAlg: mh.names['sha2-256'] + const nodeBlockBuf = dagPb.encode({ + Data: new UnixFS({ type: 'directory' }).marshal(), + Links: [{ + Name: 'shard', + Tsize: 5, + Hash: dirCid + }] }) - - const shardNode = new DAGNode(new UnixFS({ type: 'hamt-sharded-directory' }).marshal(), [ - new DAGLink('75normal-dir', 5, nodeCid) - ]) - const shardNodeCid = await ipld.put(shardNode, mc.DAG_PB, { - cidVersion: 1, - hashAlg: mh.names['sha2-256'] + const nodeBlockCid = CID.createV0(await sha256.digest(nodeBlockBuf)) + await block.put(nodeBlockCid, nodeBlockBuf) + + const shardNodeBuf = dagPb.encode({ + Data: new UnixFS({ type: 'hamt-sharded-directory' }).marshal(), + Links: [{ + Name: '75normal-dir', + Tsize: nodeBlockBuf.length, + Hash: nodeBlockCid + }] }) + const shardNodeCid = CID.createV0(await sha256.digest(shardNodeBuf)) + await block.put(shardNodeCid, shardNodeBuf) - const exported = await exporter(`/ipfs/${shardNodeCid}/normal-dir/shard/file-1`, ipld) + const exported = await exporter(`/ipfs/${shardNodeCid}/normal-dir/shard/file-1`, block) expect(exported.name).to.deep.equal('file-1') }) diff --git a/packages/ipfs-unixfs-exporter/test/exporter-subtree.spec.js b/packages/ipfs-unixfs-exporter/test/exporter-subtree.spec.js index 14a11332..6e1c6e00 100644 --- a/packages/ipfs-unixfs-exporter/test/exporter-subtree.spec.js +++ b/packages/ipfs-unixfs-exporter/test/exporter-subtree.spec.js @@ -2,10 +2,6 @@ 'use strict' const { expect } = require('aegir/utils/chai') -// @ts-ignore -const IPLD = require('ipld') -// @ts-ignore -const inMemory = require('ipld-in-memory') const { importer } = require('ipfs-unixfs-importer') const all = require('it-all') const last = require('it-last') @@ -19,15 +15,7 @@ const ONE_MEG = Math.pow(1024, 2) const { exporter, walkPath } = require('./../src') describe('exporter subtree', () => { - /** @type {import('ipld')} */ - let ipld - /** @type {import('ipfs-unixfs-importer/src/types').BlockAPI} */ - let block - - before(async () => { - ipld = await inMemory(IPLD) - block = blockApi(ipld) - }) + const block = blockApi() it('exports a file 2 levels down', async () => { const content = uint8ArrayConcat(await all(randomBytes(ONE_MEG))) @@ -44,7 +32,7 @@ describe('exporter subtree', () => { throw new Error('Nothing imported') } - const exported = await exporter(`${imported.cid}/level-1/200Bytes.txt`, ipld) + const exported = await exporter(`${imported.cid}/level-1/200Bytes.txt`, block) expect(exported).to.have.property('cid') expect(exported.name).to.equal('200Bytes.txt') @@ -74,7 +62,7 @@ describe('exporter subtree', () => { throw new Error('Nothing imported') } - const exported = await exporter(`${imported.cid}/level-1`, ipld) + const exported = await exporter(`${imported.cid}/level-1`, block) if (exported.type !== 'directory') { throw new Error('Unexpected type') @@ -108,7 +96,7 @@ describe('exporter subtree', () => { } try { - await exporter(`${imported.cid}/doesnotexist`, ipld) + await exporter(`${imported.cid}/doesnotexist`, block) } catch (err) { expect(err.code).to.equal('ERR_NOT_FOUND') } @@ -134,7 +122,7 @@ describe('exporter subtree', () => { throw new Error('Nothing imported') } - const exported = await all(walkPath(`${imported.cid}/level-1/level-2/200Bytes.txt`, ipld)) + const exported = await all(walkPath(`${imported.cid}/level-1/level-2/200Bytes.txt`, block)) expect(exported.length).to.equal(4) expect(exported[0].path).to.equal(imported.cid.toString()) diff --git a/packages/ipfs-unixfs-exporter/test/exporter.spec.js b/packages/ipfs-unixfs-exporter/test/exporter.spec.js index ee295045..0b8bbcc1 100644 --- a/packages/ipfs-unixfs-exporter/test/exporter.spec.js +++ b/packages/ipfs-unixfs-exporter/test/exporter.spec.js @@ -2,18 +2,13 @@ 'use strict' const { expect } = require('aegir/utils/chai') -// @ts-ignore -const IPLD = require('ipld') -// @ts-ignore -const inMemory = require('ipld-in-memory') const { UnixFS } = require('ipfs-unixfs') -const CID = require('cids') -const { - DAGNode, - DAGLink -} = require('ipld-dag-pb') -const mh = require('multihashing-async').multihash -const mc = require('multicodec') +const { CID } = require('multiformats/cid') +const dagPb = require('@ipld/dag-pb') +const dagCbor = require('@ipld/dag-cbor') +const { sha256 } = require('multiformats/hashes/sha2') +const { identity } = require('multiformats/hashes/identity') +const raw = require('multiformats/codecs/raw') const { exporter, recursive } = require('../src') const { importer } = require('ipfs-unixfs-importer') const all = require('it-all') @@ -29,11 +24,13 @@ const asAsyncIterable = require('./helpers/as-async-iterable') const ONE_MEG = Math.pow(1024, 2) +/** + * @typedef {import('@ipld/dag-pb').PBLink} PBLink + * @typedef {import('@ipld/dag-pb').PBNode} PBNode + */ + describe('exporter', () => { - /** @type {import('ipld')} */ - let ipld - /** @type {import('ipfs-unixfs-importer/src/types').BlockAPI} */ - let block + const block = blockApi() /** @type {Uint8Array} */ let bigFile /** @type {Uint8Array} */ @@ -48,7 +45,7 @@ describe('exporter', () => { * @param {object} [options] * @param {string} [options.type='file'] * @param {Uint8Array} [options.content] - * @param {DAGLink[]} [options.links=[]] + * @param {PBLink[]} [options.links=[]] */ async function dagPut (options = {}) { options.type = options.type || 'file' @@ -59,14 +56,15 @@ describe('exporter', () => { type: options.type, data: options.content }) + const node = { + Data: file.marshal(), + Links: options.links + } + const buf = dagPb.encode(node) + const cid = CID.createV0(await sha256.digest(buf)) + await block.put(cid, buf) - const node = new DAGNode(file.marshal(), options.links) - const cid = await ipld.put(node, mc.DAG_PB, { - cidVersion: 0, - hashAlg: mh.names['sha2-256'] - }) - - return { file: file, node: node, cid: cid } + return { file: file, node: node, cid } } /** @@ -102,7 +100,7 @@ describe('exporter', () => { */ async function addAndReadTestFile ({ file, offset, length, strategy = 'balanced', path = '/foo', maxChunkSize, rawLeaves }) { const cid = await addTestFile({ file, strategy, path, maxChunkSize, rawLeaves }) - const entry = await exporter(cid, ipld) + const entry = await exporter(cid, block) if (entry.type !== 'file' && entry.type !== 'raw') { throw new Error('Unexpected type') @@ -135,49 +133,55 @@ describe('exporter', () => { } /** - * @param {import('ipld')} ipld * @param {'file' | 'directory' | 'raw'} type * @param {Uint8Array | ArrayLike | undefined} data - * @param {{ node: DAGNode, cid: CID }[]} children + * @param {{ node: PBNode, cid: CID }[]} children */ - async function createAndPersistNode (ipld, type, data, children) { + async function createAndPersistNode (type, data, children) { const file = new UnixFS({ type, data: data ? Uint8Array.from(data) : undefined }) const links = [] for (let i = 0; i < children.length; i++) { const child = children[i] + // @ts-ignore - we can guarantee that it's not undefined const leaf = UnixFS.unmarshal(child.node.Data) file.addBlockSize(leaf.fileSize()) - links.push(new DAGLink('', child.node.size, child.cid)) + links.push({ + Name: '', + Tsize: child.node.Data != null ? child.node.Data.length : 0, + Hash: child.cid + }) } - const node = new DAGNode(file.marshal(), links) - const cid = await ipld.put(node, mc.DAG_PB, { - cidVersion: 1, - hashAlg: mh.names['sha2-256'] - }) + const node = { + Data: file.marshal(), + Links: links + } + + const nodeBlock = dagPb.encode(node) + const nodeCid = CID.createV0(await sha256.digest(nodeBlock)) + await block.put(nodeCid, nodeBlock) return { node, - cid + cid: nodeCid } } - before(async () => { - ipld = await inMemory(IPLD) - block = blockApi(ipld) - }) - it('ensure hash inputs are sanitized', async () => { const result = await dagPut() - const node = await ipld.get(result.cid) + const encodedBlock = await block.get(result.cid) + const node = dagPb.decode(encodedBlock) + if (!node.Data) { + throw new Error('PBNode Data undefined') + } const unmarsh = UnixFS.unmarshal(node.Data) expect(unmarsh.data).to.deep.equal(result.file.data) - const file = await exporter(result.cid, ipld) + const file = await exporter(result.cid, block) expect(file).to.have.property('cid') expect(file).to.have.property('path', result.cid.toString()) @@ -200,7 +204,7 @@ describe('exporter', () => { }], block)) const path = `/ipfs/${files[1].cid}/${fileName}` - const file = await exporter(path, ipld) + const file = await exporter(path, block) expect(file.name).to.equal(fileName) expect(file.path).to.equal(`${files[1].cid}/${fileName}`) @@ -216,7 +220,7 @@ describe('exporter', () => { }], block)) const path = `/ipfs/${files[1].cid}/${fileName}` - const file = await exporter(path, ipld) + const file = await exporter(path, block) expect(file.name).to.equal(fileName) expect(file.path).to.equal(`${files[1].cid}/${fileName}`) @@ -230,14 +234,18 @@ describe('exporter', () => { content: uint8ArrayConcat(await all(randomBytes(100))) }) - const node = await ipld.get(result.cid) + const encodedBlock = await block.get(result.cid) + const node = dagPb.decode(encodedBlock) + if (!node.Data) { + throw new Error('PBNode Data undefined') + } const unmarsh = UnixFS.unmarshal(node.Data) if (!unmarsh.data) { throw new Error('Unexpected data') } - const file = await exporter(result.cid, ipld) + const file = await exporter(result.cid, block) if (file.type !== 'file') { throw new Error('Unexpected type') @@ -257,18 +265,22 @@ describe('exporter', () => { type: 'raw', data: content.slice(0, 5) }) - const chunkNode1 = new DAGNode(chunk1.marshal()) - const chunkCid1 = await ipld.put(chunkNode1, mc.DAG_PB, { - cidVersion: 0, - hashAlg: mh.names['sha2-256'] - }) + const chunkNode1 = { + Data: chunk1.marshal(), + Links: [] + } + const chunkBlock1 = dagPb.encode(chunkNode1) + const chunkCid1 = CID.createV0(await sha256.digest(chunkBlock1)) + await block.put(chunkCid1, chunkBlock1) const chunk2 = new UnixFS({ type: 'raw', data: content.slice(5) }) - const chunkNode2 = new DAGNode(chunk2.marshal()) - const chunkCid2 = await ipld.put(chunkNode2, mc.DAG_PB, { - cidVersion: 0, - hashAlg: mh.names['sha2-256'] - }) + const chunkNode2 = { + Data: chunk2.marshal(), + Links: [] + } + const chunkBlock2 = dagPb.encode(chunkNode2) + const chunkCid2 = CID.createV0(await sha256.digest(chunkBlock2)) + await block.put(chunkCid2, chunkBlock2) const file = new UnixFS({ type: 'file' @@ -276,16 +288,23 @@ describe('exporter', () => { file.addBlockSize(5) file.addBlockSize(5) - const fileNode = new DAGNode(file.marshal(), [ - new DAGLink('', chunkNode1.size, chunkCid1), - new DAGLink('', chunkNode2.size, chunkCid2) - ]) - const fileCid = await ipld.put(fileNode, mc.DAG_PB, { - cidVersion: 0, - hashAlg: mh.names['sha2-256'] + const fileNode = dagPb.prepare({ + Data: file.marshal(), + Links: [{ + Name: '', + Tsize: chunkNode1.Data != null ? chunkNode1.Data.length : 0, + Hash: chunkCid1.toV0() + }, { + Name: '', + Tsize: chunkNode2.Data != null ? chunkNode2.Data.length : 0, + Hash: chunkCid2.toV0() + }] }) + const fileBlock = dagPb.encode(fileNode) + const fileCid = CID.createV0(await sha256.digest(fileBlock)) + await block.put(fileCid, fileBlock) - const exported = await exporter(fileCid, ipld) + const exported = await exporter(fileCid, block) if (exported.type !== 'file') { throw new Error('Unexpected type') @@ -302,16 +321,18 @@ describe('exporter', () => { const chunk = await dagPut({ content: uint8ArrayConcat(await all(randomBytes(100))) }) const result = await dagPut({ content: uint8ArrayConcat(await all(randomBytes(100))), - links: [ - new DAGLink('', chunk.node.size, chunk.cid) - ] + links: [{ + Name: '', + Tsize: chunk.node.Data != null ? chunk.node.Data.length : 0, + Hash: chunk.cid + }] }) if (!result.file.data) { throw new Error('Expected data') } - const file = await exporter(result.cid, ipld) + const file = await exporter(result.cid, block) if (file.type !== 'file') { throw new Error('Unexpected type') @@ -332,7 +353,7 @@ describe('exporter', () => { file: uint8ArrayConcat(await all(randomBytes(ONE_MEG * 6))) }) - const file = await exporter(cid, ipld) + const file = await exporter(cid, block) if (file.type !== 'file') { throw new Error('Unexpected type') @@ -353,7 +374,7 @@ describe('exporter', () => { file: bytes }) - const file = await exporter(cid, ipld) + const file = await exporter(cid, block) expect(file).to.have.property('path', cid.toString()) if (file.type !== 'file') { @@ -419,7 +440,7 @@ describe('exporter', () => { throw new Error('Nothing imported') } - const dir = await exporter(importedDir.cid, ipld) + const dir = await exporter(importedDir.cid, block) if (dir.type !== 'directory') { throw new Error('Unexpected type') @@ -467,7 +488,7 @@ describe('exporter', () => { throw new Error('Nothing imported') } - const dir = await exporter(importedDir.cid, ipld) + const dir = await exporter(importedDir.cid, block) if (dir.type !== 'directory') { throw new Error('Unexpected type') @@ -623,7 +644,7 @@ describe('exporter', () => { throw new Error('Nothing imported') } - const file = await exporter(imported.cid, ipld) + const file = await exporter(imported.cid, block) if (file.type !== 'file') { throw new Error('Unexpected type') @@ -643,7 +664,7 @@ describe('exporter', () => { throw new Error('Nothing imported') } - const dir = await exporter(imported.cid, ipld) + const dir = await exporter(imported.cid, block) if (dir.type !== 'directory') { throw new Error('Unexpected type') @@ -762,7 +783,7 @@ describe('exporter', () => { file: bigFile, maxChunkSize: 1024 }) - const file = await exporter(cid, ipld) + const file = await exporter(cid, block) if (file.type !== 'file') { throw new Error('Unexpected type') @@ -800,19 +821,19 @@ describe('exporter', () => { const hash = 'bafybeidu2qqwriogfndznz32swi5r4p2wruf6ztu5k7my53tsezwhncs5y' try { - await exporter(hash, ipld) + await exporter(hash, block) } catch (err) { expect(err.code).to.equal('ERR_NOT_FOUND') } }) it('exports file with data on internal and leaf nodes', async () => { - const leaf = await createAndPersistNode(ipld, 'raw', [0x04, 0x05, 0x06, 0x07], []) - const node = await createAndPersistNode(ipld, 'file', [0x00, 0x01, 0x02, 0x03], [ + const leaf = await createAndPersistNode('raw', [0x04, 0x05, 0x06, 0x07], []) + const node = await createAndPersistNode('file', [0x00, 0x01, 0x02, 0x03], [ leaf ]) - const file = await exporter(node.cid, ipld) + const file = await exporter(node.cid, block) if (file.type !== 'file') { throw new Error('Unexpected type') @@ -835,23 +856,23 @@ describe('exporter', () => { // | \ // l l const leaves = await Promise.all([ - createAndPersistNode(ipld, 'raw', [0x00, 0x01, 0x02, 0x03], []), - createAndPersistNode(ipld, 'raw', [0x08, 0x09, 0x10, 0x11], []), - createAndPersistNode(ipld, 'raw', [0x12, 0x13, 0x14, 0x15], []) + createAndPersistNode('raw', [0x00, 0x01, 0x02, 0x03], []), + createAndPersistNode('raw', [0x08, 0x09, 0x10, 0x11], []), + createAndPersistNode('raw', [0x12, 0x13, 0x14, 0x15], []) ]) const internalNodes = await Promise.all([ - createAndPersistNode(ipld, 'raw', [0x04, 0x05, 0x06, 0x07], [leaves[1]]), - createAndPersistNode(ipld, 'raw', undefined, [leaves[2]]) + createAndPersistNode('raw', [0x04, 0x05, 0x06, 0x07], [leaves[1]]), + createAndPersistNode('raw', undefined, [leaves[2]]) ]) - const node = await createAndPersistNode(ipld, 'file', undefined, [ + const node = await createAndPersistNode('file', undefined, [ leaves[0], internalNodes[0], internalNodes[1] ]) - const file = await exporter(node.cid, ipld) + const file = await exporter(node.cid, block) if (file.type !== 'file') { throw new Error('Unexpected type') @@ -868,12 +889,12 @@ describe('exporter', () => { }) it('exports file with data on internal and leaf nodes with an offset that only fetches data from leaf nodes', async () => { - const leaf = await createAndPersistNode(ipld, 'raw', [0x04, 0x05, 0x06, 0x07], []) - const node = await createAndPersistNode(ipld, 'file', [0x00, 0x01, 0x02, 0x03], [ + const leaf = await createAndPersistNode('raw', [0x04, 0x05, 0x06, 0x07], []) + const node = await createAndPersistNode('file', [0x00, 0x01, 0x02, 0x03], [ leaf ]) - const file = await exporter(node.cid, ipld) + const file = await exporter(node.cid, block) if (file.type !== 'file') { throw new Error('Unexpected type') @@ -900,7 +921,7 @@ describe('exporter', () => { throw new Error('Nothing imported') } - const file = await exporter(imported.cid, ipld) + const file = await exporter(imported.cid, block) if (file.type !== 'file') { throw new Error('Unexpected type') @@ -925,8 +946,8 @@ describe('exporter', () => { throw new Error('Nothing imported') } - const file = await exporter(imported.cid, ipld) - expect(CID.isCID(file.cid)).to.be.true() + const file = await exporter(imported.cid, block) + expect(CID.asCID(file.cid)).to.not.be.undefined() if (file.type !== 'raw') { throw new Error('Unexpected type') @@ -937,12 +958,16 @@ describe('exporter', () => { }) it('errors when exporting a non-existent key from a cbor node', async () => { - const cborNodeCid = await ipld.put({ + const node = { foo: 'bar' - }, mc.DAG_CBOR) + } + + const cborBlock = dagCbor.encode(node) + const cid = CID.createV1(dagCbor.code, await sha256.digest(cborBlock)) + await block.put(cid, cborBlock) try { - await exporter(`${cborNodeCid}/baz`, ipld) + await exporter(`${cid}/baz`, block) } catch (err) { expect(err.code).to.equal('ERR_NO_PROP') } @@ -953,8 +978,10 @@ describe('exporter', () => { foo: 'bar' } - const cborNodeCid = await ipld.put(node, mc.DAG_CBOR) - const exported = await exporter(`${cborNodeCid}`, ipld) + const cborBlock = dagCbor.encode(node) + const cid = CID.createV1(dagCbor.code, await sha256.digest(cborBlock)) + await block.put(cid, cborBlock) + const exported = await exporter(`${cid}`, block) if (exported.type !== 'object') { throw new Error('Unexpected type') @@ -964,50 +991,64 @@ describe('exporter', () => { }) it('errors when exporting a node with no resolver', async () => { - const cid = new CID(1, 'git-raw', new CID('zdj7WkRPAX9o9nb9zPbXzwG7JEs78uyhwbUs8JSUayB98DWWY').multihash) + const cid = CID.create(1, 0x78, CID.parse('zdj7WkRPAX9o9nb9zPbXzwG7JEs78uyhwbUs8JSUayB98DWWY').multihash) try { - await exporter(`${cid}`, ipld) + await exporter(`${cid}`, block) } catch (err) { expect(err.code).to.equal('ERR_NO_RESOLVER') } }) it('errors if we try to export links from inside a raw node', async () => { - const cid = await ipld.put(Uint8Array.from([0, 1, 2, 3, 4]), mc.RAW) + const rawBlock = Uint8Array.from([0, 1, 2, 3, 4]) + const cid = CID.createV1(raw.code, await sha256.digest(rawBlock)) + await block.put(cid, rawBlock) try { - await exporter(`${cid}/lol`, ipld) + await exporter(`${cid}/lol`, block) } catch (err) { expect(err.code).to.equal('ERR_NOT_FOUND') } }) it('errors we export a non-unixfs dag-pb node', async () => { - const cid = await ipld.put(new DAGNode(Uint8Array.from([0, 1, 2, 3, 4])), mc.DAG_PB) + const dagpbBlock = dagPb.encode({ + Data: Uint8Array.from([0, 1, 2, 3, 4]), + Links: [] + }) + const dagpbCid = CID.createV0(await sha256.digest(dagpbBlock)) + await block.put(dagpbCid, dagpbBlock) try { - await exporter(cid, ipld) + await exporter(dagpbCid, block) } catch (err) { expect(err.code).to.equal('ERR_NOT_UNIXFS') } }) it('errors we export a unixfs node that has a non-unixfs/dag-pb child', async () => { - const cborNodeCid = await ipld.put({ - foo: 'bar' - }, mc.DAG_CBOR) + const cborBlock = await dagCbor.encode({ foo: 'bar' }) + const cborCid = CID.createV1(dagCbor.code, await sha256.digest(cborBlock)) + await block.put(cborCid, cborBlock) const file = new UnixFS({ type: 'file' }) file.addBlockSize(100) - const cid = await ipld.put(new DAGNode(file.marshal(), [ - new DAGLink('', 100, cborNodeCid) - ]), mc.DAG_PB) + const dagpbBuffer = dagPb.encode({ + Data: file.marshal(), + Links: [{ + Name: '', + Tsize: cborBlock.length, + Hash: cborCid + }] + }) + const dagpbCid = CID.createV0(await sha256.digest(dagpbBuffer)) + await block.put(dagpbCid, dagpbBuffer) - const exported = await exporter(cid, ipld) + const exported = await exporter(dagpbCid, block) if (exported.type !== 'file') { throw new Error('Unexpected type') @@ -1026,7 +1067,7 @@ describe('exporter', () => { content: asAsyncIterable(uint8ArrayFromString('hello world')) }], block)) - const exported = await exporter(imported[0].cid, ipld) + const exported = await exporter(imported[0].cid, block) expect(exported.depth).to.equal(0) }) @@ -1047,7 +1088,7 @@ describe('exporter', () => { throw new Error('Nothing imported') } - const exported = await all(recursive(dir.cid, ipld)) + const exported = await all(recursive(dir.cid, block)) const dirCid = dir.cid.toString() expect(exported[0].depth).to.equal(0) @@ -1072,10 +1113,10 @@ describe('exporter', () => { it('exports a CID encoded with the identity hash', async () => { const data = uint8ArrayFromString('hello world') - const hash = mh.encode(data, 'identity') - const cid = new CID(1, 'identity', hash) + const hash = await identity.digest(data) + const cid = CID.create(1, identity.code, hash) - const exported = await exporter(cid, ipld) + const exported = await exporter(cid, block) if (exported.type !== 'identity') { throw new Error('Unexpected type') @@ -1089,10 +1130,10 @@ describe('exporter', () => { it('exports a CID encoded with the identity hash with an offset', async () => { const data = uint8ArrayFromString('hello world') - const hash = mh.encode(data, 'identity') - const cid = new CID(1, 'identity', hash) + const hash = await identity.digest(data) + const cid = CID.create(1, identity.code, hash) - const exported = await exporter(cid, ipld) + const exported = await exporter(cid, block) if (exported.type !== 'identity') { throw new Error('Unexpected type') @@ -1107,10 +1148,10 @@ describe('exporter', () => { it('exports a CID encoded with the identity hash with a length', async () => { const data = uint8ArrayFromString('hello world') - const hash = mh.encode(data, 'identity') - const cid = new CID(1, 'identity', hash) + const hash = await identity.digest(data) + const cid = CID.create(1, identity.code, hash) - const exported = await exporter(cid, ipld) + const exported = await exporter(cid, block) if (exported.type !== 'identity') { throw new Error('Unexpected type') @@ -1125,10 +1166,10 @@ describe('exporter', () => { it('exports a CID encoded with the identity hash with an offset and a length', async () => { const data = uint8ArrayFromString('hello world') - const hash = mh.encode(data, 'identity') - const cid = new CID(1, 'identity', hash) + const hash = await identity.digest(data) + const cid = CID.create(1, identity.code, hash) - const exported = await exporter(cid, ipld) + const exported = await exporter(cid, block) if (exported.type !== 'identity') { throw new Error('Unexpected type') @@ -1147,8 +1188,8 @@ describe('exporter', () => { // data should not be in IPLD const data = uint8ArrayFromString(`hello world '${Math.random()}`) - const hash = mh.encode(data, 'sha2-256') - const cid = new CID(1, 'dag-pb', hash) + const hash = await sha256.digest(data) + const cid = CID.create(1, dagPb.code, hash) const message = `User aborted ${Math.random()}` setTimeout(() => { @@ -1157,7 +1198,7 @@ describe('exporter', () => { // regular test IPLD is offline-only, we need to mimic what happens when // we try to get a block from the network - const ipld = { + const customBlock = { /** * * @param {CID} cid @@ -1174,7 +1215,7 @@ describe('exporter', () => { } // @ts-ignore ipld implementation incomplete - await expect(exporter(cid, ipld, { + await expect(exporter(cid, customBlock, { signal: abortController.signal })).to.eventually.be.rejectedWith(message) }) diff --git a/packages/ipfs-unixfs-exporter/test/helpers/block.js b/packages/ipfs-unixfs-exporter/test/helpers/block.js index 64b718fd..d6aa505c 100644 --- a/packages/ipfs-unixfs-exporter/test/helpers/block.js +++ b/packages/ipfs-unixfs-exporter/test/helpers/block.js @@ -1,64 +1,7 @@ 'use strict' -const { - DAGNode, - util -} = require('ipld-dag-pb') -const multicodec = require('multicodec') -const mh = require('multihashing-async').multihash -const CID = require('cids') -const Block = require('ipld-block') +/** @type {() => import('interface-blockstore').Blockstore} */ +// @ts-expect-error no types for this deep import +const block = require('ipfs-unixfs-importer/test/helpers/block') -/** - * @param {import('ipld')} ipld - */ -function createBlockApi (ipld) { - // make ipld behave like the block api, some tests need to pull - // data from ipld so can't use a simple in-memory cid->block map - /** @type {import('ipfs-unixfs-importer/src/types').BlockAPI} */ - const BlockApi = { - put: async (buf, options) => { - if (!options || !options.cid) { - throw new Error('No cid passed') - } - - const cid = new CID(options.cid) - - const multihash = mh.decode(cid.multihash) - - if (Block.isBlock(buf)) { - buf = buf.data - } - - /** @type {any} */ - let obj = buf - - if (cid.codec === 'dag-pb') { - obj = util.deserialize(buf) - } - - await ipld.put(obj, cid.codec === 'dag-pb' ? multicodec.DAG_PB : multicodec.RAW, { - cidVersion: cid.version, - hashAlg: multihash.code - }) - - return new Block(buf, cid) - }, - get: async (cid, options) => { - cid = new CID(cid) - - /** @type {Uint8Array} */ - let buf = await ipld.get(cid, options) - - if (buf instanceof DAGNode) { - buf = buf.serialize() - } - - return new Block(buf, cid) - } - } - - return BlockApi -} - -module.exports = createBlockApi +module.exports = block diff --git a/packages/ipfs-unixfs-exporter/test/helpers/collect-leaf-cids.js b/packages/ipfs-unixfs-exporter/test/helpers/collect-leaf-cids.js index b8f04509..259b3133 100644 --- a/packages/ipfs-unixfs-exporter/test/helpers/collect-leaf-cids.js +++ b/packages/ipfs-unixfs-exporter/test/helpers/collect-leaf-cids.js @@ -1,15 +1,22 @@ 'use strict' +const dagPb = require('@ipld/dag-pb') + +/** + * @typedef {import('@ipld/dag-pb').PBLink} PBLink + */ + /** - * @param {import('cids')} cid - * @param {import('ipld')} ipld + * @param {import('multiformats/cid').CID} cid + * @param {import('interface-blockstore').Blockstore} blockstore */ -module.exports = function (cid, ipld) { +module.exports = function (cid, blockstore) { /** - * @param {import('cids')} cid + * @param {import('multiformats/cid').CID} cid */ async function * traverse (cid) { - const node = await ipld.get(cid) + const block = await blockstore.get(cid) + const node = dagPb.decode(block) if (node instanceof Uint8Array || !node.Links.length) { yield { @@ -20,12 +27,7 @@ module.exports = function (cid, ipld) { return } - node.Links.forEach( - /** - * @param {import('ipld-dag-pb').DAGLink} link - */ - link => traverse(link.Hash) - ) + node.Links.forEach(link => traverse(link.Hash)) } return traverse(cid) diff --git a/packages/ipfs-unixfs-exporter/test/import-export-dir-sharding.spec.js b/packages/ipfs-unixfs-exporter/test/import-export-dir-sharding.spec.js index 14cb081a..b2772159 100644 --- a/packages/ipfs-unixfs-exporter/test/import-export-dir-sharding.spec.js +++ b/packages/ipfs-unixfs-exporter/test/import-export-dir-sharding.spec.js @@ -5,10 +5,6 @@ const { importer } = require('ipfs-unixfs-importer') const { exporter } = require('../src') const { expect } = require('aegir/utils/chai') -// @ts-ignore -const IPLD = require('ipld') -// @ts-ignore -const inMemory = require('ipld-in-memory') const all = require('it-all') const last = require('it-last') const blockApi = require('./helpers/block') @@ -23,15 +19,7 @@ const asAsyncIterable = require('./helpers/as-async-iterable') */ describe('builder: directory sharding', () => { - /** @type {import('ipld')} */ - let ipld - /** @type {import('ipfs-unixfs-importer/src/types').BlockAPI} */ - let block - - before(async () => { - ipld = await inMemory(IPLD) - block = blockApi(ipld) - }) + const block = blockApi() describe('basic dirbuilder', () => { it('yields a non-sharded dir', async () => { @@ -48,7 +36,7 @@ describe('builder: directory sharding', () => { expect(nodes[0].path).to.equal('a/b') expect(nodes[1].path).to.equal('a') - const dirNode = await exporter(nodes[1].cid, ipld) + const dirNode = await exporter(nodes[1].cid, block) if (dirNode.type !== 'directory') { throw new Error('Unexpected type') @@ -56,7 +44,7 @@ describe('builder: directory sharding', () => { expect(dirNode.unixfs.type).to.equal('directory') - const fileNode = await exporter(nodes[0].cid, ipld) + const fileNode = await exporter(nodes[0].cid, block) if (fileNode.type !== 'file') { throw new Error('Unexpected type') @@ -83,7 +71,7 @@ describe('builder: directory sharding', () => { expect(nodes[0].path).to.equal('a/b') expect(nodes[1].path).to.equal('a') - const node = await exporter(nodes[1].cid, ipld) + const node = await exporter(nodes[1].cid, block) if (node.type !== 'directory') { throw new Error('Unexpected type') @@ -103,7 +91,7 @@ describe('builder: directory sharding', () => { const nonShardedHash = nodes[1].cid - const dir = await exporter(nonShardedHash, ipld) + const dir = await exporter(nonShardedHash, block) if (dir.type !== 'directory') { throw new Error('Unexpected type') @@ -140,7 +128,7 @@ describe('builder: directory sharding', () => { const shardedHash = nodes[1].cid - const dir = await exporter(shardedHash, ipld) + const dir = await exporter(shardedHash, block) if (dir.type !== 'directory') { throw new Error('Unexpected type') @@ -207,7 +195,7 @@ describe('builder: directory sharding', () => { expect(nodes.length).to.equal(maxDirs + 1) // files plus the containing directory - const dir = await exporter(nodes[nodes.length - 1].cid, ipld) + const dir = await exporter(nodes[nodes.length - 1].cid, block) if (dir.type !== 'directory') { throw new Error('Unexpected type') @@ -229,7 +217,7 @@ describe('builder: directory sharding', () => { const maxDirs = 2000 const maxDepth = 3 - /** @type {import('cids')} */ + /** @type {import('multiformats/cid').CID} */ let rootHash before(async () => { @@ -276,7 +264,7 @@ describe('builder: directory sharding', () => { }) it('imports a big dir', async () => { - const dir = await exporter(rootHash, ipld) + const dir = await exporter(rootHash, block) /** * @param {UnixFSEntry} node @@ -351,7 +339,7 @@ describe('builder: directory sharding', () => { } } - const dir = await exporter(rootHash, ipld) + const dir = await exporter(rootHash, block) const entries = await collectContent(dir) let index = 0 @@ -365,7 +353,7 @@ describe('builder: directory sharding', () => { it('exports a big dir with subpath', async () => { const exportHash = rootHash.toString() + '/big/big/2000' - const node = await exporter(exportHash, ipld) + const node = await exporter(exportHash, block) expect(node.path).to.equal(exportHash) if (node.type !== 'file') { diff --git a/packages/ipfs-unixfs-exporter/test/import-export-nested-dir.spec.js b/packages/ipfs-unixfs-exporter/test/import-export-nested-dir.spec.js index 12ad7061..c6018e3f 100644 --- a/packages/ipfs-unixfs-exporter/test/import-export-nested-dir.spec.js +++ b/packages/ipfs-unixfs-exporter/test/import-export-nested-dir.spec.js @@ -2,10 +2,6 @@ 'use strict' const { expect } = require('aegir/utils/chai') -// @ts-ignore -const IPLD = require('ipld') -// @ts-ignore -const inMemory = require('ipld-in-memory') const all = require('it-all') const { importer } = require('ipfs-unixfs-importer') const { exporter } = require('../src') @@ -17,15 +13,7 @@ const asAsyncIterable = require('./helpers/as-async-iterable') describe('import and export: directory', () => { const rootHash = 'QmdCrquDwd7RfZ6GCZFEVADwe8uyyw1YmF9mtAB7etDgmK' - /** @type {import('ipld')} */ - let ipld - /** @type {import('ipfs-unixfs-importer/src/types').BlockAPI} */ - let block - - before(async () => { - ipld = await inMemory(IPLD) - block = blockApi(ipld) - }) + const block = blockApi() it('imports', async function () { this.timeout(20 * 1000) @@ -76,7 +64,7 @@ describe('import and export: directory', () => { it('exports', async function () { this.timeout(20 * 1000) - const dir = await exporter(rootHash, ipld) + const dir = await exporter(rootHash, block) const files = await recursiveExport(dir, rootHash) expect(files.sort(byPath)).to.eql([{ @@ -121,7 +109,7 @@ async function recursiveExport (node, path, entries = []) { } /** - * @param {{ path?: string, cid: import('cids') }} node + * @param {{ path?: string, cid: import('multiformats/cid').CID }} node */ function normalizeNode (node) { return { diff --git a/packages/ipfs-unixfs-exporter/test/import-export.spec.js b/packages/ipfs-unixfs-exporter/test/import-export.spec.js index 245f830a..d8571ff0 100644 --- a/packages/ipfs-unixfs-exporter/test/import-export.spec.js +++ b/packages/ipfs-unixfs-exporter/test/import-export.spec.js @@ -4,10 +4,6 @@ const { expect } = require('aegir/utils/chai') // @ts-ignore -const IPLD = require('ipld') -// @ts-ignore -const inMemory = require('ipld-in-memory') -// @ts-ignore const loadFixture = require('aegir/utils/fixtures') // @ts-ignore const isNode = require('detect-node') @@ -31,15 +27,7 @@ describe('import and export', function () { const importerOptions = { strategy: strategy } describe('using builder: ' + strategy, () => { - /** @type {import('ipld')} */ - let ipld - /** @type {import('ipfs-unixfs-importer/src/types').BlockAPI} */ - let block - - before(async () => { - ipld = await inMemory(IPLD) - block = blockApi(ipld) - }) + const block = blockApi() it('imports and exports', async () => { const path = `${strategy}-big.dat` @@ -49,7 +37,7 @@ describe('import and export', function () { for await (const file of importer(values, block, importerOptions)) { expect(file.path).to.eql(path) - const result = await exporter(file.cid, ipld) + const result = await exporter(file.cid, block) if (result.type !== 'file') { throw new Error('Unexpected type') diff --git a/packages/ipfs-unixfs-exporter/test/importer.spec.js b/packages/ipfs-unixfs-exporter/test/importer.spec.js index a84d6f63..c722d082 100644 --- a/packages/ipfs-unixfs-exporter/test/importer.spec.js +++ b/packages/ipfs-unixfs-exporter/test/importer.spec.js @@ -6,10 +6,6 @@ const { exporter, recursive } = require('../src') const extend = require('merge-options') const { expect } = require('aegir/utils/chai') const sinon = require('sinon') -// @ts-ignore -const IPLD = require('ipld') -// @ts-ignore -const inMemory = require('ipld-in-memory') const { UnixFS } = require('ipfs-unixfs') const collectLeafCids = require('./helpers/collect-leaf-cids') // @ts-ignore @@ -27,13 +23,14 @@ const uint8ArrayConcat = require('uint8arrays/concat') const uint8ArrayFromString = require('uint8arrays/from-string') const asAsyncIterable = require('./helpers/as-async-iterable') const last = require('it-last') -const CID = require('cids') +const { CID } = require('multiformats/cid') +const { base58btc } = require('multiformats/bases/base58') +const { decode } = require('@ipld/dag-pb') const { parseMtime } = require('ipfs-unixfs') /** - * @typedef {import('ipld')} IPLD - * @typedef {import('ipfs-unixfs-importer/src/types').BlockAPI} BlockAPI - * @typedef {import('ipld-dag-pb').DAGNode} DAGNode + * @typedef {import('interface-blockstore').Blockstore} Blockstore + * @typedef {import('@ipld/dag-pb').PBNode} PBNode */ /** @@ -200,50 +197,61 @@ const strategyOverrides = { } /** - * @param {BlockAPI} block - * @param {IPLD} ipld + * @param {Blockstore} blockstore * @param {import('ipfs-unixfs-importer').UserImporterOptions} options * @param {*} expected */ -const checkLeafNodeTypes = async (block, ipld, options, expected) => { +const checkLeafNodeTypes = async (blockstore, options, expected) => { const file = await first(importer([{ path: 'foo', content: asAsyncIterable(new Uint8Array(262144 + 5).fill(1)) - }], block, options)) + }], blockstore, options)) if (!file) { throw new Error('Nothing imported') } - /** @type {DAGNode} */ - const node = await ipld.get(file.cid) + // @type {Block} + const fileBlock = await blockstore.get(file.cid) + /** @type {PBNode} */ + const node = decode(fileBlock) + if (!node.Data) { + throw new Error('PBNode Data undefined') + } const meta = UnixFS.unmarshal(node.Data) expect(meta.type).to.equal('file') expect(node.Links.length).to.equal(2) - const linkedNodes = await Promise.all( - node.Links.map(link => ipld.get(link.Hash)) + const linkedBlocks = await Promise.all( + node.Links.map(link => blockstore.get(link.Hash)) ) - linkedNodes.forEach(node => { + linkedBlocks.forEach(bytes => { + const node = decode(bytes) + if (!node.Data) { + throw new Error('PBNode Data undefined') + } const meta = UnixFS.unmarshal(node.Data) expect(meta.type).to.equal(expected) }) } /** - * @param {BlockAPI} block - * @param {IPLD} ipld + * @param {Blockstore} blockstore * @param {import('ipfs-unixfs-importer').UserImporterOptions} options * @param {*} expected */ -const checkNodeLinks = async (block, ipld, options, expected) => { +const checkNodeLinks = async (blockstore, options, expected) => { for await (const file of importer([{ path: 'foo', content: asAsyncIterable(new Uint8Array(100).fill(1)) - }], block, options)) { - const node = await ipld.get(file.cid) + }], blockstore, options)) { + const fileBlock = await blockstore.get(file.cid) + const node = decode(fileBlock) + if (!node.Data) { + throw new Error('PBNode Data undefined') + } const meta = UnixFS.unmarshal(node.Data) expect(meta.type).to.equal('file') @@ -341,7 +349,7 @@ strategies.forEach((strategy) => { const actualFile = actualFiles[i] expect(actualFile.path).to.equal(expectedFile.path) - expect(actualFile.cid.toString('base58btc')).to.equal(expectedFile.cid) + expect(actualFile.cid.toString(base58btc)).to.equal(expectedFile.cid.toString()) if (actualFile.unixfs) { expect(actualFile.unixfs.type).to.equal(expectedFile.type) @@ -356,10 +364,7 @@ strategies.forEach((strategy) => { describe('importer: ' + strategy, function () { this.timeout(30 * 1000) - /** @type {IPLD} */ - let ipld - /** @type {BlockAPI} */ - let block + const block = blockApi() /** @type {import('ipfs-unixfs-importer').UserImporterOptions} */ const options = { // @ts-ignore @@ -372,11 +377,6 @@ strategies.forEach((strategy) => { options.reduceSingleLeafToSelf = false } - before(async () => { - ipld = await inMemory(IPLD) - block = blockApi(ipld) - }) - it('fails on bad content', async () => { try { // @ts-expect-error bad content @@ -671,7 +671,7 @@ strategies.forEach((strategy) => { expect(file).to.exist() try { - await ipld.get(file.cid) + await block.get(file.cid) throw new Error('No error was thrown') } catch (err) { @@ -756,11 +756,11 @@ strategies.forEach((strategy) => { // Just check the intermediate directory can be retrieved if (!inputFile) { - await ipld.get(cid) + await block.get(cid) } // Check the imported content is correct - const node = await exporter(cid, ipld) + const node = await exporter(cid, block) if (node.type !== 'file') { throw new Error('Unexpected type') @@ -771,25 +771,25 @@ strategies.forEach((strategy) => { }) it('imports file with raw leaf nodes when specified', () => { - return checkLeafNodeTypes(block, ipld, { + return checkLeafNodeTypes(block, { leafType: 'raw' }, 'raw') }) it('imports file with file leaf nodes when specified', () => { - return checkLeafNodeTypes(block, ipld, { + return checkLeafNodeTypes(block, { leafType: 'file' }, 'file') }) it('reduces file to single node when specified', () => { - return checkNodeLinks(block, ipld, { + return checkNodeLinks(block, { reduceSingleLeafToSelf: true }, 0) }) it('does not reduce file to single node when overidden by options', () => { - return checkNodeLinks(block, ipld, { + return checkNodeLinks(block, { reduceSingleLeafToSelf: false }, 1) }) @@ -805,7 +805,7 @@ strategies.forEach((strategy) => { path: '1.2MiB.txt', content: asAsyncIterable(bigFile) }], block, options)) { - for await (const { cid } of collectLeafCids(file.cid, ipld)) { + for await (const { cid } of collectLeafCids(file.cid, block)) { expect(cid).to.have.property('codec', 'raw') expect(cid).to.have.property('version', 1) } @@ -825,7 +825,7 @@ strategies.forEach((strategy) => { content: asAsyncIterable(bigFile), mtime: parseMtime(now) }], block, options)) { - const node = await exporter(file.cid, ipld) + const node = await exporter(file.cid, block) expect(node).to.have.deep.nested.property('unixfs.mtime', dateToTimespec(now)) } @@ -841,7 +841,7 @@ strategies.forEach((strategy) => { mtime: parseMtime(now) }], block)) - const node = await exporter(entries[0].cid, ipld) + const node = await exporter(entries[0].cid, block) expect(node).to.have.deep.nested.property('unixfs.mtime', dateToTimespec(now)) }) @@ -860,7 +860,7 @@ strategies.forEach((strategy) => { content: asAsyncIterable(bigFile) }], block)) - const nodes = await all(recursive(entries[entries.length - 1].cid, ipld)) + const nodes = await all(recursive(entries[entries.length - 1].cid, block)) const node = nodes.filter(node => node.type === 'directory').pop() if (!node) { @@ -886,7 +886,7 @@ strategies.forEach((strategy) => { content: asAsyncIterable(bigFile) }], block)) - const nodes = await all(recursive(entries[entries.length - 1].cid, ipld)) + const nodes = await all(recursive(entries[entries.length - 1].cid, block)) const node = nodes.filter(node => node.type === 'directory').pop() if (!node) { @@ -917,7 +917,7 @@ strategies.forEach((strategy) => { content: asAsyncIterable(bigFile) }], block)) - const nodes = await all(recursive(entries[entries.length - 1].cid, ipld)) + const nodes = await all(recursive(entries[entries.length - 1].cid, block)) const node = nodes.filter(node => node.type === 'directory' && node.name === 'bar').pop() if (!node) { @@ -948,7 +948,7 @@ strategies.forEach((strategy) => { shardSplitThreshold: 0 })) - const nodes = await all(recursive(entries[entries.length - 1].cid, ipld)) + const nodes = await all(recursive(entries[entries.length - 1].cid, block)) const node = nodes.filter(node => node.type === 'directory' && node.unixfs.type === 'hamt-sharded-directory').pop() if (!node) { @@ -971,7 +971,7 @@ strategies.forEach((strategy) => { content: asAsyncIterable(bigFile), mode }], block, options)) { - const node = await exporter(file.cid, ipld) + const node = await exporter(file.cid, block) expect(node).to.have.nested.property('unixfs.mode', mode) } @@ -987,7 +987,7 @@ strategies.forEach((strategy) => { mode }], block)) - const node = await exporter(entries[0].cid, ipld) + const node = await exporter(entries[0].cid, block) expect(node).to.have.nested.property('unixfs.mode', mode) }) @@ -1007,10 +1007,10 @@ strategies.forEach((strategy) => { mode: mode2 }], block)) - const node1 = await exporter(entries[0].cid, ipld) + const node1 = await exporter(entries[0].cid, block) expect(node1).to.have.nested.property('unixfs.mode', mode1) - const node2 = await exporter(entries[1].cid, ipld) + const node2 = await exporter(entries[1].cid, block) expect(node2).to.have.nested.property('unixfs.mode', mode2) }) @@ -1028,10 +1028,10 @@ strategies.forEach((strategy) => { content: asAsyncIterable(bigFile) }], block)) - const node1 = await exporter(entries[0].cid, ipld) + const node1 = await exporter(entries[0].cid, block) expect(node1).to.have.nested.property('unixfs.mode', mode) - const node2 = await exporter(entries[1].cid, ipld) + const node2 = await exporter(entries[1].cid, block) expect(node2).to.have.nested.property('unixfs.mode').that.does.not.equal(mode) }) @@ -1043,29 +1043,21 @@ strategies.forEach((strategy) => { content: asAsyncIterable(bigFile) }], block)) - const node1 = await exporter(entries[0].cid, ipld) + const node1 = await exporter(entries[0].cid, block) expect(node1).to.have.nested.property('unixfs.mode', 0o0644) - const node2 = await exporter(entries[1].cid, ipld) + const node2 = await exporter(entries[1].cid, block) expect(node2).to.have.nested.property('unixfs.mode', 0o0755) }) }) }) describe('configuration', () => { - /** @type {IPLD} */ - let ipld - /** @type {BlockAPI} */ - let block - - before(async () => { - ipld = await inMemory(IPLD) - block = blockApi(ipld) - }) + const block = blockApi() it('alllows configuring with custom dag and tree builder', async () => { let builtTree = false - const cid = new CID('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn') + const cid = CID.parse('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn') const unixfs = new UnixFS({ type: 'directory' }) // @ts-expect-error custom dag builder expects weird data diff --git a/packages/ipfs-unixfs-importer/.aegir.js b/packages/ipfs-unixfs-importer/.aegir.js index e67cd911..fbe2f018 100644 --- a/packages/ipfs-unixfs-importer/.aegir.js +++ b/packages/ipfs-unixfs-importer/.aegir.js @@ -23,6 +23,7 @@ const buildConfig = { /** @type {import('aegir').PartialOptions} */ module.exports = { build: { + bundlesizeMax: '44KB', config: buildConfig }, test: { diff --git a/packages/ipfs-unixfs-importer/README.md b/packages/ipfs-unixfs-importer/README.md index a9aad171..525cd8ab 100644 --- a/packages/ipfs-unixfs-importer/README.md +++ b/packages/ipfs-unixfs-importer/README.md @@ -140,8 +140,6 @@ The input's file paths and directory structure will be preserved in the [`dag-pb - `leafType` (string, defaults to `'file'`) what type of UnixFS node leaves should be - can be `'file'` or `'raw'` (ignored when `rawLeaves` is `true`) - `blockWriteConcurrency` (positive integer, defaults to 10) How many blocks to hash and write to the block store concurrently. For small numbers of large files this should be high (e.g. 50). - `fileImportConcurrency` (number, defaults to 50) How many files to import concurrently. For large numbers of small files this should be high (e.g. 50). -- `pin` (boolean, defaults to `false`) Whether to pin each block as it is created -- `preload` (boolean, defaults to `false`) Whether to preload each block as it is created ## Overriding internals diff --git a/packages/ipfs-unixfs-importer/package.json b/packages/ipfs-unixfs-importer/package.json index 9263d40c..55a20ead 100644 --- a/packages/ipfs-unixfs-importer/package.json +++ b/packages/ipfs-unixfs-importer/package.json @@ -12,7 +12,7 @@ "test": "aegir test", "build": "aegir build", "clean": "rimraf ./dist", - "lint": "aegir ts --check && aegir lint", + "lint": "aegir ts -p check && aegir lint", "coverage": "nyc -s npm run test -t node && nyc report --reporter=html", "depcheck": "aegir dep-check -i @types/mocha -i nyc -i rimraf -i copy -i util -i crypto-browserify -i events -i readable-stream -i assert" }, @@ -34,34 +34,31 @@ "homepage": "https://github.com/ipfs/js-ipfs-unixfs#readme", "devDependencies": { "@types/mocha": "^8.2.1", - "aegir": "^33.1.0", + "aegir": "^34.0.0", "assert": "^2.0.0", "copy": "^0.3.2", "crypto-browserify": "^3.12.0", "events": "^3.3.0", - "ipld": "^0.29.0", - "ipld-block": "^0.11.1", - "ipld-in-memory": "^8.0.0", "it-buffer-stream": "^2.0.0", - "multicodec": "^3.0.1", "nyc": "^15.0.0", "readable-stream": "^3.6.0", "rimraf": "^3.0.2", "util": "^0.12.3" }, "dependencies": { + "@ipld/dag-pb": "^2.0.2", "bl": "^5.0.0", - "cids": "^1.1.5", "err-code": "^3.0.1", "hamt-sharding": "^2.0.0", + "interface-blockstore": "^1.0.0", "ipfs-unixfs": "^4.0.3", - "ipld-dag-pb": "^0.22.2", "it-all": "^1.0.5", "it-batch": "^1.0.8", "it-first": "^1.0.6", "it-parallel-batch": "^1.0.9", "merge-options": "^3.0.4", - "multihashing-async": "^2.1.0", + "multiformats": "^9.0.4", + "murmurhash3js-revisited": "^3.0.0", "rabin-wasm": "^0.1.4", "uint8arrays": "^2.1.2" }, diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/dir.js b/packages/ipfs-unixfs-importer/src/dag-builder/dir.js index 36106c9e..ac59de32 100644 --- a/packages/ipfs-unixfs-importer/src/dag-builder/dir.js +++ b/packages/ipfs-unixfs-importer/src/dag-builder/dir.js @@ -2,9 +2,7 @@ const { UnixFS } = require('ipfs-unixfs') const persist = require('../utils/persist') -const { - DAGNode -} = require('ipld-dag-pb') +const { encode, prepare } = require('@ipld/dag-pb') /** * @typedef {import('../types').Directory} Directory @@ -13,15 +11,15 @@ const { /** * @type {import('../types').UnixFSV1DagBuilder} */ -const dirBuilder = async (item, block, options) => { +const dirBuilder = async (item, blockstore, options) => { const unixfs = new UnixFS({ type: 'directory', mtime: item.mtime, mode: item.mode }) - const buffer = new DAGNode(unixfs.marshal()).serialize() - const cid = await persist(buffer, block, options) + const buffer = encode(prepare({ Data: unixfs.marshal() })) + const cid = await persist(buffer, blockstore, options) const path = item.path return { diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/file/buffer-importer.js b/packages/ipfs-unixfs-importer/src/dag-builder/file/buffer-importer.js index b8179e89..3b12a5ad 100644 --- a/packages/ipfs-unixfs-importer/src/dag-builder/file/buffer-importer.js +++ b/packages/ipfs-unixfs-importer/src/dag-builder/file/buffer-importer.js @@ -2,9 +2,8 @@ const { UnixFS } = require('ipfs-unixfs') const persist = require('../../utils/persist') -const { - DAGNode -} = require('ipld-dag-pb') +const dagPb = require('@ipld/dag-pb') +const raw = require('multiformats/codecs/raw') /** * @typedef {import('../../types').BufferImporter} BufferImporter @@ -21,14 +20,14 @@ async function * bufferImporter (file, block, options) { /** @type {import('../../types').PersistOptions} */ const opts = { - codec: 'dag-pb', + codec: dagPb, cidVersion: options.cidVersion, - hashAlg: options.hashAlg, + hasher: options.hasher, onlyHash: options.onlyHash } if (options.rawLeaves) { - opts.codec = 'raw' + opts.codec = raw opts.cidVersion = 1 } else { unixfs = new UnixFS({ @@ -38,7 +37,10 @@ async function * bufferImporter (file, block, options) { mode: file.mode }) - buffer = new DAGNode(unixfs.marshal()).serialize() + buffer = dagPb.encode({ + Data: unixfs.marshal(), + Links: [] + }) } return { diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/file/index.js b/packages/ipfs-unixfs-importer/src/dag-builder/file/index.js index 152dac91..302ef2cd 100644 --- a/packages/ipfs-unixfs-importer/src/dag-builder/file/index.js +++ b/packages/ipfs-unixfs-importer/src/dag-builder/file/index.js @@ -3,15 +3,13 @@ const errCode = require('err-code') const { UnixFS } = require('ipfs-unixfs') const persist = require('../../utils/persist') -const { - DAGNode, - DAGLink -} = require('ipld-dag-pb') +const { encode, prepare } = require('@ipld/dag-pb') const parallelBatch = require('it-parallel-batch') -const mh = require('multihashing-async').multihash +const rawCodec = require('multiformats/codecs/raw') +const dagPb = require('@ipld/dag-pb') /** - * @typedef {import('../../types').BlockAPI} BlockAPI + * @typedef {import('interface-blockstore').Blockstore} Blockstore * @typedef {import('../../types').File} File * @typedef {import('../../types').ImporterOptions} ImporterOptions * @typedef {import('../../types').Reducer} Reducer @@ -30,10 +28,10 @@ const dagBuilders = { /** * @param {File} file - * @param {BlockAPI} block + * @param {Blockstore} blockstore * @param {ImporterOptions} options */ -async function * buildFileBatch (file, block, options) { +async function * buildFileBatch (file, blockstore, options) { let count = -1 let previous let bufferImporter @@ -44,7 +42,7 @@ async function * buildFileBatch (file, block, options) { bufferImporter = require('./buffer-importer') } - for await (const entry of parallelBatch(bufferImporter(file, block, options), options.blockWriteConcurrency)) { + for await (const entry of parallelBatch(bufferImporter(file, blockstore, options), options.blockWriteConcurrency)) { count++ if (count === 0) { @@ -66,10 +64,10 @@ async function * buildFileBatch (file, block, options) { /** * @param {File} file - * @param {BlockAPI} block + * @param {Blockstore} blockstore * @param {ImporterOptions} options */ -const reduce = (file, block, options) => { +const reduce = (file, blockstore, options) => { /** * @type {Reducer} */ @@ -77,10 +75,10 @@ const reduce = (file, block, options) => { if (leaves.length === 1 && leaves[0].single && options.reduceSingleLeafToSelf) { const leaf = leaves[0] - if (leaf.cid.codec === 'raw' && (file.mtime !== undefined || file.mode !== undefined)) { + if (leaf.cid.code === rawCodec.code && (file.mtime !== undefined || file.mode !== undefined)) { // only one leaf node which is a buffer - we have metadata so convert it into a // UnixFS entry otherwise we'll have nowhere to store the metadata - let { data: buffer } = await block.get(leaf.cid, options) + let buffer = await blockstore.get(leaf.cid) leaf.unixfs = new UnixFS({ type: 'file', @@ -89,13 +87,31 @@ const reduce = (file, block, options) => { data: buffer }) - const multihash = mh.decode(leaf.cid.multihash) - buffer = new DAGNode(leaf.unixfs.marshal()).serialize() - - leaf.cid = await persist(buffer, block, { + buffer = encode(prepare({ Data: leaf.unixfs.marshal() })) + + // // TODO vmx 2021-03-26: This is what the original code does, it checks + // // the multihash of the original leaf node and uses then the same + // // hasher. i wonder if that's really needed or if we could just use + // // the hasher from `options.hasher` instead. + // const multihash = mh.decode(leaf.cid.multihash.bytes) + // let hasher + // switch multihash { + // case sha256.code { + // hasher = sha256 + // break; + // } + // //case identity.code { + // // hasher = identity + // // break; + // //} + // default: { + // throw new Error(`Unsupported hasher "${multihash}"`) + // } + // } + leaf.cid = await persist(buffer, blockstore, { ...options, - codec: 'dag-pb', - hashAlg: multihash.name, + codec: dagPb, + hasher: options.hasher, cidVersion: options.cidVersion }) leaf.size = buffer.length @@ -118,7 +134,7 @@ const reduce = (file, block, options) => { const links = leaves .filter(leaf => { - if (leaf.cid.codec === 'raw' && leaf.size) { + if (leaf.cid.code === rawCodec.code && leaf.size) { return true } @@ -129,11 +145,15 @@ const reduce = (file, block, options) => { return Boolean(leaf.unixfs && leaf.unixfs.data && leaf.unixfs.data.length) }) .map((leaf) => { - if (leaf.cid.codec === 'raw') { + if (leaf.cid.code === rawCodec.code) { // node is a leaf buffer f.addBlockSize(leaf.size) - return new DAGLink('', leaf.size, leaf.cid) + return { + Name: '', + Tsize: leaf.size, + Hash: leaf.cid + } } if (!leaf.unixfs || !leaf.unixfs.data) { @@ -144,12 +164,19 @@ const reduce = (file, block, options) => { f.addBlockSize(leaf.unixfs.data.length) } - return new DAGLink('', leaf.size, leaf.cid) + return { + Name: '', + Tsize: leaf.size, + Hash: leaf.cid + } }) - const node = new DAGNode(f.marshal(), links) - const buffer = node.serialize() - const cid = await persist(buffer, block, options) + const node = { + Data: f.marshal(), + Links: links + } + const buffer = encode(prepare(node)) + const cid = await persist(buffer, blockstore, options) return { cid, diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/file/trickle.js b/packages/ipfs-unixfs-importer/src/dag-builder/file/trickle.js index fc1c6aef..1de9c98c 100644 --- a/packages/ipfs-unixfs-importer/src/dag-builder/file/trickle.js +++ b/packages/ipfs-unixfs-importer/src/dag-builder/file/trickle.js @@ -3,7 +3,6 @@ const batch = require('it-batch') /** - * @typedef {import('cids')} CID * @typedef {import('ipfs-unixfs').UnixFS} UnixFS * @typedef {import('../../types').ImporterOptions} ImporterOptions * @typedef {import('../../types').InProgressImportResult} InProgressImportResult diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/index.js b/packages/ipfs-unixfs-importer/src/dag-builder/index.js index 5bb45d62..d52a4939 100644 --- a/packages/ipfs-unixfs-importer/src/dag-builder/index.js +++ b/packages/ipfs-unixfs-importer/src/dag-builder/index.js @@ -55,7 +55,7 @@ function contentAsAsyncIterable (content) { /** * @type {DAGBuilder} */ -async function * dagBuilder (source, block, options) { +async function * dagBuilder (source, blockstore, options) { for await (const entry of source) { if (entry.path) { if (entry.path.substring(0, 2) === './') { @@ -101,7 +101,7 @@ async function * dagBuilder (source, block, options) { content: chunker(chunkValidator(contentAsAsyncIterable(entry.content), options), options) } - yield () => fileBuilder(file, block, options) + yield () => fileBuilder(file, blockstore, options) } else if (entry.path) { /** @type {Directory} */ const dir = { @@ -110,7 +110,7 @@ async function * dagBuilder (source, block, options) { mode: entry.mode } - yield () => dirBuilder(dir, block, options) + yield () => dirBuilder(dir, blockstore, options) } else { throw new Error('Import candidate must have content or path or both') } diff --git a/packages/ipfs-unixfs-importer/src/dir-flat.js b/packages/ipfs-unixfs-importer/src/dir-flat.js index 0a720e4a..501478ac 100644 --- a/packages/ipfs-unixfs-importer/src/dir-flat.js +++ b/packages/ipfs-unixfs-importer/src/dir-flat.js @@ -1,9 +1,6 @@ 'use strict' -const { - DAGLink, - DAGNode -} = require('ipld-dag-pb') +const { encode, prepare } = require('@ipld/dag-pb') const { UnixFS } = require('ipfs-unixfs') const Dir = require('./dir') const persist = require('./utils/persist') @@ -12,9 +9,10 @@ const persist = require('./utils/persist') * @typedef {import('./types').ImporterOptions} ImporterOptions * @typedef {import('./types').ImportResult} ImportResult * @typedef {import('./types').InProgressImportResult} InProgressImportResult - * @typedef {import('./types').BlockAPI} BlockAPI + * @typedef {import('interface-blockstore').Blockstore} Blockstore * @typedef {import('./dir').DirProps} DirProps - * @typedef {import('cids')} CID + * @typedef {import('@ipld/dag-pb').PBNode} PBNode + * @typedef {import('@ipld/dag-pb').PBLink} PBLink */ class DirFlat extends Dir { @@ -73,7 +71,7 @@ class DirFlat extends Dir { } /** - * @param {BlockAPI} block + * @param {Blockstore} block * @returns {AsyncIterable} */ async * flush (block) { @@ -92,7 +90,11 @@ class DirFlat extends Dir { } if (child.size != null && child.cid) { - links.push(new DAGLink(children[i], child.size, child.cid)) + links.push({ + Name: children[i], + Tsize: child.size, + Hash: child.cid + }) } } @@ -102,15 +104,16 @@ class DirFlat extends Dir { mode: this.mode }) - const node = new DAGNode(unixfs.marshal(), links) - const buffer = node.serialize() + /** @type {PBNode} */ + const node = { Data: unixfs.marshal(), Links: links } + const buffer = encode(prepare(node)) const cid = await persist(buffer, block, this.options) const size = buffer.length + node.Links.reduce( /** * @param {number} acc - * @param {DAGLink} curr + * @param {PBLink} curr */ - (acc, curr) => acc + curr.Tsize, + (acc, curr) => acc + (curr.Tsize == null ? 0 : curr.Tsize), 0) this.cid = cid diff --git a/packages/ipfs-unixfs-importer/src/dir-sharded.js b/packages/ipfs-unixfs-importer/src/dir-sharded.js index e0567838..71bc49c7 100644 --- a/packages/ipfs-unixfs-importer/src/dir-sharded.js +++ b/packages/ipfs-unixfs-importer/src/dir-sharded.js @@ -1,9 +1,6 @@ 'use strict' -const { - DAGLink, - DAGNode -} = require('ipld-dag-pb') +const { encode, prepare } = require('@ipld/dag-pb') const { UnixFS } = require('ipfs-unixfs') const Dir = require('./dir') const persist = require('./utils/persist') @@ -13,7 +10,7 @@ const { createHAMT, Bucket } = require('hamt-sharding') * @typedef {import('./types').ImporterOptions} ImporterOptions * @typedef {import('./types').ImportResult} ImportResult * @typedef {import('./types').InProgressImportResult} InProgressImportResult - * @typedef {import('./types').BlockAPI} BlockAPI + * @typedef {import('interface-blockstore').Blockstore} Blockstore */ /** @@ -72,11 +69,11 @@ class DirSharded extends Dir { } /** - * @param {BlockAPI} block + * @param {Blockstore} blockstore * @returns {AsyncIterable} */ - async * flush (block) { - for await (const entry of flush(this._bucket, block, this, this.options)) { + async * flush (blockstore) { + for await (const entry of flush(this._bucket, blockstore, this, this.options)) { yield { ...entry, path: this.path @@ -89,12 +86,12 @@ module.exports = DirSharded /** * @param {Bucket} bucket - * @param {BlockAPI} block + * @param {Blockstore} blockstore * @param {*} shardRoot * @param {ImporterOptions} options * @returns {AsyncIterable} */ -async function * flush (bucket, block, shardRoot, options) { +async function * flush (bucket, blockstore, shardRoot, options) { const children = bucket._children const links = [] let childrenSize = 0 @@ -111,7 +108,7 @@ async function * flush (bucket, block, shardRoot, options) { if (child instanceof Bucket) { let shard - for await (const subShard of await flush(child, block, null, options)) { + for await (const subShard of await flush(child, blockstore, null, options)) { shard = subShard } @@ -119,20 +116,28 @@ async function * flush (bucket, block, shardRoot, options) { throw new Error('Could not flush sharded directory, no subshard found') } - links.push(new DAGLink(labelPrefix, shard.size, shard.cid)) + links.push({ + Name: labelPrefix, + Tsize: shard.size, + Hash: shard.cid + }) childrenSize += shard.size } else if (typeof child.value.flush === 'function') { const dir = child.value let flushedDir - for await (const entry of dir.flush(block)) { + for await (const entry of dir.flush(blockstore)) { flushedDir = entry yield flushedDir } const label = labelPrefix + child.key - links.push(new DAGLink(label, flushedDir.size, flushedDir.cid)) + links.push({ + Name: label, + Tsize: flushedDir.size, + Hash: flushedDir.cid + }) childrenSize += flushedDir.size } else { @@ -145,7 +150,11 @@ async function * flush (bucket, block, shardRoot, options) { const label = labelPrefix + child.key const size = value.size - links.push(new DAGLink(label, size, value.cid)) + links.push({ + Name: label, + Tsize: size, + Hash: value.cid + }) childrenSize += size } } @@ -162,9 +171,12 @@ async function * flush (bucket, block, shardRoot, options) { mode: shardRoot && shardRoot.mode }) - const node = new DAGNode(dir.marshal(), links) - const buffer = node.serialize() - const cid = await persist(buffer, block, options) + const node = { + Data: dir.marshal(), + Links: links + } + const buffer = encode(prepare(node)) + const cid = await persist(buffer, blockstore, options) const size = buffer.length + childrenSize yield { diff --git a/packages/ipfs-unixfs-importer/src/dir.js b/packages/ipfs-unixfs-importer/src/dir.js index 072d66a6..fd8571b5 100644 --- a/packages/ipfs-unixfs-importer/src/dir.js +++ b/packages/ipfs-unixfs-importer/src/dir.js @@ -4,8 +4,8 @@ * @typedef {import('./types').ImporterOptions} ImporterOptions * @typedef {import('./types').ImportResult} ImportResult * @typedef {import('./types').InProgressImportResult} InProgressImportResult - * @typedef {import('./types').BlockAPI} BlockAPI - * @typedef {import('cids')} CID + * @typedef {import('interface-blockstore').Blockstore} Blockstore + * @typedef {import('multiformats/cid').CID} CID * @typedef {object} DirProps * @property {boolean} root * @property {boolean} dir @@ -64,10 +64,10 @@ class Dir { async * eachChildSeries () { } /** - * @param {BlockAPI} block + * @param {Blockstore} blockstore * @returns {AsyncIterable} */ - async * flush (block) { } + async * flush (blockstore) { } } module.exports = Dir diff --git a/packages/ipfs-unixfs-importer/src/index.js b/packages/ipfs-unixfs-importer/src/index.js index 8297c672..8d42d754 100644 --- a/packages/ipfs-unixfs-importer/src/index.js +++ b/packages/ipfs-unixfs-importer/src/index.js @@ -4,7 +4,7 @@ const parallelBatch = require('it-parallel-batch') const defaultOptions = require('./options') /** - * @typedef {import('./types').BlockAPI} BlockAPI + * @typedef {import('interface-blockstore').Blockstore} Blockstore * @typedef {import('./types').ImportCandidate} ImportCandidate * @typedef {import('./types').UserImporterOptions} UserImporterOptions * @typedef {import('./types').ImporterOptions} ImporterOptions @@ -23,10 +23,10 @@ const defaultOptions = require('./options') /** * @param {AsyncIterable | Iterable | ImportCandidate} source - * @param {BlockAPI} block + * @param {Blockstore} blockstore * @param {UserImporterOptions} options */ -async function * importer (source, block, options = {}) { +async function * importer (source, blockstore, options = {}) { const opts = defaultOptions(options) let dagBuilder @@ -56,7 +56,7 @@ async function * importer (source, block, options = {}) { candidates = [source] } - for await (const entry of treeBuilder(parallelBatch(dagBuilder(candidates, block, opts), opts.fileImportConcurrency), block, opts)) { + for await (const entry of treeBuilder(parallelBatch(dagBuilder(candidates, blockstore, opts), opts.fileImportConcurrency), blockstore, opts)) { yield { cid: entry.cid, path: entry.path, diff --git a/packages/ipfs-unixfs-importer/src/options.js b/packages/ipfs-unixfs-importer/src/options.js index 6e9f4a3c..43bdfb27 100644 --- a/packages/ipfs-unixfs-importer/src/options.js +++ b/packages/ipfs-unixfs-importer/src/options.js @@ -1,27 +1,22 @@ 'use strict' const mergeOptions = require('merge-options').bind({ ignoreUndefined: true }) -const multihashing = require('multihashing-async') +const { sha256 } = require('multiformats/hashes/sha2') +// @ts-ignore - no types available +const mur = require('murmurhash3js-revisited') +const uint8ArrayFromString = require('uint8arrays/from-string') /** * @param {Uint8Array} buf */ async function hamtHashFn (buf) { - const hash = await multihashing(buf, 'murmur3-128') - - // Multihashing inserts preamble of 2 bytes. Remove it. - // Also, murmur3 outputs 128 bit but, accidentally, IPFS Go's - // implementation only uses the first 64, so we must do the same - // for parity.. - const justHash = hash.slice(2, 10) - const length = justHash.length - const result = new Uint8Array(length) - // TODO: invert buffer because that's how Go impl does it - for (let i = 0; i < length; i++) { - result[length - i - 1] = justHash[i] - } - - return result + return uint8ArrayFromString(mur.x64.hash128(buf), 'base16') + // Murmur3 outputs 128 bit but, accidentally, IPFS Go's + // implementation only uses the first 64, so we must do the same + // for parity.. + .slice(0, 8) + // Invert buffer because that's how Go impl does it + .reverse() } /** @@ -38,7 +33,7 @@ const defaultOptions = { rawLeaves: false, onlyHash: false, reduceSingleLeafToSelf: true, - hashAlg: 'sha2-256', + hasher: sha256, leafType: 'file', // 'raw' cidVersion: 0, progress: () => () => {}, @@ -55,10 +50,8 @@ const defaultOptions = { maxChildrenPerNode: 174, layerRepeat: 4, wrapWithDirectory: false, - pin: false, recursive: false, hidden: false, - preload: false, timeout: undefined, hamtHashFn, hamtHashCode: 0x22, diff --git a/packages/ipfs-unixfs-importer/src/tree-builder.js b/packages/ipfs-unixfs-importer/src/tree-builder.js index b5c63ab8..b9321bb7 100644 --- a/packages/ipfs-unixfs-importer/src/tree-builder.js +++ b/packages/ipfs-unixfs-importer/src/tree-builder.js @@ -9,8 +9,8 @@ const toPathComponents = require('./utils/to-path-components') * @typedef {import('./types').ImportResult} ImportResult * @typedef {import('./types').InProgressImportResult} InProgressImportResult * @typedef {import('./types').ImporterOptions} ImporterOptions - * @typedef {import('./types').BlockAPI} BlockAPI - * @typedef {(source: AsyncIterable, block: BlockAPI, options: ImporterOptions) => AsyncIterable} TreeBuilder + * @typedef {import('interface-blockstore').Blockstore} Blockstore + * @typedef {(source: AsyncIterable, blockstore: Blockstore, options: ImporterOptions) => AsyncIterable} TreeBuilder */ /** @@ -65,9 +65,9 @@ async function addToTree (elem, tree, options) { /** * @param {Dir | InProgressImportResult} tree - * @param {BlockAPI} block + * @param {Blockstore} blockstore */ -async function * flushAndYield (tree, block) { +async function * flushAndYield (tree, blockstore) { if (!(tree instanceof Dir)) { if (tree && tree.unixfs && tree.unixfs.isDirectory()) { yield tree @@ -76,7 +76,7 @@ async function * flushAndYield (tree, block) { return } - yield * tree.flush(block) + yield * tree.flush(blockstore) } /** diff --git a/packages/ipfs-unixfs-importer/src/types.d.ts b/packages/ipfs-unixfs-importer/src/types.d.ts index eacedca3..d09cf955 100644 --- a/packages/ipfs-unixfs-importer/src/types.d.ts +++ b/packages/ipfs-unixfs-importer/src/types.d.ts @@ -1,8 +1,8 @@ import { UnixFS, Mtime } from 'ipfs-unixfs' -import CID, { CIDVersion } from 'cids' -import { HashName } from 'multihashes' -import Block from 'ipld-block' -import { CodecName } from 'multicodec' +import { CID, CIDVersion } from 'multiformats/cid' +import { MultihashHasher } from 'multiformats/hashes/interface' +import { BlockCodec } from 'multiformats/codecs/interface' +import { Blockstore } from 'interface-blockstore' interface ImportCandidate { path?: string @@ -36,24 +36,24 @@ interface InProgressImportResult extends ImportResult { } type ChunkerType = 'fixed' | 'rabin' -type ProgressHandler = (chunkSize: number, path?: string) => void -type HamtHashFn = (value: Uint8Array) => Promise -type Chunker = (source: AsyncIterable, options: ImporterOptions) => AsyncIterable -type DAGBuilder = (source: AsyncIterable | Iterable, block: BlockAPI, options: ImporterOptions) => AsyncIterable<() => Promise> -type TreeBuilder = (source: AsyncIterable, block: BlockAPI, options: ImporterOptions) => AsyncIterable -type BufferImporter = (file: File, block: BlockAPI, options: ImporterOptions) => AsyncIterable<() => Promise> -type ChunkValidator = (source: AsyncIterable, options: ImporterOptions) => AsyncIterable -type UnixFSV1DagBuilder = (item: T, block: BlockAPI, options: ImporterOptions) => Promise -type Reducer = (leaves: InProgressImportResult[]) => Promise +interface ProgressHandler { (chunkSize: number, path?: string): void } +interface HamtHashFn { (value: Uint8Array): Promise } +interface Chunker { (source: AsyncIterable, options: ImporterOptions): AsyncIterable } +interface DAGBuilder { (source: AsyncIterable | Iterable, blockstore: Blockstore, options: ImporterOptions): AsyncIterable<() => Promise> } +interface TreeBuilder { (source: AsyncIterable, blockstore: Blockstore, options: ImporterOptions): AsyncIterable } +interface BufferImporter { (file: File, blockstore: Blockstore, options: ImporterOptions): AsyncIterable<() => Promise> } +interface ChunkValidator { (source: AsyncIterable, options: ImporterOptions): AsyncIterable } +interface UnixFSV1DagBuilder { (item: T, blockstore: Blockstore, options: ImporterOptions): Promise } +interface Reducer { (leaves: InProgressImportResult[]): Promise } -type FileDAGBuilder = (source: AsyncIterable | Iterable, reducer: Reducer, options: ImporterOptions) => Promise +interface FileDAGBuilder { (source: AsyncIterable | Iterable, reducer: Reducer, options: ImporterOptions): Promise } interface UserImporterOptions { strategy?: 'balanced' | 'flat' | 'trickle' rawLeaves?: boolean onlyHash?: boolean reduceSingleLeafToSelf?: boolean - hashAlg?: HashName + hasher?: MultihashHasher leafType?: 'file' | 'raw' cidVersion?: CIDVersion progress?: ProgressHandler @@ -68,10 +68,8 @@ interface UserImporterOptions { maxChildrenPerNode?: number layerRepeat?: number wrapWithDirectory?: boolean - pin?: boolean recursive?: boolean hidden?: boolean - preload?: boolean timeout?: number hamtHashFn?: HamtHashFn hamtBucketBits?: number @@ -88,7 +86,7 @@ interface ImporterOptions { rawLeaves: boolean onlyHash: boolean reduceSingleLeafToSelf: boolean - hashAlg: HashName + hasher: MultihashHasher leafType: 'file' | 'raw' cidVersion: CIDVersion progress: ProgressHandler @@ -103,10 +101,8 @@ interface ImporterOptions { maxChildrenPerNode: number layerRepeat: number wrapWithDirectory: boolean - pin: boolean recursive: boolean hidden: boolean - preload: boolean timeout?: number hamtHashFn: HamtHashFn hamtBucketBits: number @@ -119,45 +115,21 @@ interface ImporterOptions { } export interface TrickleDagNode { - children: InProgressImportResult[], - depth: number, - maxDepth: number, - maxChildren: number, - data?: InProgressImportResult[], + children: InProgressImportResult[] + depth: number + maxDepth: number + maxChildren: number + data?: InProgressImportResult[] parent?: TrickleDagNode - cid?: CID, - size?: number, + cid?: CID + size?: number unixfs?: UnixFS } export interface PersistOptions { - codec?: string + codec?: BlockCodec + hasher: MultihashHasher cidVersion: CIDVersion - hashAlg: HashName onlyHash: boolean - preload?: boolean - timeout?: number signal?: AbortSignal } - -// TODO: remove this and get from core-ipfs-types -export interface BlockAPI { - get: (cid: CID | string | Uint8Array, options?: BlockOptions) => Promise - put: (block: Block | Uint8Array, options?: PutOptions) => Promise -} - -// TODO: remove this and get from core-ipfs-types -export interface BlockOptions { - signal?: AbortSignal - timeout?: number - preload?: boolean -} - -// TODO: remove this and get from core-ipfs-types -export interface PutOptions extends BlockOptions { - cid?: CID - format?: CodecName - mhtype?: HashName - version?: CIDVersion - pin?: boolean -} diff --git a/packages/ipfs-unixfs-importer/src/utils/persist.js b/packages/ipfs-unixfs-importer/src/utils/persist.js index a4aba3be..7e1f4300 100644 --- a/packages/ipfs-unixfs-importer/src/utils/persist.js +++ b/packages/ipfs-unixfs-importer/src/utils/persist.js @@ -1,41 +1,37 @@ 'use strict' -const mh = require('multihashing-async') -const CID = require('cids') +const { CID } = require('multiformats/cid') +const dagPb = require('@ipld/dag-pb') +const { sha256 } = require('multiformats/hashes/sha2') /** * @param {Uint8Array} buffer - * @param {import('../types').BlockAPI} block + * @param {import('interface-blockstore').Blockstore} blockstore * @param {import('../types').PersistOptions} options */ -const persist = async (buffer, block, options) => { +const persist = async (buffer, blockstore, options) => { if (!options.codec) { - options.codec = 'dag-pb' + options.codec = dagPb } - if (!options.cidVersion) { - options.cidVersion = 0 + if (!options.hasher) { + options.hasher = sha256 } - if (!options.hashAlg) { - options.hashAlg = 'sha2-256' + if (options.cidVersion === undefined) { + options.cidVersion = 1 } - if (options.hashAlg !== 'sha2-256') { + if (options.codec === dagPb && options.hasher !== sha256) { options.cidVersion = 1 } - const multihash = await mh(buffer, options.hashAlg) - const cid = new CID(options.cidVersion, options.codec, multihash) + const multihash = await options.hasher.digest(buffer) + const cid = CID.create(options.cidVersion, options.codec.code, multihash) if (!options.onlyHash) { - // @ts-ignore block api takes uint8arrays or blocks but is missing from typedefs - await block.put(buffer, { - // @ts-ignore pin option is missing from block api typedefs - pin: options.pin, - preload: options.preload, - timeout: options.timeout, - cid + await blockstore.put(cid, buffer, { + signal: options.signal }) } diff --git a/packages/ipfs-unixfs-importer/test/benchmark.spec.js b/packages/ipfs-unixfs-importer/test/benchmark.spec.js index 73079e54..b787798d 100644 --- a/packages/ipfs-unixfs-importer/test/benchmark.spec.js +++ b/packages/ipfs-unixfs-importer/test/benchmark.spec.js @@ -3,12 +3,7 @@ const { importer } = require('../src') -// @ts-ignore -const IPLD = require('ipld') -// @ts-ignore -const inMemory = require('ipld-in-memory') const bufferStream = require('it-buffer-stream') -const all = require('it-all') const blockApi = require('./helpers/block') const REPEATS = 10 @@ -18,15 +13,7 @@ const CHUNK_SIZE = 65536 describe.skip('benchmark', function () { this.timeout(30 * 1000) - /** @type {import('ipld')} */ - let ipld - /** @type {import('../src').BlockAPI} */ - let block - - before(async () => { - ipld = await inMemory(IPLD) - block = blockApi(ipld) - }) + const block = blockApi() /** @type {number[]} */ const times = [] @@ -67,7 +54,7 @@ describe.skip('benchmark', function () { const buf = new Uint8Array(CHUNK_SIZE).fill(0) - await all(importer([{ + await importer([{ path: '200Bytes.txt', content: bufferStream(size, { chunkSize: CHUNK_SIZE, @@ -75,7 +62,7 @@ describe.skip('benchmark', function () { return buf } }) - }], block, options)) + }], block, options) }) } }) diff --git a/packages/ipfs-unixfs-importer/test/builder-balanced.spec.js b/packages/ipfs-unixfs-importer/test/builder-balanced.spec.js index 91d830b2..84c072cf 100644 --- a/packages/ipfs-unixfs-importer/test/builder-balanced.spec.js +++ b/packages/ipfs-unixfs-importer/test/builder-balanced.spec.js @@ -3,7 +3,7 @@ const { expect } = require('aegir/utils/chai') const builder = require('../src/dag-builder/file/balanced') -const CID = require('cids') +const { CID } = require('multiformats/cid') const defaultOptions = require('../src/options') /** @@ -31,7 +31,7 @@ const options = { describe('builder: balanced', () => { it('reduces one value into itself', async () => { const source = [{ - cid: new CID('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn'), + cid: CID.parse('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn'), size: 0 }] @@ -44,13 +44,13 @@ describe('builder: balanced', () => { it('reduces 3 values into parent', async () => { const source = [{ - cid: new CID('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn'), + cid: CID.parse('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn'), size: 0 }, { - cid: new CID('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn'), + cid: CID.parse('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn'), size: 0 }, { - cid: new CID('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn'), + cid: CID.parse('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn'), size: 0 }] diff --git a/packages/ipfs-unixfs-importer/test/builder-only-hash.spec.js b/packages/ipfs-unixfs-importer/test/builder-only-hash.spec.js index 42e22930..fff31ffc 100644 --- a/packages/ipfs-unixfs-importer/test/builder-only-hash.spec.js +++ b/packages/ipfs-unixfs-importer/test/builder-only-hash.spec.js @@ -2,9 +2,6 @@ 'use strict' const { expect } = require('aegir/utils/chai') -const IPLD = require('ipld') -// @ts-ignore -const inMemory = require('ipld-in-memory') const builder = require('../src/dag-builder') const all = require('it-all') const blockApi = require('./helpers/block') @@ -12,15 +9,7 @@ const defaultOptions = require('../src/options') const asAsyncIterable = require('./helpers/as-async-iterable') describe('builder: onlyHash', () => { - /** @type {IPLD} */ - let ipld - /** @type {import('../src/types').BlockAPI} */ - let block - - before(async () => { - ipld = await inMemory(IPLD) - block = blockApi(ipld) - }) + const block = blockApi() it('will only chunk and hash if passed an "onlyHash" option', async () => { const nodes = await all(builder([{ @@ -34,7 +23,7 @@ describe('builder: onlyHash', () => { expect(nodes.length).to.equal(1) try { - await ipld.get((await nodes[0]()).cid) + await block.get((await nodes[0]()).cid) throw new Error('Should have errored') } catch (err) { diff --git a/packages/ipfs-unixfs-importer/test/builder.spec.js b/packages/ipfs-unixfs-importer/test/builder.spec.js index 49a92d87..a77e742a 100644 --- a/packages/ipfs-unixfs-importer/test/builder.spec.js +++ b/packages/ipfs-unixfs-importer/test/builder.spec.js @@ -2,11 +2,9 @@ 'use strict' const { expect } = require('aegir/utils/chai') -const mh = require('multihashing-async').multihash -// @ts-ignore -const IPLD = require('ipld') -// @ts-ignore -const inMemory = require('ipld-in-memory') +const mh = require('multiformats/hashes/digest') +const { sha256, sha512 } = require('multiformats/hashes/sha2') +const { decode } = require('@ipld/dag-pb') const { UnixFS } = require('ipfs-unixfs') const builder = require('../src/dag-builder') const first = require('it-first') @@ -16,21 +14,13 @@ const defaultOptions = require('../src/options') const asAsyncIterable = require('./helpers/as-async-iterable') describe('builder', () => { - /** @type {import('ipld')} */ - let ipld - /** @type {import('../src').BlockAPI} */ - let block - - before(async () => { - ipld = await inMemory(IPLD) - block = blockApi(ipld) - }) + const block = blockApi() - const testMultihashes = Object.keys(mh.names).slice(1, 10) + const testMultihashes = [sha256, sha512] it('allows multihash hash algorithm to be specified', async () => { for (let i = 0; i < testMultihashes.length; i++) { - const hashAlg = testMultihashes[i] + const hasher = testMultihashes[i] const content = uint8ArrayFromString(String(Math.random() + Date.now())) const inputFile = { path: content + '.txt', @@ -39,8 +29,7 @@ describe('builder', () => { const result = await first(builder([inputFile], block, { ...defaultOptions(), - // @ts-ignore thinks these aren't valid hash alg names - hashAlg + hasher })) if (!result) { @@ -48,15 +37,17 @@ describe('builder', () => { } const imported = await result() - expect(imported).to.exist() - // Verify multihash has been encoded using hashAlg - expect(mh.decode(imported.cid.multihash).name).to.equal(hashAlg) - - // Fetch using hashAlg encoded multihash - const node = await ipld.get(imported.cid) + // Verify multihash has been encoded using hasher + expect(mh.decode(imported.cid.multihash.bytes).code).to.equal(hasher.code) + // Fetch using hasher encoded multihash + const importedBlock = await block.get(imported.cid) + const node = decode(importedBlock) + if (!node.Data) { + throw new Error('PBNode Data undefined') + } const fetchedContent = UnixFS.unmarshal(node.Data).data expect(fetchedContent).to.deep.equal(content) } @@ -66,7 +57,7 @@ describe('builder', () => { this.timeout(30000) for (let i = 0; i < testMultihashes.length; i++) { - const hashAlg = testMultihashes[i] + const hasher = testMultihashes[i] const content = String(Math.random() + Date.now()) const inputFile = { path: content + '.txt', @@ -76,8 +67,7 @@ describe('builder', () => { const result = await first(builder([inputFile], block, { ...defaultOptions(), - // @ts-ignore thinks these aren't valid hash alg names - hashAlg + hasher })) if (!result) { @@ -87,21 +77,20 @@ describe('builder', () => { const imported = await result() expect(imported).to.exist() - expect(mh.decode(imported.cid.multihash).name).to.equal(hashAlg) + expect(mh.decode(imported.cid.multihash.bytes).code).to.equal(hasher.code) } }) it('allows multihash hash algorithm to be specified for a directory', async () => { for (let i = 0; i < testMultihashes.length; i++) { - const hashAlg = testMultihashes[i] + const hasher = testMultihashes[i] const inputFile = { path: `${String(Math.random() + Date.now())}-dir` } const result = await first(builder([{ ...inputFile }], block, { ...defaultOptions(), - // @ts-ignore thinks these aren't valid hash alg names - hashAlg + hasher })) if (!result) { @@ -110,11 +99,15 @@ describe('builder', () => { const imported = await result() - expect(mh.decode(imported.cid.multihash).name).to.equal(hashAlg) + expect(mh.decode(imported.cid.multihash.bytes).code).to.equal(hasher.code) - // Fetch using hashAlg encoded multihash - const node = await ipld.get(imported.cid) + // Fetch using hasher encoded multihash + const importedBlock = await block.get(imported.cid) + const node = decode(importedBlock) + if (!node.Data) { + throw new Error('PBNode Data undefined') + } const meta = UnixFS.unmarshal(node.Data) expect(meta.type).to.equal('directory') } diff --git a/packages/ipfs-unixfs-importer/test/chunker-custom.spec.js b/packages/ipfs-unixfs-importer/test/chunker-custom.spec.js index ebb7a796..7844a1d1 100644 --- a/packages/ipfs-unixfs-importer/test/chunker-custom.spec.js +++ b/packages/ipfs-unixfs-importer/test/chunker-custom.spec.js @@ -3,11 +3,9 @@ const { importer } = require('../src') const { expect } = require('aegir/utils/chai') -// @ts-ignore -const IPLD = require('ipld') -// @ts-ignore -const inMemory = require('ipld-in-memory') -const mc = require('multicodec') +const rawCodec = require('multiformats/codecs/raw') +const { sha256 } = require('multiformats/hashes/sha2') +const Block = require('multiformats/block') const blockApi = require('./helpers/block') const uint8ArrayFromString = require('uint8arrays/from-string') const { UnixFS } = require('ipfs-unixfs') @@ -18,10 +16,7 @@ const iter = async function * () { } describe('custom chunker', function () { - /** @type {import('ipld')} */ - let ipld - /** @type {import('../src').BlockAPI} */ - let block + const block = blockApi() /** * @param {AsyncIterable} content @@ -32,9 +27,14 @@ describe('custom chunker', function () { * @param {Uint8Array} buf */ const put = async (buf) => { - const cid = await ipld.put(buf, mc.RAW) + const encodedBlock = await Block.encode({ + value: buf, + codec: rawCodec, + hasher: sha256 + }) + return { - cid, + cid: encodedBlock.cid, size: buf.length, unixfs: new UnixFS() } @@ -54,11 +54,6 @@ describe('custom chunker', function () { } } - before(async () => { - ipld = await inMemory(IPLD) - block = blockApi(ipld) - }) - it('keeps custom chunking', async () => { const content = iter() for await (const part of importer([{ path: 'test', content }], block, { diff --git a/packages/ipfs-unixfs-importer/test/hash-parity-with-go-ipfs.spec.js b/packages/ipfs-unixfs-importer/test/hash-parity-with-go-ipfs.spec.js index 9e67f52e..1271ed8f 100644 --- a/packages/ipfs-unixfs-importer/test/hash-parity-with-go-ipfs.spec.js +++ b/packages/ipfs-unixfs-importer/test/hash-parity-with-go-ipfs.spec.js @@ -4,10 +4,6 @@ const { importer } = require('../src') const { expect } = require('aegir/utils/chai') -// @ts-ignore -const IPLD = require('ipld') -// @ts-ignore -const inMemory = require('ipld-in-memory') const randomByteStream = require('./helpers/finite-pseudorandom-byte-stream') const first = require('it-first') const blockApi = require('./helpers/block') @@ -39,15 +35,7 @@ strategies.forEach(strategy => { } describe('go-ipfs interop using importer:' + strategy, () => { - /** @type {import('ipld')} */ - let ipld - /** @type {import('../src').BlockAPI} */ - let block - - before(async () => { - ipld = await inMemory(IPLD) - block = blockApi(ipld) - }) + const block = blockApi() it('yields the same tree as go-ipfs', async function () { this.timeout(100 * 1000) diff --git a/packages/ipfs-unixfs-importer/test/helpers/block.js b/packages/ipfs-unixfs-importer/test/helpers/block.js index 330d8a14..0703d2c3 100644 --- a/packages/ipfs-unixfs-importer/test/helpers/block.js +++ b/packages/ipfs-unixfs-importer/test/helpers/block.js @@ -1,64 +1,50 @@ 'use strict' -const { - DAGNode, - util -} = require('ipld-dag-pb') -const multicodec = require('multicodec') -const mh = require('multihashing-async').multihash -const CID = require('cids') -const Block = require('ipld-block') +const errCode = require('err-code') +const { BlockstoreAdapter } = require('interface-blockstore') +const { base58btc } = require('multiformats/bases/base58') /** - * @param {import('ipld')} ipld + * @typedef {import('multiformats/cid').CID} CID */ -function createBlockApi (ipld) { - // make ipld behave like the block api, some tests need to pull - // data from ipld so can't use a simple in-memory cid->block map - /** @type {import('../../src/types').BlockAPI} */ - const BlockApi = { - put: async (buf, options) => { - if (!options || !options.cid) { - throw new Error('No cid passed') - } - - const cid = new CID(options.cid) - - const multihash = mh.decode(cid.multihash) - - if (Block.isBlock(buf)) { - buf = buf.data - } - /** @type {any} */ - let obj = buf +function createBlockApi () { + class MockBlockstore extends BlockstoreAdapter { + constructor () { + super() - if (cid.codec === 'dag-pb') { - obj = util.deserialize(buf) - } - - await ipld.put(obj, cid.codec === 'dag-pb' ? multicodec.DAG_PB : multicodec.RAW, { - cidVersion: cid.version, - hashAlg: multihash.code - }) + /** @type {{[key: string]: Uint8Array}} */ + this._blocks = {} + } - return new Block(buf, cid) - }, - get: async (cid, options) => { - cid = new CID(cid) + /** + * @param {CID} cid + * @param {Uint8Array} block + * @param {any} [options] + */ + async put (cid, block, options = {}) { + this._blocks[base58btc.encode(cid.multihash.bytes)] = block + } - /** @type {Uint8Array} */ - let buf = await ipld.get(cid, options) + /** + * @param {CID} cid + * @param {any} [options] + */ + async get (cid, options = {}) { + const bytes = this._blocks[base58btc.encode(cid.multihash.bytes)] - if (buf instanceof DAGNode) { - buf = buf.serialize() + if (bytes === undefined) { + throw errCode(new Error(`Could not find data for CID '${cid}'`), 'ERR_NOT_FOUND') } - return new Block(buf, cid) + return bytes } } - return BlockApi + /** @type {import('interface-blockstore').Blockstore} */ + const bs = new MockBlockstore() + + return bs } module.exports = createBlockApi diff --git a/packages/ipfs-unixfs/.aegir.js b/packages/ipfs-unixfs/.aegir.js new file mode 100644 index 00000000..7297ccbd --- /dev/null +++ b/packages/ipfs-unixfs/.aegir.js @@ -0,0 +1,8 @@ +'use strict' + +/** @type {import('aegir').PartialOptions} */ +module.exports = { + build: { + bundlesizeMax: '11KB' + } +} diff --git a/packages/ipfs-unixfs/package.json b/packages/ipfs-unixfs/package.json index f46848aa..40ad2bac 100644 --- a/packages/ipfs-unixfs/package.json +++ b/packages/ipfs-unixfs/package.json @@ -15,7 +15,7 @@ "test": "aegir test", "build": "aegir build", "clean": "rimraf ./dist", - "lint": "aegir ts --check && aegir lint", + "lint": "aegir ts -p check && aegir lint", "coverage": "nyc -s aegir test -t node && nyc report --reporter=html", "depcheck": "aegir dep-check -i mkdirp -i @types/mocha -i nyc -i npm-run-all -i copy -i util" }, @@ -37,7 +37,7 @@ "homepage": "https://github.com/ipfs/js-ipfs-unixfs#readme", "devDependencies": { "@types/mocha": "^8.2.1", - "aegir": "^33.1.0", + "aegir": "^34.0.0", "copy": "^0.3.2", "mkdirp": "^1.0.4", "npm-run-all": "^4.1.5", @@ -55,6 +55,9 @@ "dist" ], "eslintConfig": { - "extends": "ipfs" + "extends": "ipfs", + "ignorePatterns": [ + "src/unixfs.d.ts" + ] } }