diff --git a/package.json b/package.json index 56deda257e..4b68864265 100644 --- a/package.json +++ b/package.json @@ -250,5 +250,8 @@ "Jade Meskill ", "Jacob Karlsson ", "noah the goodra " - ] + ], + "dependencies": { + "@mapbox/node-pre-gyp": "^1.0.0" + } } diff --git a/packages/interface-ipfs-core/package.json b/packages/interface-ipfs-core/package.json index 0fce3d4254..93b09f8275 100644 --- a/packages/interface-ipfs-core/package.json +++ b/packages/interface-ipfs-core/package.json @@ -39,8 +39,8 @@ "delay": "^4.4.0", "dirty-chai": "^2.0.1", "err-code": "^2.0.3", - "ipfs-unixfs": "^2.0.3", - "ipfs-unixfs-importer": "^5.0.0", + "ipfs-unixfs": "https://gitpkg.now.sh/ipfs/js-ipfs-unixfs/packages/ipfs-unixfs?js-dag-pb", + "ipfs-unixfs-importer": "https://gitpkg.now.sh/ipfs/js-ipfs-unixfs/packages/ipfs-unixfs-importer?js-dag-pb", "ipfs-utils": "^6.0.1", "ipld-block": "^0.11.0", "ipld-dag-cbor": "^0.17.0", diff --git a/packages/ipfs-cli/package.json b/packages/ipfs-cli/package.json index a1f031dc38..6ce98bd417 100644 --- a/packages/ipfs-cli/package.json +++ b/packages/ipfs-cli/package.json @@ -20,7 +20,7 @@ }, "scripts": { "lint": "aegir lint", - "prepare": "aegir build --no-bundle", + "prepare": "aegir build --no-bundle --no-types", "test": "npm run test:node", "test:node": "aegir test -t node", "coverage": "nyc --reporter=text --reporter=lcov npm run test:node", diff --git a/packages/ipfs-client/package.json b/packages/ipfs-client/package.json index b61b9b4fcf..ecd41fe82e 100644 --- a/packages/ipfs-client/package.json +++ b/packages/ipfs-client/package.json @@ -32,7 +32,7 @@ "scripts": { "test": "aegir test", "lint": "aegir lint", - "prepare": "aegir build --no-bundle", + "prepare": "aegir build --no-bundle --no-types", "prepublishOnly": "aegir build", "coverage": "npx nyc -r html npm run test:node -- --bail", "clean": "rimraf ./dist", diff --git a/packages/ipfs-core-utils/package.json b/packages/ipfs-core-utils/package.json index 65b28cf360..2272996f90 100644 --- a/packages/ipfs-core-utils/package.json +++ b/packages/ipfs-core-utils/package.json @@ -30,12 +30,13 @@ "test:electron-renderer": "aegir test -t electron-renderer", "test:node": "aegir test -t node", "lint": "aegir lint", - "prepare": "aegir build --no-bundle", + "prepare": "aegir build --no-bundle --no-types", "clean": "rimraf ./dist", "dep-check": "aegir dep-check -i rimraf -i ipfs-core-types" }, "license": "MIT", "dependencies": { + "multiformats": "^4.0.0", "any-signal": "^2.1.2", "blob-to-it": "^1.0.1", "browser-readablestream-to-it": "^1.0.1", diff --git a/packages/ipfs-core-utils/src/as-legacy-cid.js b/packages/ipfs-core-utils/src/as-legacy-cid.js new file mode 100644 index 0000000000..8efc0ed4aa --- /dev/null +++ b/packages/ipfs-core-utils/src/as-legacy-cid.js @@ -0,0 +1,44 @@ +'use strict' + +const legacyCID = require('cids') +// @ts-ignore +const CID = require('multiformats/cid') + +/** + * Makes sure that an object only contains js-cid style CIDs. + * + * It traverses the object recursively and changes all instances of a CID from + * a js-multiforamts style CID to a js-cid style CID (js-cid style CIDs stay + * as they are). You can also pass in a CID directly. + * + * Once js-cid is no longer used in the code base, this utility function will + * no longer be needed. + * + * @param {any} obj - The object to do the transformation on + */ +const asLegacyCid = (obj) => { + if (legacyCID.isCID(obj)) { + return obj + } + + // NOTE vmx 2021-02-22: I have no idea why TypeScript doesn't pick this up + // correctly => ignore it for now, deal with it later. + // @ts-ignore + const newCID = CID.asCID(obj) + if (newCID) { + const { version, code, multihash: { bytes } } = newCID + const { buffer, byteOffset, byteLength } = bytes + const multihash = Buffer.from(buffer, byteOffset, byteLength) + return new legacyCID(version, code, multihash) + } + + if (obj && typeof obj === 'object') { + for (const [key, value] of Object.entries(obj)) { + obj[key] = asLegacyCid(value) + } + } + + return obj +} + +module.exports = asLegacyCid diff --git a/packages/ipfs-core/package.json b/packages/ipfs-core/package.json index e3c36de3d4..9824553993 100644 --- a/packages/ipfs-core/package.json +++ b/packages/ipfs-core/package.json @@ -39,7 +39,7 @@ }, "scripts": { "lint": "aegir lint", - "prepare": "aegir build --no-bundle", + "prepare": "aegir build --no-bundle --no-types", "prepublishOnly": "aegir build", "test": "aegir test", "test:node": "aegir test -t node", @@ -53,6 +53,7 @@ "dep-check": "aegir dep-check -i interface-ipfs-core -i ipfs-core-types -i abort-controller" }, "dependencies": { + "@ipld/dag-pb": "0.0.1", "abort-controller": "^3.0.0", "array-shuffle": "^2.0.0", "bignumber.js": "^9.0.0", @@ -73,9 +74,9 @@ "ipfs-core-types": "^0.3.0", "ipfs-core-utils": "^0.7.1", "ipfs-repo": "^8.0.0", - "ipfs-unixfs": "^2.0.3", - "ipfs-unixfs-exporter": "^3.0.4", - "ipfs-unixfs-importer": "^5.0.0", + "ipfs-unixfs": "https://gitpkg.now.sh/ipfs/js-ipfs-unixfs/packages/ipfs-unixfs?js-dag-pb", + "ipfs-unixfs-exporter": "https://gitpkg.now.sh/ipfs/js-ipfs-unixfs/packages/ipfs-unixfs-exporter?js-dag-pb", + "ipfs-unixfs-importer": "https://gitpkg.now.sh/ipfs/js-ipfs-unixfs/packages/ipfs-unixfs-importer?js-dag-pb", "ipfs-utils": "^6.0.1", "ipld": "^0.28.0", "ipld-block": "^0.11.0", @@ -109,6 +110,7 @@ "multiaddr-to-uri": "^6.0.0", "multibase": "^4.0.0", "multicodec": "^3.0.1", + "multiformats": "^4.0.0", "multihashing-async": "^2.0.1", "native-abort-controller": "^1.0.3", "p-queue": "^6.6.1", diff --git a/packages/ipfs-core/src/components/add-all/index.js b/packages/ipfs-core/src/components/add-all/index.js index 9de540a7d6..752882c560 100644 --- a/packages/ipfs-core/src/components/add-all/index.js +++ b/packages/ipfs-core/src/components/add-all/index.js @@ -6,6 +6,7 @@ const { parseChunkerString } = require('./utils') const { pipe } = require('it-pipe') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') const mergeOptions = require('merge-options').bind({ ignoreUndefined: true }) +const asLegacyCid = require('ipfs-core-utils/src/as-legacy-cid') /** * @typedef {Object} Context @@ -97,7 +98,9 @@ module.exports = ({ block, gcLock, preload, pin, options }) => { // do not keep file totals around forever delete totals[added.path] - yield added + const legacyAdded = added + legacyAdded.cid = asLegacyCid(added.cid) + yield legacyAdded } } finally { releaseLock() diff --git a/packages/ipfs-core/src/components/add.js b/packages/ipfs-core/src/components/add.js index 15fe4ca229..6cb1ca19d3 100644 --- a/packages/ipfs-core/src/components/add.js +++ b/packages/ipfs-core/src/components/add.js @@ -1,6 +1,7 @@ 'use strict' const last = require('it-last') +const asLegacyCid = require('ipfs-core-utils/src/as-legacy-cid') /** * @typedef {Object} Context @@ -27,7 +28,10 @@ module.exports = ({ addAll }) => { throw Error('Failed to add a file, if you see this please report a bug') } - return result + let legacyResult = result + legacyResult.cid = asLegacyCid(result.cid) + + return legacyResult } return add diff --git a/packages/ipfs-core/src/components/cat.js b/packages/ipfs-core/src/components/cat.js index 8146ec4119..b0c1ba456f 100644 --- a/packages/ipfs-core/src/components/cat.js +++ b/packages/ipfs-core/src/components/cat.js @@ -6,12 +6,12 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @typedef {Object} Context - * @property {import('.').IPLD} ipld + * @property {import('.').BlockService} blockService * @property {import('.').Preload} preload * * @param {Context} context */ -module.exports = function ({ ipld, preload }) { +module.exports = function ({ blockService, preload }) { /** * Returns content of the file addressed by a valid IPFS Path or CID. * @@ -27,7 +27,7 @@ module.exports = function ({ ipld, preload }) { preload(pathComponents[0]) } - const file = await exporter(ipfsPath, ipld, options) + const file = await exporter(ipfsPath, blockService, options) // File may not have unixfs prop if small & imported with rawLeaves true if (file.unixfs && file.unixfs.type.includes('dir')) { diff --git a/packages/ipfs-core/src/components/files/chmod.js b/packages/ipfs-core/src/components/files/chmod.js index 01735d674a..86e6df135c 100644 --- a/packages/ipfs-core/src/components/files/chmod.js +++ b/packages/ipfs-core/src/components/files/chmod.js @@ -9,7 +9,13 @@ const toTrail = require('./utils/to-trail') const addLink = require('./utils/add-link') const updateTree = require('./utils/update-tree') const updateMfsRoot = require('./utils/update-mfs-root') -const { DAGNode } = require('ipld-dag-pb') +const dagPb = require('@ipld/dag-pb') +// @ts-ignore +const { sha256 } = require('multiformats/hashes/sha2') +const Block = require('multiformats/block') +// @ts-ignore +const IpldBlock = require('ipld-block') +const CID = require('cids') const mc = require('multicodec') const mh = require('multihashing-async').multihash const { pipe } = require('it-pipe') @@ -193,10 +199,14 @@ module.exports = (context) => { // but do not reimport files, only manipulate dag-pb nodes const root = await pipe( async function * () { - for await (const entry of exporter.recursive(cid, context.ipld)) { - let node = await context.ipld.get(entry.cid) + for await (const entry of exporter.recursive(cid, context.blockService)) { + const block = await context.blockService(entry.cid) + let node = dagPb.decode(block.data) entry.unixfs.mode = calculateMode(mode, entry.unixfs) - node = new DAGNode(entry.unixfs.marshal(), node.Links) + node = prepare({ + Data: entry.unixfs.marshal(), + Links: node.Links + }) yield { path: entry.path, @@ -234,20 +244,32 @@ module.exports = (context) => { return } - let node = await context.ipld.get(cid) + const block = await context.blockService.get(cid) + let node = dagPb.decode(block.data) const metadata = UnixFS.unmarshal(node.Data) metadata.mode = calculateMode(mode, metadata) - node = new DAGNode(metadata.marshal(), node.Links) + node = prepare({ + Data: metadata.marshal(), + Links: node.Links + }) + - const updatedCid = await context.ipld.put(node, mc.DAG_PB, { - cidVersion: cid.version, - hashAlg: mh.names[opts.hashAlg || defaultOptions.hashAlg], - onlyHash: !opts.flush + const updatedBlock = await Block.encode({ + value: node, + codec: dagPb, + // TODO vmx 2021-02-22: Add back support for other hashing algorithms + hasher: sha256 }) + updatedCid = updatedBlock.cid + if (settings.flush) { + const legacyCid = new CID(updatedBlock.cid.multihash.bytes) + await context.blockService.put(new IpldBlock(updatedBlock.bytes, legacyCid)) + } const trail = await toTrail(context, mfsDirectory) const parent = trail[trail.length - 1] - const parentNode = await context.ipld.get(parent.cid) + const parentBlock = await context.blockService(parent.cid) + const parentNode = dagPb.decode(parentBlock.data) const result = await addLink(context, { parent: parentNode, diff --git a/packages/ipfs-core/src/components/files/index.js b/packages/ipfs-core/src/components/files/index.js index bb1478e7b9..2d29cb06ad 100644 --- a/packages/ipfs-core/src/components/files/index.js +++ b/packages/ipfs-core/src/components/files/index.js @@ -51,7 +51,6 @@ const wrap = ({ const defaultOptions = { repoOwner: true, - ipld: null, repo: null } @@ -93,7 +92,6 @@ function createMfs (options) { /** * @param {Object} context - * @param {import('..').IPLD} context.ipld * @param {import('..').Block} context.block * @param {import('..').BlockService} context.blockService * @param {import('..').Repo} context.repo @@ -101,9 +99,8 @@ function createMfs (options) { * @param {import('..').Options} context.options * @returns {MFS} */ -module.exports = ({ ipld, block, blockService, repo, preload, options: constructorOptions }) => { +module.exports = ({ block, blockService, repo, preload, options: constructorOptions }) => { const methods = createMfs({ - ipld, block, blocks: blockService, datastore: repo.root, diff --git a/packages/ipfs-core/src/components/files/ls.js b/packages/ipfs-core/src/components/files/ls.js index 951074e86b..b82f8e1513 100644 --- a/packages/ipfs-core/src/components/files/ls.js +++ b/packages/ipfs-core/src/components/files/ls.js @@ -64,7 +64,7 @@ module.exports = (context) => { */ async function * mfsLs (path, options = {}) { const mfsPath = await toMfsPath(context, path, options) - const fsDir = await exporter(mfsPath.mfsPath, context.ipld) + const fsDir = await exporter(mfsPath.mfsPath, context.blocks) // single file/node if (!fsDir.unixfs || !fsDir.unixfs.type.includes('directory')) { diff --git a/packages/ipfs-core/src/components/files/mkdir.js b/packages/ipfs-core/src/components/files/mkdir.js index 0763dcb8f7..484c61dc54 100644 --- a/packages/ipfs-core/src/components/files/mkdir.js +++ b/packages/ipfs-core/src/components/files/mkdir.js @@ -75,7 +75,7 @@ module.exports = (context) => { const subPath = `/ipfs/${root}/${subPathComponents.join('/')}` try { - parent = await exporter(subPath, context.ipld) + parent = await exporter(subPath, context.blockService) log(`${subPath} existed`) log(`${subPath} had children ${parent.node.Links.map(link => link.Name)}`) diff --git a/packages/ipfs-core/src/components/files/read.js b/packages/ipfs-core/src/components/files/read.js index ea697c85b7..26e7f74e23 100644 --- a/packages/ipfs-core/src/components/files/read.js +++ b/packages/ipfs-core/src/components/files/read.js @@ -40,7 +40,7 @@ module.exports = (context) => { return { [Symbol.asyncIterator]: async function * read () { const mfsPath = await toMfsPath(context, path, options) - const result = await exporter(mfsPath.mfsPath, context.ipld) + const result = await exporter(mfsPath.mfsPath, context.blockService) if (result.unixfs.type !== 'file') { throw errCode(new Error(`${path} was not a file`), 'ERR_NOT_FILE') diff --git a/packages/ipfs-core/src/components/files/stat.js b/packages/ipfs-core/src/components/files/stat.js index e1a3690f8f..952f7014f4 100644 --- a/packages/ipfs-core/src/components/files/stat.js +++ b/packages/ipfs-core/src/components/files/stat.js @@ -5,7 +5,9 @@ const toMfsPath = require('./utils/to-mfs-path') const exporter = require('ipfs-unixfs-exporter') const log = require('debug')('ipfs:mfs:stat') const errCode = require('err-code') +const mc = require('multicodec') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') +const asLegacyCid = require('ipfs-core-utils/src/as-legacy-cid') const defaultOptions = { withLocal: false, @@ -14,7 +16,7 @@ const defaultOptions = { /** * @param {Object} context - * @param {import('..').IPLD} context.ipld + * @param {import('..').BlockService} context.blockService */ module.exports = (context) => { /** @@ -39,7 +41,7 @@ module.exports = (context) => { let file try { - file = await exporter(exportPath, context.ipld) + file = await exporter(exportPath, context.blocks) } catch (err) { if (err.code === 'ERR_NOT_FOUND') { throw errCode(new Error(`${path} does not exist`), 'ERR_NOT_FOUND') @@ -48,11 +50,11 @@ module.exports = (context) => { throw err } - if (!statters[file.cid.codec]) { - throw new Error(`Cannot stat codec ${file.cid.codec}`) + if (!statters[file.cid.code]) { + throw new Error(`Cannot stat codec ${mc.getNameFromCode(file.cid.code)}`) } - return statters[file.cid.codec](file) + return statters[file.cid.code](file) } return withTimeoutOption(mfsStat) @@ -64,9 +66,9 @@ const statters = { * @param {any} file * @returns {Stat} */ - raw: (file) => { + [mc.RAW]: (file) => { return { - cid: file.cid, + cid: asLegacyCid(file.cid), size: file.node.length, cumulativeSize: file.node.length, blocks: 0, @@ -80,14 +82,14 @@ const statters = { * @param {any} file * @returns {Stat} */ - 'dag-pb': (file) => { + [mc.DAG_PB]: (file) => { const blocks = file.node.Links.length const size = file.node.size const cumulativeSize = file.node.size /** @type {Stat} */ const output = { - cid: file.cid, + cid: asLegacyCid(file.cid), type: 'file', size: size, cumulativeSize: cumulativeSize, @@ -129,11 +131,11 @@ const statters = { * @param {any} file * @returns {Stat} */ - 'dag-cbor': (file) => { + [mc.DAG_CBOR]: (file) => { // @ts-ignore - This is incompatible with Stat object // @TODO - https://github.com/ipfs/js-ipfs/issues/3325 return { - cid: file.cid, + cid: asLegacyCid(file.cid), local: undefined, sizeLocal: undefined, withLocality: false @@ -143,9 +145,9 @@ const statters = { * @param {any} file * @returns {Stat} */ - identity: (file) => { + [mc.IDENTITY]: (file) => { return { - cid: file.cid, + cid: asLegacyCid(file.cid), size: file.node.digest.length, cumulativeSize: file.node.digest.length, blocks: 0, diff --git a/packages/ipfs-core/src/components/files/touch.js b/packages/ipfs-core/src/components/files/touch.js index 6c6dbc3d7d..df74e5bb16 100644 --- a/packages/ipfs-core/src/components/files/touch.js +++ b/packages/ipfs-core/src/components/files/touch.js @@ -9,9 +9,17 @@ const toTrail = require('./utils/to-trail') const addLink = require('./utils/add-link') const updateTree = require('./utils/update-tree') const updateMfsRoot = require('./utils/update-mfs-root') -const { DAGNode } = require('ipld-dag-pb') +const IpldBlock = require('ipld-block') +const dagPb = require('@ipld/dag-pb') +// @ts-ignore +const Block = require('multiformats/block') +// @ts-ignore +const { sha256 } = require('multiformats/hashes/sha2') +// NOTE vmx 2021-02-19: Not importet as CID to make the type checker happy +const CID = require('cids') const mc = require('multicodec') const mh = require('multihashing-async').multihash +const asLegacyCid = require('ipfs-core-utils/src/as-legacy-cid') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') const defaultOptions = { @@ -65,12 +73,18 @@ module.exports = (context) => { type: 'file', mtime: settings.mtime }) - node = new DAGNode(metadata.marshal()) - updatedCid = await context.ipld.put(node, mc.DAG_PB, { - cidVersion: settings.cidVersion, - hashAlg: mh.names['sha2-256'], - onlyHash: !settings.flush + node = dagPb.prepare({ Data: metadata.marshal() }) + const block = await Block.encode({ + value: node, + codec: dagPb, + hasher: sha256 }) + // Create an old style CID + updatedCid = block.cid + if (settings.flush) { + const legacyCid = new CID(block.cid.multihash.bytes) + await context.blockService.put(new IpldBlock(block.bytes, legacyCid)) + } } else { if (cid.codec !== 'dag-pb') { throw errCode(new Error(`${path} was not a UnixFS node`), 'ERR_NOT_UNIXFS') @@ -78,35 +92,48 @@ module.exports = (context) => { cidVersion = cid.version - node = await context.ipld.get(cid) + const block = await context.blockService(cid) + node = dagPb.decode(block.data) const metadata = UnixFS.unmarshal(node.Data) metadata.mtime = settings.mtime - node = new DAGNode(metadata.marshal(), node.Links) + node = dagPb.prepare({ + Data: metadata.marshal(), + Links: node.Links + }) - updatedCid = await context.ipld.put(node, mc.DAG_PB, { - cidVersion: cid.version, - hashAlg: mh.names['sha2-256'], - onlyHash: !settings.flush + const updatedBlock = await Block.encode({ + value: node, + codec: dagPb, + hasher: sha256 }) + updatedCid = updatedBlock.cid + if (settings.flush) { + const legacyCid = new CID(updatedBlock.cid.multihash.bytes) + await context.blockService.put(new IpldBlock(updatedBlock.bytes, legacyCid)) + } } const trail = await toTrail(context, mfsDirectory) const parent = trail[trail.length - 1] - const parentNode = await context.ipld.get(parent.cid) + const parentBlock = await context.blockService.get(asLegacyCid(parent.cid)) + const parentNode = dagPb.decode(preantBlock.data) const result = await addLink(context, { parent: parentNode, name: name, cid: updatedCid, - size: node.serialize().length, + size: dagPb.encode(node).length, flush: settings.flush, shardSplitThreshold: settings.shardSplitThreshold, + // TODO vmx 2021-02-23: Check if the hash alg is always hardcoded hashAlg: 'sha2-256', cidVersion }) + // TODO vmx 2021-02-22: If there are errors about the CID version, do the + // conversion to the correct CID version here, based on `cidVersion`. parent.cid = result.cid // update the tree with the new child diff --git a/packages/ipfs-core/src/components/files/utils/add-link.js b/packages/ipfs-core/src/components/files/utils/add-link.js index b622f64d33..cfbb024cec 100644 --- a/packages/ipfs-core/src/components/files/utils/add-link.js +++ b/packages/ipfs-core/src/components/files/utils/add-link.js @@ -1,9 +1,11 @@ 'use strict' -const { - DAGLink, - DAGNode -} = require('ipld-dag-pb') +const dagPb = require('@ipld/dag-pb') +// @ts-ignore +const { sha256 } = require('multiformats/hashes/sha2') +const Block = require('multiformats/block') +//// @ts-ignore +const IpldBlock = require('ipld-block') const CID = require('cids') const log = require('debug')('ipfs:mfs:core:utils:add-link') const UnixFS = require('ipfs-unixfs') @@ -19,6 +21,7 @@ const errCode = require('err-code') const mc = require('multicodec') const mh = require('multihashing-async').multihash const last = require('it-last') +const asLegacyCid = require('ipfs-core-utils/src/as-legacy-cid') const addLink = async (context, options) => { if (!options.parentCid && !options.parent) { @@ -32,7 +35,8 @@ const addLink = async (context, options) => { if (!options.parent) { log(`Loading parent node ${options.parentCid}`) - options.parent = await context.ipld.get(options.parentCid) + const block = await context.blockService(options.parentCid) + options.parent = dagPb.decode(block.data) } if (!options.cid) { @@ -91,8 +95,15 @@ const convertToShardedDirectory = async (context, options) => { } const addToDirectory = async (context, options) => { - options.parent.rmLink(options.name) - options.parent.addLink(new DAGLink(options.name, options.size, options.cid)) + // Remove existing link if it exists + const parentLinks = options.parent.Links.filter((link) => { + link.name !== options.name + }) + parentLinks.push({ + Name: options.name, + Tsize: options.size, + Hash: options.cid + }) const node = UnixFS.unmarshal(options.parent.Data) @@ -100,21 +111,32 @@ const addToDirectory = async (context, options) => { // Update mtime if previously set node.mtime = new Date() - options.parent = new DAGNode(node.marshal(), options.parent.Links) + options.parent.Data = node.marshal() + } + options.parent = { + Data: options.parent.Data, + Links: parentLinks } const hashAlg = mh.names[options.hashAlg] // Persist the new parent DAGNode - const cid = await context.ipld.put(options.parent, mc.DAG_PB, { - cidVersion: options.cidVersion, - hashAlg, - onlyHash: !options.flush - }) + const block = await Block.encode({ + value: options.parent, + codec: dagPb, + // TODO vmx 2021-02-22: Add back support for other hashing algorithms + //hasher: hashAlg + hasher: sha256 + }) + // Create an old style CID + if (settings.flush) { + const legacyCid = new CID(block.cid.multihash.bytes) + await context.blockService.put(new IpldBlock(block.bytes, legacyCid)) + } return { node: options.parent, - cid, + cid: block.cid, size: options.parent.size } } @@ -125,7 +147,8 @@ const addToShardedDirectory = async (context, options) => { } = await addFileToShardedDirectory(context, options) const result = await last(shard.flush('', context.block)) - const node = await context.ipld.get(result.cid) + const block = await context.blockService.get(asLegacyCid(result.cid)) + const node = dagPb.decode(block.data) // we have written out the shard, but only one sub-shard will have been written so replace it in the original shard const oldLink = options.parent.Links @@ -211,7 +234,8 @@ const addFileToShardedDirectory = async (context, options) => { // load sub-shard log(`Found subshard ${segment.prefix}`) - const subShard = await context.ipld.get(link.Hash) + const block = context.blockService.get(link.Hash) + const subShard = dagPb.decode(block.data) // subshard hasn't been loaded, descend to the next level of the HAMT if (!path[index]) { diff --git a/packages/ipfs-core/src/components/files/utils/create-node.js b/packages/ipfs-core/src/components/files/utils/create-node.js index dede8f7fc6..4c2058145c 100644 --- a/packages/ipfs-core/src/components/files/utils/create-node.js +++ b/packages/ipfs-core/src/components/files/utils/create-node.js @@ -1,9 +1,14 @@ 'use strict' const UnixFS = require('ipfs-unixfs') -const { - DAGNode -} = require('ipld-dag-pb') +const IpldBlock = require('ipld-block') +const dagPb = require('@ipld/dag-pb') +// @ts-ignore +const Block = require('multiformats/block') +// @ts-ignore +const { sha256 } = require('multiformats/hashes/sha2') +// NOTE vmx 2021-02-19: Not importet as CID to make the type checker happy +const CID = require('cids') const mc = require('multicodec') const mh = require('multihashing-async').multihash @@ -15,15 +20,23 @@ const createNode = async (context, type, options) => { mtime: options.mtime }) - const node = new DAGNode(metadata.marshal()) - const cid = await context.ipld.put(node, mc.DAG_PB, { - cidVersion: options.cidVersion, - hashAlg, - onlyHash: !options.flush - }) + node = dagPb.prepare({ Data: metadata.marshal() }) + const block = await Block.encode({ + value: node, + codec: dagPb, + // TODO vmx 2021-02-23: support any hashalg as it used to be + // hasher: hashAlg + hasher: sha256 + }) + // Create an old style CID + if (options.flush) { + // TODO vmx 2021-020-23 Check if the cid version matters (as the old code uses `options.cidVersion` + const legacyCid = new CID(block.cid.multihash.bytes) + await context.blockService.put(new IpldBlock(block.bytes, legacyCid)) + } return { - cid, + cid: block.cid, node } } diff --git a/packages/ipfs-core/src/components/files/utils/hamt-utils.js b/packages/ipfs-core/src/components/files/utils/hamt-utils.js index 17466eff73..5be5b868b4 100644 --- a/packages/ipfs-core/src/components/files/utils/hamt-utils.js +++ b/packages/ipfs-core/src/components/files/utils/hamt-utils.js @@ -1,8 +1,10 @@ 'use strict' -const { - DAGNode -} = require('ipld-dag-pb') +const dagPb = require('@ipld/dag-pb') +const { sha256 } = require('multiformats/hashes/sha2') +const Block = require('multiformats/block') +const IpldBlock = require('ipld-block') +const CID = require('cids') const Bucket = require('hamt-sharding/src/bucket') const DirSharded = require('ipfs-unixfs-importer/src/dir-sharded') const log = require('debug')('ipfs:mfs:core:utils:hamt-utils') @@ -24,18 +26,29 @@ const updateHamtDirectory = async (context, links, bucket, options) => { mtime: node.mtime }) - const hashAlg = mh.names[options.hashAlg] - const parent = new DAGNode(dir.marshal(), links) - const cid = await context.ipld.put(parent, mc.DAG_PB, { - cidVersion: options.cidVersion, - hashAlg, - onlyHash: !options.flush + //const hashAlg = mh.names[options.hashAlg] + const parent = dagPb.prepare({ + Data: dir.marshal(), + Links: links }) + // TODO vmx 2021-03-04: Check if the CID version matters + const parentBlock = await Block.encode({ + value: parent, + codec: dagPb, + // TODO vmx 2021-03-04: Check if support for other hash algs is needed + hasher: sha256 + }) + + if (options.flush) { + const legacyCid = new CID(parentBlock.cid.multihash.bytes) + await context.blockService.put(new IpldBlock(parentBlock.bytes, legacyCid)) + } return { node: parent, - cid, - size: parent.size + cid: parentBlock.cid, + // TODO vmx 2021-03-04: double check that it is the size we want here + size: parentBlock.bytes.length } } @@ -135,7 +148,8 @@ const generatePath = async (context, fileName, rootNode) => { // found subshard log(`Found subshard ${segment.prefix}`) - const node = await context.ipld.get(link.Hash) + const block = await context.blockService.get(link.Hash) + const node = dagPb.decode(block.data) // subshard hasn't been loaded, descend to the next level of the HAMT if (!path[i + 1]) { diff --git a/packages/ipfs-core/src/components/files/utils/remove-link.js b/packages/ipfs-core/src/components/files/utils/remove-link.js index 572521bc93..5ba32db8b9 100644 --- a/packages/ipfs-core/src/components/files/utils/remove-link.js +++ b/packages/ipfs-core/src/components/files/utils/remove-link.js @@ -1,9 +1,9 @@ 'use strict' -const { - DAGNode, - DAGLink -} = require('ipld-dag-pb') +const dagPb = require('@ipld/dag-pb') +const { sha256 } = require('multiformats/hashes/sha2') +const Block = require('multiformats/block') +const IpldBlock = require('ipld-block') const CID = require('cids') const log = require('debug')('ipfs:mfs:core:utils:remove-link') const UnixFS = require('ipfs-unixfs') @@ -27,7 +27,8 @@ const removeLink = async (context, options) => { if (!options.parent) { log(`Loading parent node ${options.parentCid}`) - options.parent = await context.ipld.get(options.parentCid) + const block = await context.blockService.get(options.parentCid) + options.parent = dagPb.decode(block.data) } if (!options.name) { @@ -48,14 +49,24 @@ const removeLink = async (context, options) => { } const removeFromDirectory = async (context, options) => { - const hashAlg = mh.names[options.hashAlg] + //const hashAlg = mh.names[options.hashAlg] - options.parent.rmLink(options.name) - const cid = await context.ipld.put(options.parent, mc.DAG_PB, { - cidVersion: options.cidVersion, - hashAlg + // Remove existing link if it exists + options.parent.Links = options.parent.Links.filter((link) => { + link.name !== options.name + }) + // TODO vmx 2021-03-04: Check if the CID version matters + const parentBlock = await Block.encode({ + value: options.parent, + codec: dagPb, + // TODO vmx 2021-03-04: Check if support for other hash algs is needed + hasher: sha256 }) + const legacyCid = new CID(parentBlock.cid.multihash.bytes) + await context.blockService.put(new IpldBlock(parentBlock.bytes, legacyCid)) + + const cid = parentBlock.cid log(`Updated regular directory ${cid}`) return { @@ -127,16 +138,24 @@ const updateShard = async (context, positions, child, options) => { log(`Updating shard ${prefix} with name ${newName}`) - const size = DAGNode.isDAGNode(result.node) ? result.node.size : result.node.Tsize + // TODO vmx 2021-03-04: This might be wrong, does every node have a `Tsize`? + const size = result.node.Tsize return updateShardParent(context, bucket, node, prefix, newName, size, result.cid, options) } const updateShardParent = (context, bucket, parent, oldName, newName, size, cid, options) => { - parent.rmLink(oldName) - parent.addLink(new DAGLink(newName, size, cid)) + // Remove existing link if it exists + const parentLinks = parent.Links.filter((link) => { + link.name !== oldName + }) + parentLinks.push({ + Name: newName, + Tsize: size, + Hash: cid + }) - return updateHamtDirectory(context, parent.Links, bucket, options) + return updateHamtDirectory(context, parentLinks, bucket, options) } module.exports = removeLink diff --git a/packages/ipfs-core/src/components/files/utils/to-mfs-path.js b/packages/ipfs-core/src/components/files/utils/to-mfs-path.js index c5a23828af..505c51ca8f 100644 --- a/packages/ipfs-core/src/components/files/utils/to-mfs-path.js +++ b/packages/ipfs-core/src/components/files/utils/to-mfs-path.js @@ -4,7 +4,7 @@ const loadMfsRoot = require('./with-mfs-root') const toPathComponents = require('./to-path-components') const exporter = require('ipfs-unixfs-exporter') const errCode = require('err-code') -const CID = require('cids') +const CID = require('multiformats/cid') const IPFS_PREFIX = 'ipfs' @@ -12,10 +12,10 @@ const toMfsPath = async (context, path, options) => { const outputArray = Array.isArray(path) let paths = Array.isArray(path) ? path : [path] const root = await loadMfsRoot(context, options) - paths = paths.map(path => { - if (CID.isCID(path)) { - path = `/ipfs/${path}` + const cid = CID.asCID(path) + if (cid) { + path = `/ipfs/${cid}` } path = (path || '').trim() @@ -81,7 +81,7 @@ const toMfsPath = async (context, path, options) => { const cidPath = path.type === 'mfs' ? path.mfsPath : path.path try { - const res = await exporter(cidPath, context.ipld) + const res = await exporter(cidPath, context.blocks) path.cid = res.cid path.mfsPath = `/ipfs/${res.path}` diff --git a/packages/ipfs-core/src/components/files/utils/to-trail.js b/packages/ipfs-core/src/components/files/utils/to-trail.js index 7a42d8acca..2b7320323f 100644 --- a/packages/ipfs-core/src/components/files/utils/to-trail.js +++ b/packages/ipfs-core/src/components/files/utils/to-trail.js @@ -8,7 +8,7 @@ const toTrail = async (context, path) => { const output = [] - for await (const fsEntry of exporter.path(path, context.ipld)) { + for await (const fsEntry of exporter.path(path, context.blockService)) { output.push({ name: fsEntry.name, cid: fsEntry.cid, diff --git a/packages/ipfs-core/src/components/files/utils/update-tree.js b/packages/ipfs-core/src/components/files/utils/update-tree.js index dc78800482..63e8c31fcf 100644 --- a/packages/ipfs-core/src/components/files/utils/update-tree.js +++ b/packages/ipfs-core/src/components/files/utils/update-tree.js @@ -2,6 +2,9 @@ const log = require('debug')('ipfs:mfs:utils:update-tree') const addLink = require('./add-link') +const { + decode +} = require('@ipld/dag-pb') const defaultOptions = { shardSplitThreshold: 1000 @@ -17,7 +20,8 @@ const updateTree = async (context, trail, options) => { let index = 0 let child - for await (const node of context.ipld.getMany(trail.map(node => node.cid))) { + for await (const block of context.blockService.getMany(trail.map(node => node.cid))) { + const node = decode(block.data) const cid = trail[index].cid const name = trail[index].name index++ @@ -26,7 +30,8 @@ const updateTree = async (context, trail, options) => { child = { cid, name, - size: node.size + // TODO vmx 2021-03-04: Check if the size should be 0 or the actual size + size: block.data.length } continue diff --git a/packages/ipfs-core/src/components/files/utils/with-mfs-root.js b/packages/ipfs-core/src/components/files/utils/with-mfs-root.js index a25b0ceb73..71bfa360e2 100644 --- a/packages/ipfs-core/src/components/files/utils/with-mfs-root.js +++ b/packages/ipfs-core/src/components/files/utils/with-mfs-root.js @@ -1,14 +1,18 @@ 'use strict' -const CID = require('cids') +const CID = require('multiformats/cid') const UnixFs = require('ipfs-unixfs') -const { - DAGNode -} = require('ipld-dag-pb') +const dagPb = require('@ipld/dag-pb') +// @ts-ignore +const { sha256 } = require('multiformats/hashes/sha2') +const Block = require('multiformats/block') +// @ts-ignore +const IpldBlock = require('ipld-block') const log = require('debug')('ipfs:mfs:utils:with-mfs-root') const mc = require('multicodec') const mh = require('multihashing-async').multihash const errCode = require('err-code') +const asLegacyCid = require('ipfs-core-utils/src/as-legacy-cid') const { MFS_ROOT_KEY @@ -28,24 +32,32 @@ const loadMfsRoot = async (context, options) => { try { const buf = await context.repo.datastore.get(MFS_ROOT_KEY) - cid = new CID(buf) + cid = CID.decode(buf) } catch (err) { if (err.code !== 'ERR_NOT_FOUND') { throw err } log('Creating new MFS root') - const node = new DAGNode(new UnixFs({ type: 'directory' }).marshal()) - cid = await context.ipld.put(node, mc.DAG_PB, { - cidVersion: 0, - hashAlg: mh.names['sha2-256'] // why can't ipld look this up? + const node = dagPb.prepare({ Data: new UnixFs({ type: 'directory' }).marshal() }) + const block = await Block.encode({ + value: node, + codec: dagPb, + hasher: sha256 }) + cid = block.cid + //console.log('vmx: cid:', cid) + //const legacyCid = asLegacyCid(block.cid) + // TODO vmx 2021-02-23: Check if it needs to be a cidv0 as it used to be + // TODO vmx 2021-02-13: Call `context.blocks` more consistently, e.g. `context.blockService` + //await context.blocks.put(new IpldBlock(block.bytes, legacyCid)) + await context.blocks.put(block) if (options && options.signal && options.signal.aborted) { throw errCode(new Error('Request aborted'), 'ERR_ABORTED', { name: 'Aborted' }) } - await context.repo.datastore.put(MFS_ROOT_KEY, cid.bytes) + await context.repo.datastore.put(MFS_ROOT_KEY, block.cid.bytes) } log(`Loaded MFS root /ipfs/${cid}`) diff --git a/packages/ipfs-core/src/components/files/write.js b/packages/ipfs-core/src/components/files/write.js index 330043ca8a..5f21bfe827 100644 --- a/packages/ipfs-core/src/components/files/write.js +++ b/packages/ipfs-core/src/components/files/write.js @@ -2,6 +2,9 @@ const log = require('debug')('ipfs:mfs:write') const importer = require('ipfs-unixfs-importer') +const { + decode +} = require('@ipld/dag-pb') const stat = require('./stat') const mkdir = require('./mkdir') const addLink = require('./utils/add-link') @@ -108,7 +111,8 @@ const updateOrImport = async (context, path, source, destination, options) => { throw errCode(new Error(`cannot write to ${parent.name}: Not a directory`), 'ERR_NOT_A_DIRECTORY') } - const parentNode = await context.ipld.get(parent.cid) + const parentBlock = await context.blockService.get(parent.cid) + const prentNode = decode(parentBlock.data) const result = await addLink(context, { parent: parentNode, diff --git a/packages/ipfs-core/src/components/get.js b/packages/ipfs-core/src/components/get.js index f183dc37a2..e0e273474b 100644 --- a/packages/ipfs-core/src/components/get.js +++ b/packages/ipfs-core/src/components/get.js @@ -4,6 +4,7 @@ const exporter = require('ipfs-unixfs-exporter') const errCode = require('err-code') const { normalizeCidPath, mapFile } = require('../utils') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') +const asLegacyCid = require('ipfs-core-utils/src/as-legacy-cid') /** * @typedef {Object} Context @@ -12,7 +13,7 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') * * @param {Context} context */ -module.exports = function ({ ipld, preload }) { +module.exports = function ({ blockService, preload }) { /** * Fetch a file or an entire directory tree from IPFS that is addressed by a * valid IPFS Path. @@ -34,11 +35,16 @@ module.exports = function ({ ipld, preload }) { preload(pathComponents[0]) } - for await (const file of exporter.recursive(ipfsPath, ipld, options)) { - yield mapFile(file, { + for await (const file of exporter.recursive(ipfsPath, blockService, options)) { + const result = mapFile(file, { ...options, includeContent: true }) + + let legacyResult = result + legacyResult.cid = asLegacyCid(result.cid) + + yield legacyResult } } diff --git a/packages/ipfs-core/src/components/index.js b/packages/ipfs-core/src/components/index.js index 20652c29a5..6ffaccc7b4 100644 --- a/packages/ipfs-core/src/components/index.js +++ b/packages/ipfs-core/src/components/index.js @@ -4,11 +4,21 @@ const { mergeOptions } = require('../utils') const { isTest } = require('ipfs-utils/src/env') const log = require('debug')('ipfs') -const { DAGNode } = require('ipld-dag-pb') +// @ts-ignore +const dagPb = require('@ipld/dag-pb') const UnixFs = require('ipfs-unixfs') -const multicodec = require('multicodec') +//const multicodec = require('multicodec') +// @ts-ignore +const Block = require('multiformats/block') +// @ts-ignore +const { sha256 } = require('multiformats/hashes/sha2') +// NOTE vmx 2021-02-19: Not importet as CID to make the type checker happy +const cids = require('cids') +const CID = require('multiformats/cid') +const IpldBlock = require('ipld-block') const initAssets = require('../runtime/init-assets-nodejs') const { AlreadyInitializedError } = require('../errors') +const asLegacyCid = require('ipfs-core-utils/src/as-legacy-cid') const createStartAPI = require('./start') const createStopAPI = require('./stop') @@ -47,6 +57,54 @@ const createPingAPI = require('./ping') const createDHTAPI = require('./dht') const createPubSubAPI = require('./pubsub') + + +// Make the block service take and return new CIDs and also not return +// js-ipld-blocks, but just plain objects with a `cid` and `bytes` property +class NewBlockService { + constructor(legacyBlockService) { + this.blockService = legacyBlockService + this.setExchange = legacyBlockService.setExchange + this.unsetExchange = legacyBlockService.unsetExchange + } + + async get (cid, options) { + //console.log('vmx: blockservice new cid wrappen call: get: has cid.codec (is old cid):', 'codec' in cid) + const legacyCid = asLegacyCid(cid) + const block = await this.blockService.get(legacyCid, options) + return { + cid: CID.decode(block.cid.bytes), + bytes: block.data + } + } + + async * getMnay (cids, options) { + //console.log('vmx: blockservice new cid wrappen call: getMany') + const legacyCids = cids.map(asLegacyCid) + for await (const block of this.blockService.getMany(legacyCids, options)) { + yield { + cid: CID.decode(block.cid.bytes), + bytes: block.data + } + } + } + + put (block, options) { + //console.log('vmx: blockservice new cid wrappen call: put: block:', block) + const legacyCid = asLegacyCid(block.cid) + return this.blockService.put(new IpldBlock(block.bytes, legacyCid)) + } + + putMany (blocks, options) { + //console.log('vmx: blockservice new cid wrappen call: putMany') + const legacyBlocks = block.map((block) => { + const legacyCid = asLegacyCid(block.cid) + return new IpldBlock(block.bytes, legacyCid) + }) + return blockService.putMany(legacyBlocks) + } +} + class IPFS { /** * @param {Object} config @@ -60,9 +118,55 @@ class IPFS { const preload = createPreloadAPI(options.preload) + const legacyBlockService = new IPFSBlockService(storage.repo) + const blockService = new NewBlockService(legacyBlockService) /** @type {BlockService} */ - const blockService = new IPFSBlockService(storage.repo) - const ipld = createIPLD({ blockService, print, options: options.ipld }) + //const blockService = new IPFSBlockService(storage.repo) + //// Make the block service take and return new CIDs and also not return + //// js-ipld-blocks, but just plain objects with a `cid` and `bytes` property + //blockService.get = (cid, options) => { + // return async (cid, options) => { + // console.log('vmx: blockservice new cid wrappen call: get') + // const legacyCid = asLegacyCid(cid) + // const block = await blockService.get(legacyCid, options) + // return { + // cid: CID.parse(block.cid.bytes), + // bytes: block.data + // } + // } + //} + //blockService.getMany = function * (cids, options) { + // return async function * (cids, options) { + // console.log('vmx: blockservice new cid wrappen call: getMany') + // const legacyCids = cids.map(asLegacyCid) + // for await (const block of blockService.getMany(legacyCids, options)) { + // yield { + // cid: CID.parse(block.cid.bytes), + // bytes: block.data + // } + // } + // } + //} + //blockService.put = (block, options) => { + // return (block, options) => { + // console.log('vmx: blockservice new cid wrappen call: put: block:', block) + // const legacyCid = asLegacyCid(block.cid) + // return blockService.put(new IpldBlock(block.bytes, legacyCid)) + // } + //} + //blockService.putMany = (blocks, options) => { + // return (blocks, options) => { + // console.log('vmx: blockservice new cid wrappen call: putMany') + // const legacyBlocks = block.map((block) => { + // const legacyCid = asLegacyCid(block.cid) + // return new IpldBlock(block.bytes, legacyCid) + // }) + // return blockService.putMany(legacyBlocks) + // } + //} + + + const ipld = createIPLD({ blockService: legacyBlockService, print, options: options.ipld }) const gcLock = createGCLockAPI({ path: repo.path, @@ -87,9 +191,9 @@ class IPFS { const resolve = createResolveAPI({ ipld, name }) const pinManager = new PinManagerAPI({ repo, dagReader }) const pin = new PinAPI({ gcLock, pinManager, dagReader }) - const block = new BlockAPI({ blockService, preload, gcLock, pinManager, pin }) + const block = new BlockAPI({ blockService: legacyBlockService, preload, gcLock, pinManager, pin }) const dag = new DagAPI({ ipld, preload, gcLock, pin, dagReader }) - const refs = Object.assign(createRefsAPI({ ipld, resolve, preload }), { + const refs = Object.assign(createRefsAPI({ blockService, resolve, preload }), { local: createRefsLocalAPI({ repo: storage.repo }) }) const { add, addAll, cat, get, ls } = new RootAPI({ @@ -97,7 +201,7 @@ class IPFS { preload, pin, block, - ipld, + blockService, options: options.EXPERIMENTAL }) @@ -116,6 +220,7 @@ class IPFS { options: options.preload }) + this.blockService = blockService this.preload = preload this.name = name this.ipld = ipld @@ -248,17 +353,17 @@ const initOptions = ({ init }) => * @param {IPFS} ipfs */ const addEmptyDir = async (ipfs) => { - const node = new DAGNode(new UnixFs('directory').marshal()) - const cid = await ipfs.dag.put(node, { - version: 0, - format: multicodec.DAG_PB, - hashAlg: multicodec.SHA2_256, - preload: false + const node = dagPb.prepare({ Data: new UnixFs('directory').marshal() }) + const block = await Block.encode({ + value: node, + codec: dagPb, + hasher: sha256 }) + await ipfs.blockService.put(block) - await ipfs.pin.add(cid) + await ipfs.pin.add(asLegacyCid(block.cid)) - return cid + return block.cid } /** diff --git a/packages/ipfs-core/src/components/ls.js b/packages/ipfs-core/src/components/ls.js index c97985d9e0..ac5f2d0fb8 100644 --- a/packages/ipfs-core/src/components/ls.js +++ b/packages/ipfs-core/src/components/ls.js @@ -7,12 +7,12 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @typedef {Object} Context - * @property {import('.').IPLD} ipld + * @property {import('.').BlockService} blockService * @property {import('.').Preload} preload * * @param {Context} context */ -module.exports = function ({ ipld, preload }) { +module.exports = function ({ blockService, preload }) { /** * Lists a directory from IPFS that is addressed by a valid IPFS Path. * @@ -21,6 +21,7 @@ module.exports = function ({ ipld, preload }) { * @returns {AsyncIterable} */ async function * ls (ipfsPath, options = {}) { + console.log('vmx: components: ls: ipfspath:', ipfsPath, 'codec' in ipfsPath) const path = normalizeCidPath(ipfsPath) const recursive = options.recursive const pathComponents = path.split('/') @@ -29,7 +30,7 @@ module.exports = function ({ ipld, preload }) { preload(pathComponents[0]) } - const file = await exporter(ipfsPath, ipld, options) + const file = await exporter(ipfsPath, blockService, options) if (!file.unixfs) { throw errCode(new Error('dag node was not a UnixFS node'), 'ERR_NOT_UNIXFS') @@ -42,7 +43,7 @@ module.exports = function ({ ipld, preload }) { if (file.unixfs.type.includes('dir')) { if (recursive) { - for await (const child of exporter.recursive(file.cid, ipld, options)) { + for await (const child of exporter.recursive(file.cid, blockService, options)) { if (file.cid.toBaseEncodedString() === child.cid.toBaseEncodedString()) { continue } diff --git a/packages/ipfs-core/src/components/refs/index.js b/packages/ipfs-core/src/components/refs/index.js index 438532a74d..39d3da80a5 100644 --- a/packages/ipfs-core/src/components/refs/index.js +++ b/packages/ipfs-core/src/components/refs/index.js @@ -1,13 +1,16 @@ 'use strict' const isIpfs = require('is-ipfs') -const CID = require('cids') -const { DAGNode } = require('ipld-dag-pb') +const CID = require('multiformats/cid').default +// @ts-ignore +const { decode } = require('@ipld/dag-pb') const { normalizeCidPath } = require('../../utils') const { Errors } = require('interface-datastore') const ERR_NOT_FOUND = Errors.notFoundError().code +const asLegacyCid = require('ipfs-core-utils/src/as-legacy-cid') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') + const Format = { default: '', edges: ' -> ' @@ -15,11 +18,11 @@ const Format = { /** * @param {Object} config - * @param {import('..').IPLD} config.ipld + * @param {import('..').BlockService} config.blockService * @param {import('..').Resolve} config.resolve * @param {import('..').Preload} config.preload */ -module.exports = function ({ ipld, resolve, preload }) { +module.exports = function ({ blockService, resolve, preload }) { /** * Get links (references) from an object * @@ -48,7 +51,7 @@ module.exports = function ({ ipld, resolve, preload }) { const paths = rawPaths.map(p => getFullPath(preload, p, options)) for (const path of paths) { - yield * refsStream(resolve, ipld, path, options) + yield * refsStream(resolve, blockService, path, options) } } @@ -76,7 +79,7 @@ function getFullPath (preload, ipfsPath, options) { } // Get a stream of refs at the given path -async function * refsStream (resolve, ipld, path, options) { +async function * refsStream (resolve, blockService, path, options) { // Resolve to the target CID of the path const resPath = await resolve(path) // path is /ipfs/ @@ -84,7 +87,7 @@ async function * refsStream (resolve, ipld, path, options) { const cid = parts[2] // Traverse the DAG, converting it into a stream - for await (const obj of objectStream(ipld, cid, options.maxDepth, options.unique)) { + for await (const obj of objectStream(blockService, cid, options.maxDepth, options.unique)) { // Root object will not have a parent if (!obj.parent) { continue @@ -112,7 +115,7 @@ function formatLink (srcCid, dstCid, linkName, format) { } // Do a depth first search of the DAG, starting from the given root cid -async function * objectStream (ipld, rootCid, maxDepth, uniqueOnly) { // eslint-disable-line require-await +async function * objectStream (blockService, rootCid, maxDepth, uniqueOnly) { // eslint-disable-line require-await const seen = new Set() async function * traverseLevel (parent, depth) { @@ -126,7 +129,7 @@ async function * objectStream (ipld, rootCid, maxDepth, uniqueOnly) { // eslint- // Get this object's links try { // Look at each link, parent and the new depth - for (const link of await getLinks(ipld, parent.cid)) { + for (const link of await getLinks(blockService, parent.cid)) { yield { parent: parent, node: link, @@ -151,14 +154,11 @@ async function * objectStream (ipld, rootCid, maxDepth, uniqueOnly) { // eslint- yield * traverseLevel({ cid: rootCid }, 0) } -// Fetch a node from IPLD then get all its links -async function getLinks (ipld, cid) { - const node = await ipld.get(new CID(cid)) - - if (DAGNode.isDAGNode(node)) { - return node.Links.map(({ Name, Hash }) => ({ name: Name, cid: new CID(Hash) })) - } - +// Fetch a node from the BlockService then get all its links +async function getLinks (blockService, cid) { + const block = await blockService.get(asLegacyCid(CID.parse(cid))) + //const node = asLegacyCid(decode(block.data)) + const node = decode(block.data) return getNodeLinks(node) } @@ -166,10 +166,15 @@ async function getLinks (ipld, cid) { function getNodeLinks (node, path = '') { let links = [] for (const [name, value] of Object.entries(node)) { - if (CID.isCID(value)) { + const cid = CID.asCID(value) + if (cid) { links.push({ name: path + name, - cid: value + // TODO vmx 2021-02-22: Check if `asLegacyCid` is also used on objects + // or only on CIDs. If it's only used on CIDs, its code can be + // simplified. + //cid: asLegacyCid(cid) + cid }) } else if (typeof value === 'object') { links = links.concat(getNodeLinks(value, path + name + '/')) diff --git a/packages/ipfs-core/src/components/root.js b/packages/ipfs-core/src/components/root.js index 9be83d8169..8e9a5bb7ed 100644 --- a/packages/ipfs-core/src/components/root.js +++ b/packages/ipfs-core/src/components/root.js @@ -19,7 +19,7 @@ class Root { /** * @param {Context} context */ - constructor ({ preload, gcLock, pin, block, ipld, options }) { + constructor ({ preload, gcLock, pin, block, blockService, options }) { const addAll = createAddAllAPI({ preload, gcLock, @@ -30,9 +30,9 @@ class Root { this.addAll = addAll this.add = createAddAPI({ addAll }) - this.cat = createCatAPI({ ipld, preload }) - this.get = createGetAPI({ ipld, preload }) - this.ls = createLsAPI({ ipld, preload }) + this.cat = createCatAPI({ blockService, preload }) + this.get = createGetAPI({ blockService, preload }) + this.ls = createLsAPI({ blockService, preload }) } } diff --git a/packages/ipfs-core/src/mfs-preload.js b/packages/ipfs-core/src/mfs-preload.js index e800eff9f4..375021e4b6 100644 --- a/packages/ipfs-core/src/mfs-preload.js +++ b/packages/ipfs-core/src/mfs-preload.js @@ -2,6 +2,7 @@ const debug = require('debug') const { cidToString } = require('ipfs-core-utils/src/cid') +//const { base32 } = require('multiformats/bases/base32') const log = Object.assign(debug('ipfs:mfs-preload'), { error: debug('ipfs:mfs-preload:error') }) @@ -27,6 +28,7 @@ module.exports = ({ preload, files, options = {} }) => { try { const stats = await files.stat('/') const nextRootCid = cidToString(stats.cid, { base: 'base32' }) + //const nextRootCid = stats.cid.toString(base32) if (rootCid !== nextRootCid) { log(`preloading updated MFS root ${rootCid} -> ${stats.cid}`) @@ -47,6 +49,7 @@ module.exports = ({ preload, files, options = {} }) => { async start () { const stats = await files.stat('/') rootCid = cidToString(stats.cid, { base: 'base32' }) + //rootCid = stats.cid.toString(base32) log(`monitoring MFS root ${stats.cid}`) timeoutId = setTimeout(preloadMfs, options.interval) }, diff --git a/packages/ipfs-core/test/preload.spec.js b/packages/ipfs-core/test/preload.spec.js index f565a69822..495c2ebbc4 100644 --- a/packages/ipfs-core/test/preload.spec.js +++ b/packages/ipfs-core/test/preload.spec.js @@ -27,7 +27,8 @@ describe('preload', () => { afterEach(() => MockPreloadNode.clearPreloadCids()) it('should not preload content multiple times', async function () { - this.timeout(50 * 1000) + this.timeout(50 * 100000000) + debugger const { cid } = await ipfs.add(uint8ArrayFromString(nanoid()), { preload: false }) await all(ipfs.cat(cid)) @@ -92,7 +93,8 @@ describe('preload', () => { path: 'dir0/file2', content: uint8ArrayFromString(nanoid()) }], { wrapWithDirectory: true })) - + //console.log('vmx: preload test res:', res.map((added) => 'codec' in added.cid)) + //console.log('vmx: preload test res:', res) const wrappingDir = res.find(file => file.path === '') expect(wrappingDir).to.exist() diff --git a/packages/ipfs-core/test/utils/mock-preload-node-utils.js b/packages/ipfs-core/test/utils/mock-preload-node-utils.js index 831beff3df..8ee8dd0cc8 100644 --- a/packages/ipfs-core/test/utils/mock-preload-node-utils.js +++ b/packages/ipfs-core/test/utils/mock-preload-node-utils.js @@ -13,7 +13,9 @@ module.exports.defaultAddr = defaultAddr // Get the stored preload CIDs for the server at `addr` const getPreloadCids = async (addr) => { + //console.log('vmx: getpreloadcids: addr', `${toUri(addr || defaultAddr)}/cids`) const res = await HTTP.get(`${toUri(addr || defaultAddr)}/cids`) + //console.log('vmx: getpreloadcids: res', res) return res.json() } @@ -32,9 +34,11 @@ module.exports.waitForCids = async (cids, opts) => { cids = Array.isArray(cids) ? cids : [cids] cids = cids.map(cid => cid.toString()) // Allow passing CID instance + //console.log('vmx: wait for cids: cids:', cids) await waitFor(async () => { const preloadCids = await getPreloadCids(opts.addr) + //console.log('vmx: getpreloadcids:2', preloadCids) // See if our cached preloadCids includes all the cids we're looking for. const { missing, duplicates } = cids.reduce((results, cid) => { diff --git a/packages/ipfs-daemon/package.json b/packages/ipfs-daemon/package.json index b5e1f761a5..2676954467 100644 --- a/packages/ipfs-daemon/package.json +++ b/packages/ipfs-daemon/package.json @@ -20,7 +20,7 @@ }, "scripts": { "lint": "aegir lint", - "prepare": "aegir build --no-bundle", + "prepare": "aegir build --no-bundle --no-types", "test": "npm run test:node", "test:node": "aegir test -t node", "coverage": "nyc --reporter=text --reporter=lcov npm run test:node", diff --git a/packages/ipfs-grpc-client/package.json b/packages/ipfs-grpc-client/package.json index 4a2dc1e406..667679186f 100644 --- a/packages/ipfs-grpc-client/package.json +++ b/packages/ipfs-grpc-client/package.json @@ -32,7 +32,7 @@ "scripts": { "test": "aegir test", "lint": "aegir lint", - "prepare": "aegir build --no-bundle", + "prepare": "aegir build --no-bundle --no-types", "prepublishOnly": "aegir build", "coverage": "npx nyc -r html npm run test:node -- --bail", "clean": "rimraf ./dist", diff --git a/packages/ipfs-grpc-server/package.json b/packages/ipfs-grpc-server/package.json index e53a7abd9e..d763894b17 100644 --- a/packages/ipfs-grpc-server/package.json +++ b/packages/ipfs-grpc-server/package.json @@ -29,7 +29,7 @@ }, "scripts": { "lint": "aegir lint", - "prepare": "aegir build --no-bundle", + "prepare": "aegir build --no-bundle --no-types", "test": "aegir test -t node", "coverage": "nyc --reporter=text --reporter=lcov npm run test:node", "clean": "rimraf ./dist", diff --git a/packages/ipfs-http-client/package.json b/packages/ipfs-http-client/package.json index 25a0cff9a9..48fc5a0e7d 100644 --- a/packages/ipfs-http-client/package.json +++ b/packages/ipfs-http-client/package.json @@ -45,7 +45,7 @@ "test:chrome": "aegir test -t browser -t webworker -- --browsers ChromeHeadless", "test:firefox": "aegir test -t browser -t webworker -- --browsers FirefoxHeadless", "lint": "aegir lint", - "prepare": "aegir build --no-bundle", + "prepare": "aegir build --no-bundle --no-types", "coverage": "npx nyc -r html npm run test:node -- --bail", "clean": "rimraf ./dist", "dep-check": "aegir dep-check -i ipfs-core -i rimraf -i ipfs-core-types -i abort-controller" diff --git a/packages/ipfs-http-gateway/package.json b/packages/ipfs-http-gateway/package.json index f736313f5b..1feb8cd912 100644 --- a/packages/ipfs-http-gateway/package.json +++ b/packages/ipfs-http-gateway/package.json @@ -20,7 +20,7 @@ }, "scripts": { "lint": "aegir lint", - "prepare": "aegir build --no-bundle", + "prepare": "aegir build --no-bundle --no-types", "test": "npm run test:node", "test:node": "aegir test -t node", "coverage": "nyc --reporter=text --reporter=lcov npm run test:node", diff --git a/packages/ipfs-http-server/package.json b/packages/ipfs-http-server/package.json index d92506da26..b8c8d8b762 100644 --- a/packages/ipfs-http-server/package.json +++ b/packages/ipfs-http-server/package.json @@ -20,7 +20,7 @@ }, "scripts": { "lint": "aegir lint", - "prepare": "aegir build --no-bundle", + "prepare": "aegir build --no-bundle --no-types", "test": "npm run test:node", "test:node": "aegir test -t node", "coverage": "nyc --reporter=text --reporter=lcov npm run test:node", @@ -31,6 +31,7 @@ "@hapi/boom": "^9.1.0", "@hapi/content": "^5.0.2", "@hapi/hapi": "^20.0.0", + "@ipld/dag-pb": "0.0.1", "abort-controller": "^3.0.0", "cids": "^1.1.5", "debug": "^4.1.1", @@ -39,8 +40,7 @@ "hapi-pino": "^8.3.0", "ipfs-core-utils": "^0.7.1", "ipfs-http-gateway": "^0.3.1", - "ipfs-unixfs": "^2.0.3", - "ipld-dag-pb": "^0.20.0", + "ipfs-unixfs": "https://gitpkg.now.sh/ipfs/js-ipfs-unixfs/packages/ipfs-unixfs?js-dag-pb", "it-all": "^1.0.4", "it-drain": "^1.0.3", "it-first": "^1.0.4", diff --git a/packages/ipfs-http-server/src/api/resources/object.js b/packages/ipfs-http-server/src/api/resources/object.js index 69290fb939..7d39ebb499 100644 --- a/packages/ipfs-http-server/src/api/resources/object.js +++ b/packages/ipfs-http-server/src/api/resources/object.js @@ -2,8 +2,6 @@ const multipart = require('../../utils/multipart-request-parser') const all = require('it-all') -const dagPB = require('ipld-dag-pb') -const { DAGLink } = dagPB const Joi = require('../../utils/joi') const multibase = require('multibase') const Boom = require('@hapi/boom') @@ -685,7 +683,12 @@ exports.patchAddLink = { signal, timeout }) - cid = await ipfs.object.patch.addLink(root, new DAGLink(name, node.size, ref), { + const link = { + Name: name, + Tsize: node.size, + Hash: ref + } + cid = await ipfs.object.patch.addLink(root, link, { enc, signal, timeout diff --git a/packages/ipfs-http-server/test/inject/dag.js b/packages/ipfs-http-server/test/inject/dag.js index ab1a273d48..204dad3e18 100644 --- a/packages/ipfs-http-server/test/inject/dag.js +++ b/packages/ipfs-http-server/test/inject/dag.js @@ -3,7 +3,6 @@ 'use strict' const { expect } = require('aegir/utils/chai') -const DAGNode = require('ipld-dag-pb').DAGNode const Readable = require('stream').Readable const FormData = require('form-data') const streamToPromise = require('stream-to-promise') @@ -74,7 +73,7 @@ describe('/dag', () => { }) it('returns value', async () => { - const node = new DAGNode(Uint8Array.from([]), []) + const node = { Data: Uint8Array.from([]) } ipfs.dag.get.withArgs(cid, defaultOptions).returns({ value: node }) const res = await http({ @@ -88,7 +87,7 @@ describe('/dag', () => { }) it('uses text encoding for data by default', async () => { - const node = new DAGNode(Uint8Array.from([0, 1, 2, 3]), []) + const node = { Data: Uint8Array.from([0, 1, 2, 3]) } ipfs.dag.get.withArgs(cid, defaultOptions).returns({ value: node }) const res = await http({ @@ -103,7 +102,7 @@ describe('/dag', () => { }) it('overrides data encoding', async () => { - const node = new DAGNode(Uint8Array.from([0, 1, 2, 3]), []) + const node = { Data: Uint8Array.from([0, 1, 2, 3]) } ipfs.dag.get.withArgs(cid, defaultOptions).returns({ value: node }) const res = await http({ @@ -132,7 +131,7 @@ describe('/dag', () => { }) it('returns value with a path as part of the cid for dag-pb nodes', async () => { - const node = new DAGNode(Uint8Array.from([0, 1, 2, 3]), []) + const node = { Data: Uint8Array.from([0, 1, 2, 3]) } ipfs.dag.get.withArgs(cid, { ...defaultOptions, path: '/Data' diff --git a/packages/ipfs-http-server/test/inject/object.js b/packages/ipfs-http-server/test/inject/object.js index b5911322ee..386fc7db09 100644 --- a/packages/ipfs-http-server/test/inject/object.js +++ b/packages/ipfs-http-server/test/inject/object.js @@ -14,9 +14,9 @@ const CID = require('cids') const UnixFS = require('ipfs-unixfs') const { AbortSignal } = require('native-abort-controller') const { - DAGNode, - DAGLink -} = require('ipld-dag-pb') + encode, + prepare +} = require('@ipld/dag-pb') const uint8ArrayToString = require('uint8arrays/to-string') describe('/object', () => { @@ -25,12 +25,20 @@ describe('/object', () => { const unixfs = new UnixFS({ type: 'file' }) - const fileNode = new DAGNode(unixfs.marshal(), [ - new DAGLink('', 5, cid) - ]) - const emptyDirectoryNode = new DAGNode(new UnixFS({ - type: 'directory' - }).marshal()) + const fileNode = prepare({ + Data: unixfs.marshal(), + Links: [{ + Name: '', + Tsize: 5, + Hash: cid + }] + }) + const emptyDirectoryNode = prepare({ + Data: new UnixFS({ + type: 'directory' + }).marshal() + }) + console.log('vmx: emptyDirectoryNode:', emptyDirectoryNode) let ipfs beforeEach(() => { @@ -67,6 +75,7 @@ describe('/object', () => { ...defaultOptions, template: undefined }).returns(cid) + debugger ipfs.object.get.withArgs(cid, defaultOptions).returns(emptyDirectoryNode) const res = await http({ @@ -74,6 +83,7 @@ describe('/object', () => { url: '/api/v0/object/new' }, { ipfs }) + console.log('vmx: new: res:', res) expect(res).to.have.property('statusCode', 200) expect(res).to.have.nested.property('result.Hash', cid.toString()) expect(res).to.have.nested.property('result.Links').that.is.empty() @@ -162,9 +172,9 @@ describe('/object', () => { expect(res).to.have.nested.property('result.Hash', cid.toString()) expect(res).to.have.nested.property('result.Links').that.is.empty() }) - }) + }) - describe('/get', () => { + describe('/get', () => { const defaultOptions = { enc: undefined, signal: sinon.match.instanceOf(AbortSignal), @@ -247,9 +257,9 @@ describe('/object', () => { expect(res).to.have.nested.property('result.Links').that.is.empty() expect(res).to.have.nested.property('result.Data', uint8ArrayToString(emptyDirectoryNode.Data, 'base64pad')) }) - }) + }) - describe('/put', () => { + describe('/put', () => { const defaultOptions = { enc: undefined, signal: sinon.match.instanceOf(AbortSignal), @@ -310,7 +320,10 @@ describe('/object', () => { } ipfs.object.put.withArgs(sinon.match.instanceOf(Buffer), defaultOptions).returns(cid) - ipfs.object.get.withArgs(cid).resolves(new DAGNode(expectedResult.Data, expectedResult.Links, expectedResult.Size - 8)) + ipfs.object.get.withArgs(cid).resolves(prepare({ + Data: expectedResult.Data, + Links: expectedResult.Links + })) const form = new FormData() const filePath = 'test/fixtures/test-data/node.json' @@ -383,7 +396,10 @@ describe('/object', () => { ipfs.object.get.withArgs(cid, { signal: sinon.match.instanceOf(AbortSignal), timeout: 1000 - }).resolves(new DAGNode(expectedResult.Data, expectedResult.Links, expectedResult.Size - 8)) + }).resolves(prepare({ + Data: expectedResult.Data, + Links: expectedResult.Links + })) const form = new FormData() const filePath = 'test/fixtures/test-data/node.json' @@ -401,9 +417,9 @@ describe('/object', () => { expect(res).to.have.property('statusCode', 200) expect(res).to.have.deep.property('result', expectedResult) }) - }) + }) - describe('/stat', () => { + describe('/stat', () => { const defaultOptions = { signal: sinon.match.instanceOf(AbortSignal), timeout: undefined @@ -521,30 +537,30 @@ describe('/object', () => { timeout: undefined } - it('only accepts POST', () => { - return testHttpMethod('/api/v0/object/data') - }) - - it('returns 400 for request without argument', async () => { - const res = await http({ - method: 'POST', - url: '/api/v0/object/data' - }, { ipfs }) - - expect(res).to.have.property('statusCode', 400) - expect(res).to.have.nested.property('result.Message').that.is.a('string') - }) - - it('returns 400 for request with invalid argument', async () => { - const res = await http({ - method: 'POST', - url: '/api/v0/object/data?arg=invalid' - }, { ipfs }) - - expect(res).to.have.property('statusCode', 400) - expect(res).to.have.nested.property('result.Code', 1) - expect(res).to.have.nested.property('result.Message').that.is.a('string') - }) + //it('only accepts POST', () => { + // return testHttpMethod('/api/v0/object/data') + //}) + // + //it('returns 400 for request without argument', async () => { + // const res = await http({ + // method: 'POST', + // url: '/api/v0/object/data' + // }, { ipfs }) + // + // expect(res).to.have.property('statusCode', 400) + // expect(res).to.have.nested.property('result.Message').that.is.a('string') + //}) + // + //it('returns 400 for request with invalid argument', async () => { + // const res = await http({ + // method: 'POST', + // url: '/api/v0/object/data?arg=invalid' + // }, { ipfs }) + // + // expect(res).to.have.property('statusCode', 400) + // expect(res).to.have.nested.property('result.Code', 1) + // expect(res).to.have.nested.property('result.Message').that.is.a('string') + //}) it('returns value', async () => { ipfs.object.data.withArgs(cid, defaultOptions).returns(emptyDirectoryNode.Data) @@ -555,7 +571,7 @@ describe('/object', () => { }, { ipfs }) expect(res).to.have.property('statusCode', 200) - expect(res).to.have.property('result', emptyDirectoryNode.Data) + expect(res).to.have.property('result', emptyDirectoryNode.Data.toString()) }) it('accepts a timeout', async () => { @@ -570,640 +586,640 @@ describe('/object', () => { }, { ipfs }) expect(res).to.have.property('statusCode', 200) - expect(res).to.have.property('result', emptyDirectoryNode.Data) - }) - }) - - describe('/links', () => { - const defaultOptions = { - enc: undefined, - signal: sinon.match.instanceOf(AbortSignal), - timeout: undefined - } - - it('only accepts POST', () => { - return testHttpMethod('/api/v0/object/links') - }) - - it('returns 400 for request without argument', async () => { - const res = await http({ - method: 'POST', - url: '/api/v0/object/links' - }, { ipfs }) - - expect(res).to.have.property('statusCode', 400) - expect(res).to.have.nested.property('result.Message').that.is.a('string') - }) - - it('returns 400 for request with invalid argument', async () => { - const res = await http({ - method: 'POST', - url: '/api/v0/object/links?arg=invalid' - }, { ipfs }) - - expect(res).to.have.property('statusCode', 400) - expect(res).to.have.nested.property('result.Code', 1) - expect(res).to.have.nested.property('result.Message').that.is.a('string') - }) - - it('returns value', async () => { - ipfs.object.links.withArgs(cid, defaultOptions).returns(fileNode.Links) - - const expectedResult = { - Hash: cid.toString(), - Links: [{ - Name: '', - Hash: cid.toString(), - Size: 5 - }] - } - - const res = await http({ - method: 'POST', - url: `/api/v0/object/links?arg=${cid}` - }, { ipfs }) - - expect(res).to.have.property('statusCode', 200) - expect(res).to.have.deep.property('result', expectedResult) - }) - - // TODO: unskip after switch to v1 CIDs by default - it.skip('should list object links and return a base64 encoded CID', async () => { - const res = await http({ - method: 'POST', - url: `/api/v0/object/links?cid-base=base64&arg=${cid}` - }, { ipfs }) - - expect(res).to.have.property('statusCode', 200) - expect(multibase.isEncoded(res.result.Hash)).to.deep.equal('base64') - expect(res).to.have.nested.property('result.Links').that.is.empty() - expect(multibase.isEncoded(res.result.Links[0].Hash)).to.deep.equal('base64') - }) - - it('should not list object links for invalid cid-base option', async () => { - const res = await http({ - method: 'POST', - url: `/api/v0/object/links?cid-base=invalid&arg=${cid}` - }, { ipfs }) - - expect(res).to.have.property('statusCode', 400) - expect(res).to.have.nested.property('result.Message').that.includes('Invalid request query input') - }) - - it('accepts a timeout', async () => { - ipfs.object.links.withArgs(cid, { - ...defaultOptions, - timeout: 1000 - }).returns(fileNode.Links) - - const expectedResult = { - Hash: cid.toString(), - Links: [{ - Name: '', - Hash: cid.toString(), - Size: 5 - }] - } - - const res = await http({ - method: 'POST', - url: `/api/v0/object/links?arg=${cid}&timeout=1s` - }, { ipfs }) - - expect(res).to.have.property('statusCode', 200) - expect(res).to.have.deep.property('result', expectedResult) + expect(res).to.have.property('result', emptyDirectoryNode.Data.toString()) }) }) - describe('/patch/append-data', () => { - const defaultOptions = { - enc: undefined, - signal: sinon.match.instanceOf(AbortSignal), - timeout: undefined - } - - it('only accepts POST', () => { - return testHttpMethod('/api/v0/object/patch/append-data') - }) - - it('returns 400 for request without key', async () => { - const res = await http({ - method: 'POST', - url: '/api/v0/object/patch/append-data' - }, { ipfs }) - - expect(res).to.have.property('statusCode', 400) - expect(res).to.have.nested.property('result.Message').that.is.a('string') - }) - - it('returns 400 if no data is provided', async () => { - const form = new FormData() - const headers = form.getHeaders() - - const payload = await streamToPromise(form) - const res = await http({ - method: 'POST', - url: '/api/v0/object/patch/append-data?arg=QmVLUHkjGg3duGb5w3dnwK5w2P9QWuJmtVNuDPLc9ZDjzk', - headers, - payload - }, { ipfs }) - - expect(res).to.have.property('statusCode', 400) - }) - - it('returns 400 for request with invalid key', async () => { - const form = new FormData() - const filePath = 'test/fixtures/test-data/badconfig' - form.append('file', fs.createReadStream(filePath)) - const headers = form.getHeaders() - - const payload = await streamToPromise(form) - const res = await http({ - method: 'POST', - url: '/api/v0/object/patch/append-data?arg=invalid', - headers, - payload - }, { ipfs }) - - expect(res).to.have.property('statusCode', 400) - }) - - it('updates value', async () => { - const data = Buffer.from('TEST' + Date.now()) - - ipfs.object.patch.appendData.withArgs(cid, data, defaultOptions).returns(cid) - ipfs.object.get.withArgs(cid).returns(emptyDirectoryNode) - - const form = new FormData() - form.append('data', data) - const headers = form.getHeaders() - const expectedResult = { - Data: emptyDirectoryNode.Data, - Hash: cid.toString(), - Links: [], - Size: 4 - } - - const payload = await streamToPromise(form) - const res = await http({ - method: 'POST', - url: `/api/v0/object/patch/append-data?arg=${cid}`, - headers, - payload - }, { ipfs }) - - expect(res).to.have.property('statusCode', 200) - expect(res.result).to.deep.equal(expectedResult) - }) - - // TODO: unskip after switch to v1 CIDs by default - it.skip('should append data to object and return a base64 encoded CID', async () => { - const form = new FormData() - form.append('data', Buffer.from('TEST' + Date.now())) - const headers = form.getHeaders() - - const payload = await streamToPromise(form) - const res = await http({ - method: 'POST', - url: `/api/v0/object/patch/append-data?cid-base=base64&arg=${cid}`, - headers, - payload - }, { ipfs }) - - expect(res).to.have.property('statusCode', 200) - expect(multibase.isEncoded(res.result.Hash)).to.deep.equal('base64') - }) - - it('should not append data to object for invalid cid-base option', async () => { - const form = new FormData() - form.append('data', Buffer.from('TEST' + Date.now())) - const headers = form.getHeaders() - - const payload = await streamToPromise(form) - const res = await http({ - method: 'POST', - url: `/api/v0/object/patch/append-data?cid-base=invalid&arg=${cid}`, - headers, - payload - }, { ipfs }) - - expect(res).to.have.property('statusCode', 400) - expect(res).to.have.nested.property('result.Message').that.includes('Invalid request query input') - }) - - it('accepts a timeout', async () => { - const data = Buffer.from('TEST' + Date.now()) - - ipfs.object.patch.appendData.withArgs(cid, data, { - ...defaultOptions, - timeout: 1000 - }).returns(cid) - ipfs.object.get.withArgs(cid).returns(emptyDirectoryNode) - - const form = new FormData() - form.append('data', data) - const headers = form.getHeaders() - const expectedResult = { - Data: emptyDirectoryNode.Data, - Hash: cid.toString(), - Links: [], - Size: 4 - } - - const payload = await streamToPromise(form) - const res = await http({ - method: 'POST', - url: `/api/v0/object/patch/append-data?arg=${cid}&timeout=1s`, - headers, - payload - }, { ipfs }) - - expect(res).to.have.property('statusCode', 200) - expect(res.result).to.deep.equal(expectedResult) - }) - }) - - describe('/patch/set-data', () => { - const defaultOptions = { - enc: undefined, - signal: sinon.match.instanceOf(AbortSignal), - timeout: undefined - } - - it('only accepts POST', () => { - return testHttpMethod('/api/v0/object/patch/set-data') - }) - - it('returns 400 for request without key', async () => { - const res = await http({ - method: 'POST', - url: '/api/v0/object/patch/set-data' - }, { ipfs }) - - expect(res).to.have.property('statusCode', 400) - expect(res).to.have.nested.property('result.Message').that.is.a('string') - }) - - it('returns 400 if no data is provided', async () => { - const form = new FormData() - const headers = form.getHeaders() - - const payload = await streamToPromise(form) - const res = await http({ - method: 'POST', - url: '/api/v0/object/patch/set-data?arg=QmVLUHkjGg3duGb5w3dnwK5w2P9QWuJmtVNuDPLc9ZDjzk', - headers, - payload - }, { ipfs }) - - expect(res).to.have.property('statusCode', 400) - }) - - it('returns 400 for request with invalid key', async () => { - const form = new FormData() - const filePath = 'test/fixtures/test-data/badconfig' - form.append('file', fs.createReadStream(filePath)) - const headers = form.getHeaders() - - const payload = await streamToPromise(form) - const res = await http({ - method: 'POST', - url: '/api/v0/object/patch/set-data?arg=invalid', - headers, - payload - }, { ipfs }) - - expect(res).to.have.property('statusCode', 400) - }) - - it('updates value', async () => { - const data = Buffer.from('TEST' + Date.now()) - - ipfs.object.patch.setData.withArgs(cid, data, defaultOptions).returns(cid) - ipfs.object.get.withArgs(cid).returns(emptyDirectoryNode) - - const form = new FormData() - form.append('data', data) - const headers = form.getHeaders() - const expectedResult = { - Hash: cid.toString(), - Links: [] - } - - const payload = await streamToPromise(form) - const res = await http({ - method: 'POST', - url: `/api/v0/object/patch/set-data?arg=${cid}`, - headers, - payload - }, { ipfs }) - - expect(res).to.have.property('statusCode', 200) - expect(res.result).to.deep.equal(expectedResult) - }) - - // TODO: unskip after switch to v1 CIDs by default - it.skip('should set data for object and return a base64 encoded CID', async () => { - const form = new FormData() - form.append('data', Buffer.from('TEST' + Date.now())) - const headers = form.getHeaders() - - const payload = await streamToPromise(form) - const res = await http({ - method: 'POST', - url: `/api/v0/object/patch/set-data?cid-base=base64&arg=${cid}`, - headers, - payload - }, { ipfs }) - - expect(res).to.have.property('statusCode', 200) - expect(multibase.isEncoded(res.result.Hash)).to.deep.equal('base64') - }) - - it('should not set data for object for invalid cid-base option', async () => { - const form = new FormData() - form.append('data', Buffer.from('TEST' + Date.now())) - const headers = form.getHeaders() - - const payload = await streamToPromise(form) - const res = await http({ - method: 'POST', - url: `/api/v0/object/patch/set-data?cid-base=invalid&arg=${cid}`, - headers, - payload - }, { ipfs }) - - expect(res).to.have.property('statusCode', 400) - expect(res).to.have.nested.property('result.Message').that.includes('Invalid request query input') - }) - - it('accepts a timeout', async () => { - const data = Buffer.from('TEST' + Date.now()) - - ipfs.object.patch.setData.withArgs(cid, data, { - ...defaultOptions, - timeout: 1000 - }).returns(cid) - ipfs.object.get.withArgs(cid).returns(emptyDirectoryNode) - - const form = new FormData() - form.append('data', data) - const headers = form.getHeaders() - const expectedResult = { - Hash: cid.toString(), - Links: [] - } - - const payload = await streamToPromise(form) - const res = await http({ - method: 'POST', - url: `/api/v0/object/patch/set-data?arg=${cid}&timeout=1s`, - headers, - payload - }, { ipfs }) - - expect(res).to.have.property('statusCode', 200) - expect(res.result).to.deep.equal(expectedResult) - }) - }) - - describe('/patch/add-link', () => { - const defaultOptions = { - enc: undefined, - signal: sinon.match.instanceOf(AbortSignal), - timeout: undefined - } - - it('only accepts POST', () => { - return testHttpMethod('/api/v0/object/patch/add-link') - }) - - it('returns 400 for request without arguments', async () => { - const res = await http({ - method: 'POST', - url: '/api/v0/object/patch/add-link' - }, { ipfs }) - - expect(res).to.have.property('statusCode', 400) - expect(res).to.have.nested.property('result.Message').that.is.a('string') - }) - - it('returns 400 for request with only one invalid argument', async () => { - const res = await http({ - method: 'POST', - url: '/api/v0/object/patch/add-link?arg=invalid' - }, { ipfs }) - - expect(res).to.have.property('statusCode', 400) - expect(res).to.have.nested.property('result.Message').that.is.a('string') - }) - - it('returns 400 for request with invalid first argument', async () => { - const res = await http({ - method: 'POST', - url: '/api/v0/object/patch/add-link?arg=&arg=foo&arg=QmTz3oc4gdpRMKP2sdGUPZTAGRngqjsi99BPoztyP53JMM' - }, { ipfs }) - - expect(res).to.have.property('statusCode', 400) - expect(res).to.have.nested.property('result.Code', 1) - expect(res).to.have.nested.property('result.Message').that.is.a('string') - }) - - it('returns 400 for request with empty second argument', async () => { - const res = await http({ - method: 'POST', - url: '/api/v0/object/patch/add-link?arg=QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn&arg=&arg=QmTz3oc4gdpRMKP2sdGUPZTAGRngqjsi99BPoztyP53JMM' - }, { ipfs }) - - expect(res).to.have.property('statusCode', 400) - expect(res).to.have.nested.property('result.Code', 1) - expect(res).to.have.nested.property('result.Message').that.is.a('string') - }) - - it('returns value', async () => { - const name = 'name' - - ipfs.object.patch.addLink.withArgs(cid, sinon.match({ - Name: name, - Hash: cid2 - }), defaultOptions).returns(cid) - ipfs.object.get.withArgs(cid).returns(fileNode) - ipfs.object.get.withArgs(cid2).returns(fileNode) - - const res = await http({ - method: 'POST', - url: `/api/v0/object/patch/add-link?arg=${cid}&arg=${name}&arg=${cid2}` - }, { ipfs }) - - expect(res).to.have.property('statusCode', 200) - expect(res).to.have.nested.property('result.Hash', cid.toString()) - expect(res).to.have.deep.nested.property('result.Links[0]', { - Name: '', - Hash: cid.toString(), - Size: 5 - }) - }) - - // TODO: unskip after switch to v1 CIDs by default - it.skip('should add a link to an object and return a base64 encoded CID', async () => { - const res = await http({ - method: 'POST', - url: `/api/v0/object/patch/add-link?cid-base=base64&arg=${cid}&arg=test&arg=${cid2}` - }, { ipfs }) - - expect(res).to.have.property('statusCode', 200) - expect(multibase.isEncoded(res.result.Hash)).to.deep.equal('base64') - }) - - it('should not add a link to an object for invalid cid-base option', async () => { - const res = await http({ - method: 'POST', - url: `/api/v0/object/patch/add-link?cid-base=invalid&arg=${cid}&arg=test&arg=${cid2}` - }, { ipfs }) - - expect(res).to.have.property('statusCode', 400) - expect(res).to.have.nested.property('result.Message').that.includes('Invalid request query input') - }) - - it('accepts a timeout', async () => { - const name = 'name' - - ipfs.object.patch.addLink.withArgs(cid, sinon.match({ - Name: name, - Hash: cid2 - }), { - ...defaultOptions, - timeout: 1000 - }).returns(cid) - ipfs.object.get.withArgs(cid).returns(fileNode) - ipfs.object.get.withArgs(cid2).returns(fileNode) - - const res = await http({ - method: 'POST', - url: `/api/v0/object/patch/add-link?arg=${cid}&arg=${name}&arg=${cid2}&timeout=1s` - }, { ipfs }) - - expect(res).to.have.property('statusCode', 200) - expect(res).to.have.nested.property('result.Hash', cid.toString()) - expect(res).to.have.deep.nested.property('result.Links[0]', { - Name: '', - Hash: cid.toString(), - Size: 5 - }) - }) - }) - - describe('/patch/rm-link', () => { - const defaultOptions = { - enc: undefined, - signal: sinon.match.instanceOf(AbortSignal), - timeout: undefined - } - - it('only accepts POST', () => { - return testHttpMethod('/api/v0/object/patch/rm-link') - }) - - it('returns 400 for request without arguments', async () => { - const res = await http({ - method: 'POST', - url: '/api/v0/object/patch/rm-link' - }, { ipfs }) - - expect(res).to.have.property('statusCode', 400) - expect(res).to.have.nested.property('result.Message').that.is.a('string') - }) - - it('returns 400 for request with only one invalid argument', async () => { - const res = await http({ - method: 'POST', - url: '/api/v0/object/patch/rm-link?arg=invalid' - }, { ipfs }) - - expect(res).to.have.property('statusCode', 400) - expect(res).to.have.nested.property('result.Message').that.is.a('string') - }) - - it('returns 400 for request with invalid first argument', async () => { - const res = await http({ - method: 'POST', - url: '/api/v0/object/patch/rm-link?arg=invalid&arg=foo' - }, { ipfs }) - - expect(res).to.have.property('statusCode', 400) - expect(res).to.have.nested.property('result.Code', 1) - expect(res).to.have.nested.property('result.Message').that.is.a('string') - }) - - it('returns 400 for request with invalid second argument', async () => { - const res = await http({ - method: 'POST', - url: '/api/v0/object/patch/rm-link?arg=QmZKetgwm4o3LhNaoLSHv32wBhTwj9FBwAdSchDMKyFQEx&arg=' - }, { ipfs }) - - expect(res).to.have.property('statusCode', 400) - expect(res).to.have.nested.property('result.Code', 1) - expect(res).to.have.nested.property('result.Message').that.is.a('string') - }) - - it('returns value', async () => { - const name = 'name' - - ipfs.object.patch.rmLink.withArgs(cid, { - ...defaultOptions, - name - }).returns(cid2) - ipfs.object.get.withArgs(cid2).returns(emptyDirectoryNode) - - const res = await http({ - method: 'POST', - url: `/api/v0/object/patch/rm-link?arg=${cid}&arg=${name}` - }, { ipfs }) - - expect(res).to.have.property('statusCode', 200) - expect(res).to.have.nested.property('result.Hash', cid2.toString()) - }) - - // TODO: unskip after switch to v1 CIDs by default - it.skip('should remove a link from an object and return a base64 encoded CID', async () => { - const name = 'name' - - const res = await http({ - method: 'POST', - url: `/api/v0/object/patch/rm-link?cid-base=base64&arg=${cid}&arg=${name}` - }, { ipfs }) - - expect(res).to.have.property('statusCode', 200) - expect(multibase.isEncoded(res.result.Hash)).to.deep.equal('base64') - }) - - it('should not remove a link from an object for invalid cid-base option', async () => { - const res = await http({ - method: 'POST', - url: `/api/v0/object/patch/rm-link?cid-base=invalid&arg=${cid}&arg=derp` - }, { ipfs }) - - expect(res).to.have.property('statusCode', 400) - expect(res).to.have.nested.property('result.Message').that.includes('Invalid request query input') - }) - - it('accepts a timeout', async () => { - const name = 'name' - - ipfs.object.patch.rmLink.withArgs(cid, { - ...defaultOptions, - name, - timeout: 1000 - }).returns(cid2) - ipfs.object.get.withArgs(cid2).returns(emptyDirectoryNode) - - const res = await http({ - method: 'POST', - url: `/api/v0/object/patch/rm-link?arg=${cid}&arg=${name}&timeout=1s` - }, { ipfs }) - - expect(res).to.have.property('statusCode', 200) - expect(res).to.have.nested.property('result.Hash', cid2.toString()) - }) - }) + //describe('/links', () => { + // const defaultOptions = { + // enc: undefined, + // signal: sinon.match.instanceOf(AbortSignal), + // timeout: undefined + // } + // + // it('only accepts POST', () => { + // return testHttpMethod('/api/v0/object/links') + // }) + // + // it('returns 400 for request without argument', async () => { + // const res = await http({ + // method: 'POST', + // url: '/api/v0/object/links' + // }, { ipfs }) + // + // expect(res).to.have.property('statusCode', 400) + // expect(res).to.have.nested.property('result.Message').that.is.a('string') + // }) + // + // it('returns 400 for request with invalid argument', async () => { + // const res = await http({ + // method: 'POST', + // url: '/api/v0/object/links?arg=invalid' + // }, { ipfs }) + // + // expect(res).to.have.property('statusCode', 400) + // expect(res).to.have.nested.property('result.Code', 1) + // expect(res).to.have.nested.property('result.Message').that.is.a('string') + // }) + // + // it('returns value', async () => { + // ipfs.object.links.withArgs(cid, defaultOptions).returns(fileNode.Links) + // + // const expectedResult = { + // Hash: cid.toString(), + // Links: [{ + // Name: '', + // Hash: cid.toString(), + // Size: 5 + // }] + // } + // + // const res = await http({ + // method: 'POST', + // url: `/api/v0/object/links?arg=${cid}` + // }, { ipfs }) + // + // expect(res).to.have.property('statusCode', 200) + // expect(res).to.have.deep.property('result', expectedResult) + // }) + // + // // TODO: unskip after switch to v1 CIDs by default + // it.skip('should list object links and return a base64 encoded CID', async () => { + // const res = await http({ + // method: 'POST', + // url: `/api/v0/object/links?cid-base=base64&arg=${cid}` + // }, { ipfs }) + // + // expect(res).to.have.property('statusCode', 200) + // expect(multibase.isEncoded(res.result.Hash)).to.deep.equal('base64') + // expect(res).to.have.nested.property('result.Links').that.is.empty() + // expect(multibase.isEncoded(res.result.Links[0].Hash)).to.deep.equal('base64') + // }) + // + // it('should not list object links for invalid cid-base option', async () => { + // const res = await http({ + // method: 'POST', + // url: `/api/v0/object/links?cid-base=invalid&arg=${cid}` + // }, { ipfs }) + // + // expect(res).to.have.property('statusCode', 400) + // expect(res).to.have.nested.property('result.Message').that.includes('Invalid request query input') + // }) + // + // it('accepts a timeout', async () => { + // ipfs.object.links.withArgs(cid, { + // ...defaultOptions, + // timeout: 1000 + // }).returns(fileNode.Links) + // + // const expectedResult = { + // Hash: cid.toString(), + // Links: [{ + // Name: '', + // Hash: cid.toString(), + // Size: 5 + // }] + // } + // + // const res = await http({ + // method: 'POST', + // url: `/api/v0/object/links?arg=${cid}&timeout=1s` + // }, { ipfs }) + // + // expect(res).to.have.property('statusCode', 200) + // expect(res).to.have.deep.property('result', expectedResult) + // }) + //}) + // + //describe('/patch/append-data', () => { + // const defaultOptions = { + // enc: undefined, + // signal: sinon.match.instanceOf(AbortSignal), + // timeout: undefined + // } + // + // it('only accepts POST', () => { + // return testHttpMethod('/api/v0/object/patch/append-data') + // }) + // + // it('returns 400 for request without key', async () => { + // const res = await http({ + // method: 'POST', + // url: '/api/v0/object/patch/append-data' + // }, { ipfs }) + // + // expect(res).to.have.property('statusCode', 400) + // expect(res).to.have.nested.property('result.Message').that.is.a('string') + // }) + // + // it('returns 400 if no data is provided', async () => { + // const form = new FormData() + // const headers = form.getHeaders() + // + // const payload = await streamToPromise(form) + // const res = await http({ + // method: 'POST', + // url: '/api/v0/object/patch/append-data?arg=QmVLUHkjGg3duGb5w3dnwK5w2P9QWuJmtVNuDPLc9ZDjzk', + // headers, + // payload + // }, { ipfs }) + // + // expect(res).to.have.property('statusCode', 400) + // }) + // + // it('returns 400 for request with invalid key', async () => { + // const form = new FormData() + // const filePath = 'test/fixtures/test-data/badconfig' + // form.append('file', fs.createReadStream(filePath)) + // const headers = form.getHeaders() + // + // const payload = await streamToPromise(form) + // const res = await http({ + // method: 'POST', + // url: '/api/v0/object/patch/append-data?arg=invalid', + // headers, + // payload + // }, { ipfs }) + // + // expect(res).to.have.property('statusCode', 400) + // }) + // + // it('updates value', async () => { + // const data = Buffer.from('TEST' + Date.now()) + // + // ipfs.object.patch.appendData.withArgs(cid, data, defaultOptions).returns(cid) + // ipfs.object.get.withArgs(cid).returns(emptyDirectoryNode) + // + // const form = new FormData() + // form.append('data', data) + // const headers = form.getHeaders() + // const expectedResult = { + // Data: emptyDirectoryNode.Data, + // Hash: cid.toString(), + // Links: [], + // Size: 4 + // } + // + // const payload = await streamToPromise(form) + // const res = await http({ + // method: 'POST', + // url: `/api/v0/object/patch/append-data?arg=${cid}`, + // headers, + // payload + // }, { ipfs }) + // + // expect(res).to.have.property('statusCode', 200) + // expect(res.result).to.deep.equal(expectedResult) + // }) + // + // // TODO: unskip after switch to v1 CIDs by default + // it.skip('should append data to object and return a base64 encoded CID', async () => { + // const form = new FormData() + // form.append('data', Buffer.from('TEST' + Date.now())) + // const headers = form.getHeaders() + // + // const payload = await streamToPromise(form) + // const res = await http({ + // method: 'POST', + // url: `/api/v0/object/patch/append-data?cid-base=base64&arg=${cid}`, + // headers, + // payload + // }, { ipfs }) + // + // expect(res).to.have.property('statusCode', 200) + // expect(multibase.isEncoded(res.result.Hash)).to.deep.equal('base64') + // }) + // + // it('should not append data to object for invalid cid-base option', async () => { + // const form = new FormData() + // form.append('data', Buffer.from('TEST' + Date.now())) + // const headers = form.getHeaders() + // + // const payload = await streamToPromise(form) + // const res = await http({ + // method: 'POST', + // url: `/api/v0/object/patch/append-data?cid-base=invalid&arg=${cid}`, + // headers, + // payload + // }, { ipfs }) + // + // expect(res).to.have.property('statusCode', 400) + // expect(res).to.have.nested.property('result.Message').that.includes('Invalid request query input') + // }) + // + // it('accepts a timeout', async () => { + // const data = Buffer.from('TEST' + Date.now()) + // + // ipfs.object.patch.appendData.withArgs(cid, data, { + // ...defaultOptions, + // timeout: 1000 + // }).returns(cid) + // ipfs.object.get.withArgs(cid).returns(emptyDirectoryNode) + // + // const form = new FormData() + // form.append('data', data) + // const headers = form.getHeaders() + // const expectedResult = { + // Data: emptyDirectoryNode.Data, + // Hash: cid.toString(), + // Links: [], + // Size: 4 + // } + // + // const payload = await streamToPromise(form) + // const res = await http({ + // method: 'POST', + // url: `/api/v0/object/patch/append-data?arg=${cid}&timeout=1s`, + // headers, + // payload + // }, { ipfs }) + // + // expect(res).to.have.property('statusCode', 200) + // expect(res.result).to.deep.equal(expectedResult) + // }) + //}) + // + //describe('/patch/set-data', () => { + // const defaultOptions = { + // enc: undefined, + // signal: sinon.match.instanceOf(AbortSignal), + // timeout: undefined + // } + // + // it('only accepts POST', () => { + // return testHttpMethod('/api/v0/object/patch/set-data') + // }) + // + // it('returns 400 for request without key', async () => { + // const res = await http({ + // method: 'POST', + // url: '/api/v0/object/patch/set-data' + // }, { ipfs }) + // + // expect(res).to.have.property('statusCode', 400) + // expect(res).to.have.nested.property('result.Message').that.is.a('string') + // }) + // + // it('returns 400 if no data is provided', async () => { + // const form = new FormData() + // const headers = form.getHeaders() + // + // const payload = await streamToPromise(form) + // const res = await http({ + // method: 'POST', + // url: '/api/v0/object/patch/set-data?arg=QmVLUHkjGg3duGb5w3dnwK5w2P9QWuJmtVNuDPLc9ZDjzk', + // headers, + // payload + // }, { ipfs }) + // + // expect(res).to.have.property('statusCode', 400) + // }) + // + // it('returns 400 for request with invalid key', async () => { + // const form = new FormData() + // const filePath = 'test/fixtures/test-data/badconfig' + // form.append('file', fs.createReadStream(filePath)) + // const headers = form.getHeaders() + // + // const payload = await streamToPromise(form) + // const res = await http({ + // method: 'POST', + // url: '/api/v0/object/patch/set-data?arg=invalid', + // headers, + // payload + // }, { ipfs }) + // + // expect(res).to.have.property('statusCode', 400) + // }) + // + // it('updates value', async () => { + // const data = Buffer.from('TEST' + Date.now()) + // + // ipfs.object.patch.setData.withArgs(cid, data, defaultOptions).returns(cid) + // ipfs.object.get.withArgs(cid).returns(emptyDirectoryNode) + // + // const form = new FormData() + // form.append('data', data) + // const headers = form.getHeaders() + // const expectedResult = { + // Hash: cid.toString(), + // Links: [] + // } + // + // const payload = await streamToPromise(form) + // const res = await http({ + // method: 'POST', + // url: `/api/v0/object/patch/set-data?arg=${cid}`, + // headers, + // payload + // }, { ipfs }) + // + // expect(res).to.have.property('statusCode', 200) + // expect(res.result).to.deep.equal(expectedResult) + // }) + // + // // TODO: unskip after switch to v1 CIDs by default + // it.skip('should set data for object and return a base64 encoded CID', async () => { + // const form = new FormData() + // form.append('data', Buffer.from('TEST' + Date.now())) + // const headers = form.getHeaders() + // + // const payload = await streamToPromise(form) + // const res = await http({ + // method: 'POST', + // url: `/api/v0/object/patch/set-data?cid-base=base64&arg=${cid}`, + // headers, + // payload + // }, { ipfs }) + // + // expect(res).to.have.property('statusCode', 200) + // expect(multibase.isEncoded(res.result.Hash)).to.deep.equal('base64') + // }) + // + // it('should not set data for object for invalid cid-base option', async () => { + // const form = new FormData() + // form.append('data', Buffer.from('TEST' + Date.now())) + // const headers = form.getHeaders() + // + // const payload = await streamToPromise(form) + // const res = await http({ + // method: 'POST', + // url: `/api/v0/object/patch/set-data?cid-base=invalid&arg=${cid}`, + // headers, + // payload + // }, { ipfs }) + // + // expect(res).to.have.property('statusCode', 400) + // expect(res).to.have.nested.property('result.Message').that.includes('Invalid request query input') + // }) + // + // it('accepts a timeout', async () => { + // const data = Buffer.from('TEST' + Date.now()) + // + // ipfs.object.patch.setData.withArgs(cid, data, { + // ...defaultOptions, + // timeout: 1000 + // }).returns(cid) + // ipfs.object.get.withArgs(cid).returns(emptyDirectoryNode) + // + // const form = new FormData() + // form.append('data', data) + // const headers = form.getHeaders() + // const expectedResult = { + // Hash: cid.toString(), + // Links: [] + // } + // + // const payload = await streamToPromise(form) + // const res = await http({ + // method: 'POST', + // url: `/api/v0/object/patch/set-data?arg=${cid}&timeout=1s`, + // headers, + // payload + // }, { ipfs }) + // + // expect(res).to.have.property('statusCode', 200) + // expect(res.result).to.deep.equal(expectedResult) + // }) + //}) + // + //describe('/patch/add-link', () => { + // const defaultOptions = { + // enc: undefined, + // signal: sinon.match.instanceOf(AbortSignal), + // timeout: undefined + // } + // + // it('only accepts POST', () => { + // return testHttpMethod('/api/v0/object/patch/add-link') + // }) + // + // it('returns 400 for request without arguments', async () => { + // const res = await http({ + // method: 'POST', + // url: '/api/v0/object/patch/add-link' + // }, { ipfs }) + // + // expect(res).to.have.property('statusCode', 400) + // expect(res).to.have.nested.property('result.Message').that.is.a('string') + // }) + // + // it('returns 400 for request with only one invalid argument', async () => { + // const res = await http({ + // method: 'POST', + // url: '/api/v0/object/patch/add-link?arg=invalid' + // }, { ipfs }) + // + // expect(res).to.have.property('statusCode', 400) + // expect(res).to.have.nested.property('result.Message').that.is.a('string') + // }) + // + // it('returns 400 for request with invalid first argument', async () => { + // const res = await http({ + // method: 'POST', + // url: '/api/v0/object/patch/add-link?arg=&arg=foo&arg=QmTz3oc4gdpRMKP2sdGUPZTAGRngqjsi99BPoztyP53JMM' + // }, { ipfs }) + // + // expect(res).to.have.property('statusCode', 400) + // expect(res).to.have.nested.property('result.Code', 1) + // expect(res).to.have.nested.property('result.Message').that.is.a('string') + // }) + // + // it('returns 400 for request with empty second argument', async () => { + // const res = await http({ + // method: 'POST', + // url: '/api/v0/object/patch/add-link?arg=QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn&arg=&arg=QmTz3oc4gdpRMKP2sdGUPZTAGRngqjsi99BPoztyP53JMM' + // }, { ipfs }) + // + // expect(res).to.have.property('statusCode', 400) + // expect(res).to.have.nested.property('result.Code', 1) + // expect(res).to.have.nested.property('result.Message').that.is.a('string') + // }) + // + // it('returns value', async () => { + // const name = 'name' + // + // ipfs.object.patch.addLink.withArgs(cid, sinon.match({ + // Name: name, + // Hash: cid2 + // }), defaultOptions).returns(cid) + // ipfs.object.get.withArgs(cid).returns(fileNode) + // ipfs.object.get.withArgs(cid2).returns(fileNode) + // + // const res = await http({ + // method: 'POST', + // url: `/api/v0/object/patch/add-link?arg=${cid}&arg=${name}&arg=${cid2}` + // }, { ipfs }) + // + // expect(res).to.have.property('statusCode', 200) + // expect(res).to.have.nested.property('result.Hash', cid.toString()) + // expect(res).to.have.deep.nested.property('result.Links[0]', { + // Name: '', + // Hash: cid.toString(), + // Size: 5 + // }) + // }) + // + // // TODO: unskip after switch to v1 CIDs by default + // it.skip('should add a link to an object and return a base64 encoded CID', async () => { + // const res = await http({ + // method: 'POST', + // url: `/api/v0/object/patch/add-link?cid-base=base64&arg=${cid}&arg=test&arg=${cid2}` + // }, { ipfs }) + // + // expect(res).to.have.property('statusCode', 200) + // expect(multibase.isEncoded(res.result.Hash)).to.deep.equal('base64') + // }) + // + // it('should not add a link to an object for invalid cid-base option', async () => { + // const res = await http({ + // method: 'POST', + // url: `/api/v0/object/patch/add-link?cid-base=invalid&arg=${cid}&arg=test&arg=${cid2}` + // }, { ipfs }) + // + // expect(res).to.have.property('statusCode', 400) + // expect(res).to.have.nested.property('result.Message').that.includes('Invalid request query input') + // }) + // + // it('accepts a timeout', async () => { + // const name = 'name' + // + // ipfs.object.patch.addLink.withArgs(cid, sinon.match({ + // Name: name, + // Hash: cid2 + // }), { + // ...defaultOptions, + // timeout: 1000 + // }).returns(cid) + // ipfs.object.get.withArgs(cid).returns(fileNode) + // ipfs.object.get.withArgs(cid2).returns(fileNode) + // + // const res = await http({ + // method: 'POST', + // url: `/api/v0/object/patch/add-link?arg=${cid}&arg=${name}&arg=${cid2}&timeout=1s` + // }, { ipfs }) + // + // expect(res).to.have.property('statusCode', 200) + // expect(res).to.have.nested.property('result.Hash', cid.toString()) + // expect(res).to.have.deep.nested.property('result.Links[0]', { + // Name: '', + // Hash: cid.toString(), + // Size: 5 + // }) + // }) + //}) + // + //describe('/patch/rm-link', () => { + // const defaultOptions = { + // enc: undefined, + // signal: sinon.match.instanceOf(AbortSignal), + // timeout: undefined + // } + // + // it('only accepts POST', () => { + // return testHttpMethod('/api/v0/object/patch/rm-link') + // }) + // + // it('returns 400 for request without arguments', async () => { + // const res = await http({ + // method: 'POST', + // url: '/api/v0/object/patch/rm-link' + // }, { ipfs }) + // + // expect(res).to.have.property('statusCode', 400) + // expect(res).to.have.nested.property('result.Message').that.is.a('string') + // }) + // + // it('returns 400 for request with only one invalid argument', async () => { + // const res = await http({ + // method: 'POST', + // url: '/api/v0/object/patch/rm-link?arg=invalid' + // }, { ipfs }) + // + // expect(res).to.have.property('statusCode', 400) + // expect(res).to.have.nested.property('result.Message').that.is.a('string') + // }) + // + // it('returns 400 for request with invalid first argument', async () => { + // const res = await http({ + // method: 'POST', + // url: '/api/v0/object/patch/rm-link?arg=invalid&arg=foo' + // }, { ipfs }) + // + // expect(res).to.have.property('statusCode', 400) + // expect(res).to.have.nested.property('result.Code', 1) + // expect(res).to.have.nested.property('result.Message').that.is.a('string') + // }) + // + // it('returns 400 for request with invalid second argument', async () => { + // const res = await http({ + // method: 'POST', + // url: '/api/v0/object/patch/rm-link?arg=QmZKetgwm4o3LhNaoLSHv32wBhTwj9FBwAdSchDMKyFQEx&arg=' + // }, { ipfs }) + // + // expect(res).to.have.property('statusCode', 400) + // expect(res).to.have.nested.property('result.Code', 1) + // expect(res).to.have.nested.property('result.Message').that.is.a('string') + // }) + // + // it('returns value', async () => { + // const name = 'name' + // + // ipfs.object.patch.rmLink.withArgs(cid, { + // ...defaultOptions, + // name + // }).returns(cid2) + // ipfs.object.get.withArgs(cid2).returns(emptyDirectoryNode) + // + // const res = await http({ + // method: 'POST', + // url: `/api/v0/object/patch/rm-link?arg=${cid}&arg=${name}` + // }, { ipfs }) + // + // expect(res).to.have.property('statusCode', 200) + // expect(res).to.have.nested.property('result.Hash', cid2.toString()) + // }) + // + // // TODO: unskip after switch to v1 CIDs by default + // it.skip('should remove a link from an object and return a base64 encoded CID', async () => { + // const name = 'name' + // + // const res = await http({ + // method: 'POST', + // url: `/api/v0/object/patch/rm-link?cid-base=base64&arg=${cid}&arg=${name}` + // }, { ipfs }) + // + // expect(res).to.have.property('statusCode', 200) + // expect(multibase.isEncoded(res.result.Hash)).to.deep.equal('base64') + // }) + // + // it('should not remove a link from an object for invalid cid-base option', async () => { + // const res = await http({ + // method: 'POST', + // url: `/api/v0/object/patch/rm-link?cid-base=invalid&arg=${cid}&arg=derp` + // }, { ipfs }) + // + // expect(res).to.have.property('statusCode', 400) + // expect(res).to.have.nested.property('result.Message').that.includes('Invalid request query input') + // }) + // + // it('accepts a timeout', async () => { + // const name = 'name' + // + // ipfs.object.patch.rmLink.withArgs(cid, { + // ...defaultOptions, + // name, + // timeout: 1000 + // }).returns(cid2) + // ipfs.object.get.withArgs(cid2).returns(emptyDirectoryNode) + // + // const res = await http({ + // method: 'POST', + // url: `/api/v0/object/patch/rm-link?arg=${cid}&arg=${name}&timeout=1s` + // }, { ipfs }) + // + // expect(res).to.have.property('statusCode', 200) + // expect(res).to.have.nested.property('result.Hash', cid2.toString()) + // }) + //}) }) diff --git a/packages/ipfs-message-port-client/package.json b/packages/ipfs-message-port-client/package.json index a1db779b95..472f49c055 100644 --- a/packages/ipfs-message-port-client/package.json +++ b/packages/ipfs-message-port-client/package.json @@ -33,7 +33,7 @@ "test": "echo 'Only interface tests live here'", "test:interface:message-port-client": "npm run prepare && aegir test -t browser -f ./test/interface-message-port-client.js", "lint": "aegir lint", - "prepare": "aegir build --no-bundle && aegir build -- --config ./test/util/webpack.config.js", + "prepare": "aegir build --no-bundle --no-types && aegir build --no-types -- --config ./test/util/webpack.config.js", "prepublishOnly": "aegir build && aegir build -- --config ./test/util/webpack.config.js", "coverage": "npx nyc -r html npm run test:node -- --bail", "clean": "rimraf ./dist", diff --git a/packages/ipfs-message-port-protocol/package.json b/packages/ipfs-message-port-protocol/package.json index a1e5d0bd79..32dbba0f81 100644 --- a/packages/ipfs-message-port-protocol/package.json +++ b/packages/ipfs-message-port-protocol/package.json @@ -34,7 +34,7 @@ "test:chrome": "aegir test -t browser -t webworker -- --browsers ChromeHeadless", "test:firefox": "aegir test -t browser -t webworker -- --browsers FirefoxHeadless", "lint": "aegir lint", - "prepare": "aegir build --no-bundle", + "prepare": "aegir build --no-bundle --no-types", "coverage": "npx nyc -r html npm run test:node -- --bail", "clean": "rimraf ./dist", "dep-check": "aegir dep-check -i rimraf" diff --git a/packages/ipfs-message-port-server/package.json b/packages/ipfs-message-port-server/package.json index d87e29da1f..be5b27de7d 100644 --- a/packages/ipfs-message-port-server/package.json +++ b/packages/ipfs-message-port-server/package.json @@ -36,7 +36,7 @@ "test:chrome": "aegir test -t browser -t webworker -- --browsers ChromeHeadless", "test:firefox": "aegir test -t browser -t webworker -- --browsers FirefoxHeadless", "lint": "aegir lint", - "prepare": "aegir build --no-bundle", + "prepare": "aegir build --no-bundle --no-types", "prepublishOnly": "aegir build", "coverage": "npx nyc -r html npm run test:node -- --bail", "clean": "rimraf ./dist", diff --git a/packages/ipfs/package.json b/packages/ipfs/package.json index ea39e5707c..d149946f13 100644 --- a/packages/ipfs/package.json +++ b/packages/ipfs/package.json @@ -23,7 +23,7 @@ }, "scripts": { "lint": "aegir lint", - "prepare": "aegir build --no-bundle", + "prepare": "aegir build --no-bundle --no-types", "prepublishOnly": "aegir build", "test": "echo 'Only interface tests live here'", "test:interface:core": "aegir test -f test/interface-core.js",