Skip to content
This repository has been archived by the owner on Mar 10, 2020. It is now read-only.

refactor: reworks resolve tests with async/await #504

Merged
merged 1 commit into from
Jul 25, 2019
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
154 changes: 45 additions & 109 deletions src/miscellaneous/resolve.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,8 @@
const isIpfs = require('is-ipfs')
const loadFixture = require('aegir/fixtures')
const hat = require('hat')
const waterfall = require('async/waterfall')
const multibase = require('multibase')
const { spawnNodeWithId } = require('../utils/spawn')
const { connect } = require('../utils/swarm')
const { getDescribe, getIt, expect } = require('../utils/mocha')

module.exports = (createCommon, options) => {
Expand All @@ -16,150 +14,88 @@ module.exports = (createCommon, options) => {
const common = createCommon()

describe('.resolve', () => {
let factory, ipfs
let ipfs
let nodeId

before(function (done) {
// CI takes longer to instantiate the daemon, so we need to increase the
// timeout for the before step
this.timeout(60 * 1000)

common.setup((err, f) => {
common.setup((err, factory) => {
expect(err).to.not.exist()
factory = f
factory.spawnNode((err, node) => {
spawnNodeWithId(factory, (err, node) => {
expect(err).to.not.exist()

ipfs = node
nodeId = node.peerId.id
done()
})
})
})

after(function (done) {
this.timeout(10 * 1000)
common.teardown(done)
})
after(common.teardown)

it('should resolve an IPFS hash', (done) => {
it('should resolve an IPFS hash', async () => {
const content = loadFixture('test/fixtures/testfile.txt', 'interface-ipfs-core')

ipfs.add(content, (err, res) => {
expect(err).to.not.exist()
expect(isIpfs.cid(res[0].hash)).to.be.true()

ipfs.resolve(`/ipfs/${res[0].hash}`, (err, path) => {
expect(err).to.not.exist()
expect(path).to.equal(`/ipfs/${res[0].hash}`)
done()
})
})
const [ { hash } ] = await ipfs.add(content)
const path = await ipfs.resolve(`/ipfs/${hash}`)
expect(path).to.equal(`/ipfs/${hash}`)
})

it('should resolve an IPFS hash and return a base64url encoded CID in path', (done) => {
const content = Buffer.from('TEST' + Date.now())

ipfs.add(content, (err, res) => {
expect(err).to.not.exist()
it('should resolve an IPFS hash and return a base64url encoded CID in path', async () => {
const [ { hash } ] = await ipfs.add(Buffer.from('base64url encoded'))
const path = await ipfs.resolve(`/ipfs/${hash}`, { cidBase: 'base64url' })
const [,, cid] = path.split('/')

ipfs.resolve(`/ipfs/${res[0].hash}`, { cidBase: 'base64url' }, (err, path) => {
expect(err).to.not.exist()
const cid = path.split('/')[2]
expect(multibase.isEncoded(cid)).to.equal('base64url')
done()
})
})
expect(multibase.isEncoded(cid)).to.equal('base64url')
})

// Test resolve turns /ipfs/QmRootHash/path/to/file into /ipfs/QmFileHash
it('should resolve an IPFS path link', (done) => {
it('should resolve an IPFS path link', async () => {
const path = 'path/to/testfile.txt'
const content = loadFixture('test/fixtures/testfile.txt', 'interface-ipfs-core')
const [{ hash: fileHash }, , , { hash: rootHash }] = await ipfs.add([{ path, content }], { wrapWithDirectory: true })
const resolve = await ipfs.resolve(`/ipfs/${rootHash}/${path}`)

ipfs.add([{ path, content }], { wrapWithDirectory: true }, (err, res) => {
expect(err).to.not.exist()

const rootHash = res.find(r => r.path === '').hash
const fileHash = res.find(r => r.path === path).hash

ipfs.resolve(`/ipfs/${rootHash}/${path}`, (err, path) => {
expect(err).to.not.exist()
expect(path).to.equal(`/ipfs/${fileHash}`)
done()
})
})
expect(resolve).to.equal(`/ipfs/${fileHash}`)
})

it('should resolve up to the last node', (done) => {
it('should resolve up to the last node', async () => {
const content = { path: { to: { file: hat() } } }
const options = { format: 'dag-cbor', hashAlg: 'sha2-256' }
const cid = await ipfs.dag.put(content, options)
const path = `/ipfs/${cid}/path/to/file`
const resolved = await ipfs.resolve(path)

ipfs.dag.put(content, options, (err, cid) => {
expect(err).to.not.exist()

const path = `/ipfs/${cid}/path/to/file`
ipfs.resolve(path, (err, resolved) => {
expect(err).to.not.exist()
expect(resolved).to.equal(path)
done()
})
})
expect(resolved).to.equal(path)
})

it('should resolve up to the last node across multiple nodes', (done) => {
it('should resolve up to the last node across multiple nodes', async () => {
const options = { format: 'dag-cbor', hashAlg: 'sha2-256' }
const childCid = await ipfs.dag.put({ node: { with: { file: hat() } } }, options)
const parentCid = await ipfs.dag.put({ path: { to: childCid } }, options)
const resolved = await ipfs.resolve(`/ipfs/${parentCid}/path/to/node/with/file`)

waterfall([
cb => {
const content = { node: { with: { file: hat() } } }
ipfs.dag.put(content, options, cb)
},
(childCid, cb) => {
const content = { path: { to: childCid } }
ipfs.dag.put(content, options, (err, parentCid) => cb(err, { childCid, parentCid }))
}
], (err, res) => {
expect(err).to.not.exist()

const path = `/ipfs/${res.parentCid}/path/to/node/with/file`
ipfs.resolve(path, (err, resolved) => {
expect(err).to.not.exist()
expect(resolved).to.equal(`/ipfs/${res.childCid}/node/with/file`)
done()
})
})
expect(resolved).to.equal(`/ipfs/${childCid}/node/with/file`)
})

// Test resolve turns /ipns/domain.com into /ipfs/QmHash
it('should resolve an IPNS DNS link', function (done) {
this.timeout(20 * 1000)
it('should resolve an IPNS DNS link', async function () {
this.retries(3)
const resolved = await ipfs.resolve('/ipns/ipfs.io')

ipfs.resolve('/ipns/ipfs.io', { r: true }, (err, path) => {
expect(err).to.not.exist()
expect(isIpfs.ipfsPath(path)).to.be.true()
done()
})
expect(isIpfs.ipfsPath(resolved)).to.be.true()
})

// Test resolve turns /ipns/QmPeerHash into /ipns/domain.com into /ipfs/QmHash
it('should resolve IPNS link recursively', function (done) {
this.timeout(5 * 60 * 1000)

waterfall([
// Ensure node has another node to publish a name to
(cb) => spawnNodeWithId(factory, cb),
(ipfsB, cb) => {
const addr = ipfsB.peerId.addresses.find((a) => a.includes('127.0.0.1'))
connect(ipfs, addr, cb)
},
(cb) => ipfs.name.publish('/ipns/ipfs.io', { resolve: false }, cb),
(res, cb) => {
ipfs.resolve(`/ipns/${res.name}`, { recursive: true }, (err, res) => {
expect(err).to.not.exist()
expect(res).to.not.equal('/ipns/ipfs.io')
expect(isIpfs.ipfsPath(res)).to.be.true()
cb()
})
}
], done)
it('should resolve IPNS link recursively', async function () {
this.timeout(20 * 1000)

const [{ path }] = await ipfs.add(Buffer.from('should resolve a record recursive === true'))
const { id: keyId } = await ipfs.key.gen('key-name', { type: 'rsa', size: 2048 })

await ipfs.name.publish(path, { 'allow-offline': true })
await ipfs.name.publish(`/ipns/${nodeId}`, { 'allow-offline': true, key: 'key-name' })

return expect(await ipfs.resolve(`/ipns/${keyId}`, { recursive: true }))
.to.eq(`/ipfs/${path}`)
})
})
}