From 76959ac9d32143da42e4e9ba417bb6c81534ca36 Mon Sep 17 00:00:00 2001 From: greenkeeperio-bot Date: Fri, 9 Sep 2016 17:01:04 -0400 Subject: [PATCH 1/8] chore(package): update ipfs-unixfs-engine to version 0.11.3 https://greenkeeper.io/ --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 718654fcc6..d684499640 100644 --- a/package.json +++ b/package.json @@ -76,7 +76,7 @@ "ipfs-multipart": "^0.1.0", "ipfs-repo": "^0.9.0", "ipfs-unixfs": "^0.1.4", - "ipfs-unixfs-engine": "^0.11.2", + "ipfs-unixfs-engine": "^0.11.3", "isstream": "^0.1.2", "joi": "^9.0.4", "libp2p-ipfs": "^0.13.0", From 2bf49ea40cf64b1eed72465aecbc4ea890039f14 Mon Sep 17 00:00:00 2001 From: Victor Bjelkholm Date: Sat, 10 Sep 2016 18:14:50 +0200 Subject: [PATCH 2/8] fix(cli): use right argument for cli .cat Add tests for making sure .cat shows right output and fix that test by using the right argument from cli. Ref: issue #476 --- src/cli/commands/files/cat.js | 2 +- test/cli/test-files.js | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/src/cli/commands/files/cat.js b/src/cli/commands/files/cat.js index 7129aeed66..34737439ae 100644 --- a/src/cli/commands/files/cat.js +++ b/src/cli/commands/files/cat.js @@ -13,7 +13,7 @@ module.exports = { builder: {}, handler (argv) { - const path = argv.ipfsPath + const path = argv['ipfs-path'] utils.getIPFS((err, ipfs) => { if (err) { throw err diff --git a/test/cli/test-files.js b/test/cli/test-files.js index ffe0a5ae78..a9a5bf68cb 100644 --- a/test/cli/test-files.js +++ b/test/cli/test-files.js @@ -17,6 +17,7 @@ describe('files', () => { .run((err, stdout, exitcode) => { expect(err).to.not.exist expect(exitcode).to.equal(0) + expect(stdout[0]).to.equal('hello world') done() }) }) From e29f42936493af755066aa9891d0b465f15cdbe4 Mon Sep 17 00:00:00 2001 From: nginnever Date: Tue, 14 Jun 2016 23:11:20 -0700 Subject: [PATCH 3/8] feat(add): add the http endpoint for files.add --- src/core/ipfs/object.js | 1 - src/http-api/resources/files.js | 66 +++++++++++++++++++++++++++++++++ src/http-api/routes/files.js | 12 ++++++ 3 files changed, 78 insertions(+), 1 deletion(-) diff --git a/src/core/ipfs/object.js b/src/core/ipfs/object.js index 85eac9033d..8e987b4d07 100644 --- a/src/core/ipfs/object.js +++ b/src/core/ipfs/object.js @@ -155,7 +155,6 @@ module.exports = function object (self) { if (err) { return cb(err) } - cb(null, node.data) }) }), diff --git a/src/http-api/resources/files.js b/src/http-api/resources/files.js index f4b8f14dd2..5d4dbda052 100644 --- a/src/http-api/resources/files.js +++ b/src/http-api/resources/files.js @@ -1,6 +1,8 @@ 'use strict' const bs58 = require('bs58') +const ndjson = require('ndjson') +const multipart = require('ipfs-multipart') const debug = require('debug') const log = debug('http-api:files') log.error = debug('http-api:files:error') @@ -48,3 +50,67 @@ exports.cat = { }) } } + +exports.add = { + handler: (request, reply) => { + if (!request.payload) { + return reply('Array, Buffer, or String is required.').code(400).takeover() + } + + const parser = multipart.reqParser(request.payload) + + var filesParsed = false + var filesAdded = 0 + + var serialize = ndjson.serialize() + // hapi doesn't permit object streams: http://hapijs.com/api#replyerr-result + serialize._readableState.objectMode = false + + request.server.app.ipfs.files.createAddStream((err, fileAdder) => { + if (err) { + return reply({ + Message: err, + Code: 0 + }).code(500) + } + + fileAdder.on('data', (file) => { + serialize.write({ + Name: file.path, + Hash: bs58.encode(file.node.multihash()).toString() + }) + filesAdded++ + }) + + fileAdder.on('end', () => { + if (filesAdded === 0 && filesParsed) { + return reply({ + Message: 'Failed to add files.', + Code: 0 + }).code(500) + } else { + serialize.end() + return reply(serialize) + .header('x-chunked-output', '1') + .header('content-type', 'application/json') + } + }) + + parser.on('file', (fileName, fileStream) => { + var filePair = { + path: fileName, + content: fileStream + } + filesParsed = true + fileAdder.write(filePair) + }) + + parser.on('end', () => { + if (!filesParsed) { + return reply("File argument 'data' is required.").code(400).takeover() + } + fileAdder.end() + }) + }) + } +} diff --git a/src/http-api/routes/files.js b/src/http-api/routes/files.js index 99c47741e3..ddf722f850 100644 --- a/src/http-api/routes/files.js +++ b/src/http-api/routes/files.js @@ -15,4 +15,16 @@ module.exports = (server) => { handler: resources.files.cat.handler } }) + + api.route({ + method: '*', + path: '/api/v0/add', + config: { + payload: { + parse: false, + output: 'stream' + }, + handler: resources.files.add.handler + } + }) } From 11cb4ca47d1fd818f91a348f0d6713fe1cd401fe Mon Sep 17 00:00:00 2001 From: Victor Bjelkholm Date: Thu, 1 Sep 2016 18:06:47 +0200 Subject: [PATCH 4/8] feat(files): get interface-ipfs-core files tests pass through http-api using ipfs-api --- src/http-api/resources/files.js | 62 ++++++++++++++++++- src/http-api/routes/files.js | 14 +++++ .../test-files.js | 6 +- 3 files changed, 74 insertions(+), 8 deletions(-) diff --git a/src/http-api/resources/files.js b/src/http-api/resources/files.js index 5d4dbda052..46cff0093c 100644 --- a/src/http-api/resources/files.js +++ b/src/http-api/resources/files.js @@ -4,8 +4,10 @@ const bs58 = require('bs58') const ndjson = require('ndjson') const multipart = require('ipfs-multipart') const debug = require('debug') +const tar = require('tar-stream') const log = debug('http-api:files') log.error = debug('http-api:files:error') +const async = require('async') exports = module.exports @@ -44,8 +46,55 @@ exports.cat = { Code: 0 }).code(500) } + return reply(stream).header('X-Stream-Output', '1') + }) + } +} + +exports.get = { + // uses common parseKey method that returns a `key` + parseArgs: exports.parseKey, + + // main route handler which is called after the above `parseArgs`, but only if the args were valid + handler: (request, reply) => { + const key = request.pre.args.key + + request.server.app.ipfs.files.get(key, (err, stream) => { + if (err) { + log.error(err) + return reply({ + Message: 'Failed to get file: ' + err, + Code: 0 + }).code(500) + } + var pack = tar.pack() + const files = [] stream.on('data', (data) => { - return reply(data) + files.push(data) + }) + const processFile = (file) => { + return (callback) => { + if (!file.content) { // is directory + pack.entry({name: file.path, type: 'directory'}) + callback() + } else { // is file + const fileContents = [] + file.content.on('data', (data) => { + fileContents.push(data) + }) + file.content.on('end', () => { + pack.entry({name: file.path}, Buffer.concat(fileContents)) + callback() + }) + } + } + } + stream.on('end', () => { + const callbacks = files.map(processFile) + async.series(callbacks, () => { + pack.finalize() + reply(pack).header('X-Stream-Output', '1') + }) }) }) } @@ -75,9 +124,10 @@ exports.add = { } fileAdder.on('data', (file) => { + const filePath = file.path ? file.path : file.hash serialize.write({ - Name: file.path, - Hash: bs58.encode(file.node.multihash()).toString() + Name: filePath, + Hash: file.hash }) filesAdded++ }) @@ -104,6 +154,12 @@ exports.add = { filesParsed = true fileAdder.write(filePair) }) + parser.on('directory', (directory) => { + fileAdder.write({ + path: directory, + content: '' + }) + }) parser.on('end', () => { if (!filesParsed) { diff --git a/src/http-api/routes/files.js b/src/http-api/routes/files.js index ddf722f850..da57b3f2f1 100644 --- a/src/http-api/routes/files.js +++ b/src/http-api/routes/files.js @@ -6,6 +6,7 @@ module.exports = (server) => { const api = server.select('API') api.route({ + // TODO fix method method: '*', path: '/api/v0/cat', config: { @@ -17,6 +18,19 @@ module.exports = (server) => { }) api.route({ + // TODO fix method + method: '*', + path: '/api/v0/get', + config: { + pre: [ + { method: resources.files.get.parseArgs, assign: 'args' } + ], + handler: resources.files.get.handler + } + }) + + api.route({ + // TODO fix method method: '*', path: '/api/v0/add', config: { diff --git a/test/http-api/interface-ipfs-core-over-ipfs-api/test-files.js b/test/http-api/interface-ipfs-core-over-ipfs-api/test-files.js index 4a322b948e..4762a26f76 100644 --- a/test/http-api/interface-ipfs-core-over-ipfs-api/test-files.js +++ b/test/http-api/interface-ipfs-core-over-ipfs-api/test-files.js @@ -2,7 +2,6 @@ 'use strict' -/* const test = require('interface-ipfs-core') const FactoryClient = require('./../../utils/factory-http') @@ -17,8 +16,5 @@ const common = { fc.dismantle(callback) } } -*/ -// TODO -// needs: https://github.com/ipfs/js-ipfs/pull/323 -// test.files(common) +test.files(common) From 001a6eb26aae1be495b49be7ca2ec69607518fdb Mon Sep 17 00:00:00 2001 From: Victor Bjelkholm Date: Thu, 1 Sep 2016 18:06:47 +0200 Subject: [PATCH 5/8] feat(files): interface-ipfs-core tests over ipfs-api --- package.json | 1 + src/core/ipfs/files.js | 5 + src/http-api/resources/files.js | 177 +++++++++++++++++--------------- 3 files changed, 99 insertions(+), 84 deletions(-) diff --git a/package.json b/package.json index d684499640..92346aa177 100644 --- a/package.json +++ b/package.json @@ -96,6 +96,7 @@ "promisify-es6": "^1.0.1", "pull-file": "^1.0.0", "pull-paramap": "^1.1.6", + "pull-pushable": "^2.0.1", "pull-sort": "^1.0.0", "pull-stream": "^3.4.5", "pull-stream-to-stream": "^1.3.3", diff --git a/src/core/ipfs/files.js b/src/core/ipfs/files.js index 5070398b17..4f829d5269 100644 --- a/src/core/ipfs/files.js +++ b/src/core/ipfs/files.js @@ -53,6 +53,7 @@ module.exports = function files (self) { pull( pull.values([hash]), pull.asyncMap(self._dagS.get.bind(self._dagS)), + pull.take(1), pull.map((node) => { const data = UnixFS.unmarshal(node.data) if (data.type === 'directory') { @@ -81,6 +82,10 @@ module.exports = function files (self) { return file }) ))) + }), + + getPull: promisify((hash, callback) => { + callback(null, exporter(hash, self._dagS)) }) } } diff --git a/src/http-api/resources/files.js b/src/http-api/resources/files.js index 46cff0093c..38e755fd11 100644 --- a/src/http-api/resources/files.js +++ b/src/http-api/resources/files.js @@ -1,13 +1,16 @@ 'use strict' const bs58 = require('bs58') -const ndjson = require('ndjson') const multipart = require('ipfs-multipart') const debug = require('debug') const tar = require('tar-stream') const log = debug('http-api:files') log.error = debug('http-api:files:error') -const async = require('async') +const pull = require('pull-stream') +const toStream = require('pull-stream-to-stream') +const toPull = require('stream-to-pull-stream') +const pushable = require('pull-pushable') +const EOL = require('os').EOL exports = module.exports @@ -37,8 +40,9 @@ exports.cat = { // main route handler which is called after the above `parseArgs`, but only if the args were valid handler: (request, reply) => { const key = request.pre.args.key + const ipfs = request.server.app.ipfs - request.server.app.ipfs.files.cat(key, (err, stream) => { + ipfs.files.cat(key, (err, stream) => { if (err) { log.error(err) return reply({ @@ -46,6 +50,13 @@ exports.cat = { Code: 0 }).code(500) } + + // hapi is not very clever and throws if no + // - _read method + // - _readableState object + // are there :( + stream._read = () => {} + stream._readableState = {} return reply(stream).header('X-Stream-Output', '1') }) } @@ -58,45 +69,44 @@ exports.get = { // main route handler which is called after the above `parseArgs`, but only if the args were valid handler: (request, reply) => { const key = request.pre.args.key - - request.server.app.ipfs.files.get(key, (err, stream) => { - if (err) { - log.error(err) - return reply({ - Message: 'Failed to get file: ' + err, - Code: 0 - }).code(500) - } - var pack = tar.pack() - const files = [] - stream.on('data', (data) => { - files.push(data) - }) - const processFile = (file) => { - return (callback) => { - if (!file.content) { // is directory - pack.entry({name: file.path, type: 'directory'}) - callback() - } else { // is file - const fileContents = [] - file.content.on('data', (data) => { - fileContents.push(data) - }) - file.content.on('end', () => { - pack.entry({name: file.path}, Buffer.concat(fileContents)) - callback() - }) + const ipfs = request.server.app.ipfs + const pack = tar.pack() + + ipfs.files.getPull(key, (err, stream) => { + if (err) return handleError(err) + + pull( + stream, + pull.asyncMap((file, cb) => { + const header = {name: file.path} + + if (!file.content) { + header.type = 'directory' + pack.entry(header) + cb() + } else { + header.size = file.size + toStream.source(file.content) + .pipe(pack.entry(header, cb)) } - } - } - stream.on('end', () => { - const callbacks = files.map(processFile) - async.series(callbacks, () => { + }), + pull.onEnd((err) => { + if (err) return handleError(err) + pack.finalize() reply(pack).header('X-Stream-Output', '1') }) - }) + ) }) + + function handleError (err) { + log.error(err) + + reply({ + Message: 'Failed to get file: ' + err, + Code: 0 + }).code(500) + } } } @@ -106,67 +116,66 @@ exports.add = { return reply('Array, Buffer, or String is required.').code(400).takeover() } + const ipfs = request.server.app.ipfs + // TODO: make pull-multipart const parser = multipart.reqParser(request.payload) + let filesParsed = false - var filesParsed = false - var filesAdded = 0 + const fileAdder = pushable() - var serialize = ndjson.serialize() - // hapi doesn't permit object streams: http://hapijs.com/api#replyerr-result - serialize._readableState.objectMode = false - - request.server.app.ipfs.files.createAddStream((err, fileAdder) => { - if (err) { - return reply({ - Message: err, - Code: 0 - }).code(500) + parser.on('file', (fileName, fileStream) => { + const filePair = { + path: fileName, + content: toPull(fileStream) } + filesParsed = true + fileAdder.push(filePair) + }) - fileAdder.on('data', (file) => { - const filePath = file.path ? file.path : file.hash - serialize.write({ - Name: filePath, - Hash: file.hash - }) - filesAdded++ + parser.on('directory', (directory) => { + fileAdder.push({ + path: directory, + content: '' }) + }) + + parser.on('end', () => { + if (!filesParsed) { + return reply("File argument 'data' is required.") + .code(400).takeover() + } + fileAdder.end() + }) - fileAdder.on('end', () => { - if (filesAdded === 0 && filesParsed) { + pull( + fileAdder, + ipfs.files.createAddPullStream(), + pull.map((file) => { + return { + Name: file.path ? file.path : file.hash, + Hash: file.hash + } + }), + pull.map((file) => JSON.stringify(file) + EOL), + pull.collect((err, files) => { + if (err) { return reply({ - Message: 'Failed to add files.', + Message: err, Code: 0 }).code(500) - } else { - serialize.end() - return reply(serialize) - .header('x-chunked-output', '1') - .header('content-type', 'application/json') } - }) - parser.on('file', (fileName, fileStream) => { - var filePair = { - path: fileName, - content: fileStream + if (files.length === 0 && filesParsed) { + return reply({ + Message: 'Failed to add files.', + Code: 0 + }).code(500) } - filesParsed = true - fileAdder.write(filePair) - }) - parser.on('directory', (directory) => { - fileAdder.write({ - path: directory, - content: '' - }) - }) - parser.on('end', () => { - if (!filesParsed) { - return reply("File argument 'data' is required.").code(400).takeover() - } - fileAdder.end() + reply(files.join('')) + .header('x-chunked-output', '1') + .header('content-type', 'application/json') }) - }) + ) } } From cb10ab7b4504b2a8fdb4b415f76ef7fc55cca0b0 Mon Sep 17 00:00:00 2001 From: jbenet Date: Sat, 10 Sep 2016 19:37:07 -0400 Subject: [PATCH 6/8] feat(tests): waste less time generating keys --- test/core/both/test-init.js | 2 +- test/utils/factory-core/index.js | 4 ++-- test/utils/factory-http/index.js | 4 ++-- test/utils/temp-node.js | 2 +- test/utils/temp-repo.js | 2 +- 5 files changed, 7 insertions(+), 7 deletions(-) diff --git a/test/core/both/test-init.js b/test/core/both/test-init.js index fb566b670d..c7671b5183 100644 --- a/test/core/both/test-init.js +++ b/test/core/both/test-init.js @@ -13,7 +13,7 @@ describe('init', function () { const repo = createTempRepo() const ipfs = new IPFS(repo) - ipfs.init({ emptyRepo: true }, (err) => { + ipfs.init({ emptyRepo: true, bits: 128 }, (err) => { expect(err).to.not.exist repo.exists((err, res) => { diff --git a/test/utils/factory-core/index.js b/test/utils/factory-core/index.js index 03b95d89ad..f334536795 100644 --- a/test/utils/factory-core/index.js +++ b/test/utils/factory-core/index.js @@ -36,7 +36,7 @@ function Factory () { if (!config) { config = JSON.parse(JSON.stringify(defaultConfig)) - const pId = PeerId.create({ bits: 32 }).toJSON() + const pId = PeerId.create({ bits: 512 }).toJSON() config.Identity.PeerID = pId.id config.Identity.PrivKey = pId.privKey } @@ -69,7 +69,7 @@ function Factory () { // create the IPFS node const ipfs = new IPFS(repo) - ipfs.init({ emptyRepo: true }, (err) => { + ipfs.init({ emptyRepo: true, bits: 512 }, (err) => { if (err) { return callback(err) } diff --git a/test/utils/factory-http/index.js b/test/utils/factory-http/index.js index bcafd3b2a2..6582184d25 100644 --- a/test/utils/factory-http/index.js +++ b/test/utils/factory-http/index.js @@ -37,7 +37,7 @@ function Factory () { if (!config) { config = JSON.parse(JSON.stringify(defaultConfig)) - const pId = PeerId.create({ bits: 32 }).toJSON() + const pId = PeerId.create({ bits: 512 }).toJSON() config.Identity.PeerID = pId.id config.Identity.PrivKey = pId.privKey } @@ -53,7 +53,7 @@ function Factory () { // create the IPFS node const ipfs = new IPFS(repo) - ipfs.init({ emptyRepo: true }, (err) => { + ipfs.init({ emptyRepo: true, bits: 512 }, (err) => { if (err) { return callback(err) } diff --git a/test/utils/temp-node.js b/test/utils/temp-node.js index adabcc0155..7375d9e60b 100644 --- a/test/utils/temp-node.js +++ b/test/utils/temp-node.js @@ -31,7 +31,7 @@ function createTempNode (num, callback) { num = leftPad(num, 3, 0) series([ - (cb) => ipfs.init({ emptyRepo: true }, cb), + (cb) => ipfs.init({ emptyRepo: true, bits: 512 }, cb), (cb) => setAddresses(repo, num, cb), (cb) => ipfs.load(cb) ], (err) => { diff --git a/test/utils/temp-repo.js b/test/utils/temp-repo.js index 47d39de6e0..878ca2812d 100644 --- a/test/utils/temp-repo.js +++ b/test/utils/temp-repo.js @@ -31,7 +31,7 @@ function createTempRepo () { } var repo = new IPFSRepo(repoPath, { - bits: 64, + bits: 512, stores: store }) From b0a6db910da882bceacc6087bb43abd36cbdb87d Mon Sep 17 00:00:00 2001 From: jbenet Date: Sun, 11 Sep 2016 02:02:36 -0400 Subject: [PATCH 7/8] fix(http): get handler reads the stream --- src/http-api/resources/files.js | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/http-api/resources/files.js b/src/http-api/resources/files.js index 38e755fd11..b5309b6de6 100644 --- a/src/http-api/resources/files.js +++ b/src/http-api/resources/files.js @@ -94,9 +94,12 @@ exports.get = { if (err) return handleError(err) pack.finalize() - reply(pack).header('X-Stream-Output', '1') }) ) + + // the reply must read the tar stream, + // to pull values through + reply(pack).header('X-Stream-Output', '1') }) function handleError (err) { From 40501955a4c9e40f5d7cb6b3d2699b6ce603d358 Mon Sep 17 00:00:00 2001 From: Friedel Ziegelmayer Date: Mon, 12 Sep 2016 11:36:29 +0200 Subject: [PATCH 8/8] chore: cleanup, apply CR --- src/core/ipfs/files.js | 38 +++++++++++++++------------- src/http-api/resources/files.js | 29 +++++++++++++-------- test/http-api/ipfs-api/test-files.js | 38 ---------------------------- 3 files changed, 38 insertions(+), 67 deletions(-) delete mode 100644 test/http-api/ipfs-api/test-files.js diff --git a/src/core/ipfs/files.js b/src/core/ipfs/files.js index 4f829d5269..5525a66d62 100644 --- a/src/core/ipfs/files.js +++ b/src/core/ipfs/files.js @@ -50,24 +50,26 @@ module.exports = function files (self) { return callback(new Error('You must supply a multihash')) } - pull( - pull.values([hash]), - pull.asyncMap(self._dagS.get.bind(self._dagS)), - pull.take(1), - pull.map((node) => { - const data = UnixFS.unmarshal(node.data) - if (data.type === 'directory') { - return pull.error(new Error('This dag node is a directory')) - } - - return exporter(hash, self._dagS) - }), - pull.flatten(), - pull.collect((err, files) => { - if (err) return callback(err) - callback(null, toStream.source(files[0].content)) - }) - ) + self._dagS.get(hash, (err, node) => { + if (err) { + return callback(err) + } + + const data = UnixFS.unmarshal(node.data) + if (data.type === 'directory') { + return callback( + new Error('This dag node is a directory') + ) + } + + pull( + exporter(hash, self._dagS), + pull.collect((err, files) => { + if (err) return callback(err) + callback(null, toStream.source(files[0].content)) + }) + ) + }) }), get: promisify((hash, callback) => { diff --git a/src/http-api/resources/files.js b/src/http-api/resources/files.js index b5309b6de6..3c3cbd1e5e 100644 --- a/src/http-api/resources/files.js +++ b/src/http-api/resources/files.js @@ -73,7 +73,15 @@ exports.get = { const pack = tar.pack() ipfs.files.getPull(key, (err, stream) => { - if (err) return handleError(err) + if (err) { + log.error(err) + + reply({ + Message: 'Failed to get file: ' + err, + Code: 0 + }).code(500) + return + } pull( stream, @@ -91,7 +99,15 @@ exports.get = { } }), pull.onEnd((err) => { - if (err) return handleError(err) + if (err) { + log.error(err) + + reply({ + Message: 'Failed to get file: ' + err, + Code: 0 + }).code(500) + return + } pack.finalize() }) @@ -101,15 +117,6 @@ exports.get = { // to pull values through reply(pack).header('X-Stream-Output', '1') }) - - function handleError (err) { - log.error(err) - - reply({ - Message: 'Failed to get file: ' + err, - Code: 0 - }).code(500) - } } } diff --git a/test/http-api/ipfs-api/test-files.js b/test/http-api/ipfs-api/test-files.js deleted file mode 100644 index 95312de30b..0000000000 --- a/test/http-api/ipfs-api/test-files.js +++ /dev/null @@ -1,38 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const expect = require('chai').expect - -module.exports = (ctl) => { - describe('.files', () => { - describe('.add', () => {}) // TODO - - describe('.cat', () => { - it('returns error for request without argument', (done) => { - ctl.cat(null, (err, result) => { - expect(err).to.exist - done() - }) - }) - - it('returns error for request with invalid argument', (done) => { - ctl.cat('invalid', (err, result) => { - expect(err).to.exist - done() - }) - }) - - it('returns a buffer', (done) => { - ctl.cat('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o', (err, result) => { - expect(err).to.not.exist - expect(result).to.deep.equal(new Buffer('hello world' + '\n')) - done() - }) - }) - }) - - describe('.get', () => {}) // TODO - - describe('.ls', () => {}) // TODO - }) -}