From e1ecd325cb7f2519ed1e3cca9ca889fe8060d922 Mon Sep 17 00:00:00 2001 From: nginnever Date: Tue, 14 Jun 2016 23:11:20 -0700 Subject: [PATCH 1/9] add http resource --- src/core/ipfs/files.js | 1 + src/http-api/resources/files.js | 67 +++++++++++++++++++++++++++++++++ src/http-api/routes/files.js | 12 ++++++ 3 files changed, 80 insertions(+) diff --git a/src/core/ipfs/files.js b/src/core/ipfs/files.js index 0184147d57..cf194e7477 100644 --- a/src/core/ipfs/files.js +++ b/src/core/ipfs/files.js @@ -9,6 +9,7 @@ const isStream = require('isstream') const promisify = require('promisify-es6') const Duplex = require('stream').Duplex const multihashes = require('multihashes') +const bs58 = require('bs58') module.exports = function files (self) { return { diff --git a/src/http-api/resources/files.js b/src/http-api/resources/files.js index f4b8f14dd2..ef223c34e3 100644 --- a/src/http-api/resources/files.js +++ b/src/http-api/resources/files.js @@ -1,6 +1,9 @@ 'use strict' const bs58 = require('bs58') +const multihash = require('multihashes') +const ndjson = require('ndjson') +const multipart = require('ipfs-multipart') const debug = require('debug') const log = debug('http-api:files') log.error = debug('http-api:files:error') @@ -48,3 +51,67 @@ exports.cat = { }) } } + +exports.add = { + handler: (request, reply) => { + if (!request.payload) { + return reply('Array, Buffer, or String is required.').code(400).takeover() + } + + const parser = multipart.reqParser(request.payload) + + var filesParsed = false + var filesAdded = 0 + + var serialize = ndjson.serialize() + // hapi doesn't permit object streams: http://hapijs.com/api#replyerr-result + serialize._readableState.objectMode = false + + request.server.app.ipfs.files.createAddStream((err, fileAdder) => { + if (err) { + return reply({ + Message: err, + Code: 0 + }).code(500) + } + + fileAdder.on('data', (file) => { + serialize.write({ + Name: file.path, + Hash: bs58.encode(file.node.multihash()).toString() + }) + filesAdded++ + }) + + fileAdder.on('end', () => { + if (filesAdded === 0 && filesParsed) { + return reply({ + Message: 'Failed to add files.', + Code: 0 + }).code(500) + } else { + serialize.end() + return reply(serialize) + .header('x-chunked-output', '1') + .header('content-type', 'application/json') + } + }) + + parser.on('file', (fileName, fileStream) => { + var filePair = { + path: fileName, + content: fileStream + } + filesParsed = true + fileAdder.write(filePair) + }) + + parser.on('end', () => { + if (!filesParsed) { + return reply("File argument 'data' is required.").code(400).takeover() + } + fileAdder.end() + }) + }) + } +} diff --git a/src/http-api/routes/files.js b/src/http-api/routes/files.js index 99c47741e3..ddf722f850 100644 --- a/src/http-api/routes/files.js +++ b/src/http-api/routes/files.js @@ -15,4 +15,16 @@ module.exports = (server) => { handler: resources.files.cat.handler } }) + + api.route({ + method: '*', + path: '/api/v0/add', + config: { + payload: { + parse: false, + output: 'stream' + }, + handler: resources.files.add.handler + } + }) } From 731d1896c746fe0efb9e648bb2e001437325e7aa Mon Sep 17 00:00:00 2001 From: nginnever Date: Wed, 15 Jun 2016 11:35:54 -0700 Subject: [PATCH 2/9] http tests --- src/core/ipfs/files.js | 1 - src/http-api/resources/files.js | 1 - test/http-api/test-files.js | 162 ++++++++++++++++++++++++++++++++ 3 files changed, 162 insertions(+), 2 deletions(-) diff --git a/src/core/ipfs/files.js b/src/core/ipfs/files.js index cf194e7477..0184147d57 100644 --- a/src/core/ipfs/files.js +++ b/src/core/ipfs/files.js @@ -9,7 +9,6 @@ const isStream = require('isstream') const promisify = require('promisify-es6') const Duplex = require('stream').Duplex const multihashes = require('multihashes') -const bs58 = require('bs58') module.exports = function files (self) { return { diff --git a/src/http-api/resources/files.js b/src/http-api/resources/files.js index ef223c34e3..5d4dbda052 100644 --- a/src/http-api/resources/files.js +++ b/src/http-api/resources/files.js @@ -1,7 +1,6 @@ 'use strict' const bs58 = require('bs58') -const multihash = require('multihashes') const ndjson = require('ndjson') const multipart = require('ipfs-multipart') const debug = require('debug') diff --git a/test/http-api/test-files.js b/test/http-api/test-files.js index 29aaf9a44d..8f4ec841a6 100644 --- a/test/http-api/test-files.js +++ b/test/http-api/test-files.js @@ -3,6 +3,17 @@ const expect = require('chai').expect const APIctl = require('ipfs-api') +const FormData = require('form-data') +const fs = require('fs') +const streamToPromise = require('stream-to-promise') +const Readable = require('stream').Readable +const http = require('http') + +function singleFileServer (filename) { + return http.createServer(function (req, res) { + fs.createReadStream(filename).pipe(res) + }) +} module.exports = (httpAPI) => { describe('files', () => { @@ -83,4 +94,155 @@ module.exports = (httpAPI) => { }) }) }) + + describe('files', () => { + describe('api', () => { + let api + + it('api', () => { + api = httpAPI.server.select('API') + }) + + describe('/files/add', () => { + it('returns 400 if no tuple is provided', (done) => { + const form = new FormData() + const headers = form.getHeaders() + + streamToPromise(form).then((payload) => { + api.inject({ + method: 'POST', + url: '/api/v0/add', + headers: headers, + payload: payload + }, (res) => { + expect(res.statusCode).to.equal(400) + done() + }) + }) + }) + + it('adds a file', (done) => { + const form = new FormData() + const filePath = 'test/test-data/node.json' + form.append('file', fs.createReadStream(filePath)) + const headers = form.getHeaders() + + streamToPromise(form).then((payload) => { + api.inject({ + method: 'POST', + url: '/api/v0/add', + headers: headers, + payload: payload + }, (res) => { + expect(res.statusCode).to.equal(200) + var result = JSON.parse(res.result) + expect(result.Name).to.equal('node.json') + expect(result.Hash).to.equal('QmRRdjTN2PjyEPrW73GBxJNAZrstH5tCZzwHYFJpSTKkhe') + done() + }) + }) + }) + + it('adds multiple files', (done) => { + const form = new FormData() + const filePath = 'test/test-data/hello' + const filePath2 = 'test/test-data/otherconfig' + form.append('file', fs.createReadStream(filePath)) + form.append('file', fs.createReadStream(filePath2)) + const headers = form.getHeaders() + + streamToPromise(form).then((payload) => { + api.inject({ + method: 'POST', + url: '/api/v0/add', + headers: headers, + payload: payload + }, (res) => { + expect(res.statusCode).to.equal(200) + var results = res.result.split('\n').slice(0, -1).map(JSON.parse) + expect(results[0].Name).to.equal('hello') + expect(results[0].Hash).to.equal('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') + expect(results[1].Name).to.equal('otherconfig') + expect(results[1].Hash).to.equal('QmayedZNznnEbHtyfjeQvvt29opSLjYjLtLqwfwSWq28ds') + done() + }) + }) + }) + }) + }) + + describe('using js-ipfs-api', () => { + var ctl + + it('start IPFS API ctl', (done) => { + ctl = APIctl('/ip4/127.0.0.1/tcp/6001') + done() + }) + + describe('ipfs.add', () => { + it('adds two files under a chunk Size', (done) => { + const rs = new Readable() + const rs2 = new Readable() + var files = [] + const buffered = fs.readFileSync('test/test-data/hello') + const buffered2 = fs.readFileSync('test/test-data/otherconfig') + rs.push(buffered) + rs.push(null) + rs2.push(buffered2) + rs2.push(null) + const filePair = {path: 'hello', content: rs} + const filePair2 = {path: 'otherconfig', content: rs2} + files.push(filePair) + files.push(filePair2) + + ctl.files.add(files, (err, res) => { + expect(err).to.not.exist + expect(res[0].Name).to.equal('hello') + expect(res[0].Hash).to.equal('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') + expect(res[1].Name).to.equal('otherconfig') + expect(res[1].Hash).to.equal('QmayedZNznnEbHtyfjeQvvt29opSLjYjLtLqwfwSWq28ds') + done() + }) + }) + + it('adds a large file > a chunk', (done) => { + const rs = new Readable() + var files = [] + const buffered = fs.readFileSync('test/test-data/1.2MiB.txt') + rs.push(buffered) + rs.push(null) + const filePair = {path: '1.2MiB.txt', content: rs} + files.push(filePair) + + ctl.files.add(filePair, (err, res) => { + expect(err).to.not.exist + expect(res[0].Name).to.equal('1.2MiB.txt') + expect(res[0].Hash).to.equal('QmW7BDxEbGqxxSYVtn3peNPQgdDXbWkoQ6J1EFYAEuQV3Q') + done() + }) + }) + + it('adds a buffer', (done) => { + const buffer = new Buffer('hello world') + ctl.files.add(buffer, (err, res) => { + expect(err).to.not.exist + expect(res[0].Hash).to.equal('Qmf412jQZiuVUtdgnB36FXFX7xg5V6KEbSJ4dpQuhkLyfD') + done() + }) + }) + + it('adds a url', (done) => { + var server = singleFileServer('test/test-data/1.2MiB.txt') + server.listen(2913, function () { + ctl.files.add('http://localhost:2913/', (err, res) => { + expect(err).to.not.exist + const added = res[0] != null ? res[0] : res + expect(added).to.have.a.property('Hash', 'QmW7BDxEbGqxxSYVtn3peNPQgdDXbWkoQ6J1EFYAEuQV3Q') + done() + }) + }) + }) + }) + }) + }) } From 9f50cb815b8dbec00a823fd895f83ca00b849fd9 Mon Sep 17 00:00:00 2001 From: nginnever Date: Thu, 16 Jun 2016 19:16:57 -0700 Subject: [PATCH 3/9] update http tests --- src/core/ipfs/object.js | 1 - test/http-api/test-files.js | 52 ++++++++++++++++++++++++++++++++----- 2 files changed, 45 insertions(+), 8 deletions(-) diff --git a/src/core/ipfs/object.js b/src/core/ipfs/object.js index f1ab133a88..1649f7b8fa 100644 --- a/src/core/ipfs/object.js +++ b/src/core/ipfs/object.js @@ -155,7 +155,6 @@ module.exports = function object (self) { if (err) { return cb(err) } - cb(null, node.data) }) }), diff --git a/test/http-api/test-files.js b/test/http-api/test-files.js index 8f4ec841a6..ef90010928 100644 --- a/test/http-api/test-files.js +++ b/test/http-api/test-files.js @@ -8,6 +8,7 @@ const fs = require('fs') const streamToPromise = require('stream-to-promise') const Readable = require('stream').Readable const http = require('http') +var bs58 = require('bs58') function singleFileServer (filename) { return http.createServer(function (req, res) { @@ -197,15 +198,15 @@ module.exports = (httpAPI) => { ctl.files.add(files, (err, res) => { expect(err).to.not.exist - expect(res[0].Name).to.equal('hello') - expect(res[0].Hash).to.equal('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') - expect(res[1].Name).to.equal('otherconfig') - expect(res[1].Hash).to.equal('QmayedZNznnEbHtyfjeQvvt29opSLjYjLtLqwfwSWq28ds') + expect(res[0].path).to.equal('hello') + expect(bs58.encode(res[0].node.multihash()).toString()).to.equal('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') + expect(res[1].path).to.equal('otherconfig') + expect(bs58.encode(res[1].node.multihash()).toString()).to.equal('QmayedZNznnEbHtyfjeQvvt29opSLjYjLtLqwfwSWq28ds') done() }) }) - it('adds a large file > a chunk', (done) => { + it.skip('adds a large file > a chunk', (done) => { const rs = new Readable() var files = [] const buffered = fs.readFileSync('test/test-data/1.2MiB.txt') @@ -222,7 +223,7 @@ module.exports = (httpAPI) => { }) }) - it('adds a buffer', (done) => { + it.skip('adds a buffer', (done) => { const buffer = new Buffer('hello world') ctl.files.add(buffer, (err, res) => { expect(err).to.not.exist @@ -231,7 +232,7 @@ module.exports = (httpAPI) => { }) }) - it('adds a url', (done) => { + it.skip('adds a url', (done) => { var server = singleFileServer('test/test-data/1.2MiB.txt') server.listen(2913, function () { ctl.files.add('http://localhost:2913/', (err, res) => { @@ -243,6 +244,43 @@ module.exports = (httpAPI) => { }) }) }) + + describe('ipfs.createAddStream', () => { + it('adds two files under a chunk Size', (done) => { + const rs = new Readable() + const rs2 = new Readable() + var files = [] + const buffered = fs.readFileSync('test/test-data/hello') + const buffered2 = fs.readFileSync('test/test-data/otherconfig') + rs.push(buffered) + rs.push(null) + rs2.push(buffered2) + rs2.push(null) + const filePair = {path: 'hello', content: rs} + const filePair2 = {path: 'otherconfig', content: rs2} + files.push(filePair) + files.push(filePair2) + + const i = ctl.files.createAddStream(function (err, stream) { + expect(err).to.not.exist; + + stream.on('data', function (tuple) { + if (tuple.path === 'otherconfig') { + expect(tuple.path).to.equal('otherconfig') + expect(bs58.encode(tuple.node.multihash()).toString()).to.equal('QmayedZNznnEbHtyfjeQvvt29opSLjYjLtLqwfwSWq28ds') + } + }); + + stream.on('end', done); + + files.forEach(function (file) { + stream.write(file); + }); + + stream.end(); + }) + }) + }) }) }) } From cc6056f3f85303e8491786ce59f62a4642cfac7d Mon Sep 17 00:00:00 2001 From: nginnever Date: Thu, 16 Jun 2016 19:20:03 -0700 Subject: [PATCH 4/9] lint --- test/http-api/test-files.js | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/test/http-api/test-files.js b/test/http-api/test-files.js index ef90010928..d165c98187 100644 --- a/test/http-api/test-files.js +++ b/test/http-api/test-files.js @@ -246,7 +246,7 @@ module.exports = (httpAPI) => { }) describe('ipfs.createAddStream', () => { - it('adds two files under a chunk Size', (done) => { + it('adds two files under a chunk Size', (done) => { const rs = new Readable() const rs2 = new Readable() var files = [] @@ -261,23 +261,23 @@ module.exports = (httpAPI) => { files.push(filePair) files.push(filePair2) - const i = ctl.files.createAddStream(function (err, stream) { - expect(err).to.not.exist; + ctl.files.createAddStream(function (err, stream) { + expect(err).to.not.exist stream.on('data', function (tuple) { if (tuple.path === 'otherconfig') { expect(tuple.path).to.equal('otherconfig') expect(bs58.encode(tuple.node.multihash()).toString()).to.equal('QmayedZNznnEbHtyfjeQvvt29opSLjYjLtLqwfwSWq28ds') } - }); + }) - stream.on('end', done); + stream.on('end', done) files.forEach(function (file) { - stream.write(file); - }); + stream.write(file) + }) - stream.end(); + stream.end() }) }) }) From 1c8b23e4d85d01126ffa62a7fe91b90c85e77f13 Mon Sep 17 00:00:00 2001 From: Victor Bjelkholm Date: Thu, 1 Sep 2016 18:06:47 +0200 Subject: [PATCH 5/9] Use file hash if no path, include directories --- src/http-api/resources/files.js | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/src/http-api/resources/files.js b/src/http-api/resources/files.js index 5d4dbda052..6fe8d76bb0 100644 --- a/src/http-api/resources/files.js +++ b/src/http-api/resources/files.js @@ -75,9 +75,10 @@ exports.add = { } fileAdder.on('data', (file) => { + const filePath = file.path ? file.path : file.hash serialize.write({ - Name: file.path, - Hash: bs58.encode(file.node.multihash()).toString() + Name: filePath, + Hash: file.hash }) filesAdded++ }) @@ -104,6 +105,12 @@ exports.add = { filesParsed = true fileAdder.write(filePair) }) + parser.on('directory', (directory) => { + fileAdder.write({ + path: directory, + content: '' + }) + }) parser.on('end', () => { if (!filesParsed) { From 3c3810099644343366cdec5dab6113af77ea44db Mon Sep 17 00:00:00 2001 From: Victor Bjelkholm Date: Thu, 1 Sep 2016 18:07:16 +0200 Subject: [PATCH 6/9] Start using the interface-ipfs-core tests for files --- .../interface-ipfs-core-over-ipfs-api/test-files.js | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/test/http-api/interface-ipfs-core-over-ipfs-api/test-files.js b/test/http-api/interface-ipfs-core-over-ipfs-api/test-files.js index 4a322b948e..4762a26f76 100644 --- a/test/http-api/interface-ipfs-core-over-ipfs-api/test-files.js +++ b/test/http-api/interface-ipfs-core-over-ipfs-api/test-files.js @@ -2,7 +2,6 @@ 'use strict' -/* const test = require('interface-ipfs-core') const FactoryClient = require('./../../utils/factory-http') @@ -17,8 +16,5 @@ const common = { fc.dismantle(callback) } } -*/ -// TODO -// needs: https://github.com/ipfs/js-ipfs/pull/323 -// test.files(common) +test.files(common) From 6e492ce79f524af7edfdd9be714b75f2bcace724 Mon Sep 17 00:00:00 2001 From: Victor Bjelkholm Date: Tue, 6 Sep 2016 11:23:38 +0200 Subject: [PATCH 7/9] Reply with stream and set stream header --- src/http-api/resources/files.js | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/http-api/resources/files.js b/src/http-api/resources/files.js index 6fe8d76bb0..8f77bcee07 100644 --- a/src/http-api/resources/files.js +++ b/src/http-api/resources/files.js @@ -44,9 +44,7 @@ exports.cat = { Code: 0 }).code(500) } - stream.on('data', (data) => { - return reply(data) - }) + return reply(stream).header('X-Stream-Output', '1') }) } } From 00d9ad6fc5c312621c2fb32ac2030985757492cd Mon Sep 17 00:00:00 2001 From: Victor Bjelkholm Date: Fri, 9 Sep 2016 15:40:15 +0200 Subject: [PATCH 8/9] First hacky attempt at .get --- src/core/ipfs/files.js | 4 +-- src/http-api/resources/files.js | 54 +++++++++++++++++++++++++++++++++ src/http-api/routes/files.js | 14 +++++++++ 3 files changed, 70 insertions(+), 2 deletions(-) diff --git a/src/core/ipfs/files.js b/src/core/ipfs/files.js index 03e8689fcc..db8ea19494 100644 --- a/src/core/ipfs/files.js +++ b/src/core/ipfs/files.js @@ -127,8 +127,8 @@ module.exports = function files (self) { }), get: promisify((hash, callback) => { - const exportFile = Exporter(hash, self._dagS) - callback(null, exportFile) + const exportStream = Exporter(hash, self._dagS) + callback(null, exportStream) }) } } diff --git a/src/http-api/resources/files.js b/src/http-api/resources/files.js index 8f77bcee07..a113eb0a09 100644 --- a/src/http-api/resources/files.js +++ b/src/http-api/resources/files.js @@ -4,6 +4,7 @@ const bs58 = require('bs58') const ndjson = require('ndjson') const multipart = require('ipfs-multipart') const debug = require('debug') +const tar = require('tar-stream') const log = debug('http-api:files') log.error = debug('http-api:files:error') @@ -49,6 +50,59 @@ exports.cat = { } } +exports.get = { + // uses common parseKey method that returns a `key` + parseArgs: exports.parseKey, + + // main route handler which is called after the above `parseArgs`, but only if the args were valid + handler: (request, reply) => { + const key = request.pre.args.key + + request.server.app.ipfs.files.get(key, (err, stream) => { + if (err) { + log.error(err) + return reply({ + Message: 'Failed to get file: ' + err, + Code: 0 + }).code(500) + } + var pack = tar.pack() + const files = [] + let totalFiles = 0 + stream.on('data', (data) => { + files.push(data) + totalFiles = totalFiles + 1 + }) + let processedFiles = 0 + stream.on('end', () => { + files.forEach((file, index) => { + if (!file.content) { // is directory + // TODO want to put null here but get Uncaught error: already piping an entry + pack.entry({name: file.path}, '') + processedFiles = processedFiles + 1 + } else { // is file + const fileContents = [] + file.content.on('data', (data) => { + fileContents.push(data) + }) + file.content.on('end', () => { + pack.entry({name: file.path}, Buffer.concat(fileContents)) + processedFiles = processedFiles + 1 + }) + } + }) + }) + const interval = setInterval(() => { + if (totalFiles === processedFiles) { + clearInterval(interval) + pack.finalize() + reply(pack).header('X-Stream-Output', '1') + } + }, 500) + }) + } +} + exports.add = { handler: (request, reply) => { if (!request.payload) { diff --git a/src/http-api/routes/files.js b/src/http-api/routes/files.js index ddf722f850..da57b3f2f1 100644 --- a/src/http-api/routes/files.js +++ b/src/http-api/routes/files.js @@ -6,6 +6,7 @@ module.exports = (server) => { const api = server.select('API') api.route({ + // TODO fix method method: '*', path: '/api/v0/cat', config: { @@ -17,6 +18,19 @@ module.exports = (server) => { }) api.route({ + // TODO fix method + method: '*', + path: '/api/v0/get', + config: { + pre: [ + { method: resources.files.get.parseArgs, assign: 'args' } + ], + handler: resources.files.get.handler + } + }) + + api.route({ + // TODO fix method method: '*', path: '/api/v0/add', config: { From 24edfa876dfc36186292e02661300ae8d55b3d60 Mon Sep 17 00:00:00 2001 From: Victor Bjelkholm Date: Fri, 9 Sep 2016 18:05:35 +0200 Subject: [PATCH 9/9] Better way of handling directories and multiple files --- src/http-api/resources/files.js | 29 +++++++++++++---------------- 1 file changed, 13 insertions(+), 16 deletions(-) diff --git a/src/http-api/resources/files.js b/src/http-api/resources/files.js index a113eb0a09..46cff0093c 100644 --- a/src/http-api/resources/files.js +++ b/src/http-api/resources/files.js @@ -7,6 +7,7 @@ const debug = require('debug') const tar = require('tar-stream') const log = debug('http-api:files') log.error = debug('http-api:files:error') +const async = require('async') exports = module.exports @@ -68,18 +69,14 @@ exports.get = { } var pack = tar.pack() const files = [] - let totalFiles = 0 stream.on('data', (data) => { files.push(data) - totalFiles = totalFiles + 1 }) - let processedFiles = 0 - stream.on('end', () => { - files.forEach((file, index) => { + const processFile = (file) => { + return (callback) => { if (!file.content) { // is directory - // TODO want to put null here but get Uncaught error: already piping an entry - pack.entry({name: file.path}, '') - processedFiles = processedFiles + 1 + pack.entry({name: file.path, type: 'directory'}) + callback() } else { // is file const fileContents = [] file.content.on('data', (data) => { @@ -87,18 +84,18 @@ exports.get = { }) file.content.on('end', () => { pack.entry({name: file.path}, Buffer.concat(fileContents)) - processedFiles = processedFiles + 1 + callback() }) } - }) - }) - const interval = setInterval(() => { - if (totalFiles === processedFiles) { - clearInterval(interval) + } + } + stream.on('end', () => { + const callbacks = files.map(processFile) + async.series(callbacks, () => { pack.finalize() reply(pack).header('X-Stream-Output', '1') - } - }, 500) + }) + }) }) } }