diff --git a/package.json b/package.json index 494dc73..1d354ea 100644 --- a/package.json +++ b/package.json @@ -50,6 +50,7 @@ }, "dependencies": { "async": "^2.6.1", + "boom": "^7.3.0", "cids": "~0.5.5", "debug": "^4.1.0", "filereader-stream": "^2.0.0", @@ -57,8 +58,8 @@ "interface-datastore": "~0.6.0", "ipfs-multipart": "~0.1.0", "ipfs-unixfs": "~0.1.16", - "ipfs-unixfs-importer": "~0.38.0", "ipfs-unixfs-exporter": "~0.35.5", + "ipfs-unixfs-importer": "~0.38.0", "ipld-dag-pb": "~0.15.0", "is-pull-stream": "~0.0.0", "is-stream": "^1.1.0", diff --git a/src/http/cp.js b/src/http/cp.js index 9cdd57f..280e5f1 100644 --- a/src/http/cp.js +++ b/src/http/cp.js @@ -2,57 +2,49 @@ const Joi = require('joi') -const mfsCp = (api) => { - api.route({ - method: 'POST', - path: '/api/v0/files/cp', - config: { - handler: (request, reply) => { - const { - ipfs - } = request.server.app - const { - arg, - parents, - format, - hashAlg, - shardSplitThreshold - } = request.query +const mfsCp = { + method: 'POST', + path: '/api/v0/files/cp', + async handler (request, h) { + const { + ipfs + } = request.server.app + const { + arg, + parents, + format, + hashAlg, + shardSplitThreshold + } = request.query - const args = arg.concat({ - parents, - format, - hashAlg, - shardSplitThreshold - }) + const args = arg.concat({ + parents, + format, + hashAlg, + shardSplitThreshold + }) - return ipfs.files.cp.apply(null, args) - .then(() => reply()) - .catch(error => { - reply({ - Message: error.message, - Code: error.code || 0, - Type: 'error' - }).code(500).takeover() - }) + await ipfs.files.cp.apply(null, args) + + return h.response() + }, + options: { + validate: { + options: { + allowUnknown: true, + stripUnknown: true }, - validate: { - options: { - allowUnknown: true, - stripUnknown: true - }, - query: Joi.object().keys({ - arg: Joi.array().items(Joi.string()).min(2), - parents: Joi.boolean().default(false), - format: Joi.string().valid([ - 'dag-pb', - 'dag-cbor' - ]).default('dag-pb'), - hashAlg: Joi.string().default('sha2-256') - }) - } + query: Joi.object().keys({ + arg: Joi.array().items(Joi.string()).min(2), + parents: Joi.boolean().default(false), + format: Joi.string().valid([ + 'dag-pb', + 'dag-cbor' + ]).default('dag-pb'), + hashAlg: Joi.string().default('sha2-256') + }) } - }) + } } module.exports = mfsCp diff --git a/src/http/flush.js b/src/http/flush.js index adff3db..bda0d54 100644 --- a/src/http/flush.js +++ b/src/http/flush.js @@ -2,40 +2,32 @@ const Joi = require('joi') -const mfsFlush = (api) => { - api.route({ - method: 'POST', - path: '/api/v0/files/flush', - config: { - handler: (request, reply) => { - const { - ipfs - } = request.server.app - const { - arg - } = request.query +const mfsFlush = { + method: 'POST', + path: '/api/v0/files/flush', + async handler (request, h) { + const { + ipfs + } = request.server.app + const { + arg + } = request.query - return ipfs.files.flush.call(null, arg) - .then(() => reply()) - .catch(error => { - reply({ - Message: error.message, - Code: error.code || 0, - Type: 'error' - }).code(500).takeover() - }) + await ipfs.files.flush.call(null, arg) + + return h.response() + }, + options: { + validate: { + options: { + allowUnknown: true, + stripUnknown: true }, - validate: { - options: { - allowUnknown: true, - stripUnknown: true - }, - query: Joi.object().keys({ - arg: Joi.string().required() - }) - } + query: Joi.object().keys({ + arg: Joi.string().required() + }) } - }) + } } module.exports = mfsFlush diff --git a/src/http/index.js b/src/http/index.js index f271245..2ccaa9e 100644 --- a/src/http/index.js +++ b/src/http/index.js @@ -10,14 +10,14 @@ const rm = require('./rm') const stat = require('./stat') const write = require('./write') -module.exports = (api) => { - cp(api) - flush(api) - ls(api) - mkdir(api) - mv(api) - read(api) - rm(api) - stat(api) - write(api) -} +module.exports = [ + cp, + flush, + ls, + mkdir, + mv, + read, + rm, + stat, + write +] diff --git a/src/http/ls.js b/src/http/ls.js index 8886e3a..99cd329 100644 --- a/src/http/ls.js +++ b/src/http/ls.js @@ -14,103 +14,83 @@ const mapEntry = (entry) => { } } -const mfsLs = (api) => { - api.route({ - method: 'POST', - path: '/api/v0/files/ls', - config: { - handler: (request, reply) => { - const { - ipfs - } = request.server.app - const { - arg, +const mfsLs = { + method: 'POST', + path: '/api/v0/files/ls', + async handler (request, h) { + const { + ipfs + } = request.server.app + const { + arg, + long, + cidBase, + stream + } = request.query + + if (stream) { + const responseStream = await new Promise((resolve, reject) => { + const readableStream = ipfs.files.lsReadableStream(arg, { long, - cidBase, - stream - } = request.query + cidBase + }) - if (stream) { - const readableStream = ipfs.files.lsReadableStream(arg, { - long, - cidBase - }) + let passThrough - if (!readableStream._read) { - // make the stream look like a Streams2 to appease Hapi - readableStream._read = () => {} - readableStream._readableState = {} + readableStream.on('data', (entry) => { + if (!passThrough) { + passThrough = new PassThrough() + resolve(passThrough) } - let passThrough - - readableStream.on('data', (entry) => { - if (!passThrough) { - passThrough = new PassThrough() - - reply(passThrough) - .header('X-Stream-Output', '1') - } + passThrough.write(JSON.stringify(mapEntry(entry)) + '\n') + }) - passThrough.write(JSON.stringify(mapEntry(entry)) + '\n') - }) + readableStream.once('end', (entry) => { + if (passThrough) { + passThrough.end(entry ? JSON.stringify(mapEntry(entry)) + '\n' : undefined) + } + }) - readableStream.once('end', (entry) => { - if (passThrough) { - passThrough.end(entry ? JSON.stringify(mapEntry(entry)) + '\n' : undefined) - } - }) + readableStream.once('error', (error) => { + reject(error) + }) + }) - readableStream.once('error', (error) => { - reply({ - Message: error.message, - Code: error.code || 0, - Type: 'error' - }).code(500).takeover() - }) + return h.response(responseStream).header('X-Stream-Output', '1') + } - return - } + const files = await ipfs.files.ls(arg, { + long, + cidBase + }) - return ipfs.files.ls(arg, { - long, - cidBase - }) - .then(files => { - reply({ - Entries: files.map(mapEntry) - }) - }) - .catch(error => { - reply({ - Message: error.message, - Code: error.code || 0, - Type: 'error' - }).code(500).takeover() - }) + return h.response({ + Entries: files.map(mapEntry) + }) + }, + options: { + validate: { + options: { + allowUnknown: true, + stripUnknown: true }, - validate: { - options: { - allowUnknown: true, - stripUnknown: true - }, - query: Joi.object().keys({ - arg: Joi.string().default('/'), - long: Joi.boolean().default(false), - cidBase: Joi.string().default('base58btc'), - stream: Joi.boolean().default(false) + query: Joi.object().keys({ + arg: Joi.string().default('/'), + long: Joi.boolean().default(false), + cidBase: Joi.string().default('base58btc'), + stream: Joi.boolean().default(false) + }) + .rename('l', 'long', { + override: true, + ignoreUndefined: true + }) + .rename('s', 'stream', { + override: true, + ignoreUndefined: true }) - .rename('l', 'long', { - override: true, - ignoreUndefined: true - }) - .rename('s', 'stream', { - override: true, - ignoreUndefined: true - }) - } } - }) + } } module.exports = mfsLs diff --git a/src/http/mkdir.js b/src/http/mkdir.js index 2c4fcd4..fa4917e 100644 --- a/src/http/mkdir.js +++ b/src/http/mkdir.js @@ -2,68 +2,60 @@ const Joi = require('joi') -const mfsMkdir = (api) => { - api.route({ - method: 'POST', - path: '/api/v0/files/mkdir', - config: { - handler: (request, reply) => { - const { - ipfs - } = request.server.app - const { - arg, - parents, - format, - hashAlg, - cidVersion, - flush, - shardSplitThreshold - } = request.query +const mfsMkdir = { + method: 'POST', + path: '/api/v0/files/mkdir', + async handler (request, h) { + const { + ipfs + } = request.server.app + const { + arg, + parents, + format, + hashAlg, + cidVersion, + flush, + shardSplitThreshold + } = request.query - return ipfs.files.mkdir(arg, { - parents, - format, - hashAlg, - cidVersion, - flush, - shardSplitThreshold - }) - .then(() => reply()) - .catch(error => { - reply({ - Message: error.message, - Code: error.code || 0, - Type: 'error' - }).code(500).takeover() - }) + await ipfs.files.mkdir(arg, { + parents, + format, + hashAlg, + cidVersion, + flush, + shardSplitThreshold + }) + + return h.response() + }, + options: { + validate: { + options: { + allowUnknown: true, + stripUnknown: true }, - validate: { - options: { - allowUnknown: true, - stripUnknown: true - }, - query: Joi.object().keys({ - arg: Joi.string().required(), - parents: Joi.boolean().default(false), - format: Joi.string().valid([ - 'dag-pb', - 'dag-cbor' - ]).default('dag-pb'), - hashAlg: Joi.string().default('sha2-256'), - cidVersion: Joi.number().integer().valid([ - 0, - 1 - ]).default(0), - flush: Joi.boolean().default(true) + query: Joi.object().keys({ + arg: Joi.string().required(), + parents: Joi.boolean().default(false), + format: Joi.string().valid([ + 'dag-pb', + 'dag-cbor' + ]).default('dag-pb'), + hashAlg: Joi.string().default('sha2-256'), + cidVersion: Joi.number().integer().valid([ + 0, + 1 + ]).default(0), + flush: Joi.boolean().default(true) + }) + .rename('p', 'parents', { + override: true, + ignoreUndefined: true }) - .rename('p', 'parents', { - override: true, - ignoreUndefined: true - }) - } } - }) + } } module.exports = mfsMkdir diff --git a/src/http/mv.js b/src/http/mv.js index b4957a5..7f74a1b 100644 --- a/src/http/mv.js +++ b/src/http/mv.js @@ -2,57 +2,49 @@ const Joi = require('joi') -const mfsMv = (api) => { - api.route({ - method: 'POST', - path: '/api/v0/files/mv', - config: { - handler: (request, reply) => { - const { - ipfs - } = request.server.app - const { - arg, - parents, - format, - hashAlg, - shardSplitThreshold - } = request.query +const mfsMv = { + method: 'POST', + path: '/api/v0/files/mv', + async handler (request, h) { + const { + ipfs + } = request.server.app + const { + arg, + parents, + format, + hashAlg, + shardSplitThreshold + } = request.query - const args = arg.concat({ - parents, - format, - hashAlg, - shardSplitThreshold - }) + const args = arg.concat({ + parents, + format, + hashAlg, + shardSplitThreshold + }) - return ipfs.files.mv.apply(null, args) - .then(() => reply()) - .catch(error => { - reply({ - Message: error.message, - Code: error.code || 0, - Type: 'error' - }).code(500).takeover() - }) + await ipfs.files.mv.apply(null, args) + + return h.response() + }, + options: { + validate: { + options: { + allowUnknown: true, + stripUnknown: true }, - validate: { - options: { - allowUnknown: true, - stripUnknown: true - }, - query: Joi.object().keys({ - arg: Joi.array().items(Joi.string()).min(2), - parents: Joi.boolean().default(false), - format: Joi.string().valid([ - 'dag-pb', - 'dag-cbor' - ]).default('dag-pb'), - hashAlg: Joi.string().default('sha2-256') - }) - } + query: Joi.object().keys({ + arg: Joi.array().items(Joi.string()).min(2), + parents: Joi.boolean().default(false), + format: Joi.string().valid([ + 'dag-pb', + 'dag-cbor' + ]).default('dag-pb'), + hashAlg: Joi.string().default('sha2-256') + }) } - }) + } } module.exports = mfsMv diff --git a/src/http/read.js b/src/http/read.js index 2686dc6..5538121 100644 --- a/src/http/read.js +++ b/src/http/read.js @@ -5,73 +5,64 @@ const { PassThrough } = require('stream') -const mfsRead = (api) => { - api.route({ - method: 'POST', - path: '/api/v0/files/read', - config: { - handler: (request, reply) => { - const { - ipfs - } = request.server.app - const { - arg, - offset, - length, - count - } = request.query +const mfsRead = { + method: 'POST', + path: '/api/v0/files/read', + async handler (request, h) { + const { + ipfs + } = request.server.app + const { + arg, + offset, + length, + count + } = request.query - const stream = ipfs.files.readReadableStream(arg, { - offset, - length, - count - }) + const responseStream = await new Promise((resolve, reject) => { + const stream = ipfs.files.readReadableStream(arg, { + offset, + length, + count + }) - if (!stream._read) { - // make the stream look like a Streams2 to appease Hapi - stream._read = () => {} - stream._readableState = {} - } + stream.once('data', (chunk) => { + const passThrough = new PassThrough() - stream.once('data', (chunk) => { - const passThrough = new PassThrough() + resolve(passThrough) - reply(passThrough) - .header('X-Stream-Output', '1') + passThrough.write(chunk) + stream.pipe(passThrough) + }) - passThrough.write(chunk) - stream.pipe(passThrough) - }) + stream.once('error', (error) => { + reject(error) + }) + }) - stream.once('error', (error) => { - reply({ - Message: error.message, - Code: error.code || 0, - Type: 'error' - }).code(500).takeover() - }) + return h.response(responseStream).header('X-Stream-Output', '1') + }, + options: { + validate: { + options: { + allowUnknown: true, + stripUnknown: true }, - validate: { - options: { - allowUnknown: true, - stripUnknown: true - }, - query: Joi.object().keys({ - arg: Joi.string().required(), - offset: Joi.number().integer().min(0), - length: Joi.number().integer().min(0) + query: Joi.object().keys({ + arg: Joi.string().required(), + offset: Joi.number().integer().min(0), + length: Joi.number().integer().min(0) + }) + .rename('o', 'offset', { + override: true, + ignoreUndefined: true + }) + .rename('n', 'length', { + override: true, + ignoreUndefined: true }) - .rename('o', 'offset', { - override: true, - ignoreUndefined: true - }) - .rename('n', 'length', { - override: true, - ignoreUndefined: true - }) - } } - }) + } } module.exports = mfsRead diff --git a/src/http/rm.js b/src/http/rm.js index 70d605e..0d05693 100644 --- a/src/http/rm.js +++ b/src/http/rm.js @@ -2,48 +2,40 @@ const Joi = require('joi') -const mfsRm = (api) => { - api.route({ - method: 'POST', - path: '/api/v0/files/rm', - config: { - handler: (request, reply) => { - const { - ipfs - } = request.server.app - const { - arg, - recursive - } = request.query +const mfsRm = { + method: 'POST', + path: '/api/v0/files/rm', + async handler (request, h) { + const { + ipfs + } = request.server.app + const { + arg, + recursive + } = request.query - return ipfs.files.rm(arg, { - recursive - }) - .then(() => reply()) - .catch(error => { - reply({ - Message: error.message, - Code: error.code || 0, - Type: 'error' - }).code(500).takeover() - }) + await ipfs.files.rm(arg, { + recursive + }) + + return h.response() + }, + options: { + validate: { + options: { + allowUnknown: true, + stripUnknown: true }, - validate: { - options: { - allowUnknown: true, - stripUnknown: true - }, - query: Joi.object().keys({ - arg: Joi.string().required(), - recursive: Joi.boolean().default(false) + query: Joi.object().keys({ + arg: Joi.string().required(), + recursive: Joi.boolean().default(false) + }) + .rename('r', 'recursive', { + override: true, + ignoreUndefined: true }) - .rename('r', 'recursive', { - override: true, - ignoreUndefined: true - }) - } } - }) + } } module.exports = mfsRm diff --git a/src/http/stat.js b/src/http/stat.js index fdd3eab..8c83175 100644 --- a/src/http/stat.js +++ b/src/http/stat.js @@ -2,64 +2,54 @@ const Joi = require('joi') -const mfsStat = (api) => { - api.route({ - method: 'POST', - path: '/api/v0/files/stat', - config: { - handler: (request, reply) => { - const { - ipfs - } = request.server.app - const { - arg, - hash, - size, - withLocal, - cidBase - } = request.query +const mfsStat = { + method: 'POST', + path: '/api/v0/files/stat', + async handler (request, h) { + const { + ipfs + } = request.server.app + const { + arg, + hash, + size, + withLocal, + cidBase + } = request.query - return ipfs.files.stat(arg, { - hash, - size, - withLocal, - cidBase - }) - .then(stats => { - reply({ - Type: stats.type, - Blocks: stats.blocks, - Size: stats.size, - Hash: stats.hash, - CumulativeSize: stats.cumulativeSize, - WithLocality: stats.withLocality, - Local: stats.local, - SizeLocal: stats.sizeLocal - }) - }) - .catch(error => { - reply({ - Message: error.message, - Code: error.code || 0, - Type: 'error' - }).code(500).takeover() - }) + const stats = await ipfs.files.stat(arg, { + hash, + size, + withLocal, + cidBase + }) + + return h.response({ + Type: stats.type, + Blocks: stats.blocks, + Size: stats.size, + Hash: stats.hash, + CumulativeSize: stats.cumulativeSize, + WithLocality: stats.withLocality, + Local: stats.local, + SizeLocal: stats.sizeLocal + }) + }, + options: { + validate: { + options: { + allowUnknown: true, + stripUnknown: true }, - validate: { - options: { - allowUnknown: true, - stripUnknown: true - }, - query: Joi.object().keys({ - arg: Joi.string().default('/'), - hash: Joi.boolean().default(false), - size: Joi.boolean().default(false), - withLocal: Joi.boolean().default(false), - cidBase: Joi.string().default('base58btc') - }) - } + query: Joi.object().keys({ + arg: Joi.string().default('/'), + hash: Joi.boolean().default(false), + size: Joi.boolean().default(false), + withLocal: Joi.boolean().default(false), + cidBase: Joi.string().default('base58btc') + }) } - }) + } } module.exports = mfsStat diff --git a/src/http/write.js b/src/http/write.js index de32386..710f5d2 100644 --- a/src/http/write.js +++ b/src/http/write.js @@ -2,134 +2,122 @@ const Joi = require('joi') const multipart = require('ipfs-multipart') -const once = require('once') +const Boom = require('boom') -const mfsWrite = (api) => { - api.route({ - method: 'POST', - path: '/api/v0/files/write', - config: { - payload: { - parse: false, - output: 'stream' - }, - handler: (request, reply) => { - const { - ipfs - } = request.server.app - const { - arg, - offset, - length, - create, - truncate, - rawLeaves, - cidVersion, - hashAlg, - format, - parents, - progress, - strategy, - flush, - shardSplitThreshold - } = request.query +const mfsWrite = { + method: 'POST', + path: '/api/v0/files/write', + async handler (request, h) { + const { + ipfs + } = request.server.app + const { + arg, + offset, + length, + create, + truncate, + rawLeaves, + cidVersion, + hashAlg, + format, + parents, + progress, + strategy, + flush, + shardSplitThreshold + } = request.query - const parser = multipart.reqParser(request.payload) - let filesParsed = false + const fileStream = await new Promise((resolve, reject) => { + const parser = multipart.reqParser(request.payload) + let fileStream - reply = once(reply) + parser.on('file', (_, stream) => { + if (fileStream) { + return reject(Boom.badRequest('Please only send one file')) + } - parser.on('file', (_, fileStream) => { - if (filesParsed) { - return reply({ - Message: 'Please only send one file', - Code: 0, - Type: 'error' - }).code(400).takeover() - } + fileStream = stream + }) - filesParsed = true + parser.on('error', (error) => { + reject(error) + }) - ipfs.files.write(arg, fileStream, { - offset, - length, - create, - truncate, - rawLeaves, - cidVersion, - hashAlg, - format, - parents, - progress, - strategy, - flush, - shardSplitThreshold - }) - .then(() => reply()) - .catch(error => { - reply({ - Message: error.message, - Code: 0, - Type: 'error' - }).code(500).takeover() - }) - }) + parser.on('end', () => { + resolve(fileStream) + }) + }) - parser.on('error', (error) => { - reply({ - Message: error.message, - Code: 0, - Type: 'error' - }).code(500).takeover() - }) + await ipfs.files.write(arg, fileStream, { + offset, + length, + create, + truncate, + rawLeaves, + cidVersion, + hashAlg, + format, + parents, + progress, + strategy, + flush, + shardSplitThreshold + }) + + return h.response() + }, + options: { + payload: { + parse: false, + output: 'stream' + }, + validate: { + options: { + allowUnknown: true, + stripUnknown: true }, - validate: { - options: { - allowUnknown: true, - stripUnknown: true - }, - query: Joi.object().keys({ - arg: Joi.string().required(), - offset: Joi.number().integer().min(0), - length: Joi.number().integer().min(0), - create: Joi.boolean().default(false), - truncate: Joi.boolean().default(false), - rawLeaves: Joi.boolean().default(false), - cidVersion: Joi.number().integer().valid([ - 0, - 1 - ]).default(0), - hashAlg: Joi.string().valid([ - 'sha2-256' - ]).default('sha2-256'), - format: Joi.string().valid([ - 'dag-pb', - 'dag-cbor' - ]).default('dag-pb'), - parents: Joi.boolean().default(false), - progress: Joi.func(), - strategy: Joi.string().valid([ - 'flat', - 'balanced', - 'trickle' - ]).default('trickle'), - flush: Joi.boolean().default(true) + query: Joi.object().keys({ + arg: Joi.string().required(), + offset: Joi.number().integer().min(0), + length: Joi.number().integer().min(0), + create: Joi.boolean().default(false), + truncate: Joi.boolean().default(false), + rawLeaves: Joi.boolean().default(false), + cidVersion: Joi.number().integer().valid([ + 0, + 1 + ]).default(0), + hashAlg: Joi.string().valid([ + 'sha2-256' + ]).default('sha2-256'), + format: Joi.string().valid([ + 'dag-pb', + 'dag-cbor' + ]).default('dag-pb'), + parents: Joi.boolean().default(false), + progress: Joi.func(), + strategy: Joi.string().valid([ + 'flat', + 'balanced', + 'trickle' + ]).default('trickle'), + flush: Joi.boolean().default(true) + }) + .rename('o', 'offset', { + override: true, + ignoreUndefined: true + }) + .rename('e', 'create', { + override: true, + ignoreUndefined: true + }) + .rename('t', 'truncate', { + override: true, + ignoreUndefined: true }) - .rename('o', 'offset', { - override: true, - ignoreUndefined: true - }) - .rename('e', 'create', { - override: true, - ignoreUndefined: true - }) - .rename('t', 'truncate', { - override: true, - ignoreUndefined: true - }) - } } - }) + } } module.exports = mfsWrite