Skip to content
This repository has been archived by the owner on Mar 10, 2020. It is now read-only.

Commit

Permalink
refactor: hapi 18
Browse files Browse the repository at this point in the history
This PR refactors the HTTP API for use with Hapi 18 in preparation for ipfs/js-ipfs#1844.

License: MIT
Signed-off-by: Alan Shaw <alan.shaw@protocol.ai>
  • Loading branch information
alanshaw committed Jan 28, 2019
1 parent daee4b7 commit 8d5deb9
Show file tree
Hide file tree
Showing 11 changed files with 458 additions and 548 deletions.
3 changes: 2 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -50,15 +50,16 @@
},
"dependencies": {
"async": "^2.6.1",
"boom": "^7.3.0",
"cids": "~0.5.5",
"debug": "^4.1.0",
"filereader-stream": "^2.0.0",
"hamt-sharding": "~0.0.2",
"interface-datastore": "~0.6.0",
"ipfs-multipart": "~0.1.0",
"ipfs-unixfs": "~0.1.16",
"ipfs-unixfs-importer": "~0.38.0",
"ipfs-unixfs-exporter": "~0.35.5",
"ipfs-unixfs-importer": "~0.38.0",
"ipld-dag-pb": "~0.15.0",
"is-pull-stream": "~0.0.0",
"is-stream": "^1.1.0",
Expand Down
86 changes: 39 additions & 47 deletions src/http/cp.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,57 +2,49 @@

const Joi = require('joi')

const mfsCp = (api) => {
api.route({
method: 'POST',
path: '/api/v0/files/cp',
config: {
handler: (request, reply) => {
const {
ipfs
} = request.server.app
const {
arg,
parents,
format,
hashAlg,
shardSplitThreshold
} = request.query
const mfsCp = {
method: 'POST',
path: '/api/v0/files/cp',
async handler (request, h) {
const {
ipfs
} = request.server.app
const {
arg,
parents,
format,
hashAlg,
shardSplitThreshold
} = request.query

const args = arg.concat({
parents,
format,
hashAlg,
shardSplitThreshold
})
const args = arg.concat({
parents,
format,
hashAlg,
shardSplitThreshold
})

return ipfs.files.cp.apply(null, args)
.then(() => reply())
.catch(error => {
reply({
Message: error.message,
Code: error.code || 0,
Type: 'error'
}).code(500).takeover()
})
await ipfs.files.cp.apply(null, args)

return h.response()
},
options: {
validate: {
options: {
allowUnknown: true,
stripUnknown: true
},
validate: {
options: {
allowUnknown: true,
stripUnknown: true
},
query: Joi.object().keys({
arg: Joi.array().items(Joi.string()).min(2),
parents: Joi.boolean().default(false),
format: Joi.string().valid([
'dag-pb',
'dag-cbor'
]).default('dag-pb'),
hashAlg: Joi.string().default('sha2-256')
})
}
query: Joi.object().keys({
arg: Joi.array().items(Joi.string()).min(2),
parents: Joi.boolean().default(false),
format: Joi.string().valid([
'dag-pb',
'dag-cbor'
]).default('dag-pb'),
hashAlg: Joi.string().default('sha2-256')
})
}
})
}
}

module.exports = mfsCp
54 changes: 23 additions & 31 deletions src/http/flush.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,40 +2,32 @@

const Joi = require('joi')

const mfsFlush = (api) => {
api.route({
method: 'POST',
path: '/api/v0/files/flush',
config: {
handler: (request, reply) => {
const {
ipfs
} = request.server.app
const {
arg
} = request.query
const mfsFlush = {
method: 'POST',
path: '/api/v0/files/flush',
async handler (request, h) {
const {
ipfs
} = request.server.app
const {
arg
} = request.query

return ipfs.files.flush.call(null, arg)
.then(() => reply())
.catch(error => {
reply({
Message: error.message,
Code: error.code || 0,
Type: 'error'
}).code(500).takeover()
})
await ipfs.files.flush.call(null, arg)

return h.response()
},
options: {
validate: {
options: {
allowUnknown: true,
stripUnknown: true
},
validate: {
options: {
allowUnknown: true,
stripUnknown: true
},
query: Joi.object().keys({
arg: Joi.string().required()
})
}
query: Joi.object().keys({
arg: Joi.string().required()
})
}
})
}
}

module.exports = mfsFlush
22 changes: 11 additions & 11 deletions src/http/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,14 +10,14 @@ const rm = require('./rm')
const stat = require('./stat')
const write = require('./write')

module.exports = (api) => {
cp(api)
flush(api)
ls(api)
mkdir(api)
mv(api)
read(api)
rm(api)
stat(api)
write(api)
}
module.exports = [
cp,
flush,
ls,
mkdir,
mv,
read,
rm,
stat,
write
]
148 changes: 64 additions & 84 deletions src/http/ls.js
Original file line number Diff line number Diff line change
Expand Up @@ -14,103 +14,83 @@ const mapEntry = (entry) => {
}
}

const mfsLs = (api) => {
api.route({
method: 'POST',
path: '/api/v0/files/ls',
config: {
handler: (request, reply) => {
const {
ipfs
} = request.server.app
const {
arg,
const mfsLs = {
method: 'POST',
path: '/api/v0/files/ls',
async handler (request, h) {
const {
ipfs
} = request.server.app
const {
arg,
long,
cidBase,
stream
} = request.query

if (stream) {
const responseStream = await new Promise((resolve, reject) => {
const readableStream = ipfs.files.lsReadableStream(arg, {
long,
cidBase,
stream
} = request.query
cidBase
})

if (stream) {
const readableStream = ipfs.files.lsReadableStream(arg, {
long,
cidBase
})
let passThrough

if (!readableStream._read) {
// make the stream look like a Streams2 to appease Hapi
readableStream._read = () => {}
readableStream._readableState = {}
readableStream.on('data', (entry) => {
if (!passThrough) {
passThrough = new PassThrough()
resolve(passThrough)
}

let passThrough

readableStream.on('data', (entry) => {
if (!passThrough) {
passThrough = new PassThrough()

reply(passThrough)
.header('X-Stream-Output', '1')
}
passThrough.write(JSON.stringify(mapEntry(entry)) + '\n')
})

passThrough.write(JSON.stringify(mapEntry(entry)) + '\n')
})
readableStream.once('end', (entry) => {
if (passThrough) {
passThrough.end(entry ? JSON.stringify(mapEntry(entry)) + '\n' : undefined)
}
})

readableStream.once('end', (entry) => {
if (passThrough) {
passThrough.end(entry ? JSON.stringify(mapEntry(entry)) + '\n' : undefined)
}
})
readableStream.once('error', (error) => {
reject(error)
})
})

readableStream.once('error', (error) => {
reply({
Message: error.message,
Code: error.code || 0,
Type: 'error'
}).code(500).takeover()
})
return h.response(responseStream).header('X-Stream-Output', '1')
}

return
}
const files = await ipfs.files.ls(arg, {
long,
cidBase
})

return ipfs.files.ls(arg, {
long,
cidBase
})
.then(files => {
reply({
Entries: files.map(mapEntry)
})
})
.catch(error => {
reply({
Message: error.message,
Code: error.code || 0,
Type: 'error'
}).code(500).takeover()
})
return h.response({
Entries: files.map(mapEntry)
})
},
options: {
validate: {
options: {
allowUnknown: true,
stripUnknown: true
},
validate: {
options: {
allowUnknown: true,
stripUnknown: true
},
query: Joi.object().keys({
arg: Joi.string().default('/'),
long: Joi.boolean().default(false),
cidBase: Joi.string().default('base58btc'),
stream: Joi.boolean().default(false)
query: Joi.object().keys({
arg: Joi.string().default('/'),
long: Joi.boolean().default(false),
cidBase: Joi.string().default('base58btc'),
stream: Joi.boolean().default(false)
})
.rename('l', 'long', {
override: true,
ignoreUndefined: true
})
.rename('s', 'stream', {
override: true,
ignoreUndefined: true
})
.rename('l', 'long', {
override: true,
ignoreUndefined: true
})
.rename('s', 'stream', {
override: true,
ignoreUndefined: true
})
}
}
})
}
}

module.exports = mfsLs
Loading

0 comments on commit 8d5deb9

Please sign in to comment.