Skip to content
This repository has been archived by the owner on Feb 12, 2024. It is now read-only.

Commit

Permalink
feat(files): interface-ipfs-core tests over ipfs-api
Browse files Browse the repository at this point in the history
  • Loading branch information
victorb authored and daviddias committed Sep 12, 2016
1 parent 11cb4ca commit 001a6eb
Show file tree
Hide file tree
Showing 3 changed files with 99 additions and 84 deletions.
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,7 @@
"promisify-es6": "^1.0.1",
"pull-file": "^1.0.0",
"pull-paramap": "^1.1.6",
"pull-pushable": "^2.0.1",
"pull-sort": "^1.0.0",
"pull-stream": "^3.4.5",
"pull-stream-to-stream": "^1.3.3",
Expand Down
5 changes: 5 additions & 0 deletions src/core/ipfs/files.js
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ module.exports = function files (self) {
pull(
pull.values([hash]),
pull.asyncMap(self._dagS.get.bind(self._dagS)),
pull.take(1),
pull.map((node) => {
const data = UnixFS.unmarshal(node.data)
if (data.type === 'directory') {
Expand Down Expand Up @@ -81,6 +82,10 @@ module.exports = function files (self) {
return file
})
)))
}),

getPull: promisify((hash, callback) => {
callback(null, exporter(hash, self._dagS))
})
}
}
Expand Down
177 changes: 93 additions & 84 deletions src/http-api/resources/files.js
Original file line number Diff line number Diff line change
@@ -1,13 +1,16 @@
'use strict'

const bs58 = require('bs58')
const ndjson = require('ndjson')
const multipart = require('ipfs-multipart')
const debug = require('debug')
const tar = require('tar-stream')
const log = debug('http-api:files')
log.error = debug('http-api:files:error')
const async = require('async')
const pull = require('pull-stream')
const toStream = require('pull-stream-to-stream')
const toPull = require('stream-to-pull-stream')
const pushable = require('pull-pushable')
const EOL = require('os').EOL

exports = module.exports

Expand Down Expand Up @@ -37,15 +40,23 @@ exports.cat = {
// main route handler which is called after the above `parseArgs`, but only if the args were valid
handler: (request, reply) => {
const key = request.pre.args.key
const ipfs = request.server.app.ipfs

request.server.app.ipfs.files.cat(key, (err, stream) => {
ipfs.files.cat(key, (err, stream) => {
if (err) {
log.error(err)
return reply({
Message: 'Failed to cat file: ' + err,
Code: 0
}).code(500)
}

// hapi is not very clever and throws if no
// - _read method
// - _readableState object
// are there :(
stream._read = () => {}
stream._readableState = {}
return reply(stream).header('X-Stream-Output', '1')
})
}
Expand All @@ -58,45 +69,44 @@ exports.get = {
// main route handler which is called after the above `parseArgs`, but only if the args were valid
handler: (request, reply) => {
const key = request.pre.args.key

request.server.app.ipfs.files.get(key, (err, stream) => {
if (err) {
log.error(err)
return reply({
Message: 'Failed to get file: ' + err,
Code: 0
}).code(500)
}
var pack = tar.pack()
const files = []
stream.on('data', (data) => {
files.push(data)
})
const processFile = (file) => {
return (callback) => {
if (!file.content) { // is directory
pack.entry({name: file.path, type: 'directory'})
callback()
} else { // is file
const fileContents = []
file.content.on('data', (data) => {
fileContents.push(data)
})
file.content.on('end', () => {
pack.entry({name: file.path}, Buffer.concat(fileContents))
callback()
})
const ipfs = request.server.app.ipfs
const pack = tar.pack()

ipfs.files.getPull(key, (err, stream) => {
if (err) return handleError(err)

pull(
stream,
pull.asyncMap((file, cb) => {
const header = {name: file.path}

if (!file.content) {
header.type = 'directory'
pack.entry(header)
cb()
} else {
header.size = file.size
toStream.source(file.content)
.pipe(pack.entry(header, cb))
}
}
}
stream.on('end', () => {
const callbacks = files.map(processFile)
async.series(callbacks, () => {
}),
pull.onEnd((err) => {
if (err) return handleError(err)

pack.finalize()
reply(pack).header('X-Stream-Output', '1')
})
})
)
})

function handleError (err) {
log.error(err)

reply({
Message: 'Failed to get file: ' + err,
Code: 0
}).code(500)
}
}
}

Expand All @@ -106,67 +116,66 @@ exports.add = {
return reply('Array, Buffer, or String is required.').code(400).takeover()
}

const ipfs = request.server.app.ipfs
// TODO: make pull-multipart
const parser = multipart.reqParser(request.payload)
let filesParsed = false

var filesParsed = false
var filesAdded = 0
const fileAdder = pushable()

var serialize = ndjson.serialize()
// hapi doesn't permit object streams: http://hapijs.com/api#replyerr-result
serialize._readableState.objectMode = false

request.server.app.ipfs.files.createAddStream((err, fileAdder) => {
if (err) {
return reply({
Message: err,
Code: 0
}).code(500)
parser.on('file', (fileName, fileStream) => {
const filePair = {
path: fileName,
content: toPull(fileStream)
}
filesParsed = true
fileAdder.push(filePair)
})

fileAdder.on('data', (file) => {
const filePath = file.path ? file.path : file.hash
serialize.write({
Name: filePath,
Hash: file.hash
})
filesAdded++
parser.on('directory', (directory) => {
fileAdder.push({
path: directory,
content: ''
})
})

parser.on('end', () => {
if (!filesParsed) {
return reply("File argument 'data' is required.")
.code(400).takeover()
}
fileAdder.end()
})

fileAdder.on('end', () => {
if (filesAdded === 0 && filesParsed) {
pull(
fileAdder,
ipfs.files.createAddPullStream(),
pull.map((file) => {
return {
Name: file.path ? file.path : file.hash,
Hash: file.hash
}
}),
pull.map((file) => JSON.stringify(file) + EOL),
pull.collect((err, files) => {
if (err) {
return reply({
Message: 'Failed to add files.',
Message: err,
Code: 0
}).code(500)
} else {
serialize.end()
return reply(serialize)
.header('x-chunked-output', '1')
.header('content-type', 'application/json')
}
})

parser.on('file', (fileName, fileStream) => {
var filePair = {
path: fileName,
content: fileStream
if (files.length === 0 && filesParsed) {
return reply({
Message: 'Failed to add files.',
Code: 0
}).code(500)
}
filesParsed = true
fileAdder.write(filePair)
})
parser.on('directory', (directory) => {
fileAdder.write({
path: directory,
content: ''
})
})

parser.on('end', () => {
if (!filesParsed) {
return reply("File argument 'data' is required.").code(400).takeover()
}
fileAdder.end()
reply(files.join(''))
.header('x-chunked-output', '1')
.header('content-type', 'application/json')
})
})
)
}
}

0 comments on commit 001a6eb

Please sign in to comment.