Skip to content
This repository has been archived by the owner on Feb 12, 2024. It is now read-only.

Upgrade the files branch (#323) to work with pull-streams #469

Merged
merged 8 commits into from
Sep 12, 2016
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@
"ipfs-multipart": "^0.1.0",
"ipfs-repo": "^0.9.0",
"ipfs-unixfs": "^0.1.4",
"ipfs-unixfs-engine": "^0.11.2",
"ipfs-unixfs-engine": "^0.11.3",
"isstream": "^0.1.2",
"joi": "^9.0.4",
"libp2p-ipfs": "^0.13.0",
Expand All @@ -96,6 +96,7 @@
"promisify-es6": "^1.0.1",
"pull-file": "^1.0.0",
"pull-paramap": "^1.1.6",
"pull-pushable": "^2.0.1",
"pull-sort": "^1.0.0",
"pull-stream": "^3.4.5",
"pull-stream-to-stream": "^1.3.3",
Expand Down
2 changes: 1 addition & 1 deletion src/cli/commands/files/cat.js
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ module.exports = {
builder: {},

handler (argv) {
const path = argv.ipfsPath
const path = argv['ipfs-path']
utils.getIPFS((err, ipfs) => {
if (err) {
throw err
Expand Down
41 changes: 24 additions & 17 deletions src/core/ipfs/files.js
Original file line number Diff line number Diff line change
Expand Up @@ -50,23 +50,26 @@ module.exports = function files (self) {
return callback(new Error('You must supply a multihash'))
}

pull(
pull.values([hash]),
pull.asyncMap(self._dagS.get.bind(self._dagS)),
pull.map((node) => {
const data = UnixFS.unmarshal(node.data)
if (data.type === 'directory') {
return pull.error(new Error('This dag node is a directory'))
}

return exporter(hash, self._dagS)
}),
pull.flatten(),
pull.collect((err, files) => {
if (err) return callback(err)
callback(null, toStream.source(files[0].content))
})
)
self._dagS.get(hash, (err, node) => {
if (err) {
return callback(err)
}

const data = UnixFS.unmarshal(node.data)
if (data.type === 'directory') {
return callback(
new Error('This dag node is a directory')
)
}

pull(
exporter(hash, self._dagS),
pull.collect((err, files) => {
if (err) return callback(err)
callback(null, toStream.source(files[0].content))
})
)
})
}),

get: promisify((hash, callback) => {
Expand All @@ -81,6 +84,10 @@ module.exports = function files (self) {
return file
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We need to pause the stream? Would it otherwise start emitting without listeners?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

what pause?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Check the lines above

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

yeah, data can be lost if I don't pause it, oh my love for node streams is so great

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It's weird to me that it starts flowing before a read on a 'data' event is attached.

})
)))
}),

getPull: promisify((hash, callback) => {
callback(null, exporter(hash, self._dagS))
})
}
}
Expand Down
1 change: 0 additions & 1 deletion src/core/ipfs/object.js
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,6 @@ module.exports = function object (self) {
if (err) {
return cb(err)
}

cb(null, node.data)
})
}),
Expand Down
147 changes: 144 additions & 3 deletions src/http-api/resources/files.js
Original file line number Diff line number Diff line change
@@ -1,9 +1,16 @@
'use strict'

const bs58 = require('bs58')
const multipart = require('ipfs-multipart')
const debug = require('debug')
const tar = require('tar-stream')
const log = debug('http-api:files')
log.error = debug('http-api:files:error')
const pull = require('pull-stream')
const toStream = require('pull-stream-to-stream')
const toPull = require('stream-to-pull-stream')
const pushable = require('pull-pushable')
const EOL = require('os').EOL

exports = module.exports

Expand Down Expand Up @@ -33,18 +40,152 @@ exports.cat = {
// main route handler which is called after the above `parseArgs`, but only if the args were valid
handler: (request, reply) => {
const key = request.pre.args.key
const ipfs = request.server.app.ipfs

request.server.app.ipfs.files.cat(key, (err, stream) => {
ipfs.files.cat(key, (err, stream) => {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

👍

if (err) {
log.error(err)
return reply({
Message: 'Failed to cat file: ' + err,
Code: 0
}).code(500)
}
stream.on('data', (data) => {
return reply(data)

// hapi is not very clever and throws if no
// - _read method
// - _readableState object
// are there :(
stream._read = () => {}
stream._readableState = {}
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This seems like a bug in Hapi //cc @sericaia

return reply(stream).header('X-Stream-Output', '1')
})
}
}

exports.get = {
// uses common parseKey method that returns a `key`
parseArgs: exports.parseKey,

// main route handler which is called after the above `parseArgs`, but only if the args were valid
handler: (request, reply) => {
const key = request.pre.args.key
const ipfs = request.server.app.ipfs
const pack = tar.pack()

ipfs.files.getPull(key, (err, stream) => {
if (err) {
log.error(err)

reply({
Message: 'Failed to get file: ' + err,
Code: 0
}).code(500)
return
}

pull(
stream,
pull.asyncMap((file, cb) => {
const header = {name: file.path}

if (!file.content) {
header.type = 'directory'
pack.entry(header)
cb()
} else {
header.size = file.size
toStream.source(file.content)
.pipe(pack.entry(header, cb))
}
}),
pull.onEnd((err) => {
if (err) {
log.error(err)

reply({
Message: 'Failed to get file: ' + err,
Code: 0
}).code(500)
return
}

pack.finalize()
})
)

// the reply must read the tar stream,
// to pull values through
reply(pack).header('X-Stream-Output', '1')
})
}
}

exports.add = {
handler: (request, reply) => {
if (!request.payload) {
return reply('Array, Buffer, or String is required.').code(400).takeover()
}

const ipfs = request.server.app.ipfs
// TODO: make pull-multipart
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Track this in a issue. @xicombd you probably would like to get that working, since you did the first multipart parser :)

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

const parser = multipart.reqParser(request.payload)
let filesParsed = false

const fileAdder = pushable()

parser.on('file', (fileName, fileStream) => {
const filePair = {
path: fileName,
content: toPull(fileStream)
}
filesParsed = true
fileAdder.push(filePair)
})

parser.on('directory', (directory) => {
fileAdder.push({
path: directory,
content: ''
})
})

parser.on('end', () => {
if (!filesParsed) {
return reply("File argument 'data' is required.")
.code(400).takeover()
}
fileAdder.end()
})

pull(
fileAdder,
ipfs.files.createAddPullStream(),
pull.map((file) => {
return {
Name: file.path ? file.path : file.hash,
Hash: file.hash
}
}),
pull.map((file) => JSON.stringify(file) + EOL),
pull.collect((err, files) => {
if (err) {
return reply({
Message: err,
Code: 0
}).code(500)
}

if (files.length === 0 && filesParsed) {
return reply({
Message: 'Failed to add files.',
Code: 0
}).code(500)
}

reply(files.join(''))
.header('x-chunked-output', '1')
.header('content-type', 'application/json')
})
)
}
}
26 changes: 26 additions & 0 deletions src/http-api/routes/files.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ module.exports = (server) => {
const api = server.select('API')

api.route({
// TODO fix method
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is an http-api-spec thing

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

not sure who put that there, @victorbjelkholm did you?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yeah, I put it there. Mainly as a reminder that we should talk about setting the method of the endpoints in http-api-spec and change them accordingly here.

method: '*',
path: '/api/v0/cat',
config: {
Expand All @@ -15,4 +16,29 @@ module.exports = (server) => {
handler: resources.files.cat.handler
}
})

api.route({
// TODO fix method
method: '*',
path: '/api/v0/get',
config: {
pre: [
{ method: resources.files.get.parseArgs, assign: 'args' }
],
handler: resources.files.get.handler
}
})

api.route({
// TODO fix method
method: '*',
path: '/api/v0/add',
config: {
payload: {
parse: false,
output: 'stream'
},
handler: resources.files.add.handler
}
})
}
1 change: 1 addition & 0 deletions test/cli/test-files.js
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ describe('files', () => {
.run((err, stdout, exitcode) => {
expect(err).to.not.exist
expect(exitcode).to.equal(0)
expect(stdout[0]).to.equal('hello world')
done()
})
})
Expand Down
2 changes: 1 addition & 1 deletion test/core/both/test-init.js
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ describe('init', function () {
const repo = createTempRepo()
const ipfs = new IPFS(repo)

ipfs.init({ emptyRepo: true }, (err) => {
ipfs.init({ emptyRepo: true, bits: 128 }, (err) => {
expect(err).to.not.exist

repo.exists((err, res) => {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

'use strict'

/*
const test = require('interface-ipfs-core')
const FactoryClient = require('./../../utils/factory-http')

Expand All @@ -17,8 +16,5 @@ const common = {
fc.dismantle(callback)
}
}
*/

// TODO
// needs: https://github.com/ipfs/js-ipfs/pull/323
// test.files(common)
test.files(common)
38 changes: 0 additions & 38 deletions test/http-api/ipfs-api/test-files.js

This file was deleted.

4 changes: 2 additions & 2 deletions test/utils/factory-core/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ function Factory () {

if (!config) {
config = JSON.parse(JSON.stringify(defaultConfig))
const pId = PeerId.create({ bits: 32 }).toJSON()
const pId = PeerId.create({ bits: 512 }).toJSON()
config.Identity.PeerID = pId.id
config.Identity.PrivKey = pId.privKey
}
Expand Down Expand Up @@ -69,7 +69,7 @@ function Factory () {

// create the IPFS node
const ipfs = new IPFS(repo)
ipfs.init({ emptyRepo: true }, (err) => {
ipfs.init({ emptyRepo: true, bits: 512 }, (err) => {
if (err) {
return callback(err)
}
Expand Down
Loading