Skip to content
This repository has been archived by the owner on Feb 12, 2024. It is now read-only.

Commit

Permalink
fix(files.get):
Browse files Browse the repository at this point in the history
- Move path.join call out of readStream to not mess with the AST parser
- Fix big file test
  • Loading branch information
daviddias committed Aug 10, 2016
1 parent 833f249 commit 248cd13
Show file tree
Hide file tree
Showing 5 changed files with 64 additions and 36 deletions.
3 changes: 2 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,9 @@
"aegir": "^6.0.0",
"chai": "^3.5.0",
"gulp": "^3.9.1",
"interface-ipfs-core": "^0.5.0",
"interface-ipfs-core": "^0.6.0",
"ipfsd-ctl": "^0.14.0",
"passthrough-counter": "^1.0.0",
"pre-commit": "^1.1.3",
"stream-equal": "^0.1.8",
"stream-http": "^2.3.1",
Expand Down
1 change: 1 addition & 0 deletions src/add-to-dagnode-transform.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ module.exports = function (err, res, send, done) {
if (err) {
return done(err)
}

async.map(res, function map (entry, next) {
getDagNode(send, entry.Hash, function (err, node) {
if (err) {
Expand Down
5 changes: 5 additions & 0 deletions src/api/add-files.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
'use strict'

const isNode = require('detect-node')
const addToDagNodesTransform = require('../add-to-dagnode-transform')

module.exports = (send) => {
Expand All @@ -9,6 +10,10 @@ module.exports = (send) => {
opts = {}
}

if (!isNode) {
return cb(new Error('Recursive uploads are not supported in the browser'))
}

if (typeof (path) !== 'string') {
return cb(new Error('"path" must be a string'))
}
Expand Down
38 changes: 25 additions & 13 deletions src/request-api.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,20 +4,27 @@ const Wreck = require('wreck')
const Qs = require('qs')
const ndjson = require('ndjson')
const getFilesStream = require('./get-files-stream')
const Counter = require('passthrough-counter')

const isNode = require('detect-node')

// -- Internal

function parseChunkedJson (res, cb) {
const parsed = []
const c = new Counter()
res
.pipe(c)
.pipe(ndjson.parse())
.on('data', parsed.push.bind(parsed))
.on('end', () => cb(null, parsed))
.on('data', (obj) => {
parsed.push(obj)
})
.on('end', () => {
cb(null, parsed)
})
}

function onRes (buffer, cb) {
function onRes (buffer, cb, uri) {
return (err, res) => {
if (err) {
return cb(err)
Expand All @@ -42,10 +49,14 @@ function onRes (buffer, cb) {
})
}

if (stream && !buffer) return cb(null, res)
if (stream && !buffer) {
return cb(null, res)
}

if (chunkedObjects) {
if (isJson) return parseChunkedJson(res, cb)
if (isJson) {
return parseChunkedJson(res, cb)
}

return Wreck.read(res, null, cb)
}
Expand All @@ -56,6 +67,11 @@ function onRes (buffer, cb) {

function requestAPI (config, path, args, qs, files, buffer, cb) {
qs = qs || {}

if (Array.isArray(files)) {
qs.recursive = true
}

if (Array.isArray(path)) path = path.join('/')
if (args && !Array.isArray(args)) args = [args]
if (args) qs.arg = args
Expand All @@ -67,10 +83,6 @@ function requestAPI (config, path, args, qs, files, buffer, cb) {
delete qs.r
}

if (!isNode && qs.recursive && path === 'add') {
return cb(new Error('Recursive uploads are not supported in the browser'))
}

qs['stream-channels'] = true

let stream
Expand Down Expand Up @@ -104,7 +116,7 @@ function requestAPI (config, path, args, qs, files, buffer, cb) {
opts.payload = stream
}

return Wreck.request(opts.method, opts.uri, opts, onRes(buffer, cb))
return Wreck.request(opts.method, opts.uri, opts, onRes(buffer, cb, opts.uri))
}

// -- Interface
Expand All @@ -128,9 +140,9 @@ exports = module.exports = function getRequestAPI (config) {
return requestAPI(config, path, args, qs, files, buffer, cb)
}

// Wraps the 'send' function such that an asynchronous transform may be
// applied to its result before passing it on to either its callback or
// promise.
// Wraps the 'send' function such that an asynchronous
// transform may be applied to its result before
// passing it on to either its callback or promise.
send.withTransform = function (transform) {
return function (path, args, qs, files, buffer, cb) {
if (typeof buffer === 'function') {
Expand Down
53 changes: 31 additions & 22 deletions test/api/get.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,15 @@ const path = require('path')
// const extract = require('tar-stream').extract

const testfile = fs.readFileSync(path.join(__dirname, '/../testfile.txt'))

let testfileBig

if (isNode) {
testfileBig = fs.createReadStream(path.join(__dirname, '/../15mb.random'), { bufferSize: 128 })
const tfbPath = path.join(__dirname, '/../15mb.random')
testfileBig = fs.createReadStream(tfbPath, { bufferSize: 128 })
}

describe.skip('.get', () => {
describe('.get', () => {
it('get with no compression args', (done) => {
apiClients.a
.get('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', (err, res) => {
Expand Down Expand Up @@ -92,35 +94,42 @@ describe.skip('.get', () => {
return done()
}

apiClients.a.get('Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq', (err, res) => {
apiClients.a.get('Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq', (err, files) => {
expect(err).to.not.exist

// Do not blow out the memory of nodejs :)
streamEqual(res, testfileBig, (err, equal) => {
expect(err).to.not.exist
expect(equal).to.be.true
done()
files.on('data', (file) => {
// Do not blow out the memory of nodejs :)
streamEqual(file.content, testfileBig, (err, equal) => {
expect(err).to.not.exist
expect(equal).to.be.true
done()
})
})
})
})

describe.skip('promise', () => {
describe('promise', () => {
it('get', (done) => {
apiClients.a.get('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP')
.then((res) => {
let buf = ''
res
.on('error', (err) => {
throw err
})
.on('data', (data) => {
buf += data
})
.on('end', () => {
expect(buf).to.contain(testfile.toString())
done()
})
.then((files) => {
files.on('data', (file) => {
let buf = ''
file.content
.on('error', (err) => {
throw err
})
.on('data', (data) => {
buf += data.toString()
})
.on('end', () => {
expect(buf).to.contain(testfile.toString())
done()
})
})
})
.catch((err) => {
expect(err).to.not.exist
})
})
})
})

0 comments on commit 248cd13

Please sign in to comment.