Skip to content
This repository has been archived by the owner on Feb 12, 2024. It is now read-only.

Commit

Permalink
updates and fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
dignifiedquire committed Sep 8, 2016
1 parent bbb1cda commit a6a8ee2
Show file tree
Hide file tree
Showing 3 changed files with 27 additions and 29 deletions.
25 changes: 13 additions & 12 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -44,13 +44,14 @@
"buffer-loader": "0.0.1",
"chai": "^3.5.0",
"expose-loader": "^0.7.1",
"form-data": "^1.0.0-rc4",
"form-data": "^1.0.1",
"fs-pull-blob-store": "^0.3.0",
"gulp": "^3.9.1",
"idb-plus-blob-store": "^1.1.2",
"idb-pull-blob-store": "^0.4.0",
"interface-ipfs-core": "^0.14.3",
"left-pad": "^1.1.1",
"lodash": "^4.14.1",
"lodash": "^4.15.0",
"ncp": "^2.0.0",
"nexpect": "^0.5.0",
"pre-commit": "^1.1.3",
Expand All @@ -61,11 +62,11 @@
"dependencies": {
"babel-runtime": "^6.11.6",
"bl": "^1.1.2",
"boom": "^3.2.2",
"boom": "^4.0.0",
"bs58": "^3.0.0",
"debug": "^2.2.0",
"detect-node": "^2.0.3",
"glob": "^7.0.5",
"glob": "^7.0.6",
"hapi": "^15.0.3",
"ipfs-api": "^8.0.3",
"ipfs-bitswap": "^0.6.0",
Expand All @@ -75,16 +76,16 @@
"ipfs-multipart": "^0.1.0",
"ipfs-repo": "^0.8.0",
"ipfs-unixfs": "^0.1.4",
"ipfs-unixfs-engine": "^0.10.1",
"ipfs-unixfs-engine": "^0.10.2",
"isstream": "^0.1.2",
"joi": "^9.0.4",
"libp2p-ipfs": "^0.12.1",
"libp2p-ipfs-browser": "^0.12.1",
"lodash.get": "^4.4.1",
"lodash.get": "^4.4.2",
"lodash.has": "^4.5.2",
"lodash.set": "^4.3.1",
"lodash.sortby": "^4.6.1",
"mafmt": "^2.1.1",
"lodash.set": "^4.3.2",
"lodash.sortby": "^4.7.0",
"mafmt": "^2.1.2",
"map-limit": "0.0.1",
"multiaddr": "^2.0.2",
"multihashes": "^0.2.2",
Expand All @@ -95,11 +96,11 @@
"promisify-es6": "^1.0.1",
"pull-file": "^0.5.0",
"pull-paramap": "^1.1.6",
"pull-stream": "^3.4.3",
"pull-stream-to-stream": "^1.3.1",
"pull-sort": "^1.0.0",
"pull-stream": "^3.4.5",
"pull-stream-to-stream": "^1.3.2",
"pull-zip": "^2.0.0",
"read-pkg-up": "^1.0.1",
"readable-stream": "1.1.13",
"run-parallel": "^1.1.6",
"run-parallel-limit": "^1.0.3",
"run-series": "^1.1.4",
Expand Down
3 changes: 1 addition & 2 deletions src/core/ipfs/block.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@ module.exports = function block (self) {
return {
get: (hash, callback) => {
hash = cleanHash(hash)
console.log('block.get', hash)
self._blockS.get(hash, callback)
},
put: (block, callback) => {
Expand All @@ -24,7 +23,7 @@ module.exports = function block (self) {
},
del: (hash, callback) => {
hash = cleanHash(hash)
self._blockS.deleteBlock(hash, callback)
self._blockS.delete(hash, callback)
},
stat: (hash, callback) => {
hash = cleanHash(hash)
Expand Down
28 changes: 13 additions & 15 deletions src/core/ipfs/files.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ const isStream = require('isstream')
const promisify = require('promisify-es6')
const multihashes = require('multihashes')
const pull = require('pull-stream')
const sort = require('pull-sort')
const toStream = require('pull-stream-to-stream')
const toPull = require('stream-to-pull-stream')

Expand Down Expand Up @@ -35,6 +36,11 @@ module.exports = function files (self) {
pull.values(normalizeContent(data)),
importer(self._dagS),
pull.asyncMap(prepareFile.bind(null, self)),
sort((a, b) => {
if (a.path < b.path) return 1
if (a.path > b.path) return -1
return 0
}),
pull.collect(callback)
)
}),
Expand All @@ -58,7 +64,7 @@ module.exports = function files (self) {
pull.flatten(),
pull.collect((err, files) => {
if (err) return callback(err)
callback(null, contentToStream(files[0].content))
callback(null, toStream.source(files[0].content))
})
)
}),
Expand All @@ -68,8 +74,13 @@ module.exports = function files (self) {
exporter(hash, self._dagS),
pull.map((file) => {
if (file.content) {
file.content = contentToStream(file.content)
file.content = toStream.source(pull(
file.content,
pull.through((val) => console.log('r', file.path, val.length))
))
file.content.pause()
}

return file
})
)))
Expand Down Expand Up @@ -125,16 +136,3 @@ function normalizeContent (content) {
return data
})
}

function contentToStream (content) {
return toStream.source(pull(
content,
pull.map((chunk) => {
if (Buffer.isBuffer(chunk)) {
return chunk
}

return Buffer([chunk])
})
))
}

0 comments on commit a6a8ee2

Please sign in to comment.