Skip to content
This repository has been archived by the owner on Aug 12, 2020. It is now read-only.

Awesome IPLD endeavour #74

Merged
merged 1 commit into from
Oct 28, 2016
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,14 @@ IPFS unixFS Engine
- [Contribute](#contribute)
- [License](#license)

## BEWARE BEWARE BEWARE there might be 🐉

This module has passed through several iterations and still is far from a nice and easy understandable codebase. Currently missing features:

- tar importer
- trickle dag exporter
- sharding

## Install

With [npm](https://npmjs.org/) installed, run
Expand Down
20 changes: 11 additions & 9 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -34,31 +34,33 @@
},
"homepage": "https://github.com/ipfs/js-ipfs-unixfs-engineg#readme",
"devDependencies": {
"aegir": "^8.0.1",
"aegir": "^8.1.2",
"buffer-loader": "0.0.1",
"chai": "^3.5.0",
"fs-pull-blob-store": "^0.3.0",
"fs-pull-blob-store": "^0.4.1",
"idb-pull-blob-store": "^0.5.1",
"ipfs-block-service": "^0.5.0",
"ipfs-repo": "^0.9.0",
"ipfs-block-service": "^0.6.0",
"ipfs-repo": "^0.10.0",
"ncp": "^2.0.0",
"pre-commit": "^1.1.3",
"pull-zip": "^2.0.0",
"pull-zip": "^2.0.1",
"raw-loader": "^0.5.1",
"rimraf": "^2.5.4",
"run-series": "^1.1.4"
},
"dependencies": {
"ipfs-merkle-dag": "^0.7.0",
"cids": "^0.2.0",
"ipfs-unixfs": "^0.1.4",
"is-ipfs": "^0.2.0",
"ipld-dag-pb": "^0.1.3",
"ipld-resolver": "^0.1.1",
"is-ipfs": "^0.2.1",
"multihashes": "^0.2.2",
"pull-block": "^1.0.2",
"pull-paramap": "^1.1.6",
"pull-paramap": "^1.2.0",
"pull-pushable": "^2.0.1",
"pull-stream": "^3.4.5",
"pull-traverse": "^1.0.3",
"pull-write": "^1.1.0",
"pull-write": "^1.1.1",
"run-parallel": "^1.1.6"
},
"contributors": [
Expand Down
7 changes: 0 additions & 7 deletions src/chunker-fixed-size.js

This file was deleted.

7 changes: 7 additions & 0 deletions src/chunker/fixed-size.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
'use strict'

const pullBlock = require('pull-block')

module.exports = (size) => {
return pullBlock(size, { zeroPadding: false })
}
7 changes: 4 additions & 3 deletions src/exporters/dir.js → src/exporter/dir.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,13 @@
const path = require('path')
const pull = require('pull-stream')
const paramap = require('pull-paramap')
const CID = require('cids')

const fileExporter = require('./file')
const switchType = require('../util').switchType

// Logic to export a unixfs directory.
module.exports = (node, name, dagService) => {
module.exports = (node, name, ipldResolver) => {
// The algorithm below is as follows
//
// 1. Take all links from a given directory node
Expand All @@ -25,15 +26,15 @@ module.exports = (node, name, dagService) => {
path: path.join(name, link.name),
hash: link.hash
})),
paramap((item, cb) => dagService.get(item.hash, (err, n) => {
paramap((item, cb) => ipldResolver.get(new CID(item.hash), (err, n) => {
if (err) {
return cb(err)
}

cb(null, switchType(
n,
() => pull.values([item]),
() => fileExporter(n, item.path, dagService)
() => fileExporter(n, item.path, ipldResolver)
))
})),
pull.flatten()
Expand Down
5 changes: 3 additions & 2 deletions src/exporters/file.js → src/exporter/file.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,12 @@

const traverse = require('pull-traverse')
const UnixFS = require('ipfs-unixfs')
const CID = require('cids')
const pull = require('pull-stream')
const paramap = require('pull-paramap')

// Logic to export a single (possibly chunked) unixfs file.
module.exports = (node, name, ds) => {
module.exports = (node, name, ipldResolver) => {
function getData (node) {
try {
const file = UnixFS.unmarshal(node.data)
Expand All @@ -19,7 +20,7 @@ module.exports = (node, name, ds) => {
function visitor (node) {
return pull(
pull.values(node.links),
paramap((link, cb) => ds.get(link.hash, cb))
paramap((link, cb) => ipldResolver.get(new CID(link.hash), cb))
)
}

Expand Down
19 changes: 10 additions & 9 deletions src/exporter.js → src/exporter/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,37 +2,38 @@

const traverse = require('pull-traverse')
const pull = require('pull-stream')
const CID = require('cids')

const util = require('./util')
const util = require('./../util')
const switchType = util.switchType
const cleanMultihash = util.cleanMultihash

const dirExporter = require('./exporters/dir')
const fileExporter = require('./exporters/file')
const dirExporter = require('./dir')
const fileExporter = require('./file')

module.exports = (hash, dagService, options) => {
module.exports = (hash, ipldResolver, options) => {
hash = cleanMultihash(hash)
options = options || {}

function visitor (item) {
return pull(
dagService.getStream(item.hash),
ipldResolver.getStream(new CID(item.hash)),
pull.map((node) => switchType(
node,
() => dirExporter(node, item.path, dagService),
() => fileExporter(node, item.path, dagService)
() => dirExporter(node, item.path, ipldResolver),
() => fileExporter(node, item.path, ipldResolver)
)),
pull.flatten()
)
}

// Traverse the DAG
return pull(
dagService.getStream(hash),
ipldResolver.getStream(new CID(hash)),
pull.map((node) => switchType(
node,
() => traverse.widthFirst({path: hash, hash}, visitor),
() => fileExporter(node, hash, dagService)
() => fileExporter(node, hash, ipldResolver)
)),
pull.flatten()
)
Expand Down
155 changes: 0 additions & 155 deletions src/importer.js

This file was deleted.

Loading