Skip to content

Commit

Permalink
Rework package structure for unixfs and subpackage
Browse files Browse the repository at this point in the history
  • Loading branch information
whyrusleeping committed Oct 8, 2014
1 parent 15a4701 commit 093c8fb
Show file tree
Hide file tree
Showing 19 changed files with 81 additions and 66 deletions.
2 changes: 1 addition & 1 deletion core/commands/add.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,8 @@ import (

"github.com/jbenet/go-ipfs/core"
"github.com/jbenet/go-ipfs/importer"
ft "github.com/jbenet/go-ipfs/importer/format"
dag "github.com/jbenet/go-ipfs/merkledag"
ft "github.com/jbenet/go-ipfs/unixfs"
u "github.com/jbenet/go-ipfs/util"
)

Expand Down
4 changes: 2 additions & 2 deletions core/commands/cat.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import (
"io"

"github.com/jbenet/go-ipfs/core"
mdag "github.com/jbenet/go-ipfs/merkledag"
uio "github.com/jbenet/go-ipfs/unixfs/io"
)

func Cat(n *core.IpfsNode, args []string, opts map[string]interface{}, out io.Writer) error {
Expand All @@ -15,7 +15,7 @@ func Cat(n *core.IpfsNode, args []string, opts map[string]interface{}, out io.Wr
return fmt.Errorf("catFile error: %v", err)
}

read, err := mdag.NewDagReader(dagnode, n.DAG)
read, err := uio.NewDagReader(dagnode, n.DAG)
if err != nil {
return fmt.Errorf("cat error: %v", err)
}
Expand Down
15 changes: 8 additions & 7 deletions fuse/ipns/ipns_unix.go
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package ipns

import (
"errors"
"io/ioutil"
"os"
"path/filepath"
Expand All @@ -12,10 +13,10 @@ import (

"github.com/jbenet/go-ipfs/core"
ci "github.com/jbenet/go-ipfs/crypto"
imp "github.com/jbenet/go-ipfs/importer"
dt "github.com/jbenet/go-ipfs/importer/dagwriter"
ft "github.com/jbenet/go-ipfs/importer/format"
"github.com/jbenet/go-ipfs/importer/chunk"
mdag "github.com/jbenet/go-ipfs/merkledag"
ft "github.com/jbenet/go-ipfs/unixfs"
uio "github.com/jbenet/go-ipfs/unixfs/io"
u "github.com/jbenet/go-ipfs/util"
)

Expand Down Expand Up @@ -204,7 +205,7 @@ type Node struct {

Ipfs *core.IpfsNode
Nd *mdag.Node
dagMod *dt.DagModifier
dagMod *uio.DagModifier
cached *ft.PBData
}

Expand Down Expand Up @@ -293,7 +294,7 @@ func (s *Node) ReadDir(intr fs.Intr) ([]fuse.Dirent, fuse.Error) {
// ReadAll reads the object data as file data
func (s *Node) ReadAll(intr fs.Intr) ([]byte, fuse.Error) {
log.Debug("ipns: ReadAll [%s]", s.name)
r, err := mdag.NewDagReader(s.Nd, s.Ipfs.DAG)
r, err := uio.NewDagReader(s.Nd, s.Ipfs.DAG)
if err != nil {
return nil, err
}
Expand All @@ -312,7 +313,7 @@ func (n *Node) Write(req *fuse.WriteRequest, resp *fuse.WriteResponse, intr fs.I

if n.dagMod == nil {
// Create a DagModifier to allow us to change the existing dag node
dmod, err := dt.NewDagModifier(n.Nd, n.Ipfs.DAG, imp.DefaultSplitter)
dmod, err := uio.NewDagModifier(n.Nd, n.Ipfs.DAG, chunk.DefaultSplitter)
if err != nil {
log.Error("Error creating dag modifier: %s", err)
return err
Expand Down Expand Up @@ -541,7 +542,7 @@ func (n *Node) Rename(req *fuse.RenameRequest, newDir fs.Node, intr fs.Intr) fus
}
default:
log.Critical("Unknown node type for rename target dir!")
return err
return errors.New("Unknown fs node type!")
}
return nil
}
Expand Down
7 changes: 4 additions & 3 deletions fuse/readonly/readonly_unix.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,9 @@ import (
"github.com/jbenet/go-ipfs/Godeps/_workspace/src/bazil.org/fuse"
"github.com/jbenet/go-ipfs/Godeps/_workspace/src/bazil.org/fuse/fs"
core "github.com/jbenet/go-ipfs/core"
ft "github.com/jbenet/go-ipfs/importer/format"
mdag "github.com/jbenet/go-ipfs/merkledag"
ft "github.com/jbenet/go-ipfs/unixfs"
uio "github.com/jbenet/go-ipfs/unixfs/io"
u "github.com/jbenet/go-ipfs/util"
)

Expand Down Expand Up @@ -79,7 +80,7 @@ func (*Root) ReadDir(intr fs.Intr) ([]fuse.Dirent, fuse.Error) {
type Node struct {
Ipfs *core.IpfsNode
Nd *mdag.Node
fd *mdag.DagReader
fd *uio.DagReader
cached *ft.PBData
}

Expand Down Expand Up @@ -143,7 +144,7 @@ func (s *Node) ReadDir(intr fs.Intr) ([]fuse.Dirent, fuse.Error) {
// ReadAll reads the object data as file data
func (s *Node) ReadAll(intr fs.Intr) ([]byte, fuse.Error) {
u.DOut("Read node.\n")
r, err := mdag.NewDagReader(s.Nd, s.Ipfs.DAG)
r, err := uio.NewDagReader(s.Nd, s.Ipfs.DAG)
if err != nil {
return nil, err
}
Expand Down
2 changes: 1 addition & 1 deletion importer/rabin.go → importer/chunk/rabin.go
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package importer
package chunk

import (
"bufio"
Expand Down
12 changes: 10 additions & 2 deletions importer/splitting.go → importer/chunk/splitting.go
Original file line number Diff line number Diff line change
@@ -1,6 +1,14 @@
package importer
package chunk

import "io"
import (
"io"

"github.com/jbenet/go-ipfs/util"
)

var log = util.Logger("chunk")

var DefaultSplitter = &SizeSplitter{1024 * 512}

type BlockSplitter interface {
Split(r io.Reader) chan []byte
Expand Down
9 changes: 4 additions & 5 deletions importer/importer.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,9 @@ import (
"io"
"os"

ft "github.com/jbenet/go-ipfs/importer/format"
"github.com/jbenet/go-ipfs/importer/chunk"
dag "github.com/jbenet/go-ipfs/merkledag"
ft "github.com/jbenet/go-ipfs/unixfs"
"github.com/jbenet/go-ipfs/util"
)

Expand All @@ -18,18 +19,16 @@ var BlockSizeLimit = int64(1048576) // 1 MB
// ErrSizeLimitExceeded signals that a block is larger than BlockSizeLimit.
var ErrSizeLimitExceeded = fmt.Errorf("object size limit exceeded")

var DefaultSplitter = &SizeSplitter{1024 * 512}

// todo: incremental construction with an ipfs node. dumping constructed
// objects into the datastore, to avoid buffering all in memory

// NewDagFromReader constructs a Merkle DAG from the given io.Reader.
// size required for block construction.
func NewDagFromReader(r io.Reader) (*dag.Node, error) {
return NewDagFromReaderWithSplitter(r, DefaultSplitter)
return NewDagFromReaderWithSplitter(r, chunk.DefaultSplitter)
}

func NewDagFromReaderWithSplitter(r io.Reader, spl BlockSplitter) (*dag.Node, error) {
func NewDagFromReaderWithSplitter(r io.Reader, spl chunk.BlockSplitter) (*dag.Node, error) {
blkChan := spl.Split(r)
first := <-blkChan
root := &dag.Node{}
Expand Down
18 changes: 11 additions & 7 deletions importer/importer_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,13 @@ import (
"os"
"testing"

dag "github.com/jbenet/go-ipfs/merkledag"
"github.com/jbenet/go-ipfs/importer/chunk"
uio "github.com/jbenet/go-ipfs/unixfs/io"
)

// NOTE:
// These tests tests a combination of unixfs/io/dagreader and importer/chunk.
// Maybe split them up somehow?
func TestBuildDag(t *testing.T) {
td := os.TempDir()
fi, err := os.Create(td + "/tmpfi")
Expand All @@ -34,9 +38,9 @@ func TestBuildDag(t *testing.T) {

//Test where calls to read are smaller than the chunk size
func TestSizeBasedSplit(t *testing.T) {
bs := &SizeSplitter{512}
bs := &chunk.SizeSplitter{512}
testFileConsistency(t, bs, 32*512)
bs = &SizeSplitter{4096}
bs = &chunk.SizeSplitter{4096}
testFileConsistency(t, bs, 32*4096)

// Uneven offset
Expand All @@ -49,15 +53,15 @@ func dup(b []byte) []byte {
return o
}

func testFileConsistency(t *testing.T, bs BlockSplitter, nbytes int) {
func testFileConsistency(t *testing.T, bs chunk.BlockSplitter, nbytes int) {
buf := new(bytes.Buffer)
io.CopyN(buf, rand.Reader, int64(nbytes))
should := dup(buf.Bytes())
nd, err := NewDagFromReaderWithSplitter(buf, bs)
if err != nil {
t.Fatal(err)
}
r, err := dag.NewDagReader(nd, nil)
r, err := uio.NewDagReader(nd, nil)
if err != nil {
t.Fatal(err)
}
Expand Down Expand Up @@ -86,14 +90,14 @@ func arrComp(a, b []byte) error {
}

func TestMaybeRabinConsistency(t *testing.T) {
testFileConsistency(t, NewMaybeRabin(4096), 256*4096)
testFileConsistency(t, chunk.NewMaybeRabin(4096), 256*4096)
}

func TestRabinBlockSize(t *testing.T) {
buf := new(bytes.Buffer)
nbytes := 1024 * 1024
io.CopyN(buf, rand.Reader, int64(nbytes))
rab := NewMaybeRabin(4096)
rab := chunk.NewMaybeRabin(4096)
blkch := rab.Split(buf)

var blocks [][]byte
Expand Down
3 changes: 2 additions & 1 deletion server/http/ipfs.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import (
core "github.com/jbenet/go-ipfs/core"
"github.com/jbenet/go-ipfs/importer"
dag "github.com/jbenet/go-ipfs/merkledag"
uio "github.com/jbenet/go-ipfs/unixfs/io"
u "github.com/jbenet/go-ipfs/util"
)

Expand Down Expand Up @@ -33,5 +34,5 @@ func (i *ipfsHandler) AddNodeToDAG(nd *dag.Node) (u.Key, error) {
}

func (i *ipfsHandler) NewDagReader(nd *dag.Node) (io.Reader, error) {
return dag.NewDagReader(nd, i.node.DAG)
return uio.NewDagReader(nd, i.node.DAG)
}
File renamed without changes.
10 changes: 5 additions & 5 deletions importer/format/data.pb.go → unixfs/data.pb.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion importer/format/data.proto → unixfs/data.proto
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package format;
package unixfs;

message PBData {
enum DataType {
Expand Down
2 changes: 1 addition & 1 deletion importer/format/format.go → unixfs/format.go
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
// Package format implements a data format for files in the ipfs filesystem
// It is not the only format in ipfs, but it is the one that the filesystem assumes
package format
package unixfs

import (
"errors"
Expand Down
2 changes: 1 addition & 1 deletion importer/format/format_test.go → unixfs/format_test.go
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package format
package unixfs

import (
"testing"
Expand Down
12 changes: 6 additions & 6 deletions importer/dagwriter/dagmodifier.go → unixfs/io/dagmodifier.go
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
package dagwriter
package io

import (
"bytes"
"errors"

"github.com/jbenet/go-ipfs/Godeps/_workspace/src/code.google.com/p/goprotobuf/proto"

imp "github.com/jbenet/go-ipfs/importer"
ft "github.com/jbenet/go-ipfs/importer/format"
"github.com/jbenet/go-ipfs/importer/chunk"
mdag "github.com/jbenet/go-ipfs/merkledag"
ft "github.com/jbenet/go-ipfs/unixfs"
u "github.com/jbenet/go-ipfs/util"
)

Expand All @@ -20,10 +20,10 @@ type DagModifier struct {
curNode *mdag.Node

pbdata *ft.PBData
splitter imp.BlockSplitter
splitter chunk.BlockSplitter
}

func NewDagModifier(from *mdag.Node, serv *mdag.DAGService, spl imp.BlockSplitter) (*DagModifier, error) {
func NewDagModifier(from *mdag.Node, serv *mdag.DAGService, spl chunk.BlockSplitter) (*DagModifier, error) {
pbd, err := ft.FromBytes(from.Data)
if err != nil {
return nil, err
Expand Down Expand Up @@ -172,7 +172,7 @@ func (dm *DagModifier) WriteAt(b []byte, offset uint64) (int, error) {

// splitBytes uses a splitterFunc to turn a large array of bytes
// into many smaller arrays of bytes
func splitBytes(b []byte, spl imp.BlockSplitter) [][]byte {
func splitBytes(b []byte, spl chunk.BlockSplitter) [][]byte {
out := spl.Split(bytes.NewReader(b))
var arr [][]byte
for blk := range out {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package dagwriter
package io

import (
"fmt"
Expand All @@ -8,9 +8,9 @@ import (

"github.com/jbenet/go-ipfs/Godeps/_workspace/src/github.com/op/go-logging"
bs "github.com/jbenet/go-ipfs/blockservice"
imp "github.com/jbenet/go-ipfs/importer"
ft "github.com/jbenet/go-ipfs/importer/format"
"github.com/jbenet/go-ipfs/importer/chunk"
mdag "github.com/jbenet/go-ipfs/merkledag"
ft "github.com/jbenet/go-ipfs/unixfs"
u "github.com/jbenet/go-ipfs/util"

ds "github.com/jbenet/go-ipfs/Godeps/_workspace/src/github.com/jbenet/datastore.go"
Expand All @@ -26,7 +26,7 @@ func getMockDagServ(t *testing.T) *mdag.DAGService {
}

func getNode(t *testing.T, dserv *mdag.DAGService, size int64) ([]byte, *mdag.Node) {
dw := NewDagWriter(dserv, &imp.SizeSplitter{500})
dw := NewDagWriter(dserv, &chunk.SizeSplitter{500})

n, err := io.CopyN(dw, u.NewFastRand(), size)
if err != nil {
Expand All @@ -39,7 +39,7 @@ func getNode(t *testing.T, dserv *mdag.DAGService, size int64) ([]byte, *mdag.No
dw.Close()
node := dw.GetNode()

dr, err := mdag.NewDagReader(node, dserv)
dr, err := NewDagReader(node, dserv)
if err != nil {
t.Fatal(err)
}
Expand Down Expand Up @@ -76,7 +76,7 @@ func testModWrite(t *testing.T, beg, size uint64, orig []byte, dm *DagModifier)
t.Fatal(err)
}

rd, err := mdag.NewDagReader(nd, dm.dagserv)
rd, err := NewDagReader(nd, dm.dagserv)
if err != nil {
t.Fatal(err)
}
Expand All @@ -99,7 +99,7 @@ func TestDagModifierBasic(t *testing.T) {
dserv := getMockDagServ(t)
b, n := getNode(t, dserv, 50000)

dagmod, err := NewDagModifier(n, dserv, &imp.SizeSplitter{512})
dagmod, err := NewDagModifier(n, dserv, &chunk.SizeSplitter{512})
if err != nil {
t.Fatal(err)
}
Expand Down
Loading

1 comment on commit 093c8fb

@jbenet
Copy link
Member

@jbenet jbenet commented on 093c8fb Oct 8, 2014

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Awesome, LGTM!

Please sign in to comment.