Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

adder: add support for specifying the hash function #3919

Merged
merged 2 commits into from May 14, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
18 changes: 18 additions & 0 deletions core/commands/add.go
Expand Up @@ -4,6 +4,7 @@ import (
"errors"
"fmt"
"io"
"strings"

bstore "github.com/ipfs/go-ipfs/blocks/blockstore"
blockservice "github.com/ipfs/go-ipfs/blockservice"
Expand All @@ -17,6 +18,7 @@ import (
mfs "github.com/ipfs/go-ipfs/mfs"
ft "github.com/ipfs/go-ipfs/unixfs"

mh "gx/ipfs/QmVGtdTZdTFaLsaj2RwdVG8jcjNNcp1DE914DKZ2kHmXHw/go-multihash"
u "gx/ipfs/QmWbjfz3u6HkAdPh34dgPchGbQjob6LXLhAeCGii2TX69n/go-ipfs-util"
"gx/ipfs/QmeWjRodbcZFKe5tMN7poEx3izym6osrLSnTLf9UjJZBbs/pb"
)
Expand All @@ -39,6 +41,7 @@ const (
noCopyOptionName = "nocopy"
fstoreCacheOptionName = "fscache"
cidVersionOptionName = "cid-version"
hashOptionName = "hash"
)

const adderOutChanSize = 8
Expand Down Expand Up @@ -90,6 +93,7 @@ You can now refer to the added file in a gateway, like so:
cmds.BoolOption(noCopyOptionName, "Add the file using filestore. (experimental)"),
cmds.BoolOption(fstoreCacheOptionName, "Check the filestore for pre-existing blocks. (experimental)"),
cmds.IntOption(cidVersionOptionName, "Cid version. Non-zero value will change default of 'raw-leaves' to true. (experimental)").Default(0),
cmds.StringOption(hashOptionName, "Hash function to use. Will set Cid version to 1 if used. (experimental)").Default("sha2-256"),
},
PreRun: func(req cmds.Request) error {
quiet, _, _ := req.Option(quietOptionName).Bool()
Expand Down Expand Up @@ -164,6 +168,7 @@ You can now refer to the added file in a gateway, like so:
nocopy, _, _ := req.Option(noCopyOptionName).Bool()
fscache, _, _ := req.Option(fstoreCacheOptionName).Bool()
cidVer, _, _ := req.Option(cidVersionOptionName).Int()
hashFunStr, hfset, _ := req.Option(hashOptionName).String()

if nocopy && !cfg.Experimental.FilestoreEnabled {
res.SetError(errors.New("filestore is not enabled, see https://git.io/vy4XN"),
Expand All @@ -180,6 +185,10 @@ You can now refer to the added file in a gateway, like so:
return
}

if hfset && cidVer == 0 {
cidVer = 1
}

if cidVer >= 1 && !rbset {
rawblks = true
}
Expand All @@ -190,6 +199,15 @@ You can now refer to the added file in a gateway, like so:
return
}

hashFunCode, ok := mh.Names[strings.ToLower(hashFunStr)]
if !ok {
res.SetError(fmt.Errorf("unrecognized hash function: %s", strings.ToLower(hashFunStr)), cmds.ErrNormal)
return
}

prefix.MhType = hashFunCode
prefix.MhLength = -1

if hash {
nilnode, err := core.NewNode(n.Context(), &core.BuildCfg{
//TODO: need this to be true or all files
Expand Down
19 changes: 15 additions & 4 deletions importer/helpers/dagbuilder.go
Expand Up @@ -165,10 +165,21 @@ func (db *DagBuilderHelper) GetNextDataNode() (*UnixfsNode, error) {
}

if db.rawLeaves {
return &UnixfsNode{
rawnode: dag.NewRawNode(data),
raw: true,
}, nil
if db.prefix == nil {
return &UnixfsNode{
rawnode: dag.NewRawNode(data),
raw: true,
}, nil
} else {
rawnode, err := dag.NewRawNodeWPrefix(data, *db.prefix)
if err != nil {
return nil, err
}
return &UnixfsNode{
rawnode: rawnode,
raw: true,
}, nil
}
} else {
blk := db.NewUnixfsBlock()
blk.SetData(data)
Expand Down
2 changes: 1 addition & 1 deletion merkledag/merkledag.go
Expand Up @@ -116,7 +116,7 @@ func decodeBlock(b blocks.Block) (node.Node, error) {
decnd.Prefix = b.Cid().Prefix()
return decnd, nil
case cid.Raw:
return NewRawNode(b.RawData()), nil
return NewRawNodeWPrefix(b.RawData(), b.Cid().Prefix())
case cid.DagCBOR:
return ipldcbor.Decode(b.RawData())
default:
Expand Down
20 changes: 20 additions & 0 deletions merkledag/raw.go
Expand Up @@ -12,6 +12,8 @@ type RawNode struct {
blocks.Block
}

// NewRawNode creates a RawNode using the default sha2-256 hash
// funcition.
func NewRawNode(data []byte) *RawNode {
h := u.Hash(data)
c := cid.NewCidV1(cid.Raw, h)
Expand All @@ -20,6 +22,24 @@ func NewRawNode(data []byte) *RawNode {
return &RawNode{blk}
}

// NewRawNodeWPrefix creates a RawNode with the hash function
// specified in prefix.
func NewRawNodeWPrefix(data []byte, prefix cid.Prefix) (*RawNode, error) {
prefix.Codec = cid.Raw
if prefix.Version == 0 {
prefix.Version = 1
}
c, err := prefix.Sum(data)
if err != nil {
return nil, err
}
blk, err := blocks.NewBlockWithCid(data, c)
if err != nil {
return nil, err
}
return &RawNode{blk}, nil
}

func (rn *RawNode) Links() []*node.Link {
return nil
}
Expand Down
18 changes: 18 additions & 0 deletions test/sharness/t0040-add-and-cat.sh
Expand Up @@ -403,6 +403,10 @@ MARS="zdj7WWx6fGNrNGkdpkuTAxCjKbQ1pPtarqA6VQhedhLTZu34J"
VENUS="zdj7WbB1BUF8WejmVpQCmMLd1RbPnxJtvAj1Lep6eTmXRFbrz"
add_directory '--cid-version=1 --raw-leaves=false'

PLANETS="zDMZof1kqxDAx9myQbXsyWwyWP9qRPsXsWH7XuTz6isT7Rh1S6nM"
MARS="zCT5htkdz1ZBHYVQXFQn51ngPXLVqaHSWoae87V1d6e9qWpSAjXw"
VENUS="zCT5htke5JcdoMM4WhmUKXWf2QC3TnQToqGZHH1WsZERv6kPhFPg"
add_directory '--hash=blake2b-256'

test_expect_success "'ipfs add -rn' succeeds" '
mkdir -p mountdir/moons/jupiter &&
Expand Down Expand Up @@ -446,12 +450,26 @@ test_add_cat_5MB --cid-version=1 "zdj7WiiaedqVBXjX4SNqx3jfuZideDqdLYnDzCDJ66JDMK
# format (i.e. not raw)
test_add_cat_5MB '--cid-version=1 --raw-leaves=false' "zdj7WfgEsj897BBZj2mcfsRLhaPZcCixPV2G7DkWgF1Wdr64P"

# note: --hash=blake2b-256 implies --cid-version=1 which implies --raw-leaves=true
# the specified hash represents the leaf nodes stored as raw leaves and
# encoded with the blake2b-256 hash funtion
test_add_cat_5MB '--hash=blake2b-256' "zDMZof1kuxn7ebvKyvmkYLPvocSvFYxxAWT1yQBN1wWiXXr7w5mY"

# the specified hash represents the leaf nodes stored as protoful nodes and
# encoded with the blake2b-256 hash funtion
test_add_cat_5MB '--hash=blake2b-256 --raw-leaves=false' "zDMZof1krz3SFTyhboRyWZyUP2qNgVdn9wjtaX211aHJ8WgeyT9v"

test_add_cat_expensive "" "QmU9SWAPPmNEKZB8umYMmjYvN7VyHqABNvdA6GUi4MMEz3"

# note: the specified hash implies that internal nodes are stored
# using CidV1 and leaves are stored using raw blocks
test_add_cat_expensive "--cid-version=1" "zdj7WcatQrtuE4WMkS4XsfsMixuQN2po4irkYhqxeJyW1wgCq"

# note: --hash=blake2b-256 implies --cid-version=1 which implies --raw-leaves=true
# the specified hash represents the leaf nodes stored as raw leaves and
# encoded with the blake2b-256 hash funtion
test_add_cat_expensive '--hash=blake2b-256' "zDMZof1kwndounDzQCANUHjiE3zt1mPEgx7RE3JTHoZrRRa79xcv"

test_add_named_pipe " Post http://$API_ADDR/api/v0/add?encoding=json&progress=true&r=true&stream-channels=true:"

test_add_pwd_is_symlink
Expand Down