Skip to content

Commit

Permalink
local: don't calculate any hashes by default #3419
Browse files Browse the repository at this point in the history
Before this change, if the caller didn't provide a hint, we would
calculate all hashes for reads and writes.

The new whirlpool hash is particularly expensive and that has become noticeable.

Now we don't calculate any hashes on upload or download unless hints are provided.

This means that some operations may run slower and these will need to be discovered!

It does not affect anything calling operations.Copy which already puts
the corrects hints in.
  • Loading branch information
ncw committed Aug 14, 2019
1 parent 106cf18 commit 402aaca
Showing 1 changed file with 28 additions and 18 deletions.
46 changes: 28 additions & 18 deletions backend/local/local.go
Expand Up @@ -917,15 +917,20 @@ func (o *Object) openTranslatedLink(offset, limit int64) (lrc io.ReadCloser, err
// Open an object for read
func (o *Object) Open(ctx context.Context, options ...fs.OpenOption) (in io.ReadCloser, err error) {
var offset, limit int64 = 0, -1
hashes := hash.Supported
var hasher *hash.MultiHasher
for _, option := range options {
switch x := option.(type) {
case *fs.SeekOption:
offset = x.Offset
case *fs.RangeOption:
offset, limit = x.Decode(o.size)
case *fs.HashesOption:
hashes = x.Hashes
if x.Hashes.Count() > 0 {
hasher, err = hash.NewMultiHasherTypes(x.Hashes)
if err != nil {
return nil, err
}
}
default:
if option.Mandatory() {
fs.Logf(o, "Unsupported mandatory option: %v", option)
Expand All @@ -949,15 +954,15 @@ func (o *Object) Open(ctx context.Context, options ...fs.OpenOption) (in io.Read
// don't attempt to make checksums
return wrappedFd, err
}
hash, err := hash.NewMultiHasherTypes(hashes)
if err != nil {
return nil, err
if hasher == nil {
// no need to wrap since we don't need checksums
return wrappedFd, nil
}
// Update the md5sum as we go along
// Update the hashes as we go along
in = &localOpenFile{
o: o,
in: wrappedFd,
hash: hash,
hash: hasher,
fd: fd,
}
return in, nil
Expand All @@ -979,18 +984,23 @@ func (nwc nopWriterCloser) Close() error {
}

// Update the object from in with modTime and size
func (o *Object) Update(ctx context.Context, in io.Reader, src fs.ObjectInfo, options ...fs.OpenOption) error {
func (o *Object) Update(ctx context.Context, in io.Reader, src fs.ObjectInfo, options ...fs.OpenOption) (err error) {
var out io.WriteCloser
var hasher *hash.MultiHasher

hashes := hash.Supported
for _, option := range options {
switch x := option.(type) {
case *fs.HashesOption:
hashes = x.Hashes
if x.Hashes.Count() > 0 {
hasher, err = hash.NewMultiHasherTypes(x.Hashes)
if err != nil {
return err
}
}
}
}

err := o.mkdirAll()
err = o.mkdirAll()
if err != nil {
return err
}
Expand All @@ -1015,11 +1025,9 @@ func (o *Object) Update(ctx context.Context, in io.Reader, src fs.ObjectInfo, op
}

// Calculate the hash of the object we are reading as we go along
hash, err := hash.NewMultiHasherTypes(hashes)
if err != nil {
return err
if hasher != nil {
in = io.TeeReader(in, hasher)
}
in = io.TeeReader(in, hash)

_, err = io.Copy(out, in)
closeErr := out.Close()
Expand Down Expand Up @@ -1055,9 +1063,11 @@ func (o *Object) Update(ctx context.Context, in io.Reader, src fs.ObjectInfo, op
}

// All successful so update the hashes
o.fs.objectHashesMu.Lock()
o.hashes = hash.Sums()
o.fs.objectHashesMu.Unlock()
if hasher != nil {
o.fs.objectHashesMu.Lock()
o.hashes = hasher.Sums()
o.fs.objectHashesMu.Unlock()
}

// Set the mtime
err = o.SetModTime(ctx, src.ModTime(ctx))
Expand Down

0 comments on commit 402aaca

Please sign in to comment.