Skip to content

Commit

Permalink
Deal with open files handles if error occurs.
Browse files Browse the repository at this point in the history
closes #3
  • Loading branch information
deankarn committed May 21, 2017
1 parent 51aad6b commit d78f300
Show file tree
Hide file tree
Showing 2 changed files with 65 additions and 13 deletions.
35 changes: 35 additions & 0 deletions cmd/goget/main.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
package main

import (
"fmt"
"io"
"log"

download "github.com/joeybloggs/go-download"
"github.com/vbauerster/mpb"
)

func main() {

progress := mpb.New().SetWidth(80)
defer progress.Stop()

options := &download.Options{
Proxy: func(name string, download int, size int64, r io.Reader) io.Reader {
bar := progress.AddBar(size).
PrependName(fmt.Sprintf("%s-%d", name, download), 0, 0).
PrependCounters("%3s / %3s", mpb.UnitBytes, 18, mpb.DwidthSync|mpb.DextraSpace).
AppendPercentage(5, 0)

return bar.ProxyReader(r)
},
}

f, err := download.Open("https://storage.googleapis.com/golang/go1.8.1.src.tar.gz", options)
if err != nil {
log.Fatal(err)
}
defer f.Close()

// f implements io.Reader, write file somewhere or do some other sort of work with it
}
43 changes: 30 additions & 13 deletions download.go
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,7 @@ func OpenContext(ctx context.Context, url string, options *Options) (*File, erro
}

if err != nil {
f.closeFileHandles()
return nil, err
}

Expand Down Expand Up @@ -266,15 +267,22 @@ FOR:

func (f *File) downloadPartial(ctx context.Context, resumeable bool, idx int, start, end int64, wg *sync.WaitGroup, ch chan<- partialResult) {

defer wg.Done()
var err error
var fh *os.File

fPath := filepath.Join(f.dir, strconv.Itoa(idx))
defer func() {
if err != nil && fh != nil {
fh.Close()
}

var fh *os.File
var err error
wg.Done()
}()

fPath := filepath.Join(f.dir, strconv.Itoa(idx))

if resumeable {
var fi os.FileInfo

fi, err = os.Stat(fPath)
if os.IsNotExist(err) {
fh, err = os.Create(fPath)
Expand Down Expand Up @@ -318,8 +326,9 @@ func (f *File) downloadPartial(ctx context.Context, resumeable bool, idx int, st
}

var client http.Client
var req *http.Request

req, err := http.NewRequest(http.MethodGet, f.url, nil)
req, err = http.NewRequest(http.MethodGet, f.url, nil)
if err != nil {
select {
case <-ctx.Done():
Expand All @@ -332,7 +341,9 @@ func (f *File) downloadPartial(ctx context.Context, resumeable bool, idx int, st

req.Header.Add("Range", fmt.Sprintf("bytes=%d-%d", start, end))

resp, err := client.Do(req)
var resp *http.Response

resp, err = client.Do(req)
if err != nil {
select {
case <-ctx.Done():
Expand All @@ -343,9 +354,12 @@ func (f *File) downloadPartial(ctx context.Context, resumeable bool, idx int, st
defer resp.Body.Close()

if resp.StatusCode != http.StatusPartialContent {

err = &InvalidResponseCode{got: resp.StatusCode, expected: http.StatusPartialContent}

select {
case <-ctx.Done():
case ch <- partialResult{idx: idx, err: &InvalidResponseCode{got: resp.StatusCode, expected: http.StatusPartialContent}}:
case ch <- partialResult{idx: idx, err: err}:
}
return
}
Expand All @@ -358,6 +372,7 @@ func (f *File) downloadPartial(ctx context.Context, resumeable bool, idx int, st

_, err = io.Copy(fh, read)
if err != nil {

select {
case <-ctx.Done():
case ch <- partialResult{idx: idx, err: err}:
Expand Down Expand Up @@ -391,16 +406,18 @@ func (f *File) Stat() (os.FileInfo, error) {
// Close closes the File(s), rendering it unusable for I/O. It returns an error, if any.
func (f *File) Close() error {

// close readers from Download function
f.closeFileHandles()
f.modTime = defaultTime

return os.RemoveAll(f.dir)
}

func (f *File) closeFileHandles() {
for i := 0; i < len(f.readers); i++ {
if f.readers[i] != nil { // possible if cancelled
if f.readers[i] != nil { // possible if cancelled or error occured
f.readers[i].Close()
}
}

f.modTime = defaultTime

return os.RemoveAll(f.dir)
}

func (f *File) generateHash() string {
Expand Down

0 comments on commit d78f300

Please sign in to comment.