Skip to content

Commit

Permalink
Rename item info hash attribute to checksum
Browse files Browse the repository at this point in the history
See #12
  • Loading branch information
rafaeljusto committed May 9, 2017
1 parent 877315a commit 1bf854c
Show file tree
Hide file tree
Showing 4 changed files with 107 additions and 107 deletions.
12 changes: 6 additions & 6 deletions internal/archive/archive.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,9 @@ type ItemInfoStatus string
// ItemInfo stores all the necessary information to track the archive's item
// state.
type ItemInfo struct {
ID string
Status ItemInfoStatus
Hash string // TODO: Rename to Checksum?
ID string
Status ItemInfoStatus
Checksum string
}

// Info stores extra information from the archive's items for allowing
Expand All @@ -50,9 +50,9 @@ func (a Info) MergeLast(last Info) {
for lastFilename, lastItemInfo := range last {
if _, ok := a[lastFilename]; !ok && lastItemInfo.Status != ItemInfoStatusDeleted {
a[lastFilename] = ItemInfo{
ID: lastItemInfo.ID,
Status: ItemInfoStatusDeleted,
Hash: lastItemInfo.Hash,
ID: lastItemInfo.ID,
Status: ItemInfoStatusDeleted,
Checksum: lastItemInfo.Checksum,
}
}
}
Expand Down
16 changes: 8 additions & 8 deletions internal/archive/tar_builder.go
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,7 @@ func (t TARBuilder) build(lastArchiveInfo Info, tarArchive *tar.Writer, baseDir,
}

func (t TARBuilder) generateItemInfo(path string, lastArchiveInfo Info) (itemInfo ItemInfo, add bool, err error) {
encodedHash, err := t.fileHash(path)
encodedChecksum, err := t.fileChecksum(path)
if err != nil {
return itemInfo, true, errors.WithStack(err)
}
Expand All @@ -200,10 +200,10 @@ func (t TARBuilder) generateItemInfo(path string, lastArchiveInfo Info) (itemInf
if !ok {
add = true
itemInfo.Status = ItemInfoStatusNew
itemInfo.Hash = encodedHash
itemInfo.Checksum = encodedChecksum
t.logger.Debugf("archive: path “%s” is new since the last archive", path)

} else if encodedHash == itemInfo.Hash {
} else if encodedChecksum == itemInfo.Checksum {
add = false // don't need to add an unmodified file to the tarball
itemInfo.Status = ItemInfoStatusUnmodified
t.logger.Debugf("archive: path “%s” unmodified since the last archive", path)
Expand All @@ -212,14 +212,14 @@ func (t TARBuilder) generateItemInfo(path string, lastArchiveInfo Info) (itemInf
add = true
itemInfo.ID = ""
itemInfo.Status = ItemInfoStatusModified
itemInfo.Hash = encodedHash
itemInfo.Checksum = encodedChecksum
t.logger.Debugf("archive: path “%s” was modified since the last archive", path)
}

return
}

func (t TARBuilder) fileHash(filename string) (string, error) {
func (t TARBuilder) fileChecksum(filename string) (string, error) {
file, err := os.Open(filename)
if err != nil {
return "", errors.WithStack(newPathError(filename, PathErrorCodeOpeningFile, err))
Expand All @@ -233,9 +233,9 @@ func (t TARBuilder) fileHash(filename string) (string, error) {
return "", errors.WithStack(newPathError(filename, PathErrorCodeSHA256, err))
}

encodedHash := base64.StdEncoding.EncodeToString(hash.Sum(nil))
t.logger.Debugf("archive: path “%s” hash calculated over %d bytes: %s", filename, written, encodedHash)
return encodedHash, nil
encodedChecksum := base64.StdEncoding.EncodeToString(hash.Sum(nil))
t.logger.Debugf("archive: path “%s” hash calculated over %d bytes: %s", filename, written, encodedChecksum)
return encodedChecksum, nil
}

func (t TARBuilder) addInfo(archiveInfo Info, tarArchive *tar.Writer, baseDir string) error {
Expand Down
102 changes: 51 additions & 51 deletions internal/archive/tar_builder_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -42,19 +42,19 @@ func TestTARBuilder_Build(t *testing.T) {
lastArchiveInfo: func(backupPaths []string) archive.Info {
return archive.Info{
path.Join(backupPaths[0], "file1"): {
ID: "reference1",
Status: archive.ItemInfoStatusNew,
Hash: "+pJSD0LPX/FSn3AwOnGKsCXJSMN3o9JPyWzVv4RYqpU=",
ID: "reference1",
Status: archive.ItemInfoStatusNew,
Checksum: "+pJSD0LPX/FSn3AwOnGKsCXJSMN3o9JPyWzVv4RYqpU=",
},
path.Join(backupPaths[0], "file2"): {
ID: "reference2",
Status: archive.ItemInfoStatusNew,
Hash: "abcdefghijklmnopqrstuvxz1234567890ABCDEFGHI=",
ID: "reference2",
Status: archive.ItemInfoStatusNew,
Checksum: "abcdefghijklmnopqrstuvxz1234567890ABCDEFGHI=",
},
path.Join(backupPaths[0], "file3"): {
ID: "reference3",
Status: archive.ItemInfoStatusDeleted,
Hash: "sFwN7pdLHnHZHCmTuhFWYvYTYz9g8XzISkAR1+UOS5c=",
ID: "reference3",
Status: archive.ItemInfoStatusDeleted,
Checksum: "sFwN7pdLHnHZHCmTuhFWYvYTYz9g8XzISkAR1+UOS5c=",
},
}
},
Expand Down Expand Up @@ -139,17 +139,17 @@ func TestTARBuilder_Build(t *testing.T) {
expectedArchiveInfo: func(backupPaths []string) archive.Info {
return archive.Info(map[string]archive.ItemInfo{
path.Join(backupPaths[0], "file1"): {
ID: "reference1",
Status: archive.ItemInfoStatusUnmodified,
Hash: "+pJSD0LPX/FSn3AwOnGKsCXJSMN3o9JPyWzVv4RYqpU=",
ID: "reference1",
Status: archive.ItemInfoStatusUnmodified,
Checksum: "+pJSD0LPX/FSn3AwOnGKsCXJSMN3o9JPyWzVv4RYqpU=",
},
path.Join(backupPaths[0], "file2"): {
Status: archive.ItemInfoStatusModified,
Hash: "xZzITM+6yGsa9masWjGdi+yAA0DlqCzTf/1795fy5Pk=",
Status: archive.ItemInfoStatusModified,
Checksum: "xZzITM+6yGsa9masWjGdi+yAA0DlqCzTf/1795fy5Pk=",
},
path.Join(backupPaths[0], "dir1", "file3"): {
Status: archive.ItemInfoStatusNew,
Hash: "sFwN7pdLHnHZHCmTuhFWYvYTYz9g8XzISkAR1+UOS5c=",
Status: archive.ItemInfoStatusNew,
Checksum: "sFwN7pdLHnHZHCmTuhFWYvYTYz9g8XzISkAR1+UOS5c=",
},
})
},
Expand All @@ -165,19 +165,19 @@ func TestTARBuilder_Build(t *testing.T) {
lastArchiveInfo: func(backupPaths []string) archive.Info {
return archive.Info{
path.Join(backupPaths[0], "file1"): {
ID: "reference1",
Status: archive.ItemInfoStatusNew,
Hash: "+pJSD0LPX/FSn3AwOnGKsCXJSMN3o9JPyWzVv4RYqpU=",
ID: "reference1",
Status: archive.ItemInfoStatusNew,
Checksum: "+pJSD0LPX/FSn3AwOnGKsCXJSMN3o9JPyWzVv4RYqpU=",
},
path.Join(backupPaths[0], "file2"): {
ID: "reference2",
Status: archive.ItemInfoStatusNew,
Hash: "xZzITM+6yGsa9masWjGdi+yAA0DlqCzTf/1795fy5Pk=",
ID: "reference2",
Status: archive.ItemInfoStatusNew,
Checksum: "xZzITM+6yGsa9masWjGdi+yAA0DlqCzTf/1795fy5Pk=",
},
path.Join(backupPaths[0], "dir1", "file3"): {
ID: "reference3",
Status: archive.ItemInfoStatusNew,
Hash: "sFwN7pdLHnHZHCmTuhFWYvYTYz9g8XzISkAR1+UOS5c=",
ID: "reference3",
Status: archive.ItemInfoStatusNew,
Checksum: "sFwN7pdLHnHZHCmTuhFWYvYTYz9g8XzISkAR1+UOS5c=",
},
}
},
Expand Down Expand Up @@ -330,32 +330,32 @@ func TestTARBuilder_Build(t *testing.T) {
expectedArchiveInfo: func(backupPaths []string) archive.Info {
return archive.Info(map[string]archive.ItemInfo{
path.Join(backupPaths[0], "file1"): {
Status: archive.ItemInfoStatusNew,
Hash: "+pJSD0LPX/FSn3AwOnGKsCXJSMN3o9JPyWzVv4RYqpU=",
Status: archive.ItemInfoStatusNew,
Checksum: "+pJSD0LPX/FSn3AwOnGKsCXJSMN3o9JPyWzVv4RYqpU=",
},
path.Join(backupPaths[0], "file2"): {
Status: archive.ItemInfoStatusNew,
Hash: "xZzITM+6yGsa9masWjGdi+yAA0DlqCzTf/1795fy5Pk=",
Status: archive.ItemInfoStatusNew,
Checksum: "xZzITM+6yGsa9masWjGdi+yAA0DlqCzTf/1795fy5Pk=",
},
path.Join(backupPaths[0], "dir1", "file3"): {
Status: archive.ItemInfoStatusNew,
Hash: "sFwN7pdLHnHZHCmTuhFWYvYTYz9g8XzISkAR1+UOS5c=",
Status: archive.ItemInfoStatusNew,
Checksum: "sFwN7pdLHnHZHCmTuhFWYvYTYz9g8XzISkAR1+UOS5c=",
},
path.Join(backupPaths[1], "file1"): {
Status: archive.ItemInfoStatusNew,
Hash: "jtq4nMeFuT6h3DIgwFQ4sEQUlA/E9YVFlWkY5B6pxNw=",
Status: archive.ItemInfoStatusNew,
Checksum: "jtq4nMeFuT6h3DIgwFQ4sEQUlA/E9YVFlWkY5B6pxNw=",
},
path.Join(backupPaths[1], "file4"): {
Status: archive.ItemInfoStatusNew,
Hash: "Rk2kHsOWFY5FFhsZrR5ykkCwc9WoZCWk/hEKbGhcCac=",
Status: archive.ItemInfoStatusNew,
Checksum: "Rk2kHsOWFY5FFhsZrR5ykkCwc9WoZCWk/hEKbGhcCac=",
},
path.Join(backupPaths[1], "file5"): {
Status: archive.ItemInfoStatusNew,
Hash: "VR88iTpGdm/q+zl26Ko0GPkgZOtZy0R0/zdoFK6Y3Uw=",
Status: archive.ItemInfoStatusNew,
Checksum: "VR88iTpGdm/q+zl26Ko0GPkgZOtZy0R0/zdoFK6Y3Uw=",
},
path.Join(backupPaths[1], "dir2", "file6"): {
Status: archive.ItemInfoStatusNew,
Hash: "Js5UlbJQRd2Ve3Nmoo7wfctK38eFEcHhlOUdApQKwnQ=",
Status: archive.ItemInfoStatusNew,
Checksum: "Js5UlbJQRd2Ve3Nmoo7wfctK38eFEcHhlOUdApQKwnQ=",
},
})
},
Expand Down Expand Up @@ -428,8 +428,8 @@ func TestTARBuilder_Build(t *testing.T) {
expectedArchiveInfo: func(backupPaths []string) archive.Info {
return archive.Info(map[string]archive.ItemInfo{
path.Join(backupPaths[0]): {
Status: archive.ItemInfoStatusNew,
Hash: "ih/0rvVdKZfnQdoKwTj5gbNVE+Re3o7D+woelvakOiE=",
Status: archive.ItemInfoStatusNew,
Checksum: "ih/0rvVdKZfnQdoKwTj5gbNVE+Re3o7D+woelvakOiE=",
},
})
},
Expand Down Expand Up @@ -730,13 +730,13 @@ func TestTARBuilder_Extract(t *testing.T) {

archiveInfo := archive.Info{
file1: archive.ItemInfo{
ID: "AWS123456",
Status: archive.ItemInfoStatusModified,
Hash: "34dd713af2cf182e27310b36bf26254d5c75335f76a8f9ca4e0d0428c2bbf709",
ID: "AWS123456",
Status: archive.ItemInfoStatusModified,
Checksum: "34dd713af2cf182e27310b36bf26254d5c75335f76a8f9ca4e0d0428c2bbf709",
},
file2: archive.ItemInfo{
Status: archive.ItemInfoStatusNew,
Hash: "d650616996f255dc8ecda15eca765a490c5b52f3fe2a3f184f38b307dcd57b51",
Status: archive.ItemInfoStatusNew,
Checksum: "d650616996f255dc8ecda15eca765a490c5b52f3fe2a3f184f38b307dcd57b51",
},
}

Expand Down Expand Up @@ -776,13 +776,13 @@ func TestTARBuilder_Extract(t *testing.T) {
}
scenario.expectedArchiveInfo = archive.Info{
file1: archive.ItemInfo{
ID: "AWS123456",
Status: archive.ItemInfoStatusModified,
Hash: "34dd713af2cf182e27310b36bf26254d5c75335f76a8f9ca4e0d0428c2bbf709",
ID: "AWS123456",
Status: archive.ItemInfoStatusModified,
Checksum: "34dd713af2cf182e27310b36bf26254d5c75335f76a8f9ca4e0d0428c2bbf709",
},
file2: archive.ItemInfo{
Status: archive.ItemInfoStatusNew,
Hash: "d650616996f255dc8ecda15eca765a490c5b52f3fe2a3f184f38b307dcd57b51",
Status: archive.ItemInfoStatusNew,
Checksum: "d650616996f255dc8ecda15eca765a490c5b52f3fe2a3f184f38b307dcd57b51",
},
}
scenario.clean = func() {
Expand Down

0 comments on commit 1bf854c

Please sign in to comment.