Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

backup: add flag --summary-filename #3586

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
19 changes: 18 additions & 1 deletion cmd/restic/cmd_backup.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import (
"bufio"
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"io/ioutil"
Expand Down Expand Up @@ -81,6 +82,7 @@ type BackupOptions struct {
ExcludeLargerThan string
Stdin bool
StdinFilename string
SummaryFilename string
Tags restic.TagLists
Host string
FilesFrom []string
Expand Down Expand Up @@ -115,6 +117,7 @@ func init() {
f.StringVar(&backupOptions.ExcludeLargerThan, "exclude-larger-than", "", "max `size` of the files to be backed up (allowed suffixes: k/K, m/M, g/G, t/T)")
f.BoolVar(&backupOptions.Stdin, "stdin", false, "read backup from stdin")
f.StringVar(&backupOptions.StdinFilename, "stdin-filename", "stdin", "`filename` to use when reading from stdin")
f.StringVar(&backupOptions.SummaryFilename, "summary-filename", "", "`filename` to append summary data to")
f.Var(&backupOptions.Tags, "tag", "add `tags` for the new snapshot in the format `tag[,tag,...]` (can be specified multiple times)")

f.StringVarP(&backupOptions.Host, "host", "H", "", "set the `hostname` for the snapshot manually. To prevent an expensive rescan use the \"parent\" flag")
Expand Down Expand Up @@ -710,7 +713,21 @@ func runBackup(opts BackupOptions, gopts GlobalOptions, term *termstatus.Termina
if !gopts.JSON && !opts.DryRun {
progressPrinter.P("snapshot %s saved\n", id.Str())
}
if !success {
if opts.SummaryFilename != "" {
sf, err := os.OpenFile(opts.SummaryFilename, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0666)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'd prefer to extract that code into a separate function. runBackup is already far too long.

if err != nil {
return errors.Errorf("%s: appending to summary failed: %v", opts.SummaryFilename, err)
}
buf := new(bytes.Buffer)
if err := json.NewEncoder(buf).Encode(progressReporter.FinishSummary(id)); err != nil {
return errors.Errorf("encoding summary failed: %v", err)
}
fmt.Fprintf(sf, "%s", buf.String())
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please use sf.Write(buf.Bytes()) instead. There's no need for format string interpolation and multiple String <-> []byte conversions.

if err := sf.Close(); err != nil {
return errors.Errorf("%s: closing file failed: %v", opts.SummaryFilename, err)
}
}
if !success {
return ErrInvalidSourceData
}

Expand Down
26 changes: 7 additions & 19 deletions internal/ui/backup/json.go
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,12 @@ func (b *JSONProgress) ReportTotal(item string, start time.Time, s archiver.Scan

// Finish prints the finishing messages.
func (b *JSONProgress) Finish(snapshotID restic.ID, start time.Time, summary *Summary, dryRun bool) {
b.print(summaryOutput{
b.print(b.FinishSummary(snapshotID, start, summary, dryRun))
}

// FinishSummary returns the summary as a struct
func (b *JSONProgress) FinishSummary(snapshotID restic.ID, start time.Time, summary *Summary, dryRun bool) summaryOutput {
return summaryOutput{
MessageType: "summary",
FilesNew: summary.Files.New,
FilesChanged: summary.Files.Changed,
Expand All @@ -188,7 +193,7 @@ func (b *JSONProgress) Finish(snapshotID restic.ID, start time.Time, summary *Su
TotalDuration: time.Since(start).Seconds(),
SnapshotID: snapshotID.Str(),
DryRun: dryRun,
})
}
}

// Reset no-op
Expand Down Expand Up @@ -225,20 +230,3 @@ type verboseUpdate struct {
TotalFiles uint `json:"total_files"`
}

type summaryOutput struct {
MessageType string `json:"message_type"` // "summary"
FilesNew uint `json:"files_new"`
FilesChanged uint `json:"files_changed"`
FilesUnmodified uint `json:"files_unmodified"`
DirsNew uint `json:"dirs_new"`
DirsChanged uint `json:"dirs_changed"`
DirsUnmodified uint `json:"dirs_unmodified"`
DataBlobs int `json:"data_blobs"`
TreeBlobs int `json:"tree_blobs"`
DataAdded uint64 `json:"data_added"`
TotalFilesProcessed uint `json:"total_files_processed"`
TotalBytesProcessed uint64 `json:"total_bytes_processed"`
TotalDuration float64 `json:"total_duration"` // in seconds
SnapshotID string `json:"snapshot_id"`
DryRun bool `json:"dry_run,omitempty"`
}
24 changes: 24 additions & 0 deletions internal/ui/backup/progress.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ type ProgressPrinter interface {
CompleteItem(messageType string, item string, previous, current *restic.Node, s archiver.ItemStats, d time.Duration)
ReportTotal(item string, start time.Time, s archiver.ScanStats)
Finish(snapshotID restic.ID, start time.Time, summary *Summary, dryRun bool)
FinishSummary(snapshotID restic.ID, start time.Time, summary *Summary, dryRun bool) summaryOutput
Reset()

// ui.StdioWrapper
Expand Down Expand Up @@ -50,6 +51,7 @@ type ProgressReporter interface {
Run(ctx context.Context) error
Error(item string, fi os.FileInfo, err error) error
Finish(snapshotID restic.ID)
FinishSummary(snapshotID restic.ID) summaryOutput
}

type Summary struct {
Expand Down Expand Up @@ -312,6 +314,11 @@ func (p *Progress) Finish(snapshotID restic.ID) {
<-p.closed
p.printer.Finish(snapshotID, p.start, p.summary, p.dry)
}
func (p *Progress) FinishSummary(snapshotID restic.ID) summaryOutput {
// wait for the status update goroutine to shut down
<-p.closed
return p.printer.FinishSummary(snapshotID, p.start, p.summary, p.dry)
}

// SetMinUpdatePause sets b.MinUpdatePause. It satisfies the
// ArchiveProgressReporter interface.
Expand All @@ -323,3 +330,20 @@ func (p *Progress) SetMinUpdatePause(d time.Duration) {
func (p *Progress) SetDryRun() {
p.dry = true
}
type summaryOutput struct {
MessageType string `json:"message_type"` // "summary"
FilesNew uint `json:"files_new"`
FilesChanged uint `json:"files_changed"`
FilesUnmodified uint `json:"files_unmodified"`
DirsNew uint `json:"dirs_new"`
DirsChanged uint `json:"dirs_changed"`
DirsUnmodified uint `json:"dirs_unmodified"`
DataBlobs int `json:"data_blobs"`
TreeBlobs int `json:"tree_blobs"`
DataAdded uint64 `json:"data_added"`
TotalFilesProcessed uint `json:"total_files_processed"`
TotalBytesProcessed uint64 `json:"total_bytes_processed"`
TotalDuration float64 `json:"total_duration"` // in seconds
SnapshotID string `json:"snapshot_id"`
DryRun bool `json:"dry_run,omitempty"`
}
22 changes: 22 additions & 0 deletions internal/ui/backup/text.go
Original file line number Diff line number Diff line change
Expand Up @@ -186,3 +186,25 @@ func (b *TextProgress) Finish(snapshotID restic.ID, start time.Time, summary *Su
formatDuration(time.Since(start)),
)
}

// Return finishing stats in a struct.
func (b *TextProgress) FinishSummary(snapshotID restic.ID, start time.Time, summary *Summary, dryRun bool) summaryOutput {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This function does not require any data stored in the TextProgress struct. Thus please extract it into a separate function and remove the duplication with the JsonProgress.

return summaryOutput{
MessageType: "summary",
FilesNew: summary.Files.New,
FilesChanged: summary.Files.Changed,
FilesUnmodified: summary.Files.Unchanged,
DirsNew: summary.Dirs.New,
DirsChanged: summary.Dirs.Changed,
DirsUnmodified: summary.Dirs.Unchanged,
DataBlobs: summary.ItemStats.DataBlobs,
TreeBlobs: summary.ItemStats.TreeBlobs,
DataAdded: summary.ItemStats.DataSize + summary.ItemStats.TreeSize,
TotalFilesProcessed: summary.Files.New + summary.Files.Changed + summary.Files.Unchanged,
TotalBytesProcessed: summary.ProcessedBytes,
TotalDuration: time.Since(start).Seconds(),
SnapshotID: snapshotID.Str(),
DryRun: dryRun,
}
}