From 332736ded3d976403d6656cc854a72e30ec1cd77 Mon Sep 17 00:00:00 2001 From: Taylor Brennan Date: Mon, 7 Aug 2023 16:55:03 -0400 Subject: [PATCH] new metric for length of time supervisor takes uploading snapshots to s3 --- pkg/supervisor/archived_snapshot.go | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/pkg/supervisor/archived_snapshot.go b/pkg/supervisor/archived_snapshot.go index fbe236ec..8b2b3bc9 100644 --- a/pkg/supervisor/archived_snapshot.go +++ b/pkg/supervisor/archived_snapshot.go @@ -7,13 +7,15 @@ import ( "net/url" "os" "strings" + "time" "github.com/aws/aws-sdk-go/aws/session" "github.com/aws/aws-sdk-go/service/s3/s3manager" "github.com/pkg/errors" - "github.com/segmentio/ctlstore/pkg/utils" "github.com/segmentio/events/v2" "github.com/segmentio/stats/v4" + + "github.com/segmentio/ctlstore/pkg/utils" ) type archivedSnapshot interface { @@ -76,9 +78,13 @@ func (c *s3Snapshot) Upload(ctx context.Context, path string) error { reader = gpr } events.Log("Uploading %{file}s (%d bytes) to %{bucket}s/%{key}s", path, size, c.Bucket, key) + + start := time.Now() if err = c.sendToS3(ctx, key, c.Bucket, reader); err != nil { return errors.Wrap(err, "send to s3") } + stats.Observe("ldb-upload-time", time.Since(start), stats.T("compressed", isCompressed(gpr))) + events.Log("Successfully uploaded %{file}s to %{bucket}s/%{key}s", path, c.Bucket, key) if gpr != nil { stats.Set("ldb-size-bytes-compressed", gpr.bytesRead) @@ -91,6 +97,13 @@ func (c *s3Snapshot) Upload(ctx context.Context, path string) error { return nil } +func isCompressed(gpr *gzipCompressionReader) string { + if gpr == nil { + return "false" + } + return "true" +} + func (c *s3Snapshot) sendToS3(ctx context.Context, key string, bucket string, body io.Reader) error { if c.sendToS3Func != nil { return c.sendToS3Func(ctx, key, bucket, body)