Skip to content

Commit

Permalink
[exporter/datadog] Use statswriter, client aggregator is for tracer s…
Browse files Browse the repository at this point in the history
…tats only (open-telemetry#31173)

**Description:** <Describe what has changed.>
<!--Ex. Fixing a bug - Describe the bug and how this fixes the issue.
Ex. Adding a feature - Explain what this achieves.-->
The `ProcessStats` function receives "tracer stats" payloads, by looping
through the payload and breaking it apart this drastically increases the
number of stats payloads being emitted. Instead, let's write directly to
the StatsWriter, which reduces duplicated work and keeps the payload
together as a single payload.

**Link to tracking Issue:** <Issue number if applicable>

**Testing:** <Describe what testing was performed and which tests were
added.>
I tested this locally with the otel example calendar app verifying stats
were calculated correctly as well, the logs also showed the reduction in
the number of payloads being emitted.

~TODO: I will add a test to prevent future regression~ Done!

**Documentation:** <Describe the documentation added.>

---------

Co-authored-by: Pablo Baeyens <pbaeyens31+github@gmail.com>
  • Loading branch information
2 people authored and XinRanZhAWS committed Mar 13, 2024
1 parent 48d33fa commit df995a8
Show file tree
Hide file tree
Showing 6 changed files with 149 additions and 49 deletions.
27 changes: 27 additions & 0 deletions .chloggen/ddog-exporter-fix-multi-resc-stats.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
# Use this changelog template to create an entry for release notes.

# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix'
change_type: bug_fix

# The name of the component, or a single word describing the area of concern, (e.g. filelogreceiver)
component: datadogexporter

# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`).
note: Fix bug where multiple resources would cause datadogexporter to send extraneous additional stats buckets.

# Mandatory: One or more tracking issues related to the change. You can use the PR number here if no issue exists.
issues: [31173]

# (Optional) One or more lines of additional information to render under the primary note.
# These lines will be padded with 2 spaces and then inserted directly into the document.
# Use pipe (|) for multiline entries.
subtext:

# If your change doesn't affect end users or the exported elements of any package,
# you should instead start your pull request title with [chore] or use the "Skip Changelog" label.
# Optional: The change log or logs in which this entry should be included.
# e.g. '[user]' or '[user, api]'
# Include 'user' if the change is relevant to end users.
# Include 'api' if there is a change to a library API.
# Default: '[user]'
change_logs: ["user"]
39 changes: 27 additions & 12 deletions exporter/datadogexporter/factory.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@ import (

pb "github.com/DataDog/datadog-agent/pkg/proto/pbgo/trace"
"github.com/DataDog/datadog-agent/pkg/trace/agent"
"github.com/DataDog/datadog-agent/pkg/trace/telemetry"
"github.com/DataDog/datadog-agent/pkg/trace/writer"
"github.com/DataDog/opentelemetry-mapping-go/pkg/inframetadata"
"github.com/DataDog/opentelemetry-mapping-go/pkg/otlp/attributes"
"github.com/DataDog/opentelemetry-mapping-go/pkg/otlp/attributes/source"
Expand Down Expand Up @@ -238,7 +240,7 @@ func checkAndCastConfig(c component.Config, logger *zap.Logger) *Config {
return cfg
}

func (f *factory) consumeStatsPayload(ctx context.Context, out chan []byte, traceagent *agent.Agent, tracerVersion string, logger *zap.Logger) {
func (f *factory) consumeStatsPayload(ctx context.Context, statsIn <-chan []byte, statsToAgent chan<- *pb.StatsPayload, tracerVersion string, logger *zap.Logger) {
for i := 0; i < runtime.NumCPU(); i++ {
f.wg.Add(1)
go func() {
Expand All @@ -247,17 +249,20 @@ func (f *factory) consumeStatsPayload(ctx context.Context, out chan []byte, trac
select {
case <-ctx.Done():
return
case msg := <-out:
case msg := <-statsIn:
sp := &pb.StatsPayload{}

err := proto.Unmarshal(msg, sp)
if err != nil {
logger.Error("failed to unmarshal stats payload", zap.Error(err))
continue
}
for _, sc := range sp.Stats {
traceagent.ProcessStats(sc, "", tracerVersion)
for _, csp := range sp.Stats {
if csp.TracerVersion == "" {
csp.TracerVersion = tracerVersion
}
}
statsToAgent <- sp
}
}
}()
Expand All @@ -279,16 +284,22 @@ func (f *factory) createMetricsExporter(
ctx, cancel := context.WithCancel(ctx)
// cancel() runs on shutdown
var pushMetricsFn consumer.ConsumeMetricsFunc
traceagent, err := f.TraceAgent(ctx, set, cfg, hostProvider)
acfg, err := newTraceAgentConfig(ctx, set, cfg, hostProvider)
if err != nil {
cancel()
return nil, fmt.Errorf("failed to start trace-agent: %w", err)
return nil, err
}
var statsOut chan []byte
statsToAgent := make(chan *pb.StatsPayload)
statsWriter := writer.NewStatsWriter(acfg, statsToAgent, telemetry.NewNoopCollector())

set.Logger.Debug("Starting Datadog Trace-Agent StatsWriter")
go statsWriter.Run()

var statsIn chan []byte
if datadog.ConnectorPerformanceFeatureGate.IsEnabled() {
statsOut = make(chan []byte, 1000)
statsIn = make(chan []byte, 1000)
statsv := set.BuildInfo.Command + set.BuildInfo.Version
f.consumeStatsPayload(ctx, statsOut, traceagent, statsv, set.Logger)
f.consumeStatsPayload(ctx, statsIn, statsToAgent, statsv, set.Logger)
}
pcfg := newMetadataConfigfromConfig(cfg)
metadataReporter, err := f.Reporter(set, pcfg)
Expand Down Expand Up @@ -322,7 +333,7 @@ func (f *factory) createMetricsExporter(
return nil
}
} else {
exp, metricsErr := newMetricsExporter(ctx, set, cfg, &f.onceMetadata, attrsTranslator, hostProvider, traceagent, metadataReporter, statsOut)
exp, metricsErr := newMetricsExporter(ctx, set, cfg, acfg, &f.onceMetadata, attrsTranslator, hostProvider, statsToAgent, metadataReporter, statsIn)
if metricsErr != nil {
cancel() // first cancel context
f.wg.Wait() // then wait for shutdown
Expand All @@ -346,8 +357,12 @@ func (f *factory) createMetricsExporter(
exporterhelper.WithShutdown(func(context.Context) error {
cancel()
f.StopReporter()
if statsOut != nil {
close(statsOut)
statsWriter.Stop()
if statsIn != nil {
close(statsIn)
}
if statsToAgent != nil {
close(statsToAgent)
}
return nil
}),
Expand Down
23 changes: 20 additions & 3 deletions exporter/datadogexporter/integrationtest/integration_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ import (
"go.opentelemetry.io/otel"
"go.opentelemetry.io/otel/attribute"
"go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc"
"go.opentelemetry.io/otel/sdk/resource"
sdktrace "go.opentelemetry.io/otel/sdk/trace"
"google.golang.org/protobuf/proto"

Expand Down Expand Up @@ -104,7 +105,7 @@ func TestIntegration(t *testing.T) {
for _, chunks := range tps.Chunks {
spans = append(spans, chunks.Spans...)
for _, span := range chunks.Spans {
assert.Equal(t, span.Meta["_dd.stats_computed"], "true")
assert.Equal(t, "true", span.Meta["_dd.stats_computed"])
}
}
}
Expand All @@ -118,8 +119,8 @@ func TestIntegration(t *testing.T) {
stats = append(stats, csbs.Stats...)
for _, stat := range csbs.Stats {
assert.True(t, strings.HasPrefix(stat.Resource, "TestSpan"))
assert.Equal(t, stat.Hits, uint64(1))
assert.Equal(t, stat.TopLevelHits, uint64(1))
assert.Equal(t, uint64(1), stat.Hits)
assert.Equal(t, uint64(1), stat.TopLevelHits)
}
}
}
Expand Down Expand Up @@ -279,18 +280,34 @@ func sendTraces(t *testing.T) {
traceExporter, err := otlptracegrpc.New(ctx, otlptracegrpc.WithInsecure())
require.NoError(t, err)
bsp := sdktrace.NewBatchSpanProcessor(traceExporter)
r1, _ := resource.New(ctx, resource.WithAttributes(attribute.String("k8s.node.name", "aaaa")))
r2, _ := resource.New(ctx, resource.WithAttributes(attribute.String("k8s.node.name", "bbbb")))
tracerProvider := sdktrace.NewTracerProvider(
sdktrace.WithSampler(sdktrace.AlwaysSample()),
sdktrace.WithSpanProcessor(bsp),
sdktrace.WithResource(r1),
)
tracerProvider2 := sdktrace.NewTracerProvider(
sdktrace.WithSampler(sdktrace.AlwaysSample()),
sdktrace.WithSpanProcessor(bsp),
sdktrace.WithResource(r2),
)
otel.SetTracerProvider(tracerProvider)
defer func() {
require.NoError(t, tracerProvider.Shutdown(ctx))
require.NoError(t, tracerProvider2.Shutdown(ctx))
}()

tracer := otel.Tracer("test-tracer")
for i := 0; i < 10; i++ {
_, span := tracer.Start(ctx, fmt.Sprintf("TestSpan%d", i))

if i == 3 {
// Send some traces from a different resource
// This verifies that stats from different hosts don't accidentally create extraneous empty stats buckets
otel.SetTracerProvider(tracerProvider2)
tracer = otel.Tracer("test-tracer2")
}
// Only sample 5 out of the 10 spans
if i < 5 {
span.SetAttributes(attribute.Bool("sampled", true))
Expand Down
47 changes: 30 additions & 17 deletions exporter/datadogexporter/metrics_exporter.go
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ import (
"time"

pb "github.com/DataDog/datadog-agent/pkg/proto/pbgo/trace"
"github.com/DataDog/datadog-agent/pkg/trace/api"
"github.com/DataDog/datadog-agent/pkg/trace/config"
"github.com/DataDog/datadog-api-client-go/v2/api/datadogV2"
"github.com/DataDog/opentelemetry-mapping-go/pkg/inframetadata"
"github.com/DataDog/opentelemetry-mapping-go/pkg/otlp/attributes"
Expand All @@ -37,6 +37,7 @@ import (
type metricsExporter struct {
params exporter.CreateSettings
cfg *Config
agntConfig *config.AgentConfig
ctx context.Context
client *zorkian.Client
metricsAPI *datadogV2.MetricsApi
Expand All @@ -48,8 +49,8 @@ type metricsExporter struct {
metadataReporter *inframetadata.Reporter
// getPushTime returns a Unix time in nanoseconds, representing the time pushing metrics.
// It will be overwritten in tests.
getPushTime func() uint64
apmStatsProcessor api.StatsProcessor
getPushTime func() uint64
statsToAgent chan<- *pb.StatsPayload
}

// translatorFromConfig creates a new metrics translator from the exporter
Expand Down Expand Up @@ -95,10 +96,11 @@ func newMetricsExporter(
ctx context.Context,
params exporter.CreateSettings,
cfg *Config,
agntConfig *config.AgentConfig,
onceMetadata *sync.Once,
attrsTranslator *attributes.Translator,
sourceProvider source.Provider,
apmStatsProcessor api.StatsProcessor,
statsToAgent chan<- *pb.StatsPayload,
metadataReporter *inframetadata.Reporter,
statsOut chan []byte,
) (*metricsExporter, error) {
Expand All @@ -109,17 +111,18 @@ func newMetricsExporter(

scrubber := scrub.NewScrubber()
exporter := &metricsExporter{
params: params,
cfg: cfg,
ctx: ctx,
tr: tr,
scrubber: scrubber,
retrier: clientutil.NewRetrier(params.Logger, cfg.BackOffConfig, scrubber),
onceMetadata: onceMetadata,
sourceProvider: sourceProvider,
getPushTime: func() uint64 { return uint64(time.Now().UTC().UnixNano()) },
apmStatsProcessor: apmStatsProcessor,
metadataReporter: metadataReporter,
params: params,
cfg: cfg,
ctx: ctx,
agntConfig: agntConfig,
tr: tr,
scrubber: scrubber,
retrier: clientutil.NewRetrier(params.Logger, cfg.BackOffConfig, scrubber),
onceMetadata: onceMetadata,
sourceProvider: sourceProvider,
getPushTime: func() uint64 { return uint64(time.Now().UTC().UnixNano()) },
statsToAgent: statsToAgent,
metadataReporter: metadataReporter,
}
errchan := make(chan error)
if isMetricExportV2Enabled() {
Expand Down Expand Up @@ -260,8 +263,18 @@ func (exp *metricsExporter) PushMetricsData(ctx context.Context, md pmetric.Metr
if len(sp) > 0 {
exp.params.Logger.Debug("exporting APM stats payloads", zap.Any("stats_payloads", sp))
statsv := exp.params.BuildInfo.Command + exp.params.BuildInfo.Version
for _, p := range sp {
exp.apmStatsProcessor.ProcessStats(p, "", statsv)
for _, csp := range sp {
if csp.TracerVersion == "" {
csp.TracerVersion = statsv
}
}
exp.statsToAgent <- &pb.StatsPayload{
AgentHostname: exp.agntConfig.Hostname, // This is "dead-code". We will be removing this code path entirely
AgentEnv: exp.agntConfig.DefaultEnv,
Stats: sp,
AgentVersion: exp.agntConfig.AgentVersion,
ClientComputed: false,
SplitPayload: false,
}
}

Expand Down
50 changes: 35 additions & 15 deletions exporter/datadogexporter/metrics_exporter_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ import (

"github.com/DataDog/agent-payload/v5/gogen"
pb "github.com/DataDog/datadog-agent/pkg/proto/pbgo/trace"
traceconfig "github.com/DataDog/datadog-agent/pkg/trace/config"
"github.com/DataDog/datadog-api-client-go/v2/api/datadogV2"
"github.com/DataDog/opentelemetry-mapping-go/pkg/inframetadata"
"github.com/DataDog/opentelemetry-mapping-go/pkg/inframetadata/payload"
Expand Down Expand Up @@ -297,23 +298,24 @@ func Test_metricsExporter_PushMetricsData(t *testing.T) {
defer server.Close()

var (
once sync.Once
statsRecorder testutil.MockStatsProcessor
once sync.Once
)

statsToAgent := make(chan *pb.StatsPayload, 1000) // Buffer the channel to allow test to pass without go-routines
pusher := newTestPusher(t)
reporter, err := inframetadata.NewReporter(zap.NewNop(), pusher, 1*time.Second)
require.NoError(t, err)
attributesTranslator, err := attributes.NewTranslator(componenttest.NewNopTelemetrySettings())
require.NoError(t, err)
acfg := traceconfig.New()
exp, err := newMetricsExporter(
context.Background(),
exportertest.NewNopCreateSettings(),
newTestConfig(t, server.URL, tt.hostTags, tt.histogramMode),
acfg,
&once,
attributesTranslator,
&testutil.MockSourceProvider{Src: tt.source},
&statsRecorder,
statsToAgent,
reporter,
nil,
)
Expand Down Expand Up @@ -357,10 +359,18 @@ func Test_metricsExporter_PushMetricsData(t *testing.T) {
assert.NoError(t, err)
assert.Equal(t, expected, sketchRecorder.ByteBody)
}
if tt.expectedStats == nil {
assert.Len(t, statsRecorder.In, 0)
} else {
assert.ElementsMatch(t, statsRecorder.In, tt.expectedStats)
if tt.expectedStats != nil {
var actualStats []*pb.ClientStatsPayload
pullStats:
for len(actualStats) < len(tt.expectedStats) {
select {
case <-time.After(10 * time.Second):
break pullStats
case sp := <-statsToAgent:
actualStats = append(actualStats, sp.Stats...)
}
}
assert.ElementsMatch(t, actualStats, tt.expectedStats)
}
})
}
Expand Down Expand Up @@ -690,22 +700,24 @@ func Test_metricsExporter_PushMetricsData_Zorkian(t *testing.T) {
defer server.Close()

var (
once sync.Once
statsRecorder testutil.MockStatsProcessor
once sync.Once
)
statsToAgent := make(chan *pb.StatsPayload, 1000)
pusher := newTestPusher(t)
reporter, err := inframetadata.NewReporter(zap.NewNop(), pusher, 1*time.Second)
require.NoError(t, err)
attributesTranslator, err := attributes.NewTranslator(componenttest.NewNopTelemetrySettings())
require.NoError(t, err)
acfg := traceconfig.New()
exp, err := newMetricsExporter(
context.Background(),
exportertest.NewNopCreateSettings(),
newTestConfig(t, server.URL, tt.hostTags, tt.histogramMode),
acfg,
&once,
attributesTranslator,
&testutil.MockSourceProvider{Src: tt.source},
&statsRecorder,
statsToAgent,
reporter,
nil,
)
Expand Down Expand Up @@ -744,10 +756,18 @@ func Test_metricsExporter_PushMetricsData_Zorkian(t *testing.T) {
assert.NoError(t, err)
assert.Equal(t, expected, sketchRecorder.ByteBody)
}
if tt.expectedStats == nil {
assert.Len(t, statsRecorder.In, 0)
} else {
assert.ElementsMatch(t, statsRecorder.In, tt.expectedStats)
if tt.expectedStats != nil {
var actualStats []*pb.ClientStatsPayload
pullStats:
for len(actualStats) < len(tt.expectedStats) {
select {
case <-time.After(10 * time.Second):
break pullStats
case sp := <-statsToAgent:
actualStats = append(actualStats, sp.Stats...)
}
}
assert.ElementsMatch(t, actualStats, tt.expectedStats)
}
})
}
Expand Down

0 comments on commit df995a8

Please sign in to comment.