Skip to content

Commit

Permalink
Test BatchSpanProcessor export timeout directly (open-telemetry#1982)
Browse files Browse the repository at this point in the history
Test the export timeout by waiting indefinitely for the export to
timeout instead of having a second timer, in its own goroutine, timeout.
The algorithm this replaces fails on machines that are slow and the one
meta-timer is given priority to progress over the export timer that is
being testing, resulting in a false-negative test result.

Move the testing of a BatchSpanProcessor export timeout to its own test
function. This removes the bloat this introduces to the other testing
options and allows customization that enable the testing in a
deterministic manner.
  • Loading branch information
MrAlias committed Jun 9, 2021
1 parent 7ffe284 commit 87cc1e1
Showing 1 changed file with 38 additions and 29 deletions.
67 changes: 38 additions & 29 deletions sdk/trace/batch_span_processor_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@ type testBatchExporter struct {
sizes []int
batchCount int
shutdownCount int
delay time.Duration
errors []error
droppedCount int
idx int
Expand All @@ -54,8 +53,6 @@ func (t *testBatchExporter) ExportSpans(ctx context.Context, spans []sdktrace.Re
return err
}

time.Sleep(t.delay)

select {
case <-ctx.Done():
t.err = ctx.Err()
Expand Down Expand Up @@ -109,35 +106,23 @@ func TestNewBatchSpanProcessorWithNilExporter(t *testing.T) {
}

type testOption struct {
name string
o []sdktrace.BatchSpanProcessorOption
wantNumSpans int
wantBatchCount int
wantExportTimeout bool
genNumSpans int
delayExportBy time.Duration
parallel bool
name string
o []sdktrace.BatchSpanProcessorOption
wantNumSpans int
wantBatchCount int
genNumSpans int
parallel bool
}

func TestNewBatchSpanProcessorWithOptions(t *testing.T) {
schDelay := 200 * time.Millisecond
exportTimeout := time.Millisecond
options := []testOption{
{
name: "default BatchSpanProcessorOptions",
wantNumSpans: 2053,
wantBatchCount: 4,
genNumSpans: 2053,
},
{
name: "non-default ExportTimeout",
o: []sdktrace.BatchSpanProcessorOption{
sdktrace.WithExportTimeout(exportTimeout),
},
wantExportTimeout: true,
genNumSpans: 2053,
delayExportBy: 2 * exportTimeout, // to ensure export timeout
},
{
name: "non-default BatchTimeout",
o: []sdktrace.BatchSpanProcessorOption{
Expand Down Expand Up @@ -204,9 +189,7 @@ func TestNewBatchSpanProcessorWithOptions(t *testing.T) {
}
for _, option := range options {
t.Run(option.name, func(t *testing.T) {
te := testBatchExporter{
delay: option.delayExportBy,
}
te := testBatchExporter{}
tp := basicTracerProvider(t)
ssp := createAndRegisterBatchSP(option, &te)
if ssp == nil {
Expand All @@ -231,15 +214,41 @@ func TestNewBatchSpanProcessorWithOptions(t *testing.T) {
gotBatchCount, option.wantBatchCount)
t.Errorf("Batches %v\n", te.sizes)
}

if option.wantExportTimeout && te.err != context.DeadlineExceeded {
t.Errorf("context deadline: got err %+v, want %+v\n",
te.err, context.DeadlineExceeded)
}
})
}
}

type stuckExporter struct {
testBatchExporter
}

// ExportSpans waits for ctx to expire and returns that error.
func (e *stuckExporter) ExportSpans(ctx context.Context, _ []sdktrace.ReadOnlySpan) error {
<-ctx.Done()
e.err = ctx.Err()
return ctx.Err()
}

func TestBatchSpanProcessorExportTimeout(t *testing.T) {
exp := new(stuckExporter)
bsp := sdktrace.NewBatchSpanProcessor(
exp,
// Set a non-zero export timeout so a deadline is set.
sdktrace.WithExportTimeout(1*time.Microsecond),
sdktrace.WithBlocking(),
)
tp := basicTracerProvider(t)
tp.RegisterSpanProcessor(bsp)

tr := tp.Tracer("BatchSpanProcessorExportTimeout")
generateSpan(t, false, tr, testOption{genNumSpans: 1})
tp.UnregisterSpanProcessor(bsp)

if exp.err != context.DeadlineExceeded {
t.Errorf("context deadline error not returned: got %+v", exp.err)
}
}

func createAndRegisterBatchSP(option testOption, te *testBatchExporter) sdktrace.SpanProcessor {
// Always use blocking queue to avoid flaky tests.
options := append(option.o, sdktrace.WithBlocking())
Expand Down

0 comments on commit 87cc1e1

Please sign in to comment.