From 37e2591f3240f1288485557e74076233e1355cf7 Mon Sep 17 00:00:00 2001 From: zengxilong Date: Thu, 1 Sep 2022 17:02:57 +0800 Subject: [PATCH] feat: add comments Signed-off-by: zengxilong --- .../src/exporter/config/agent.rs | 3 +++ opentelemetry-sdk/src/trace/span_processor.rs | 26 +++++++++++++++---- 2 files changed, 24 insertions(+), 5 deletions(-) diff --git a/opentelemetry-jaeger/src/exporter/config/agent.rs b/opentelemetry-jaeger/src/exporter/config/agent.rs index 598f79b7d6..9bc42dd994 100644 --- a/opentelemetry-jaeger/src/exporter/config/agent.rs +++ b/opentelemetry-jaeger/src/exporter/config/agent.rs @@ -234,6 +234,9 @@ impl AgentPipeline { /// Assign the batch span processor for the exporter pipeline. /// + /// If a simple span processor is used by [`install_simple`][AgentPipeline::install_simple] + /// or [`build_simple`][AgentPipeline::install_simple], then this config will not be valid. + /// /// # Examples /// Set max queue size. /// ```rust diff --git a/opentelemetry-sdk/src/trace/span_processor.rs b/opentelemetry-sdk/src/trace/span_processor.rs index 94b33ee3e3..b98059168e 100644 --- a/opentelemetry-sdk/src/trace/span_processor.rs +++ b/opentelemetry-sdk/src/trace/span_processor.rs @@ -555,31 +555,47 @@ impl Default for BatchConfig { } impl BatchConfig { - /// Set max queue size for batch config + /// Set max_queue_size for [`BatchConfig`]. + /// It's the maximum queue size to buffer spans for delayed processing. + /// If the queue gets full it will drops the spans. + /// The default value of is [`OTEL_BSP_MAX_QUEUE_SIZE_DEFAULT`]. pub fn with_max_queue_size(mut self, max_queue_size: usize) -> Self { self.max_queue_size = max_queue_size; self } - /// Set max export batch size for batch config + /// Set max_export_batch_size for [`BatchConfig`]. + /// It's the maximum number of spans to process in a single batch. If there are + /// more than one batch worth of spans then it processes multiple batches + /// of spans one batch after the other without any delay. The default value + /// is [`OTEL_BSP_MAX_EXPORT_BATCH_SIZE_DEFAULT`]. pub fn with_max_export_batch_size(mut self, max_export_batch_size: usize) -> Self { self.max_export_batch_size = max_export_batch_size; self } - /// Set max concurrent exports for batch config + /// Set max_concurrent_exports for [`BatchConfig`]. + /// It's the maximum number of concurrent exports. + /// Limits the number of spawned tasks for exports and thus memory consumed by an exporter. + /// The default value is [`OTEL_BSP_MAX_CONCURRENT_EXPORTS_DEFAULT`]. + /// IF the max_concurrent_exports value is 1, it will cause exports to be performed + /// synchronously on the BatchSpanProcessor task. pub fn with_max_concurrent_exports(mut self, max_concurrent_exports: usize) -> Self { self.max_concurrent_exports = max_concurrent_exports; self } - /// Set scheduled delay duration for batch config + /// Set scheduled_delay_duration for [`BatchConfig`]. + /// It's the delay interval in milliseconds between two consecutive processing of batches. + /// The default value is [`OTEL_BSP_SCHEDULE_DELAY_DEFAULT`] milliseconds. pub fn with_scheduled_delay(mut self, scheduled_delay: Duration) -> Self { self.scheduled_delay = scheduled_delay; self } - /// Set max export timeout for batch config + /// Set max_export_timeout for [`BatchConfig`]. + /// It's the maximum duration to export a batch of data. + /// The The default value is [`OTEL_BSP_EXPORT_TIMEOUT_DEFAULT`] milliseconds. pub fn with_max_export_timeout(mut self, max_export_timeout: Duration) -> Self { self.max_export_timeout = max_export_timeout; self