From 9d3d0f88445eefe040b6854964a99b4e66b88e89 Mon Sep 17 00:00:00 2001 From: Aaron Abbott Date: Thu, 22 Jun 2023 11:27:04 -0400 Subject: [PATCH 1/8] Upgrade opentelemetry-proto to 0.20 and regen (#3355) --- CHANGELOG.md | 2 + .../tests/test_metrics_encoder.py | 5 - .../collector/logs/v1/logs_service_pb2.py | 20 ++- .../collector/logs/v1/logs_service_pb2.pyi | 53 ++++++++ .../metrics/v1/metrics_service_pb2.py | 20 ++- .../metrics/v1/metrics_service_pb2.pyi | 53 ++++++++ .../collector/trace/v1/trace_service_pb2.py | 20 ++- .../collector/trace/v1/trace_service_pb2.pyi | 53 ++++++++ .../proto/common/v1/common_pb2.py | 20 +-- .../proto/common/v1/common_pb2.pyi | 36 ++--- .../opentelemetry/proto/logs/v1/logs_pb2.py | 40 ++---- .../opentelemetry/proto/logs/v1/logs_pb2.pyi | 125 +++++++----------- .../proto/metrics/v1/metrics_pb2.py | 88 ++++++------ .../proto/metrics/v1/metrics_pb2.pyi | 116 +++++----------- .../proto/resource/v1/resource_pb2.py | 4 +- .../proto/trace/v1/trace_config_pb2.py | 68 ---------- .../proto/trace/v1/trace_config_pb2.pyi | 123 ----------------- .../opentelemetry/proto/trace/v1/trace_pb2.py | 48 +++---- .../proto/trace/v1/trace_pb2.pyi | 107 +++------------ scripts/proto_codegen.sh | 2 +- 20 files changed, 388 insertions(+), 615 deletions(-) delete mode 100644 opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_config_pb2.py delete mode 100644 opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_config_pb2.pyi diff --git a/CHANGELOG.md b/CHANGELOG.md index 0f10920db7..f1332776ef 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Add max_scale option to Exponential Bucket Histogram Aggregation [#3323](https://github.com/open-telemetry/opentelemetry-python/pull/3323)) - Use BoundedAttributes instead of raw dict to extract attributes from LogRecord and Support dropped_attributes_count in LogRecord ([#3310](https://github.com/open-telemetry/opentelemetry-python/pull/3310)) +- Upgrade opentelemetry-proto to 0.20 and regen + [#3355](https://github.com/open-telemetry/opentelemetry-python/pull/3355)) ## Version 1.18.0/0.39b0 (2023-05-04) diff --git a/exporter/opentelemetry-exporter-otlp-proto-common/tests/test_metrics_encoder.py b/exporter/opentelemetry-exporter-otlp-proto-common/tests/test_metrics_encoder.py index f7c8ceb820..69e7cda39f 100644 --- a/exporter/opentelemetry-exporter-otlp-proto-common/tests/test_metrics_encoder.py +++ b/exporter/opentelemetry-exporter-otlp-proto-common/tests/test_metrics_encoder.py @@ -451,7 +451,6 @@ def test_encode_histogram(self): bucket_counts=[1, 4], explicit_bounds=[10.0, 20.0], exemplars=[], - flags=pb2.DataPointFlags.FLAG_NONE, max=18.0, min=8.0, ) @@ -554,7 +553,6 @@ def test_encode_multiple_scope_histogram(self): bucket_counts=[1, 4], explicit_bounds=[10.0, 20.0], exemplars=[], - flags=pb2.DataPointFlags.FLAG_NONE, max=18.0, min=8.0, ) @@ -590,7 +588,6 @@ def test_encode_multiple_scope_histogram(self): bucket_counts=[1, 4], explicit_bounds=[10.0, 20.0], exemplars=[], - flags=pb2.DataPointFlags.FLAG_NONE, max=18.0, min=8.0, ) @@ -633,7 +630,6 @@ def test_encode_multiple_scope_histogram(self): bucket_counts=[1, 4], explicit_bounds=[10.0, 20.0], exemplars=[], - flags=pb2.DataPointFlags.FLAG_NONE, max=18.0, min=8.0, ) @@ -676,7 +672,6 @@ def test_encode_multiple_scope_histogram(self): bucket_counts=[1, 4], explicit_bounds=[10.0, 20.0], exemplars=[], - flags=pb2.DataPointFlags.FLAG_NONE, max=18.0, min=8.0, ) diff --git a/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2.py b/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2.py index aef1671830..5e6ae0ef92 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2.py +++ b/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2.py @@ -15,12 +15,13 @@ from opentelemetry.proto.logs.v1 import logs_pb2 as opentelemetry_dot_proto_dot_logs_dot_v1_dot_logs__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n8opentelemetry/proto/collector/logs/v1/logs_service.proto\x12%opentelemetry.proto.collector.logs.v1\x1a&opentelemetry/proto/logs/v1/logs.proto\"\\\n\x18\x45xportLogsServiceRequest\x12@\n\rresource_logs\x18\x01 \x03(\x0b\x32).opentelemetry.proto.logs.v1.ResourceLogs\"\x1b\n\x19\x45xportLogsServiceResponse2\x9d\x01\n\x0bLogsService\x12\x8d\x01\n\x06\x45xport\x12?.opentelemetry.proto.collector.logs.v1.ExportLogsServiceRequest\x1a@.opentelemetry.proto.collector.logs.v1.ExportLogsServiceResponse\"\x00\x42p\n(io.opentelemetry.proto.collector.logs.v1B\x10LogsServiceProtoP\x01Z0go.opentelemetry.io/proto/otlp/collector/logs/v1b\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n8opentelemetry/proto/collector/logs/v1/logs_service.proto\x12%opentelemetry.proto.collector.logs.v1\x1a&opentelemetry/proto/logs/v1/logs.proto\"\\\n\x18\x45xportLogsServiceRequest\x12@\n\rresource_logs\x18\x01 \x03(\x0b\x32).opentelemetry.proto.logs.v1.ResourceLogs\"u\n\x19\x45xportLogsServiceResponse\x12X\n\x0fpartial_success\x18\x01 \x01(\x0b\x32?.opentelemetry.proto.collector.logs.v1.ExportLogsPartialSuccess\"O\n\x18\x45xportLogsPartialSuccess\x12\x1c\n\x14rejected_log_records\x18\x01 \x01(\x03\x12\x15\n\rerror_message\x18\x02 \x01(\t2\x9d\x01\n\x0bLogsService\x12\x8d\x01\n\x06\x45xport\x12?.opentelemetry.proto.collector.logs.v1.ExportLogsServiceRequest\x1a@.opentelemetry.proto.collector.logs.v1.ExportLogsServiceResponse\"\x00\x42\x98\x01\n(io.opentelemetry.proto.collector.logs.v1B\x10LogsServiceProtoP\x01Z0go.opentelemetry.io/proto/otlp/collector/logs/v1\xaa\x02%OpenTelemetry.Proto.Collector.Logs.V1b\x06proto3') _EXPORTLOGSSERVICEREQUEST = DESCRIPTOR.message_types_by_name['ExportLogsServiceRequest'] _EXPORTLOGSSERVICERESPONSE = DESCRIPTOR.message_types_by_name['ExportLogsServiceResponse'] +_EXPORTLOGSPARTIALSUCCESS = DESCRIPTOR.message_types_by_name['ExportLogsPartialSuccess'] ExportLogsServiceRequest = _reflection.GeneratedProtocolMessageType('ExportLogsServiceRequest', (_message.Message,), { 'DESCRIPTOR' : _EXPORTLOGSSERVICEREQUEST, '__module__' : 'opentelemetry.proto.collector.logs.v1.logs_service_pb2' @@ -35,15 +36,24 @@ }) _sym_db.RegisterMessage(ExportLogsServiceResponse) +ExportLogsPartialSuccess = _reflection.GeneratedProtocolMessageType('ExportLogsPartialSuccess', (_message.Message,), { + 'DESCRIPTOR' : _EXPORTLOGSPARTIALSUCCESS, + '__module__' : 'opentelemetry.proto.collector.logs.v1.logs_service_pb2' + # @@protoc_insertion_point(class_scope:opentelemetry.proto.collector.logs.v1.ExportLogsPartialSuccess) + }) +_sym_db.RegisterMessage(ExportLogsPartialSuccess) + _LOGSSERVICE = DESCRIPTOR.services_by_name['LogsService'] if _descriptor._USE_C_DESCRIPTORS == False: DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n(io.opentelemetry.proto.collector.logs.v1B\020LogsServiceProtoP\001Z0go.opentelemetry.io/proto/otlp/collector/logs/v1' + DESCRIPTOR._serialized_options = b'\n(io.opentelemetry.proto.collector.logs.v1B\020LogsServiceProtoP\001Z0go.opentelemetry.io/proto/otlp/collector/logs/v1\252\002%OpenTelemetry.Proto.Collector.Logs.V1' _EXPORTLOGSSERVICEREQUEST._serialized_start=139 _EXPORTLOGSSERVICEREQUEST._serialized_end=231 _EXPORTLOGSSERVICERESPONSE._serialized_start=233 - _EXPORTLOGSSERVICERESPONSE._serialized_end=260 - _LOGSSERVICE._serialized_start=263 - _LOGSSERVICE._serialized_end=420 + _EXPORTLOGSSERVICERESPONSE._serialized_end=350 + _EXPORTLOGSPARTIALSUCCESS._serialized_start=352 + _EXPORTLOGSPARTIALSUCCESS._serialized_end=431 + _LOGSSERVICE._serialized_start=434 + _LOGSSERVICE._serialized_end=591 # @@protoc_insertion_point(module_scope) diff --git a/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2.pyi b/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2.pyi index 5940c192b2..cdf57e9fa1 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2.pyi +++ b/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2.pyi @@ -33,6 +33,59 @@ global___ExportLogsServiceRequest = ExportLogsServiceRequest class ExportLogsServiceResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + PARTIAL_SUCCESS_FIELD_NUMBER: builtins.int + @property + def partial_success(self) -> global___ExportLogsPartialSuccess: + """The details of a partially successful export request. + + If the request is only partially accepted + (i.e. when the server accepts only parts of the data and rejects the rest) + the server MUST initialize the `partial_success` field and MUST + set the `rejected_` with the number of items it rejected. + + Servers MAY also make use of the `partial_success` field to convey + warnings/suggestions to senders even when the request was fully accepted. + In such cases, the `rejected_` MUST have a value of `0` and + the `error_message` MUST be non-empty. + + A `partial_success` message with an empty value (rejected_ = 0 and + `error_message` = "") is equivalent to it not being set/present. Senders + SHOULD interpret it the same way as in the full success case. + """ + pass def __init__(self, + *, + partial_success : typing.Optional[global___ExportLogsPartialSuccess] = ..., ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["partial_success",b"partial_success"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["partial_success",b"partial_success"]) -> None: ... global___ExportLogsServiceResponse = ExportLogsServiceResponse + +class ExportLogsPartialSuccess(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + REJECTED_LOG_RECORDS_FIELD_NUMBER: builtins.int + ERROR_MESSAGE_FIELD_NUMBER: builtins.int + rejected_log_records: builtins.int = ... + """The number of rejected log records. + + A `rejected_` field holding a `0` value indicates that the + request was fully accepted. + """ + + error_message: typing.Text = ... + """A developer-facing human-readable message in English. It should be used + either to explain why the server rejected parts of the data during a partial + success or to convey warnings/suggestions during a full success. The message + should offer guidance on how users can address such issues. + + error_message is an optional field. An error_message with an empty value + is equivalent to it not being set. + """ + + def __init__(self, + *, + rejected_log_records : builtins.int = ..., + error_message : typing.Text = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["error_message",b"error_message","rejected_log_records",b"rejected_log_records"]) -> None: ... +global___ExportLogsPartialSuccess = ExportLogsPartialSuccess diff --git a/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2.py b/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2.py index ca026163b1..1d9021d702 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2.py +++ b/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2.py @@ -15,12 +15,13 @@ from opentelemetry.proto.metrics.v1 import metrics_pb2 as opentelemetry_dot_proto_dot_metrics_dot_v1_dot_metrics__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n>opentelemetry/proto/collector/metrics/v1/metrics_service.proto\x12(opentelemetry.proto.collector.metrics.v1\x1a,opentelemetry/proto/metrics/v1/metrics.proto\"h\n\x1b\x45xportMetricsServiceRequest\x12I\n\x10resource_metrics\x18\x01 \x03(\x0b\x32/.opentelemetry.proto.metrics.v1.ResourceMetrics\"\x1e\n\x1c\x45xportMetricsServiceResponse2\xac\x01\n\x0eMetricsService\x12\x99\x01\n\x06\x45xport\x12\x45.opentelemetry.proto.collector.metrics.v1.ExportMetricsServiceRequest\x1a\x46.opentelemetry.proto.collector.metrics.v1.ExportMetricsServiceResponse\"\x00\x42y\n+io.opentelemetry.proto.collector.metrics.v1B\x13MetricsServiceProtoP\x01Z3go.opentelemetry.io/proto/otlp/collector/metrics/v1b\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n>opentelemetry/proto/collector/metrics/v1/metrics_service.proto\x12(opentelemetry.proto.collector.metrics.v1\x1a,opentelemetry/proto/metrics/v1/metrics.proto\"h\n\x1b\x45xportMetricsServiceRequest\x12I\n\x10resource_metrics\x18\x01 \x03(\x0b\x32/.opentelemetry.proto.metrics.v1.ResourceMetrics\"~\n\x1c\x45xportMetricsServiceResponse\x12^\n\x0fpartial_success\x18\x01 \x01(\x0b\x32\x45.opentelemetry.proto.collector.metrics.v1.ExportMetricsPartialSuccess\"R\n\x1b\x45xportMetricsPartialSuccess\x12\x1c\n\x14rejected_data_points\x18\x01 \x01(\x03\x12\x15\n\rerror_message\x18\x02 \x01(\t2\xac\x01\n\x0eMetricsService\x12\x99\x01\n\x06\x45xport\x12\x45.opentelemetry.proto.collector.metrics.v1.ExportMetricsServiceRequest\x1a\x46.opentelemetry.proto.collector.metrics.v1.ExportMetricsServiceResponse\"\x00\x42\xa4\x01\n+io.opentelemetry.proto.collector.metrics.v1B\x13MetricsServiceProtoP\x01Z3go.opentelemetry.io/proto/otlp/collector/metrics/v1\xaa\x02(OpenTelemetry.Proto.Collector.Metrics.V1b\x06proto3') _EXPORTMETRICSSERVICEREQUEST = DESCRIPTOR.message_types_by_name['ExportMetricsServiceRequest'] _EXPORTMETRICSSERVICERESPONSE = DESCRIPTOR.message_types_by_name['ExportMetricsServiceResponse'] +_EXPORTMETRICSPARTIALSUCCESS = DESCRIPTOR.message_types_by_name['ExportMetricsPartialSuccess'] ExportMetricsServiceRequest = _reflection.GeneratedProtocolMessageType('ExportMetricsServiceRequest', (_message.Message,), { 'DESCRIPTOR' : _EXPORTMETRICSSERVICEREQUEST, '__module__' : 'opentelemetry.proto.collector.metrics.v1.metrics_service_pb2' @@ -35,15 +36,24 @@ }) _sym_db.RegisterMessage(ExportMetricsServiceResponse) +ExportMetricsPartialSuccess = _reflection.GeneratedProtocolMessageType('ExportMetricsPartialSuccess', (_message.Message,), { + 'DESCRIPTOR' : _EXPORTMETRICSPARTIALSUCCESS, + '__module__' : 'opentelemetry.proto.collector.metrics.v1.metrics_service_pb2' + # @@protoc_insertion_point(class_scope:opentelemetry.proto.collector.metrics.v1.ExportMetricsPartialSuccess) + }) +_sym_db.RegisterMessage(ExportMetricsPartialSuccess) + _METRICSSERVICE = DESCRIPTOR.services_by_name['MetricsService'] if _descriptor._USE_C_DESCRIPTORS == False: DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n+io.opentelemetry.proto.collector.metrics.v1B\023MetricsServiceProtoP\001Z3go.opentelemetry.io/proto/otlp/collector/metrics/v1' + DESCRIPTOR._serialized_options = b'\n+io.opentelemetry.proto.collector.metrics.v1B\023MetricsServiceProtoP\001Z3go.opentelemetry.io/proto/otlp/collector/metrics/v1\252\002(OpenTelemetry.Proto.Collector.Metrics.V1' _EXPORTMETRICSSERVICEREQUEST._serialized_start=154 _EXPORTMETRICSSERVICEREQUEST._serialized_end=258 _EXPORTMETRICSSERVICERESPONSE._serialized_start=260 - _EXPORTMETRICSSERVICERESPONSE._serialized_end=290 - _METRICSSERVICE._serialized_start=293 - _METRICSSERVICE._serialized_end=465 + _EXPORTMETRICSSERVICERESPONSE._serialized_end=386 + _EXPORTMETRICSPARTIALSUCCESS._serialized_start=388 + _EXPORTMETRICSPARTIALSUCCESS._serialized_end=470 + _METRICSSERVICE._serialized_start=473 + _METRICSSERVICE._serialized_end=645 # @@protoc_insertion_point(module_scope) diff --git a/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2.pyi b/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2.pyi index 1acc1de3f3..ffd750bdf2 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2.pyi +++ b/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2.pyi @@ -33,6 +33,59 @@ global___ExportMetricsServiceRequest = ExportMetricsServiceRequest class ExportMetricsServiceResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + PARTIAL_SUCCESS_FIELD_NUMBER: builtins.int + @property + def partial_success(self) -> global___ExportMetricsPartialSuccess: + """The details of a partially successful export request. + + If the request is only partially accepted + (i.e. when the server accepts only parts of the data and rejects the rest) + the server MUST initialize the `partial_success` field and MUST + set the `rejected_` with the number of items it rejected. + + Servers MAY also make use of the `partial_success` field to convey + warnings/suggestions to senders even when the request was fully accepted. + In such cases, the `rejected_` MUST have a value of `0` and + the `error_message` MUST be non-empty. + + A `partial_success` message with an empty value (rejected_ = 0 and + `error_message` = "") is equivalent to it not being set/present. Senders + SHOULD interpret it the same way as in the full success case. + """ + pass def __init__(self, + *, + partial_success : typing.Optional[global___ExportMetricsPartialSuccess] = ..., ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["partial_success",b"partial_success"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["partial_success",b"partial_success"]) -> None: ... global___ExportMetricsServiceResponse = ExportMetricsServiceResponse + +class ExportMetricsPartialSuccess(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + REJECTED_DATA_POINTS_FIELD_NUMBER: builtins.int + ERROR_MESSAGE_FIELD_NUMBER: builtins.int + rejected_data_points: builtins.int = ... + """The number of rejected data points. + + A `rejected_` field holding a `0` value indicates that the + request was fully accepted. + """ + + error_message: typing.Text = ... + """A developer-facing human-readable message in English. It should be used + either to explain why the server rejected parts of the data during a partial + success or to convey warnings/suggestions during a full success. The message + should offer guidance on how users can address such issues. + + error_message is an optional field. An error_message with an empty value + is equivalent to it not being set. + """ + + def __init__(self, + *, + rejected_data_points : builtins.int = ..., + error_message : typing.Text = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["error_message",b"error_message","rejected_data_points",b"rejected_data_points"]) -> None: ... +global___ExportMetricsPartialSuccess = ExportMetricsPartialSuccess diff --git a/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2.py b/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2.py index d32dc61933..fff65da1b7 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2.py +++ b/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2.py @@ -15,12 +15,13 @@ from opentelemetry.proto.trace.v1 import trace_pb2 as opentelemetry_dot_proto_dot_trace_dot_v1_dot_trace__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n:opentelemetry/proto/collector/trace/v1/trace_service.proto\x12&opentelemetry.proto.collector.trace.v1\x1a(opentelemetry/proto/trace/v1/trace.proto\"`\n\x19\x45xportTraceServiceRequest\x12\x43\n\x0eresource_spans\x18\x01 \x03(\x0b\x32+.opentelemetry.proto.trace.v1.ResourceSpans\"\x1c\n\x1a\x45xportTraceServiceResponse2\xa2\x01\n\x0cTraceService\x12\x91\x01\n\x06\x45xport\x12\x41.opentelemetry.proto.collector.trace.v1.ExportTraceServiceRequest\x1a\x42.opentelemetry.proto.collector.trace.v1.ExportTraceServiceResponse\"\x00\x42s\n)io.opentelemetry.proto.collector.trace.v1B\x11TraceServiceProtoP\x01Z1go.opentelemetry.io/proto/otlp/collector/trace/v1b\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n:opentelemetry/proto/collector/trace/v1/trace_service.proto\x12&opentelemetry.proto.collector.trace.v1\x1a(opentelemetry/proto/trace/v1/trace.proto\"`\n\x19\x45xportTraceServiceRequest\x12\x43\n\x0eresource_spans\x18\x01 \x03(\x0b\x32+.opentelemetry.proto.trace.v1.ResourceSpans\"x\n\x1a\x45xportTraceServiceResponse\x12Z\n\x0fpartial_success\x18\x01 \x01(\x0b\x32\x41.opentelemetry.proto.collector.trace.v1.ExportTracePartialSuccess\"J\n\x19\x45xportTracePartialSuccess\x12\x16\n\x0erejected_spans\x18\x01 \x01(\x03\x12\x15\n\rerror_message\x18\x02 \x01(\t2\xa2\x01\n\x0cTraceService\x12\x91\x01\n\x06\x45xport\x12\x41.opentelemetry.proto.collector.trace.v1.ExportTraceServiceRequest\x1a\x42.opentelemetry.proto.collector.trace.v1.ExportTraceServiceResponse\"\x00\x42\x9c\x01\n)io.opentelemetry.proto.collector.trace.v1B\x11TraceServiceProtoP\x01Z1go.opentelemetry.io/proto/otlp/collector/trace/v1\xaa\x02&OpenTelemetry.Proto.Collector.Trace.V1b\x06proto3') _EXPORTTRACESERVICEREQUEST = DESCRIPTOR.message_types_by_name['ExportTraceServiceRequest'] _EXPORTTRACESERVICERESPONSE = DESCRIPTOR.message_types_by_name['ExportTraceServiceResponse'] +_EXPORTTRACEPARTIALSUCCESS = DESCRIPTOR.message_types_by_name['ExportTracePartialSuccess'] ExportTraceServiceRequest = _reflection.GeneratedProtocolMessageType('ExportTraceServiceRequest', (_message.Message,), { 'DESCRIPTOR' : _EXPORTTRACESERVICEREQUEST, '__module__' : 'opentelemetry.proto.collector.trace.v1.trace_service_pb2' @@ -35,15 +36,24 @@ }) _sym_db.RegisterMessage(ExportTraceServiceResponse) +ExportTracePartialSuccess = _reflection.GeneratedProtocolMessageType('ExportTracePartialSuccess', (_message.Message,), { + 'DESCRIPTOR' : _EXPORTTRACEPARTIALSUCCESS, + '__module__' : 'opentelemetry.proto.collector.trace.v1.trace_service_pb2' + # @@protoc_insertion_point(class_scope:opentelemetry.proto.collector.trace.v1.ExportTracePartialSuccess) + }) +_sym_db.RegisterMessage(ExportTracePartialSuccess) + _TRACESERVICE = DESCRIPTOR.services_by_name['TraceService'] if _descriptor._USE_C_DESCRIPTORS == False: DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n)io.opentelemetry.proto.collector.trace.v1B\021TraceServiceProtoP\001Z1go.opentelemetry.io/proto/otlp/collector/trace/v1' + DESCRIPTOR._serialized_options = b'\n)io.opentelemetry.proto.collector.trace.v1B\021TraceServiceProtoP\001Z1go.opentelemetry.io/proto/otlp/collector/trace/v1\252\002&OpenTelemetry.Proto.Collector.Trace.V1' _EXPORTTRACESERVICEREQUEST._serialized_start=144 _EXPORTTRACESERVICEREQUEST._serialized_end=240 _EXPORTTRACESERVICERESPONSE._serialized_start=242 - _EXPORTTRACESERVICERESPONSE._serialized_end=270 - _TRACESERVICE._serialized_start=273 - _TRACESERVICE._serialized_end=435 + _EXPORTTRACESERVICERESPONSE._serialized_end=362 + _EXPORTTRACEPARTIALSUCCESS._serialized_start=364 + _EXPORTTRACEPARTIALSUCCESS._serialized_end=438 + _TRACESERVICE._serialized_start=441 + _TRACESERVICE._serialized_end=603 # @@protoc_insertion_point(module_scope) diff --git a/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2.pyi b/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2.pyi index 7ed93e76de..4e2d064ee7 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2.pyi +++ b/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2.pyi @@ -33,6 +33,59 @@ global___ExportTraceServiceRequest = ExportTraceServiceRequest class ExportTraceServiceResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + PARTIAL_SUCCESS_FIELD_NUMBER: builtins.int + @property + def partial_success(self) -> global___ExportTracePartialSuccess: + """The details of a partially successful export request. + + If the request is only partially accepted + (i.e. when the server accepts only parts of the data and rejects the rest) + the server MUST initialize the `partial_success` field and MUST + set the `rejected_` with the number of items it rejected. + + Servers MAY also make use of the `partial_success` field to convey + warnings/suggestions to senders even when the request was fully accepted. + In such cases, the `rejected_` MUST have a value of `0` and + the `error_message` MUST be non-empty. + + A `partial_success` message with an empty value (rejected_ = 0 and + `error_message` = "") is equivalent to it not being set/present. Senders + SHOULD interpret it the same way as in the full success case. + """ + pass def __init__(self, + *, + partial_success : typing.Optional[global___ExportTracePartialSuccess] = ..., ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["partial_success",b"partial_success"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["partial_success",b"partial_success"]) -> None: ... global___ExportTraceServiceResponse = ExportTraceServiceResponse + +class ExportTracePartialSuccess(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + REJECTED_SPANS_FIELD_NUMBER: builtins.int + ERROR_MESSAGE_FIELD_NUMBER: builtins.int + rejected_spans: builtins.int = ... + """The number of rejected spans. + + A `rejected_` field holding a `0` value indicates that the + request was fully accepted. + """ + + error_message: typing.Text = ... + """A developer-facing human-readable message in English. It should be used + either to explain why the server rejected parts of the data during a partial + success or to convey warnings/suggestions during a full success. The message + should offer guidance on how users can address such issues. + + error_message is an optional field. An error_message with an empty value + is equivalent to it not being set. + """ + + def __init__(self, + *, + rejected_spans : builtins.int = ..., + error_message : typing.Text = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["error_message",b"error_message","rejected_spans",b"rejected_spans"]) -> None: ... +global___ExportTracePartialSuccess = ExportTracePartialSuccess diff --git a/opentelemetry-proto/src/opentelemetry/proto/common/v1/common_pb2.py b/opentelemetry-proto/src/opentelemetry/proto/common/v1/common_pb2.py index a38431a589..bec37ab230 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/common/v1/common_pb2.py +++ b/opentelemetry-proto/src/opentelemetry/proto/common/v1/common_pb2.py @@ -14,7 +14,7 @@ -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n*opentelemetry/proto/common/v1/common.proto\x12\x1dopentelemetry.proto.common.v1\"\x8c\x02\n\x08\x41nyValue\x12\x16\n\x0cstring_value\x18\x01 \x01(\tH\x00\x12\x14\n\nbool_value\x18\x02 \x01(\x08H\x00\x12\x13\n\tint_value\x18\x03 \x01(\x03H\x00\x12\x16\n\x0c\x64ouble_value\x18\x04 \x01(\x01H\x00\x12@\n\x0b\x61rray_value\x18\x05 \x01(\x0b\x32).opentelemetry.proto.common.v1.ArrayValueH\x00\x12\x43\n\x0ckvlist_value\x18\x06 \x01(\x0b\x32+.opentelemetry.proto.common.v1.KeyValueListH\x00\x12\x15\n\x0b\x62ytes_value\x18\x07 \x01(\x0cH\x00\x42\x07\n\x05value\"E\n\nArrayValue\x12\x37\n\x06values\x18\x01 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.AnyValue\"G\n\x0cKeyValueList\x12\x37\n\x06values\x18\x01 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\"O\n\x08KeyValue\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x36\n\x05value\x18\x02 \x01(\x0b\x32\'.opentelemetry.proto.common.v1.AnyValue\";\n\x16InstrumentationLibrary\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t:\x02\x18\x01\"5\n\x14InstrumentationScope\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\tB[\n io.opentelemetry.proto.common.v1B\x0b\x43ommonProtoP\x01Z(go.opentelemetry.io/proto/otlp/common/v1b\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n*opentelemetry/proto/common/v1/common.proto\x12\x1dopentelemetry.proto.common.v1\"\x8c\x02\n\x08\x41nyValue\x12\x16\n\x0cstring_value\x18\x01 \x01(\tH\x00\x12\x14\n\nbool_value\x18\x02 \x01(\x08H\x00\x12\x13\n\tint_value\x18\x03 \x01(\x03H\x00\x12\x16\n\x0c\x64ouble_value\x18\x04 \x01(\x01H\x00\x12@\n\x0b\x61rray_value\x18\x05 \x01(\x0b\x32).opentelemetry.proto.common.v1.ArrayValueH\x00\x12\x43\n\x0ckvlist_value\x18\x06 \x01(\x0b\x32+.opentelemetry.proto.common.v1.KeyValueListH\x00\x12\x15\n\x0b\x62ytes_value\x18\x07 \x01(\x0cH\x00\x42\x07\n\x05value\"E\n\nArrayValue\x12\x37\n\x06values\x18\x01 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.AnyValue\"G\n\x0cKeyValueList\x12\x37\n\x06values\x18\x01 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\"O\n\x08KeyValue\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x36\n\x05value\x18\x02 \x01(\x0b\x32\'.opentelemetry.proto.common.v1.AnyValue\"\x94\x01\n\x14InstrumentationScope\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12;\n\nattributes\x18\x03 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\x04 \x01(\rB{\n io.opentelemetry.proto.common.v1B\x0b\x43ommonProtoP\x01Z(go.opentelemetry.io/proto/otlp/common/v1\xaa\x02\x1dOpenTelemetry.Proto.Common.V1b\x06proto3') @@ -22,7 +22,6 @@ _ARRAYVALUE = DESCRIPTOR.message_types_by_name['ArrayValue'] _KEYVALUELIST = DESCRIPTOR.message_types_by_name['KeyValueList'] _KEYVALUE = DESCRIPTOR.message_types_by_name['KeyValue'] -_INSTRUMENTATIONLIBRARY = DESCRIPTOR.message_types_by_name['InstrumentationLibrary'] _INSTRUMENTATIONSCOPE = DESCRIPTOR.message_types_by_name['InstrumentationScope'] AnyValue = _reflection.GeneratedProtocolMessageType('AnyValue', (_message.Message,), { 'DESCRIPTOR' : _ANYVALUE, @@ -52,13 +51,6 @@ }) _sym_db.RegisterMessage(KeyValue) -InstrumentationLibrary = _reflection.GeneratedProtocolMessageType('InstrumentationLibrary', (_message.Message,), { - 'DESCRIPTOR' : _INSTRUMENTATIONLIBRARY, - '__module__' : 'opentelemetry.proto.common.v1.common_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.common.v1.InstrumentationLibrary) - }) -_sym_db.RegisterMessage(InstrumentationLibrary) - InstrumentationScope = _reflection.GeneratedProtocolMessageType('InstrumentationScope', (_message.Message,), { 'DESCRIPTOR' : _INSTRUMENTATIONSCOPE, '__module__' : 'opentelemetry.proto.common.v1.common_pb2' @@ -69,9 +61,7 @@ if _descriptor._USE_C_DESCRIPTORS == False: DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n io.opentelemetry.proto.common.v1B\013CommonProtoP\001Z(go.opentelemetry.io/proto/otlp/common/v1' - _INSTRUMENTATIONLIBRARY._options = None - _INSTRUMENTATIONLIBRARY._serialized_options = b'\030\001' + DESCRIPTOR._serialized_options = b'\n io.opentelemetry.proto.common.v1B\013CommonProtoP\001Z(go.opentelemetry.io/proto/otlp/common/v1\252\002\035OpenTelemetry.Proto.Common.V1' _ANYVALUE._serialized_start=78 _ANYVALUE._serialized_end=346 _ARRAYVALUE._serialized_start=348 @@ -80,8 +70,6 @@ _KEYVALUELIST._serialized_end=490 _KEYVALUE._serialized_start=492 _KEYVALUE._serialized_end=571 - _INSTRUMENTATIONLIBRARY._serialized_start=573 - _INSTRUMENTATIONLIBRARY._serialized_end=632 - _INSTRUMENTATIONSCOPE._serialized_start=634 - _INSTRUMENTATIONSCOPE._serialized_end=687 + _INSTRUMENTATIONSCOPE._serialized_start=574 + _INSTRUMENTATIONSCOPE._serialized_end=722 # @@protoc_insertion_point(module_scope) diff --git a/opentelemetry-proto/src/opentelemetry/proto/common/v1/common_pb2.pyi b/opentelemetry-proto/src/opentelemetry/proto/common/v1/common_pb2.pyi index 0d1ff4f098..304feec5ab 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/common/v1/common_pb2.pyi +++ b/opentelemetry-proto/src/opentelemetry/proto/common/v1/common_pb2.pyi @@ -108,28 +108,6 @@ class KeyValue(google.protobuf.message.Message): def ClearField(self, field_name: typing_extensions.Literal["key",b"key","value",b"value"]) -> None: ... global___KeyValue = KeyValue -class InstrumentationLibrary(google.protobuf.message.Message): - """InstrumentationLibrary is a message representing the instrumentation library information - such as the fully qualified name and version. - InstrumentationLibrary is wire-compatible with InstrumentationScope for binary - Protobuf format. - This message is deprecated and will be removed on June 15, 2022. - """ - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... - NAME_FIELD_NUMBER: builtins.int - VERSION_FIELD_NUMBER: builtins.int - name: typing.Text = ... - """An empty instrumentation library name means the name is unknown.""" - - version: typing.Text = ... - def __init__(self, - *, - name : typing.Text = ..., - version : typing.Text = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["name",b"name","version",b"version"]) -> None: ... -global___InstrumentationLibrary = InstrumentationLibrary - class InstrumentationScope(google.protobuf.message.Message): """InstrumentationScope is a message representing the instrumentation scope information such as the fully qualified name and version. @@ -137,14 +115,26 @@ class InstrumentationScope(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... NAME_FIELD_NUMBER: builtins.int VERSION_FIELD_NUMBER: builtins.int + ATTRIBUTES_FIELD_NUMBER: builtins.int + DROPPED_ATTRIBUTES_COUNT_FIELD_NUMBER: builtins.int name: typing.Text = ... """An empty instrumentation scope name means the name is unknown.""" version: typing.Text = ... + @property + def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___KeyValue]: + """Additional attributes that describe the scope. [Optional]. + Attribute keys MUST be unique (it is not allowed to have more than one + attribute with the same key). + """ + pass + dropped_attributes_count: builtins.int = ... def __init__(self, *, name : typing.Text = ..., version : typing.Text = ..., + attributes : typing.Optional[typing.Iterable[global___KeyValue]] = ..., + dropped_attributes_count : builtins.int = ..., ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["name",b"name","version",b"version"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["attributes",b"attributes","dropped_attributes_count",b"dropped_attributes_count","name",b"name","version",b"version"]) -> None: ... global___InstrumentationScope = InstrumentationScope diff --git a/opentelemetry-proto/src/opentelemetry/proto/logs/v1/logs_pb2.py b/opentelemetry-proto/src/opentelemetry/proto/logs/v1/logs_pb2.py index 3967fa967b..90b7187155 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/logs/v1/logs_pb2.py +++ b/opentelemetry-proto/src/opentelemetry/proto/logs/v1/logs_pb2.py @@ -17,7 +17,7 @@ from opentelemetry.proto.resource.v1 import resource_pb2 as opentelemetry_dot_proto_dot_resource_dot_v1_dot_resource__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n&opentelemetry/proto/logs/v1/logs.proto\x12\x1bopentelemetry.proto.logs.v1\x1a*opentelemetry/proto/common/v1/common.proto\x1a.opentelemetry/proto/resource/v1/resource.proto\"L\n\x08LogsData\x12@\n\rresource_logs\x18\x01 \x03(\x0b\x32).opentelemetry.proto.logs.v1.ResourceLogs\"\xff\x01\n\x0cResourceLogs\x12;\n\x08resource\x18\x01 \x01(\x0b\x32).opentelemetry.proto.resource.v1.Resource\x12:\n\nscope_logs\x18\x02 \x03(\x0b\x32&.opentelemetry.proto.logs.v1.ScopeLogs\x12\x62\n\x1cinstrumentation_library_logs\x18\xe8\x07 \x03(\x0b\x32\x37.opentelemetry.proto.logs.v1.InstrumentationLibraryLogsB\x02\x18\x01\x12\x12\n\nschema_url\x18\x03 \x01(\t\"\xa0\x01\n\tScopeLogs\x12\x42\n\x05scope\x18\x01 \x01(\x0b\x32\x33.opentelemetry.proto.common.v1.InstrumentationScope\x12;\n\x0blog_records\x18\x02 \x03(\x0b\x32&.opentelemetry.proto.logs.v1.LogRecord\x12\x12\n\nschema_url\x18\x03 \x01(\t\"\xc9\x01\n\x1aInstrumentationLibraryLogs\x12V\n\x17instrumentation_library\x18\x01 \x01(\x0b\x32\x35.opentelemetry.proto.common.v1.InstrumentationLibrary\x12;\n\x0blog_records\x18\x02 \x03(\x0b\x32&.opentelemetry.proto.logs.v1.LogRecord\x12\x12\n\nschema_url\x18\x03 \x01(\t:\x02\x18\x01\"\xef\x02\n\tLogRecord\x12\x16\n\x0etime_unix_nano\x18\x01 \x01(\x06\x12\x1f\n\x17observed_time_unix_nano\x18\x0b \x01(\x06\x12\x44\n\x0fseverity_number\x18\x02 \x01(\x0e\x32+.opentelemetry.proto.logs.v1.SeverityNumber\x12\x15\n\rseverity_text\x18\x03 \x01(\t\x12\x35\n\x04\x62ody\x18\x05 \x01(\x0b\x32\'.opentelemetry.proto.common.v1.AnyValue\x12;\n\nattributes\x18\x06 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\x07 \x01(\r\x12\r\n\x05\x66lags\x18\x08 \x01(\x07\x12\x10\n\x08trace_id\x18\t \x01(\x0c\x12\x0f\n\x07span_id\x18\n \x01(\x0cJ\x04\x08\x04\x10\x05*\xc3\x05\n\x0eSeverityNumber\x12\x1f\n\x1bSEVERITY_NUMBER_UNSPECIFIED\x10\x00\x12\x19\n\x15SEVERITY_NUMBER_TRACE\x10\x01\x12\x1a\n\x16SEVERITY_NUMBER_TRACE2\x10\x02\x12\x1a\n\x16SEVERITY_NUMBER_TRACE3\x10\x03\x12\x1a\n\x16SEVERITY_NUMBER_TRACE4\x10\x04\x12\x19\n\x15SEVERITY_NUMBER_DEBUG\x10\x05\x12\x1a\n\x16SEVERITY_NUMBER_DEBUG2\x10\x06\x12\x1a\n\x16SEVERITY_NUMBER_DEBUG3\x10\x07\x12\x1a\n\x16SEVERITY_NUMBER_DEBUG4\x10\x08\x12\x18\n\x14SEVERITY_NUMBER_INFO\x10\t\x12\x19\n\x15SEVERITY_NUMBER_INFO2\x10\n\x12\x19\n\x15SEVERITY_NUMBER_INFO3\x10\x0b\x12\x19\n\x15SEVERITY_NUMBER_INFO4\x10\x0c\x12\x18\n\x14SEVERITY_NUMBER_WARN\x10\r\x12\x19\n\x15SEVERITY_NUMBER_WARN2\x10\x0e\x12\x19\n\x15SEVERITY_NUMBER_WARN3\x10\x0f\x12\x19\n\x15SEVERITY_NUMBER_WARN4\x10\x10\x12\x19\n\x15SEVERITY_NUMBER_ERROR\x10\x11\x12\x1a\n\x16SEVERITY_NUMBER_ERROR2\x10\x12\x12\x1a\n\x16SEVERITY_NUMBER_ERROR3\x10\x13\x12\x1a\n\x16SEVERITY_NUMBER_ERROR4\x10\x14\x12\x19\n\x15SEVERITY_NUMBER_FATAL\x10\x15\x12\x1a\n\x16SEVERITY_NUMBER_FATAL2\x10\x16\x12\x1a\n\x16SEVERITY_NUMBER_FATAL3\x10\x17\x12\x1a\n\x16SEVERITY_NUMBER_FATAL4\x10\x18*X\n\x0eLogRecordFlags\x12\x1f\n\x1bLOG_RECORD_FLAG_UNSPECIFIED\x10\x00\x12%\n LOG_RECORD_FLAG_TRACE_FLAGS_MASK\x10\xff\x01\x42U\n\x1eio.opentelemetry.proto.logs.v1B\tLogsProtoP\x01Z&go.opentelemetry.io/proto/otlp/logs/v1b\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n&opentelemetry/proto/logs/v1/logs.proto\x12\x1bopentelemetry.proto.logs.v1\x1a*opentelemetry/proto/common/v1/common.proto\x1a.opentelemetry/proto/resource/v1/resource.proto\"L\n\x08LogsData\x12@\n\rresource_logs\x18\x01 \x03(\x0b\x32).opentelemetry.proto.logs.v1.ResourceLogs\"\xa3\x01\n\x0cResourceLogs\x12;\n\x08resource\x18\x01 \x01(\x0b\x32).opentelemetry.proto.resource.v1.Resource\x12:\n\nscope_logs\x18\x02 \x03(\x0b\x32&.opentelemetry.proto.logs.v1.ScopeLogs\x12\x12\n\nschema_url\x18\x03 \x01(\tJ\x06\x08\xe8\x07\x10\xe9\x07\"\xa0\x01\n\tScopeLogs\x12\x42\n\x05scope\x18\x01 \x01(\x0b\x32\x33.opentelemetry.proto.common.v1.InstrumentationScope\x12;\n\x0blog_records\x18\x02 \x03(\x0b\x32&.opentelemetry.proto.logs.v1.LogRecord\x12\x12\n\nschema_url\x18\x03 \x01(\t\"\xef\x02\n\tLogRecord\x12\x16\n\x0etime_unix_nano\x18\x01 \x01(\x06\x12\x1f\n\x17observed_time_unix_nano\x18\x0b \x01(\x06\x12\x44\n\x0fseverity_number\x18\x02 \x01(\x0e\x32+.opentelemetry.proto.logs.v1.SeverityNumber\x12\x15\n\rseverity_text\x18\x03 \x01(\t\x12\x35\n\x04\x62ody\x18\x05 \x01(\x0b\x32\'.opentelemetry.proto.common.v1.AnyValue\x12;\n\nattributes\x18\x06 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\x07 \x01(\r\x12\r\n\x05\x66lags\x18\x08 \x01(\x07\x12\x10\n\x08trace_id\x18\t \x01(\x0c\x12\x0f\n\x07span_id\x18\n \x01(\x0cJ\x04\x08\x04\x10\x05*\xc3\x05\n\x0eSeverityNumber\x12\x1f\n\x1bSEVERITY_NUMBER_UNSPECIFIED\x10\x00\x12\x19\n\x15SEVERITY_NUMBER_TRACE\x10\x01\x12\x1a\n\x16SEVERITY_NUMBER_TRACE2\x10\x02\x12\x1a\n\x16SEVERITY_NUMBER_TRACE3\x10\x03\x12\x1a\n\x16SEVERITY_NUMBER_TRACE4\x10\x04\x12\x19\n\x15SEVERITY_NUMBER_DEBUG\x10\x05\x12\x1a\n\x16SEVERITY_NUMBER_DEBUG2\x10\x06\x12\x1a\n\x16SEVERITY_NUMBER_DEBUG3\x10\x07\x12\x1a\n\x16SEVERITY_NUMBER_DEBUG4\x10\x08\x12\x18\n\x14SEVERITY_NUMBER_INFO\x10\t\x12\x19\n\x15SEVERITY_NUMBER_INFO2\x10\n\x12\x19\n\x15SEVERITY_NUMBER_INFO3\x10\x0b\x12\x19\n\x15SEVERITY_NUMBER_INFO4\x10\x0c\x12\x18\n\x14SEVERITY_NUMBER_WARN\x10\r\x12\x19\n\x15SEVERITY_NUMBER_WARN2\x10\x0e\x12\x19\n\x15SEVERITY_NUMBER_WARN3\x10\x0f\x12\x19\n\x15SEVERITY_NUMBER_WARN4\x10\x10\x12\x19\n\x15SEVERITY_NUMBER_ERROR\x10\x11\x12\x1a\n\x16SEVERITY_NUMBER_ERROR2\x10\x12\x12\x1a\n\x16SEVERITY_NUMBER_ERROR3\x10\x13\x12\x1a\n\x16SEVERITY_NUMBER_ERROR4\x10\x14\x12\x19\n\x15SEVERITY_NUMBER_FATAL\x10\x15\x12\x1a\n\x16SEVERITY_NUMBER_FATAL2\x10\x16\x12\x1a\n\x16SEVERITY_NUMBER_FATAL3\x10\x17\x12\x1a\n\x16SEVERITY_NUMBER_FATAL4\x10\x18*Y\n\x0eLogRecordFlags\x12\x1f\n\x1bLOG_RECORD_FLAGS_DO_NOT_USE\x10\x00\x12&\n!LOG_RECORD_FLAGS_TRACE_FLAGS_MASK\x10\xff\x01\x42s\n\x1eio.opentelemetry.proto.logs.v1B\tLogsProtoP\x01Z&go.opentelemetry.io/proto/otlp/logs/v1\xaa\x02\x1bOpenTelemetry.Proto.Logs.V1b\x06proto3') _SEVERITYNUMBER = DESCRIPTOR.enum_types_by_name['SeverityNumber'] SeverityNumber = enum_type_wrapper.EnumTypeWrapper(_SEVERITYNUMBER) @@ -48,14 +48,13 @@ SEVERITY_NUMBER_FATAL2 = 22 SEVERITY_NUMBER_FATAL3 = 23 SEVERITY_NUMBER_FATAL4 = 24 -LOG_RECORD_FLAG_UNSPECIFIED = 0 -LOG_RECORD_FLAG_TRACE_FLAGS_MASK = 255 +LOG_RECORD_FLAGS_DO_NOT_USE = 0 +LOG_RECORD_FLAGS_TRACE_FLAGS_MASK = 255 _LOGSDATA = DESCRIPTOR.message_types_by_name['LogsData'] _RESOURCELOGS = DESCRIPTOR.message_types_by_name['ResourceLogs'] _SCOPELOGS = DESCRIPTOR.message_types_by_name['ScopeLogs'] -_INSTRUMENTATIONLIBRARYLOGS = DESCRIPTOR.message_types_by_name['InstrumentationLibraryLogs'] _LOGRECORD = DESCRIPTOR.message_types_by_name['LogRecord'] LogsData = _reflection.GeneratedProtocolMessageType('LogsData', (_message.Message,), { 'DESCRIPTOR' : _LOGSDATA, @@ -78,13 +77,6 @@ }) _sym_db.RegisterMessage(ScopeLogs) -InstrumentationLibraryLogs = _reflection.GeneratedProtocolMessageType('InstrumentationLibraryLogs', (_message.Message,), { - 'DESCRIPTOR' : _INSTRUMENTATIONLIBRARYLOGS, - '__module__' : 'opentelemetry.proto.logs.v1.logs_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.logs.v1.InstrumentationLibraryLogs) - }) -_sym_db.RegisterMessage(InstrumentationLibraryLogs) - LogRecord = _reflection.GeneratedProtocolMessageType('LogRecord', (_message.Message,), { 'DESCRIPTOR' : _LOGRECORD, '__module__' : 'opentelemetry.proto.logs.v1.logs_pb2' @@ -95,23 +87,17 @@ if _descriptor._USE_C_DESCRIPTORS == False: DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n\036io.opentelemetry.proto.logs.v1B\tLogsProtoP\001Z&go.opentelemetry.io/proto/otlp/logs/v1' - _RESOURCELOGS.fields_by_name['instrumentation_library_logs']._options = None - _RESOURCELOGS.fields_by_name['instrumentation_library_logs']._serialized_options = b'\030\001' - _INSTRUMENTATIONLIBRARYLOGS._options = None - _INSTRUMENTATIONLIBRARYLOGS._serialized_options = b'\030\001' - _SEVERITYNUMBER._serialized_start=1237 - _SEVERITYNUMBER._serialized_end=1944 - _LOGRECORDFLAGS._serialized_start=1946 - _LOGRECORDFLAGS._serialized_end=2034 + DESCRIPTOR._serialized_options = b'\n\036io.opentelemetry.proto.logs.v1B\tLogsProtoP\001Z&go.opentelemetry.io/proto/otlp/logs/v1\252\002\033OpenTelemetry.Proto.Logs.V1' + _SEVERITYNUMBER._serialized_start=941 + _SEVERITYNUMBER._serialized_end=1648 + _LOGRECORDFLAGS._serialized_start=1650 + _LOGRECORDFLAGS._serialized_end=1739 _LOGSDATA._serialized_start=163 _LOGSDATA._serialized_end=239 _RESOURCELOGS._serialized_start=242 - _RESOURCELOGS._serialized_end=497 - _SCOPELOGS._serialized_start=500 - _SCOPELOGS._serialized_end=660 - _INSTRUMENTATIONLIBRARYLOGS._serialized_start=663 - _INSTRUMENTATIONLIBRARYLOGS._serialized_end=864 - _LOGRECORD._serialized_start=867 - _LOGRECORD._serialized_end=1234 + _RESOURCELOGS._serialized_end=405 + _SCOPELOGS._serialized_start=408 + _SCOPELOGS._serialized_end=568 + _LOGRECORD._serialized_start=571 + _LOGRECORD._serialized_end=938 # @@protoc_insertion_point(module_scope) diff --git a/opentelemetry-proto/src/opentelemetry/proto/logs/v1/logs_pb2.pyi b/opentelemetry-proto/src/opentelemetry/proto/logs/v1/logs_pb2.pyi index db4da2afd1..98b8974390 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/logs/v1/logs_pb2.pyi +++ b/opentelemetry-proto/src/opentelemetry/proto/logs/v1/logs_pb2.pyi @@ -80,17 +80,34 @@ global___SeverityNumber = SeverityNumber class LogRecordFlags(_LogRecordFlags, metaclass=_LogRecordFlagsEnumTypeWrapper): - """Masks for LogRecord.flags field.""" + """LogRecordFlags is defined as a protobuf 'uint32' type and is to be used as + bit-fields. Each non-zero value defined in this enum is a bit-mask. + To extract the bit-field, for example, use an expression like: + + (logRecord.flags & LOG_RECORD_FLAGS_TRACE_FLAGS_MASK) + """ pass class _LogRecordFlags: V = typing.NewType('V', builtins.int) class _LogRecordFlagsEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_LogRecordFlags.V], builtins.type): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor = ... - LOG_RECORD_FLAG_UNSPECIFIED = LogRecordFlags.V(0) - LOG_RECORD_FLAG_TRACE_FLAGS_MASK = LogRecordFlags.V(255) + LOG_RECORD_FLAGS_DO_NOT_USE = LogRecordFlags.V(0) + """The zero value for the enum. Should not be used for comparisons. + Instead use bitwise "and" with the appropriate mask as shown above. + """ + + LOG_RECORD_FLAGS_TRACE_FLAGS_MASK = LogRecordFlags.V(255) + """Bits 0-7 are used for trace flags.""" + + +LOG_RECORD_FLAGS_DO_NOT_USE = LogRecordFlags.V(0) +"""The zero value for the enum. Should not be used for comparisons. +Instead use bitwise "and" with the appropriate mask as shown above. +""" + +LOG_RECORD_FLAGS_TRACE_FLAGS_MASK = LogRecordFlags.V(255) +"""Bits 0-7 are used for trace flags.""" -LOG_RECORD_FLAG_UNSPECIFIED = LogRecordFlags.V(0) -LOG_RECORD_FLAG_TRACE_FLAGS_MASK = LogRecordFlags.V(255) global___LogRecordFlags = LogRecordFlags @@ -129,7 +146,6 @@ class ResourceLogs(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... RESOURCE_FIELD_NUMBER: builtins.int SCOPE_LOGS_FIELD_NUMBER: builtins.int - INSTRUMENTATION_LIBRARY_LOGS_FIELD_NUMBER: builtins.int SCHEMA_URL_FIELD_NUMBER: builtins.int @property def resource(self) -> opentelemetry.proto.resource.v1.resource_pb2.Resource: @@ -141,37 +157,6 @@ class ResourceLogs(google.protobuf.message.Message): def scope_logs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ScopeLogs]: """A list of ScopeLogs that originate from a resource.""" pass - @property - def instrumentation_library_logs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___InstrumentationLibraryLogs]: - """A list of InstrumentationLibraryLogs that originate from a resource. - This field is deprecated and will be removed after grace period expires on June 15, 2022. - - During the grace period the following rules SHOULD be followed: - - For Binary Protobufs - ==================== - Binary Protobuf senders SHOULD NOT set instrumentation_library_logs. Instead - scope_logs SHOULD be set. - - Binary Protobuf receivers SHOULD check if instrumentation_library_logs is set - and scope_logs is not set then the value in instrumentation_library_logs - SHOULD be used instead by converting InstrumentationLibraryLogs into ScopeLogs. - If scope_logs is set then instrumentation_library_logs SHOULD be ignored. - - For JSON - ======== - JSON senders that set instrumentation_library_logs field MAY also set - scope_logs to carry the same logs, essentially double-publishing the same data. - Such double-publishing MAY be controlled by a user-settable option. - If double-publishing is not used then the senders SHOULD set scope_logs and - SHOULD NOT set instrumentation_library_logs. - - JSON receivers SHOULD check if instrumentation_library_logs is set and - scope_logs is not set then the value in instrumentation_library_logs - SHOULD be used instead by converting InstrumentationLibraryLogs into ScopeLogs. - If scope_logs is set then instrumentation_library_logs field SHOULD be ignored. - """ - pass schema_url: typing.Text = ... """This schema_url applies to the data in the "resource" field. It does not apply to the data in the "scope_logs" field which have their own schema_url field. @@ -181,11 +166,10 @@ class ResourceLogs(google.protobuf.message.Message): *, resource : typing.Optional[opentelemetry.proto.resource.v1.resource_pb2.Resource] = ..., scope_logs : typing.Optional[typing.Iterable[global___ScopeLogs]] = ..., - instrumentation_library_logs : typing.Optional[typing.Iterable[global___InstrumentationLibraryLogs]] = ..., schema_url : typing.Text = ..., ) -> None: ... def HasField(self, field_name: typing_extensions.Literal["resource",b"resource"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["instrumentation_library_logs",b"instrumentation_library_logs","resource",b"resource","schema_url",b"schema_url","scope_logs",b"scope_logs"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["resource",b"resource","schema_url",b"schema_url","scope_logs",b"scope_logs"]) -> None: ... global___ResourceLogs = ResourceLogs class ScopeLogs(google.protobuf.message.Message): @@ -218,40 +202,6 @@ class ScopeLogs(google.protobuf.message.Message): def ClearField(self, field_name: typing_extensions.Literal["log_records",b"log_records","schema_url",b"schema_url","scope",b"scope"]) -> None: ... global___ScopeLogs = ScopeLogs -class InstrumentationLibraryLogs(google.protobuf.message.Message): - """A collection of Logs produced by an InstrumentationLibrary. - InstrumentationLibraryLogs is wire-compatible with ScopeLogs for binary - Protobuf format. - This message is deprecated and will be removed on June 15, 2022. - """ - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... - INSTRUMENTATION_LIBRARY_FIELD_NUMBER: builtins.int - LOG_RECORDS_FIELD_NUMBER: builtins.int - SCHEMA_URL_FIELD_NUMBER: builtins.int - @property - def instrumentation_library(self) -> opentelemetry.proto.common.v1.common_pb2.InstrumentationLibrary: - """The instrumentation library information for the logs in this message. - Semantically when InstrumentationLibrary isn't set, it is equivalent with - an empty instrumentation library name (unknown). - """ - pass - @property - def log_records(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___LogRecord]: - """A list of logs that originate from an instrumentation library.""" - pass - schema_url: typing.Text = ... - """This schema_url applies to all logs in the "logs" field.""" - - def __init__(self, - *, - instrumentation_library : typing.Optional[opentelemetry.proto.common.v1.common_pb2.InstrumentationLibrary] = ..., - log_records : typing.Optional[typing.Iterable[global___LogRecord]] = ..., - schema_url : typing.Text = ..., - ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["instrumentation_library",b"instrumentation_library"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["instrumentation_library",b"instrumentation_library","log_records",b"log_records","schema_url",b"schema_url"]) -> None: ... -global___InstrumentationLibraryLogs = InstrumentationLibraryLogs - class LogRecord(google.protobuf.message.Message): """A log record according to OpenTelemetry Log Data Model: https://github.com/open-telemetry/oteps/blob/main/text/logs/0097-log-data-model.md @@ -321,21 +271,36 @@ class LogRecord(google.protobuf.message.Message): defined in W3C Trace Context specification. 24 most significant bits are reserved and must be set to 0. Readers must not assume that 24 most significant bits will be zero and must correctly mask the bits when reading 8-bit trace flag (use - flags & TRACE_FLAGS_MASK). [Optional]. + flags & LOG_RECORD_FLAGS_TRACE_FLAGS_MASK). [Optional]. """ trace_id: builtins.bytes = ... """A unique identifier for a trace. All logs from the same trace share - the same `trace_id`. The ID is a 16-byte array. An ID with all zeroes - is considered invalid. Can be set for logs that are part of request processing - and have an assigned trace id. [Optional]. + the same `trace_id`. The ID is a 16-byte array. An ID with all zeroes OR + of length other than 16 bytes is considered invalid (empty string in OTLP/JSON + is zero-length and thus is also invalid). + + This field is optional. + + The receivers SHOULD assume that the log record is not associated with a + trace if any of the following is true: + - the field is not present, + - the field contains an invalid value. """ span_id: builtins.bytes = ... """A unique identifier for a span within a trace, assigned when the span - is created. The ID is an 8-byte array. An ID with all zeroes is considered - invalid. Can be set for logs that are part of a particular processing span. - If span_id is present trace_id SHOULD be also present. [Optional]. + is created. The ID is an 8-byte array. An ID with all zeroes OR of length + other than 8 bytes is considered invalid (empty string in OTLP/JSON + is zero-length and thus is also invalid). + + This field is optional. If the sender specifies a valid span_id then it SHOULD also + specify a valid trace_id. + + The receivers SHOULD assume that the log record is not associated with a + span if any of the following is true: + - the field is not present, + - the field contains an invalid value. """ def __init__(self, diff --git a/opentelemetry-proto/src/opentelemetry/proto/metrics/v1/metrics_pb2.py b/opentelemetry-proto/src/opentelemetry/proto/metrics/v1/metrics_pb2.py index e94d087a45..4b938c2146 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/metrics/v1/metrics_pb2.py +++ b/opentelemetry-proto/src/opentelemetry/proto/metrics/v1/metrics_pb2.py @@ -17,7 +17,7 @@ from opentelemetry.proto.resource.v1 import resource_pb2 as opentelemetry_dot_proto_dot_resource_dot_v1_dot_resource__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n,opentelemetry/proto/metrics/v1/metrics.proto\x12\x1eopentelemetry.proto.metrics.v1\x1a*opentelemetry/proto/common/v1/common.proto\x1a.opentelemetry/proto/resource/v1/resource.proto\"X\n\x0bMetricsData\x12I\n\x10resource_metrics\x18\x01 \x03(\x0b\x32/.opentelemetry.proto.metrics.v1.ResourceMetrics\"\x94\x02\n\x0fResourceMetrics\x12;\n\x08resource\x18\x01 \x01(\x0b\x32).opentelemetry.proto.resource.v1.Resource\x12\x43\n\rscope_metrics\x18\x02 \x03(\x0b\x32,.opentelemetry.proto.metrics.v1.ScopeMetrics\x12k\n\x1finstrumentation_library_metrics\x18\xe8\x07 \x03(\x0b\x32=.opentelemetry.proto.metrics.v1.InstrumentationLibraryMetricsB\x02\x18\x01\x12\x12\n\nschema_url\x18\x03 \x01(\t\"\x9f\x01\n\x0cScopeMetrics\x12\x42\n\x05scope\x18\x01 \x01(\x0b\x32\x33.opentelemetry.proto.common.v1.InstrumentationScope\x12\x37\n\x07metrics\x18\x02 \x03(\x0b\x32&.opentelemetry.proto.metrics.v1.Metric\x12\x12\n\nschema_url\x18\x03 \x01(\t\"\xc8\x01\n\x1dInstrumentationLibraryMetrics\x12V\n\x17instrumentation_library\x18\x01 \x01(\x0b\x32\x35.opentelemetry.proto.common.v1.InstrumentationLibrary\x12\x37\n\x07metrics\x18\x02 \x03(\x0b\x32&.opentelemetry.proto.metrics.v1.Metric\x12\x12\n\nschema_url\x18\x03 \x01(\t:\x02\x18\x01\"\x92\x03\n\x06Metric\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0c\n\x04unit\x18\x03 \x01(\t\x12\x36\n\x05gauge\x18\x05 \x01(\x0b\x32%.opentelemetry.proto.metrics.v1.GaugeH\x00\x12\x32\n\x03sum\x18\x07 \x01(\x0b\x32#.opentelemetry.proto.metrics.v1.SumH\x00\x12>\n\thistogram\x18\t \x01(\x0b\x32).opentelemetry.proto.metrics.v1.HistogramH\x00\x12U\n\x15\x65xponential_histogram\x18\n \x01(\x0b\x32\x34.opentelemetry.proto.metrics.v1.ExponentialHistogramH\x00\x12:\n\x07summary\x18\x0b \x01(\x0b\x32\'.opentelemetry.proto.metrics.v1.SummaryH\x00\x42\x06\n\x04\x64\x61taJ\x04\x08\x04\x10\x05J\x04\x08\x06\x10\x07J\x04\x08\x08\x10\t\"M\n\x05Gauge\x12\x44\n\x0b\x64\x61ta_points\x18\x01 \x03(\x0b\x32/.opentelemetry.proto.metrics.v1.NumberDataPoint\"\xba\x01\n\x03Sum\x12\x44\n\x0b\x64\x61ta_points\x18\x01 \x03(\x0b\x32/.opentelemetry.proto.metrics.v1.NumberDataPoint\x12W\n\x17\x61ggregation_temporality\x18\x02 \x01(\x0e\x32\x36.opentelemetry.proto.metrics.v1.AggregationTemporality\x12\x14\n\x0cis_monotonic\x18\x03 \x01(\x08\"\xad\x01\n\tHistogram\x12G\n\x0b\x64\x61ta_points\x18\x01 \x03(\x0b\x32\x32.opentelemetry.proto.metrics.v1.HistogramDataPoint\x12W\n\x17\x61ggregation_temporality\x18\x02 \x01(\x0e\x32\x36.opentelemetry.proto.metrics.v1.AggregationTemporality\"\xc3\x01\n\x14\x45xponentialHistogram\x12R\n\x0b\x64\x61ta_points\x18\x01 \x03(\x0b\x32=.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint\x12W\n\x17\x61ggregation_temporality\x18\x02 \x01(\x0e\x32\x36.opentelemetry.proto.metrics.v1.AggregationTemporality\"P\n\x07Summary\x12\x45\n\x0b\x64\x61ta_points\x18\x01 \x03(\x0b\x32\x30.opentelemetry.proto.metrics.v1.SummaryDataPoint\"\x86\x02\n\x0fNumberDataPoint\x12;\n\nattributes\x18\x07 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12\x1c\n\x14start_time_unix_nano\x18\x02 \x01(\x06\x12\x16\n\x0etime_unix_nano\x18\x03 \x01(\x06\x12\x13\n\tas_double\x18\x04 \x01(\x01H\x00\x12\x10\n\x06\x61s_int\x18\x06 \x01(\x10H\x00\x12;\n\texemplars\x18\x05 \x03(\x0b\x32(.opentelemetry.proto.metrics.v1.Exemplar\x12\r\n\x05\x66lags\x18\x08 \x01(\rB\x07\n\x05valueJ\x04\x08\x01\x10\x02\"\xe6\x02\n\x12HistogramDataPoint\x12;\n\nattributes\x18\t \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12\x1c\n\x14start_time_unix_nano\x18\x02 \x01(\x06\x12\x16\n\x0etime_unix_nano\x18\x03 \x01(\x06\x12\r\n\x05\x63ount\x18\x04 \x01(\x06\x12\x10\n\x03sum\x18\x05 \x01(\x01H\x00\x88\x01\x01\x12\x15\n\rbucket_counts\x18\x06 \x03(\x06\x12\x17\n\x0f\x65xplicit_bounds\x18\x07 \x03(\x01\x12;\n\texemplars\x18\x08 \x03(\x0b\x32(.opentelemetry.proto.metrics.v1.Exemplar\x12\r\n\x05\x66lags\x18\n \x01(\r\x12\x10\n\x03min\x18\x0b \x01(\x01H\x01\x88\x01\x01\x12\x10\n\x03max\x18\x0c \x01(\x01H\x02\x88\x01\x01\x42\x06\n\x04_sumB\x06\n\x04_minB\x06\n\x04_maxJ\x04\x08\x01\x10\x02\"\xb5\x04\n\x1d\x45xponentialHistogramDataPoint\x12;\n\nattributes\x18\x01 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12\x1c\n\x14start_time_unix_nano\x18\x02 \x01(\x06\x12\x16\n\x0etime_unix_nano\x18\x03 \x01(\x06\x12\r\n\x05\x63ount\x18\x04 \x01(\x06\x12\x0b\n\x03sum\x18\x05 \x01(\x01\x12\r\n\x05scale\x18\x06 \x01(\x11\x12\x12\n\nzero_count\x18\x07 \x01(\x06\x12W\n\x08positive\x18\x08 \x01(\x0b\x32\x45.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets\x12W\n\x08negative\x18\t \x01(\x0b\x32\x45.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets\x12\r\n\x05\x66lags\x18\n \x01(\r\x12;\n\texemplars\x18\x0b \x03(\x0b\x32(.opentelemetry.proto.metrics.v1.Exemplar\x12\x10\n\x03min\x18\x0c \x01(\x01H\x00\x88\x01\x01\x12\x10\n\x03max\x18\r \x01(\x01H\x01\x88\x01\x01\x1a\x30\n\x07\x42uckets\x12\x0e\n\x06offset\x18\x01 \x01(\x11\x12\x15\n\rbucket_counts\x18\x02 \x03(\x04\x42\x06\n\x04_minB\x06\n\x04_max\"\xc5\x02\n\x10SummaryDataPoint\x12;\n\nattributes\x18\x07 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12\x1c\n\x14start_time_unix_nano\x18\x02 \x01(\x06\x12\x16\n\x0etime_unix_nano\x18\x03 \x01(\x06\x12\r\n\x05\x63ount\x18\x04 \x01(\x06\x12\x0b\n\x03sum\x18\x05 \x01(\x01\x12Y\n\x0fquantile_values\x18\x06 \x03(\x0b\x32@.opentelemetry.proto.metrics.v1.SummaryDataPoint.ValueAtQuantile\x12\r\n\x05\x66lags\x18\x08 \x01(\r\x1a\x32\n\x0fValueAtQuantile\x12\x10\n\x08quantile\x18\x01 \x01(\x01\x12\r\n\x05value\x18\x02 \x01(\x01J\x04\x08\x01\x10\x02\"\xc1\x01\n\x08\x45xemplar\x12\x44\n\x13\x66iltered_attributes\x18\x07 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12\x16\n\x0etime_unix_nano\x18\x02 \x01(\x06\x12\x13\n\tas_double\x18\x03 \x01(\x01H\x00\x12\x10\n\x06\x61s_int\x18\x06 \x01(\x10H\x00\x12\x0f\n\x07span_id\x18\x04 \x01(\x0c\x12\x10\n\x08trace_id\x18\x05 \x01(\x0c\x42\x07\n\x05valueJ\x04\x08\x01\x10\x02*\x8c\x01\n\x16\x41ggregationTemporality\x12\'\n#AGGREGATION_TEMPORALITY_UNSPECIFIED\x10\x00\x12!\n\x1d\x41GGREGATION_TEMPORALITY_DELTA\x10\x01\x12&\n\"AGGREGATION_TEMPORALITY_CUMULATIVE\x10\x02*;\n\x0e\x44\x61taPointFlags\x12\r\n\tFLAG_NONE\x10\x00\x12\x1a\n\x16\x46LAG_NO_RECORDED_VALUE\x10\x01\x42^\n!io.opentelemetry.proto.metrics.v1B\x0cMetricsProtoP\x01Z)go.opentelemetry.io/proto/otlp/metrics/v1b\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n,opentelemetry/proto/metrics/v1/metrics.proto\x12\x1eopentelemetry.proto.metrics.v1\x1a*opentelemetry/proto/common/v1/common.proto\x1a.opentelemetry/proto/resource/v1/resource.proto\"X\n\x0bMetricsData\x12I\n\x10resource_metrics\x18\x01 \x03(\x0b\x32/.opentelemetry.proto.metrics.v1.ResourceMetrics\"\xaf\x01\n\x0fResourceMetrics\x12;\n\x08resource\x18\x01 \x01(\x0b\x32).opentelemetry.proto.resource.v1.Resource\x12\x43\n\rscope_metrics\x18\x02 \x03(\x0b\x32,.opentelemetry.proto.metrics.v1.ScopeMetrics\x12\x12\n\nschema_url\x18\x03 \x01(\tJ\x06\x08\xe8\x07\x10\xe9\x07\"\x9f\x01\n\x0cScopeMetrics\x12\x42\n\x05scope\x18\x01 \x01(\x0b\x32\x33.opentelemetry.proto.common.v1.InstrumentationScope\x12\x37\n\x07metrics\x18\x02 \x03(\x0b\x32&.opentelemetry.proto.metrics.v1.Metric\x12\x12\n\nschema_url\x18\x03 \x01(\t\"\x92\x03\n\x06Metric\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0c\n\x04unit\x18\x03 \x01(\t\x12\x36\n\x05gauge\x18\x05 \x01(\x0b\x32%.opentelemetry.proto.metrics.v1.GaugeH\x00\x12\x32\n\x03sum\x18\x07 \x01(\x0b\x32#.opentelemetry.proto.metrics.v1.SumH\x00\x12>\n\thistogram\x18\t \x01(\x0b\x32).opentelemetry.proto.metrics.v1.HistogramH\x00\x12U\n\x15\x65xponential_histogram\x18\n \x01(\x0b\x32\x34.opentelemetry.proto.metrics.v1.ExponentialHistogramH\x00\x12:\n\x07summary\x18\x0b \x01(\x0b\x32\'.opentelemetry.proto.metrics.v1.SummaryH\x00\x42\x06\n\x04\x64\x61taJ\x04\x08\x04\x10\x05J\x04\x08\x06\x10\x07J\x04\x08\x08\x10\t\"M\n\x05Gauge\x12\x44\n\x0b\x64\x61ta_points\x18\x01 \x03(\x0b\x32/.opentelemetry.proto.metrics.v1.NumberDataPoint\"\xba\x01\n\x03Sum\x12\x44\n\x0b\x64\x61ta_points\x18\x01 \x03(\x0b\x32/.opentelemetry.proto.metrics.v1.NumberDataPoint\x12W\n\x17\x61ggregation_temporality\x18\x02 \x01(\x0e\x32\x36.opentelemetry.proto.metrics.v1.AggregationTemporality\x12\x14\n\x0cis_monotonic\x18\x03 \x01(\x08\"\xad\x01\n\tHistogram\x12G\n\x0b\x64\x61ta_points\x18\x01 \x03(\x0b\x32\x32.opentelemetry.proto.metrics.v1.HistogramDataPoint\x12W\n\x17\x61ggregation_temporality\x18\x02 \x01(\x0e\x32\x36.opentelemetry.proto.metrics.v1.AggregationTemporality\"\xc3\x01\n\x14\x45xponentialHistogram\x12R\n\x0b\x64\x61ta_points\x18\x01 \x03(\x0b\x32=.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint\x12W\n\x17\x61ggregation_temporality\x18\x02 \x01(\x0e\x32\x36.opentelemetry.proto.metrics.v1.AggregationTemporality\"P\n\x07Summary\x12\x45\n\x0b\x64\x61ta_points\x18\x01 \x03(\x0b\x32\x30.opentelemetry.proto.metrics.v1.SummaryDataPoint\"\x86\x02\n\x0fNumberDataPoint\x12;\n\nattributes\x18\x07 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12\x1c\n\x14start_time_unix_nano\x18\x02 \x01(\x06\x12\x16\n\x0etime_unix_nano\x18\x03 \x01(\x06\x12\x13\n\tas_double\x18\x04 \x01(\x01H\x00\x12\x10\n\x06\x61s_int\x18\x06 \x01(\x10H\x00\x12;\n\texemplars\x18\x05 \x03(\x0b\x32(.opentelemetry.proto.metrics.v1.Exemplar\x12\r\n\x05\x66lags\x18\x08 \x01(\rB\x07\n\x05valueJ\x04\x08\x01\x10\x02\"\xe6\x02\n\x12HistogramDataPoint\x12;\n\nattributes\x18\t \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12\x1c\n\x14start_time_unix_nano\x18\x02 \x01(\x06\x12\x16\n\x0etime_unix_nano\x18\x03 \x01(\x06\x12\r\n\x05\x63ount\x18\x04 \x01(\x06\x12\x10\n\x03sum\x18\x05 \x01(\x01H\x00\x88\x01\x01\x12\x15\n\rbucket_counts\x18\x06 \x03(\x06\x12\x17\n\x0f\x65xplicit_bounds\x18\x07 \x03(\x01\x12;\n\texemplars\x18\x08 \x03(\x0b\x32(.opentelemetry.proto.metrics.v1.Exemplar\x12\r\n\x05\x66lags\x18\n \x01(\r\x12\x10\n\x03min\x18\x0b \x01(\x01H\x01\x88\x01\x01\x12\x10\n\x03max\x18\x0c \x01(\x01H\x02\x88\x01\x01\x42\x06\n\x04_sumB\x06\n\x04_minB\x06\n\x04_maxJ\x04\x08\x01\x10\x02\"\xda\x04\n\x1d\x45xponentialHistogramDataPoint\x12;\n\nattributes\x18\x01 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12\x1c\n\x14start_time_unix_nano\x18\x02 \x01(\x06\x12\x16\n\x0etime_unix_nano\x18\x03 \x01(\x06\x12\r\n\x05\x63ount\x18\x04 \x01(\x06\x12\x10\n\x03sum\x18\x05 \x01(\x01H\x00\x88\x01\x01\x12\r\n\x05scale\x18\x06 \x01(\x11\x12\x12\n\nzero_count\x18\x07 \x01(\x06\x12W\n\x08positive\x18\x08 \x01(\x0b\x32\x45.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets\x12W\n\x08negative\x18\t \x01(\x0b\x32\x45.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets\x12\r\n\x05\x66lags\x18\n \x01(\r\x12;\n\texemplars\x18\x0b \x03(\x0b\x32(.opentelemetry.proto.metrics.v1.Exemplar\x12\x10\n\x03min\x18\x0c \x01(\x01H\x01\x88\x01\x01\x12\x10\n\x03max\x18\r \x01(\x01H\x02\x88\x01\x01\x12\x16\n\x0ezero_threshold\x18\x0e \x01(\x01\x1a\x30\n\x07\x42uckets\x12\x0e\n\x06offset\x18\x01 \x01(\x11\x12\x15\n\rbucket_counts\x18\x02 \x03(\x04\x42\x06\n\x04_sumB\x06\n\x04_minB\x06\n\x04_max\"\xc5\x02\n\x10SummaryDataPoint\x12;\n\nattributes\x18\x07 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12\x1c\n\x14start_time_unix_nano\x18\x02 \x01(\x06\x12\x16\n\x0etime_unix_nano\x18\x03 \x01(\x06\x12\r\n\x05\x63ount\x18\x04 \x01(\x06\x12\x0b\n\x03sum\x18\x05 \x01(\x01\x12Y\n\x0fquantile_values\x18\x06 \x03(\x0b\x32@.opentelemetry.proto.metrics.v1.SummaryDataPoint.ValueAtQuantile\x12\r\n\x05\x66lags\x18\x08 \x01(\r\x1a\x32\n\x0fValueAtQuantile\x12\x10\n\x08quantile\x18\x01 \x01(\x01\x12\r\n\x05value\x18\x02 \x01(\x01J\x04\x08\x01\x10\x02\"\xc1\x01\n\x08\x45xemplar\x12\x44\n\x13\x66iltered_attributes\x18\x07 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12\x16\n\x0etime_unix_nano\x18\x02 \x01(\x06\x12\x13\n\tas_double\x18\x03 \x01(\x01H\x00\x12\x10\n\x06\x61s_int\x18\x06 \x01(\x10H\x00\x12\x0f\n\x07span_id\x18\x04 \x01(\x0c\x12\x10\n\x08trace_id\x18\x05 \x01(\x0c\x42\x07\n\x05valueJ\x04\x08\x01\x10\x02*\x8c\x01\n\x16\x41ggregationTemporality\x12\'\n#AGGREGATION_TEMPORALITY_UNSPECIFIED\x10\x00\x12!\n\x1d\x41GGREGATION_TEMPORALITY_DELTA\x10\x01\x12&\n\"AGGREGATION_TEMPORALITY_CUMULATIVE\x10\x02*^\n\x0e\x44\x61taPointFlags\x12\x1f\n\x1b\x44\x41TA_POINT_FLAGS_DO_NOT_USE\x10\x00\x12+\n\'DATA_POINT_FLAGS_NO_RECORDED_VALUE_MASK\x10\x01\x42\x7f\n!io.opentelemetry.proto.metrics.v1B\x0cMetricsProtoP\x01Z)go.opentelemetry.io/proto/otlp/metrics/v1\xaa\x02\x1eOpenTelemetry.Proto.Metrics.V1b\x06proto3') _AGGREGATIONTEMPORALITY = DESCRIPTOR.enum_types_by_name['AggregationTemporality'] AggregationTemporality = enum_type_wrapper.EnumTypeWrapper(_AGGREGATIONTEMPORALITY) @@ -26,14 +26,13 @@ AGGREGATION_TEMPORALITY_UNSPECIFIED = 0 AGGREGATION_TEMPORALITY_DELTA = 1 AGGREGATION_TEMPORALITY_CUMULATIVE = 2 -FLAG_NONE = 0 -FLAG_NO_RECORDED_VALUE = 1 +DATA_POINT_FLAGS_DO_NOT_USE = 0 +DATA_POINT_FLAGS_NO_RECORDED_VALUE_MASK = 1 _METRICSDATA = DESCRIPTOR.message_types_by_name['MetricsData'] _RESOURCEMETRICS = DESCRIPTOR.message_types_by_name['ResourceMetrics'] _SCOPEMETRICS = DESCRIPTOR.message_types_by_name['ScopeMetrics'] -_INSTRUMENTATIONLIBRARYMETRICS = DESCRIPTOR.message_types_by_name['InstrumentationLibraryMetrics'] _METRIC = DESCRIPTOR.message_types_by_name['Metric'] _GAUGE = DESCRIPTOR.message_types_by_name['Gauge'] _SUM = DESCRIPTOR.message_types_by_name['Sum'] @@ -68,13 +67,6 @@ }) _sym_db.RegisterMessage(ScopeMetrics) -InstrumentationLibraryMetrics = _reflection.GeneratedProtocolMessageType('InstrumentationLibraryMetrics', (_message.Message,), { - 'DESCRIPTOR' : _INSTRUMENTATIONLIBRARYMETRICS, - '__module__' : 'opentelemetry.proto.metrics.v1.metrics_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.metrics.v1.InstrumentationLibraryMetrics) - }) -_sym_db.RegisterMessage(InstrumentationLibraryMetrics) - Metric = _reflection.GeneratedProtocolMessageType('Metric', (_message.Message,), { 'DESCRIPTOR' : _METRIC, '__module__' : 'opentelemetry.proto.metrics.v1.metrics_pb2' @@ -171,47 +163,41 @@ if _descriptor._USE_C_DESCRIPTORS == False: DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n!io.opentelemetry.proto.metrics.v1B\014MetricsProtoP\001Z)go.opentelemetry.io/proto/otlp/metrics/v1' - _RESOURCEMETRICS.fields_by_name['instrumentation_library_metrics']._options = None - _RESOURCEMETRICS.fields_by_name['instrumentation_library_metrics']._serialized_options = b'\030\001' - _INSTRUMENTATIONLIBRARYMETRICS._options = None - _INSTRUMENTATIONLIBRARYMETRICS._serialized_options = b'\030\001' - _AGGREGATIONTEMPORALITY._serialized_start=3754 - _AGGREGATIONTEMPORALITY._serialized_end=3894 - _DATAPOINTFLAGS._serialized_start=3896 - _DATAPOINTFLAGS._serialized_end=3955 + DESCRIPTOR._serialized_options = b'\n!io.opentelemetry.proto.metrics.v1B\014MetricsProtoP\001Z)go.opentelemetry.io/proto/otlp/metrics/v1\252\002\036OpenTelemetry.Proto.Metrics.V1' + _AGGREGATIONTEMPORALITY._serialized_start=3487 + _AGGREGATIONTEMPORALITY._serialized_end=3627 + _DATAPOINTFLAGS._serialized_start=3629 + _DATAPOINTFLAGS._serialized_end=3723 _METRICSDATA._serialized_start=172 _METRICSDATA._serialized_end=260 _RESOURCEMETRICS._serialized_start=263 - _RESOURCEMETRICS._serialized_end=539 - _SCOPEMETRICS._serialized_start=542 - _SCOPEMETRICS._serialized_end=701 - _INSTRUMENTATIONLIBRARYMETRICS._serialized_start=704 - _INSTRUMENTATIONLIBRARYMETRICS._serialized_end=904 - _METRIC._serialized_start=907 - _METRIC._serialized_end=1309 - _GAUGE._serialized_start=1311 - _GAUGE._serialized_end=1388 - _SUM._serialized_start=1391 - _SUM._serialized_end=1577 - _HISTOGRAM._serialized_start=1580 - _HISTOGRAM._serialized_end=1753 - _EXPONENTIALHISTOGRAM._serialized_start=1756 - _EXPONENTIALHISTOGRAM._serialized_end=1951 - _SUMMARY._serialized_start=1953 - _SUMMARY._serialized_end=2033 - _NUMBERDATAPOINT._serialized_start=2036 - _NUMBERDATAPOINT._serialized_end=2298 - _HISTOGRAMDATAPOINT._serialized_start=2301 - _HISTOGRAMDATAPOINT._serialized_end=2659 - _EXPONENTIALHISTOGRAMDATAPOINT._serialized_start=2662 - _EXPONENTIALHISTOGRAMDATAPOINT._serialized_end=3227 - _EXPONENTIALHISTOGRAMDATAPOINT_BUCKETS._serialized_start=3163 - _EXPONENTIALHISTOGRAMDATAPOINT_BUCKETS._serialized_end=3211 - _SUMMARYDATAPOINT._serialized_start=3230 - _SUMMARYDATAPOINT._serialized_end=3555 - _SUMMARYDATAPOINT_VALUEATQUANTILE._serialized_start=3499 - _SUMMARYDATAPOINT_VALUEATQUANTILE._serialized_end=3549 - _EXEMPLAR._serialized_start=3558 - _EXEMPLAR._serialized_end=3751 + _RESOURCEMETRICS._serialized_end=438 + _SCOPEMETRICS._serialized_start=441 + _SCOPEMETRICS._serialized_end=600 + _METRIC._serialized_start=603 + _METRIC._serialized_end=1005 + _GAUGE._serialized_start=1007 + _GAUGE._serialized_end=1084 + _SUM._serialized_start=1087 + _SUM._serialized_end=1273 + _HISTOGRAM._serialized_start=1276 + _HISTOGRAM._serialized_end=1449 + _EXPONENTIALHISTOGRAM._serialized_start=1452 + _EXPONENTIALHISTOGRAM._serialized_end=1647 + _SUMMARY._serialized_start=1649 + _SUMMARY._serialized_end=1729 + _NUMBERDATAPOINT._serialized_start=1732 + _NUMBERDATAPOINT._serialized_end=1994 + _HISTOGRAMDATAPOINT._serialized_start=1997 + _HISTOGRAMDATAPOINT._serialized_end=2355 + _EXPONENTIALHISTOGRAMDATAPOINT._serialized_start=2358 + _EXPONENTIALHISTOGRAMDATAPOINT._serialized_end=2960 + _EXPONENTIALHISTOGRAMDATAPOINT_BUCKETS._serialized_start=2888 + _EXPONENTIALHISTOGRAMDATAPOINT_BUCKETS._serialized_end=2936 + _SUMMARYDATAPOINT._serialized_start=2963 + _SUMMARYDATAPOINT._serialized_end=3288 + _SUMMARYDATAPOINT_VALUEATQUANTILE._serialized_start=3232 + _SUMMARYDATAPOINT_VALUEATQUANTILE._serialized_end=3282 + _EXEMPLAR._serialized_start=3291 + _EXEMPLAR._serialized_end=3484 # @@protoc_insertion_point(module_scope) diff --git a/opentelemetry-proto/src/opentelemetry/proto/metrics/v1/metrics_pb2.pyi b/opentelemetry-proto/src/opentelemetry/proto/metrics/v1/metrics_pb2.pyi index ed1291a757..ccbbb35cfb 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/metrics/v1/metrics_pb2.pyi +++ b/opentelemetry-proto/src/opentelemetry/proto/metrics/v1/metrics_pb2.pyi @@ -170,23 +170,31 @@ class DataPointFlags(_DataPointFlags, metaclass=_DataPointFlagsEnumTypeWrapper): enum is a bit-mask. To test the presence of a single flag in the flags of a data point, for example, use an expression like: - (point.flags & FLAG_NO_RECORDED_VALUE) == FLAG_NO_RECORDED_VALUE + (point.flags & DATA_POINT_FLAGS_NO_RECORDED_VALUE_MASK) == DATA_POINT_FLAGS_NO_RECORDED_VALUE_MASK """ pass class _DataPointFlags: V = typing.NewType('V', builtins.int) class _DataPointFlagsEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_DataPointFlags.V], builtins.type): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor = ... - FLAG_NONE = DataPointFlags.V(0) - FLAG_NO_RECORDED_VALUE = DataPointFlags.V(1) + DATA_POINT_FLAGS_DO_NOT_USE = DataPointFlags.V(0) + """The zero value for the enum. Should not be used for comparisons. + Instead use bitwise "and" with the appropriate mask as shown above. + """ + + DATA_POINT_FLAGS_NO_RECORDED_VALUE_MASK = DataPointFlags.V(1) """This DataPoint is valid but has no recorded value. This value SHOULD be used to reflect explicitly missing data in a series, as for an equivalent to the Prometheus "staleness marker". """ -FLAG_NONE = DataPointFlags.V(0) -FLAG_NO_RECORDED_VALUE = DataPointFlags.V(1) +DATA_POINT_FLAGS_DO_NOT_USE = DataPointFlags.V(0) +"""The zero value for the enum. Should not be used for comparisons. +Instead use bitwise "and" with the appropriate mask as shown above. +""" + +DATA_POINT_FLAGS_NO_RECORDED_VALUE_MASK = DataPointFlags.V(1) """This DataPoint is valid but has no recorded value. This value SHOULD be used to reflect explicitly missing data in a series, as for an equivalent to the Prometheus "staleness marker". @@ -230,7 +238,6 @@ class ResourceMetrics(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... RESOURCE_FIELD_NUMBER: builtins.int SCOPE_METRICS_FIELD_NUMBER: builtins.int - INSTRUMENTATION_LIBRARY_METRICS_FIELD_NUMBER: builtins.int SCHEMA_URL_FIELD_NUMBER: builtins.int @property def resource(self) -> opentelemetry.proto.resource.v1.resource_pb2.Resource: @@ -242,37 +249,6 @@ class ResourceMetrics(google.protobuf.message.Message): def scope_metrics(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ScopeMetrics]: """A list of metrics that originate from a resource.""" pass - @property - def instrumentation_library_metrics(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___InstrumentationLibraryMetrics]: - """A list of InstrumentationLibraryMetrics that originate from a resource. - This field is deprecated and will be removed after grace period expires on June 15, 2022. - - During the grace period the following rules SHOULD be followed: - - For Binary Protobufs - ==================== - Binary Protobuf senders SHOULD NOT set instrumentation_library_metrics. Instead - scope_metrics SHOULD be set. - - Binary Protobuf receivers SHOULD check if instrumentation_library_metrics is set - and scope_metrics is not set then the value in instrumentation_library_metrics - SHOULD be used instead by converting InstrumentationLibraryMetrics into ScopeMetrics. - If scope_metrics is set then instrumentation_library_metrics SHOULD be ignored. - - For JSON - ======== - JSON senders that set instrumentation_library_metrics field MAY also set - scope_metrics to carry the same metrics, essentially double-publishing the same data. - Such double-publishing MAY be controlled by a user-settable option. - If double-publishing is not used then the senders SHOULD set scope_metrics and - SHOULD NOT set instrumentation_library_metrics. - - JSON receivers SHOULD check if instrumentation_library_metrics is set and - scope_metrics is not set then the value in instrumentation_library_metrics - SHOULD be used instead by converting InstrumentationLibraryMetrics into ScopeMetrics. - If scope_metrics is set then instrumentation_library_metrics field SHOULD be ignored. - """ - pass schema_url: typing.Text = ... """This schema_url applies to the data in the "resource" field. It does not apply to the data in the "scope_metrics" field which have their own schema_url field. @@ -282,11 +258,10 @@ class ResourceMetrics(google.protobuf.message.Message): *, resource : typing.Optional[opentelemetry.proto.resource.v1.resource_pb2.Resource] = ..., scope_metrics : typing.Optional[typing.Iterable[global___ScopeMetrics]] = ..., - instrumentation_library_metrics : typing.Optional[typing.Iterable[global___InstrumentationLibraryMetrics]] = ..., schema_url : typing.Text = ..., ) -> None: ... def HasField(self, field_name: typing_extensions.Literal["resource",b"resource"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["instrumentation_library_metrics",b"instrumentation_library_metrics","resource",b"resource","schema_url",b"schema_url","scope_metrics",b"scope_metrics"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["resource",b"resource","schema_url",b"schema_url","scope_metrics",b"scope_metrics"]) -> None: ... global___ResourceMetrics = ResourceMetrics class ScopeMetrics(google.protobuf.message.Message): @@ -319,45 +294,11 @@ class ScopeMetrics(google.protobuf.message.Message): def ClearField(self, field_name: typing_extensions.Literal["metrics",b"metrics","schema_url",b"schema_url","scope",b"scope"]) -> None: ... global___ScopeMetrics = ScopeMetrics -class InstrumentationLibraryMetrics(google.protobuf.message.Message): - """A collection of Metrics produced by an InstrumentationLibrary. - InstrumentationLibraryMetrics is wire-compatible with ScopeMetrics for binary - Protobuf format. - This message is deprecated and will be removed on June 15, 2022. - """ - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... - INSTRUMENTATION_LIBRARY_FIELD_NUMBER: builtins.int - METRICS_FIELD_NUMBER: builtins.int - SCHEMA_URL_FIELD_NUMBER: builtins.int - @property - def instrumentation_library(self) -> opentelemetry.proto.common.v1.common_pb2.InstrumentationLibrary: - """The instrumentation library information for the metrics in this message. - Semantically when InstrumentationLibrary isn't set, it is equivalent with - an empty instrumentation library name (unknown). - """ - pass - @property - def metrics(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Metric]: - """A list of metrics that originate from an instrumentation library.""" - pass - schema_url: typing.Text = ... - """This schema_url applies to all metrics in the "metrics" field.""" - - def __init__(self, - *, - instrumentation_library : typing.Optional[opentelemetry.proto.common.v1.common_pb2.InstrumentationLibrary] = ..., - metrics : typing.Optional[typing.Iterable[global___Metric]] = ..., - schema_url : typing.Text = ..., - ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["instrumentation_library",b"instrumentation_library"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["instrumentation_library",b"instrumentation_library","metrics",b"metrics","schema_url",b"schema_url"]) -> None: ... -global___InstrumentationLibraryMetrics = InstrumentationLibraryMetrics - class Metric(google.protobuf.message.Message): """Defines a Metric which has one or more timeseries. The following is a brief summary of the Metric data model. For more details, see: - https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/metrics/datamodel.md + https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/metrics/data-model.md The data model and relation between entities is shown in the @@ -816,9 +757,9 @@ class ExponentialHistogramDataPoint(google.protobuf.message.Message): @property def bucket_counts(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: - """Count is an array of counts, where count[i] carries the count - of the bucket at index (offset+i). count[i] is the count of - values greater than or equal to base^(offset+i) and less than + """bucket_counts is an array of count values, where bucket_counts[i] carries + the count of the bucket at index (offset+i). bucket_counts[i] is the count + of values greater than base^(offset+i) and less than or equal to base^(offset+i+1). Note: By contrast, the explicit HistogramDataPoint uses @@ -847,6 +788,7 @@ class ExponentialHistogramDataPoint(google.protobuf.message.Message): EXEMPLARS_FIELD_NUMBER: builtins.int MIN_FIELD_NUMBER: builtins.int MAX_FIELD_NUMBER: builtins.int + ZERO_THRESHOLD_FIELD_NUMBER: builtins.int @property def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]: """The set of key/value pairs that uniquely identify the timeseries from @@ -894,8 +836,8 @@ class ExponentialHistogramDataPoint(google.protobuf.message.Message): base = (2^(2^-scale)) The histogram bucket identified by `index`, a signed integer, - contains values that are greater than or equal to (base^index) and - less than (base^(index+1)). + contains values that are greater than (base^index) and + less than or equal to (base^(index+1)). The positive and negative ranges of the histogram are expressed separately. Negative values are mapped by their absolute value @@ -941,6 +883,15 @@ class ExponentialHistogramDataPoint(google.protobuf.message.Message): max: builtins.float = ... """max is the maximum value over (start_time, end_time].""" + zero_threshold: builtins.float = ... + """ZeroThreshold may be optionally set to convey the width of the zero + region. Where the zero region is defined as the closed interval + [-ZeroThreshold, ZeroThreshold]. + When ZeroThreshold is 0, zero count bucket stores values that cannot be + expressed using the standard exponential formula as well as values that + have been rounded to zero. + """ + def __init__(self, *, attributes : typing.Optional[typing.Iterable[opentelemetry.proto.common.v1.common_pb2.KeyValue]] = ..., @@ -956,13 +907,16 @@ class ExponentialHistogramDataPoint(google.protobuf.message.Message): exemplars : typing.Optional[typing.Iterable[global___Exemplar]] = ..., min : builtins.float = ..., max : builtins.float = ..., + zero_threshold : builtins.float = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["_max",b"_max","_min",b"_min","max",b"max","min",b"min","negative",b"negative","positive",b"positive"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["_max",b"_max","_min",b"_min","attributes",b"attributes","count",b"count","exemplars",b"exemplars","flags",b"flags","max",b"max","min",b"min","negative",b"negative","positive",b"positive","scale",b"scale","start_time_unix_nano",b"start_time_unix_nano","sum",b"sum","time_unix_nano",b"time_unix_nano","zero_count",b"zero_count"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["_max",b"_max","_min",b"_min","_sum",b"_sum","max",b"max","min",b"min","negative",b"negative","positive",b"positive","sum",b"sum"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["_max",b"_max","_min",b"_min","_sum",b"_sum","attributes",b"attributes","count",b"count","exemplars",b"exemplars","flags",b"flags","max",b"max","min",b"min","negative",b"negative","positive",b"positive","scale",b"scale","start_time_unix_nano",b"start_time_unix_nano","sum",b"sum","time_unix_nano",b"time_unix_nano","zero_count",b"zero_count","zero_threshold",b"zero_threshold"]) -> None: ... @typing.overload def WhichOneof(self, oneof_group: typing_extensions.Literal["_max",b"_max"]) -> typing.Optional[typing_extensions.Literal["max"]]: ... @typing.overload def WhichOneof(self, oneof_group: typing_extensions.Literal["_min",b"_min"]) -> typing.Optional[typing_extensions.Literal["min"]]: ... + @typing.overload + def WhichOneof(self, oneof_group: typing_extensions.Literal["_sum",b"_sum"]) -> typing.Optional[typing_extensions.Literal["sum"]]: ... global___ExponentialHistogramDataPoint = ExponentialHistogramDataPoint class SummaryDataPoint(google.protobuf.message.Message): diff --git a/opentelemetry-proto/src/opentelemetry/proto/resource/v1/resource_pb2.py b/opentelemetry-proto/src/opentelemetry/proto/resource/v1/resource_pb2.py index 2c31f9a3fa..728e9114dc 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/resource/v1/resource_pb2.py +++ b/opentelemetry-proto/src/opentelemetry/proto/resource/v1/resource_pb2.py @@ -15,7 +15,7 @@ from opentelemetry.proto.common.v1 import common_pb2 as opentelemetry_dot_proto_dot_common_dot_v1_dot_common__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n.opentelemetry/proto/resource/v1/resource.proto\x12\x1fopentelemetry.proto.resource.v1\x1a*opentelemetry/proto/common/v1/common.proto\"i\n\x08Resource\x12;\n\nattributes\x18\x01 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\x02 \x01(\rBa\n\"io.opentelemetry.proto.resource.v1B\rResourceProtoP\x01Z*go.opentelemetry.io/proto/otlp/resource/v1b\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n.opentelemetry/proto/resource/v1/resource.proto\x12\x1fopentelemetry.proto.resource.v1\x1a*opentelemetry/proto/common/v1/common.proto\"i\n\x08Resource\x12;\n\nattributes\x18\x01 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\x02 \x01(\rB\x83\x01\n\"io.opentelemetry.proto.resource.v1B\rResourceProtoP\x01Z*go.opentelemetry.io/proto/otlp/resource/v1\xaa\x02\x1fOpenTelemetry.Proto.Resource.V1b\x06proto3') @@ -30,7 +30,7 @@ if _descriptor._USE_C_DESCRIPTORS == False: DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n\"io.opentelemetry.proto.resource.v1B\rResourceProtoP\001Z*go.opentelemetry.io/proto/otlp/resource/v1' + DESCRIPTOR._serialized_options = b'\n\"io.opentelemetry.proto.resource.v1B\rResourceProtoP\001Z*go.opentelemetry.io/proto/otlp/resource/v1\252\002\037OpenTelemetry.Proto.Resource.V1' _RESOURCE._serialized_start=127 _RESOURCE._serialized_end=232 # @@protoc_insertion_point(module_scope) diff --git a/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_config_pb2.py b/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_config_pb2.py deleted file mode 100644 index a54a2b68d0..0000000000 --- a/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_config_pb2.py +++ /dev/null @@ -1,68 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: opentelemetry/proto/trace/v1/trace_config.proto -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n/opentelemetry/proto/trace/v1/trace_config.proto\x12\x1copentelemetry.proto.trace.v1\"\xc8\x03\n\x0bTraceConfig\x12I\n\x10\x63onstant_sampler\x18\x01 \x01(\x0b\x32-.opentelemetry.proto.trace.v1.ConstantSamplerH\x00\x12O\n\x14trace_id_ratio_based\x18\x02 \x01(\x0b\x32/.opentelemetry.proto.trace.v1.TraceIdRatioBasedH\x00\x12R\n\x15rate_limiting_sampler\x18\x03 \x01(\x0b\x32\x31.opentelemetry.proto.trace.v1.RateLimitingSamplerH\x00\x12 \n\x18max_number_of_attributes\x18\x04 \x01(\x03\x12\"\n\x1amax_number_of_timed_events\x18\x05 \x01(\x03\x12\x30\n(max_number_of_attributes_per_timed_event\x18\x06 \x01(\x03\x12\x1b\n\x13max_number_of_links\x18\x07 \x01(\x03\x12)\n!max_number_of_attributes_per_link\x18\x08 \x01(\x03\x42\t\n\x07sampler\"\xa9\x01\n\x0f\x43onstantSampler\x12P\n\x08\x64\x65\x63ision\x18\x01 \x01(\x0e\x32>.opentelemetry.proto.trace.v1.ConstantSampler.ConstantDecision\"D\n\x10\x43onstantDecision\x12\x0e\n\nALWAYS_OFF\x10\x00\x12\r\n\tALWAYS_ON\x10\x01\x12\x11\n\rALWAYS_PARENT\x10\x02\"*\n\x11TraceIdRatioBased\x12\x15\n\rsamplingRatio\x18\x01 \x01(\x01\"\"\n\x13RateLimitingSampler\x12\x0b\n\x03qps\x18\x01 \x01(\x03\x42h\n\x1fio.opentelemetry.proto.trace.v1B\x10TraceConfigProtoP\x01Z1go.opentelemetry.io/proto/otlp/collector/trace/v1b\x06proto3') - - - -_TRACECONFIG = DESCRIPTOR.message_types_by_name['TraceConfig'] -_CONSTANTSAMPLER = DESCRIPTOR.message_types_by_name['ConstantSampler'] -_TRACEIDRATIOBASED = DESCRIPTOR.message_types_by_name['TraceIdRatioBased'] -_RATELIMITINGSAMPLER = DESCRIPTOR.message_types_by_name['RateLimitingSampler'] -_CONSTANTSAMPLER_CONSTANTDECISION = _CONSTANTSAMPLER.enum_types_by_name['ConstantDecision'] -TraceConfig = _reflection.GeneratedProtocolMessageType('TraceConfig', (_message.Message,), { - 'DESCRIPTOR' : _TRACECONFIG, - '__module__' : 'opentelemetry.proto.trace.v1.trace_config_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.trace.v1.TraceConfig) - }) -_sym_db.RegisterMessage(TraceConfig) - -ConstantSampler = _reflection.GeneratedProtocolMessageType('ConstantSampler', (_message.Message,), { - 'DESCRIPTOR' : _CONSTANTSAMPLER, - '__module__' : 'opentelemetry.proto.trace.v1.trace_config_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.trace.v1.ConstantSampler) - }) -_sym_db.RegisterMessage(ConstantSampler) - -TraceIdRatioBased = _reflection.GeneratedProtocolMessageType('TraceIdRatioBased', (_message.Message,), { - 'DESCRIPTOR' : _TRACEIDRATIOBASED, - '__module__' : 'opentelemetry.proto.trace.v1.trace_config_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.trace.v1.TraceIdRatioBased) - }) -_sym_db.RegisterMessage(TraceIdRatioBased) - -RateLimitingSampler = _reflection.GeneratedProtocolMessageType('RateLimitingSampler', (_message.Message,), { - 'DESCRIPTOR' : _RATELIMITINGSAMPLER, - '__module__' : 'opentelemetry.proto.trace.v1.trace_config_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.trace.v1.RateLimitingSampler) - }) -_sym_db.RegisterMessage(RateLimitingSampler) - -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n\037io.opentelemetry.proto.trace.v1B\020TraceConfigProtoP\001Z1go.opentelemetry.io/proto/otlp/collector/trace/v1' - _TRACECONFIG._serialized_start=82 - _TRACECONFIG._serialized_end=538 - _CONSTANTSAMPLER._serialized_start=541 - _CONSTANTSAMPLER._serialized_end=710 - _CONSTANTSAMPLER_CONSTANTDECISION._serialized_start=642 - _CONSTANTSAMPLER_CONSTANTDECISION._serialized_end=710 - _TRACEIDRATIOBASED._serialized_start=712 - _TRACEIDRATIOBASED._serialized_end=754 - _RATELIMITINGSAMPLER._serialized_start=756 - _RATELIMITINGSAMPLER._serialized_end=790 -# @@protoc_insertion_point(module_scope) diff --git a/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_config_pb2.pyi b/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_config_pb2.pyi deleted file mode 100644 index 8290baf58e..0000000000 --- a/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_config_pb2.pyi +++ /dev/null @@ -1,123 +0,0 @@ -""" -@generated by mypy-protobuf. Do not edit manually! -isort:skip_file -""" -import builtins -import google.protobuf.descriptor -import google.protobuf.internal.enum_type_wrapper -import google.protobuf.message -import typing -import typing_extensions - -DESCRIPTOR: google.protobuf.descriptor.FileDescriptor = ... - -class TraceConfig(google.protobuf.message.Message): - """Global configuration of the trace service. All fields must be specified, or - the default (zero) values will be used for each type. - """ - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... - CONSTANT_SAMPLER_FIELD_NUMBER: builtins.int - TRACE_ID_RATIO_BASED_FIELD_NUMBER: builtins.int - RATE_LIMITING_SAMPLER_FIELD_NUMBER: builtins.int - MAX_NUMBER_OF_ATTRIBUTES_FIELD_NUMBER: builtins.int - MAX_NUMBER_OF_TIMED_EVENTS_FIELD_NUMBER: builtins.int - MAX_NUMBER_OF_ATTRIBUTES_PER_TIMED_EVENT_FIELD_NUMBER: builtins.int - MAX_NUMBER_OF_LINKS_FIELD_NUMBER: builtins.int - MAX_NUMBER_OF_ATTRIBUTES_PER_LINK_FIELD_NUMBER: builtins.int - @property - def constant_sampler(self) -> global___ConstantSampler: ... - @property - def trace_id_ratio_based(self) -> global___TraceIdRatioBased: ... - @property - def rate_limiting_sampler(self) -> global___RateLimitingSampler: ... - max_number_of_attributes: builtins.int = ... - """The global default max number of attributes per span.""" - - max_number_of_timed_events: builtins.int = ... - """The global default max number of annotation events per span.""" - - max_number_of_attributes_per_timed_event: builtins.int = ... - """The global default max number of attributes per timed event.""" - - max_number_of_links: builtins.int = ... - """The global default max number of link entries per span.""" - - max_number_of_attributes_per_link: builtins.int = ... - """The global default max number of attributes per span.""" - - def __init__(self, - *, - constant_sampler : typing.Optional[global___ConstantSampler] = ..., - trace_id_ratio_based : typing.Optional[global___TraceIdRatioBased] = ..., - rate_limiting_sampler : typing.Optional[global___RateLimitingSampler] = ..., - max_number_of_attributes : builtins.int = ..., - max_number_of_timed_events : builtins.int = ..., - max_number_of_attributes_per_timed_event : builtins.int = ..., - max_number_of_links : builtins.int = ..., - max_number_of_attributes_per_link : builtins.int = ..., - ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["constant_sampler",b"constant_sampler","rate_limiting_sampler",b"rate_limiting_sampler","sampler",b"sampler","trace_id_ratio_based",b"trace_id_ratio_based"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["constant_sampler",b"constant_sampler","max_number_of_attributes",b"max_number_of_attributes","max_number_of_attributes_per_link",b"max_number_of_attributes_per_link","max_number_of_attributes_per_timed_event",b"max_number_of_attributes_per_timed_event","max_number_of_links",b"max_number_of_links","max_number_of_timed_events",b"max_number_of_timed_events","rate_limiting_sampler",b"rate_limiting_sampler","sampler",b"sampler","trace_id_ratio_based",b"trace_id_ratio_based"]) -> None: ... - def WhichOneof(self, oneof_group: typing_extensions.Literal["sampler",b"sampler"]) -> typing.Optional[typing_extensions.Literal["constant_sampler","trace_id_ratio_based","rate_limiting_sampler"]]: ... -global___TraceConfig = TraceConfig - -class ConstantSampler(google.protobuf.message.Message): - """Sampler that always makes a constant decision on span sampling.""" - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... - class ConstantDecision(_ConstantDecision, metaclass=_ConstantDecisionEnumTypeWrapper): - """How spans should be sampled: - - Always off - - Always on - - Always follow the parent Span's decision (off if no parent). - """ - pass - class _ConstantDecision: - V = typing.NewType('V', builtins.int) - class _ConstantDecisionEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_ConstantDecision.V], builtins.type): - DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor = ... - ALWAYS_OFF = ConstantSampler.ConstantDecision.V(0) - ALWAYS_ON = ConstantSampler.ConstantDecision.V(1) - ALWAYS_PARENT = ConstantSampler.ConstantDecision.V(2) - - ALWAYS_OFF = ConstantSampler.ConstantDecision.V(0) - ALWAYS_ON = ConstantSampler.ConstantDecision.V(1) - ALWAYS_PARENT = ConstantSampler.ConstantDecision.V(2) - - DECISION_FIELD_NUMBER: builtins.int - decision: global___ConstantSampler.ConstantDecision.V = ... - def __init__(self, - *, - decision : global___ConstantSampler.ConstantDecision.V = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["decision",b"decision"]) -> None: ... -global___ConstantSampler = ConstantSampler - -class TraceIdRatioBased(google.protobuf.message.Message): - """Sampler that tries to uniformly sample traces with a given ratio. - The ratio of sampling a trace is equal to that of the specified ratio. - """ - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... - SAMPLINGRATIO_FIELD_NUMBER: builtins.int - samplingRatio: builtins.float = ... - """The desired ratio of sampling. Must be within [0.0, 1.0].""" - - def __init__(self, - *, - samplingRatio : builtins.float = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["samplingRatio",b"samplingRatio"]) -> None: ... -global___TraceIdRatioBased = TraceIdRatioBased - -class RateLimitingSampler(google.protobuf.message.Message): - """Sampler that tries to sample with a rate per time window.""" - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... - QPS_FIELD_NUMBER: builtins.int - qps: builtins.int = ... - """Rate per second.""" - - def __init__(self, - *, - qps : builtins.int = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["qps",b"qps"]) -> None: ... -global___RateLimitingSampler = RateLimitingSampler diff --git a/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_pb2.py b/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_pb2.py index 0827c14301..6e80acce51 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_pb2.py +++ b/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_pb2.py @@ -16,14 +16,13 @@ from opentelemetry.proto.resource.v1 import resource_pb2 as opentelemetry_dot_proto_dot_resource_dot_v1_dot_resource__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n(opentelemetry/proto/trace/v1/trace.proto\x12\x1copentelemetry.proto.trace.v1\x1a*opentelemetry/proto/common/v1/common.proto\x1a.opentelemetry/proto/resource/v1/resource.proto\"Q\n\nTracesData\x12\x43\n\x0eresource_spans\x18\x01 \x03(\x0b\x32+.opentelemetry.proto.trace.v1.ResourceSpans\"\x86\x02\n\rResourceSpans\x12;\n\x08resource\x18\x01 \x01(\x0b\x32).opentelemetry.proto.resource.v1.Resource\x12=\n\x0bscope_spans\x18\x02 \x03(\x0b\x32(.opentelemetry.proto.trace.v1.ScopeSpans\x12\x65\n\x1dinstrumentation_library_spans\x18\xe8\x07 \x03(\x0b\x32\x39.opentelemetry.proto.trace.v1.InstrumentationLibrarySpansB\x02\x18\x01\x12\x12\n\nschema_url\x18\x03 \x01(\t\"\x97\x01\n\nScopeSpans\x12\x42\n\x05scope\x18\x01 \x01(\x0b\x32\x33.opentelemetry.proto.common.v1.InstrumentationScope\x12\x31\n\x05spans\x18\x02 \x03(\x0b\x32\".opentelemetry.proto.trace.v1.Span\x12\x12\n\nschema_url\x18\x03 \x01(\t\"\xc0\x01\n\x1bInstrumentationLibrarySpans\x12V\n\x17instrumentation_library\x18\x01 \x01(\x0b\x32\x35.opentelemetry.proto.common.v1.InstrumentationLibrary\x12\x31\n\x05spans\x18\x02 \x03(\x0b\x32\".opentelemetry.proto.trace.v1.Span\x12\x12\n\nschema_url\x18\x03 \x01(\t:\x02\x18\x01\"\xe6\x07\n\x04Span\x12\x10\n\x08trace_id\x18\x01 \x01(\x0c\x12\x0f\n\x07span_id\x18\x02 \x01(\x0c\x12\x13\n\x0btrace_state\x18\x03 \x01(\t\x12\x16\n\x0eparent_span_id\x18\x04 \x01(\x0c\x12\x0c\n\x04name\x18\x05 \x01(\t\x12\x39\n\x04kind\x18\x06 \x01(\x0e\x32+.opentelemetry.proto.trace.v1.Span.SpanKind\x12\x1c\n\x14start_time_unix_nano\x18\x07 \x01(\x06\x12\x1a\n\x12\x65nd_time_unix_nano\x18\x08 \x01(\x06\x12;\n\nattributes\x18\t \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\n \x01(\r\x12\x38\n\x06\x65vents\x18\x0b \x03(\x0b\x32(.opentelemetry.proto.trace.v1.Span.Event\x12\x1c\n\x14\x64ropped_events_count\x18\x0c \x01(\r\x12\x36\n\x05links\x18\r \x03(\x0b\x32\'.opentelemetry.proto.trace.v1.Span.Link\x12\x1b\n\x13\x64ropped_links_count\x18\x0e \x01(\r\x12\x34\n\x06status\x18\x0f \x01(\x0b\x32$.opentelemetry.proto.trace.v1.Status\x1a\x8c\x01\n\x05\x45vent\x12\x16\n\x0etime_unix_nano\x18\x01 \x01(\x06\x12\x0c\n\x04name\x18\x02 \x01(\t\x12;\n\nattributes\x18\x03 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\x04 \x01(\r\x1a\x9d\x01\n\x04Link\x12\x10\n\x08trace_id\x18\x01 \x01(\x0c\x12\x0f\n\x07span_id\x18\x02 \x01(\x0c\x12\x13\n\x0btrace_state\x18\x03 \x01(\t\x12;\n\nattributes\x18\x04 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\x05 \x01(\r\"\x99\x01\n\x08SpanKind\x12\x19\n\x15SPAN_KIND_UNSPECIFIED\x10\x00\x12\x16\n\x12SPAN_KIND_INTERNAL\x10\x01\x12\x14\n\x10SPAN_KIND_SERVER\x10\x02\x12\x14\n\x10SPAN_KIND_CLIENT\x10\x03\x12\x16\n\x12SPAN_KIND_PRODUCER\x10\x04\x12\x16\n\x12SPAN_KIND_CONSUMER\x10\x05\"\xae\x01\n\x06Status\x12\x0f\n\x07message\x18\x02 \x01(\t\x12=\n\x04\x63ode\x18\x03 \x01(\x0e\x32/.opentelemetry.proto.trace.v1.Status.StatusCode\"N\n\nStatusCode\x12\x15\n\x11STATUS_CODE_UNSET\x10\x00\x12\x12\n\x0eSTATUS_CODE_OK\x10\x01\x12\x15\n\x11STATUS_CODE_ERROR\x10\x02J\x04\x08\x01\x10\x02\x42X\n\x1fio.opentelemetry.proto.trace.v1B\nTraceProtoP\x01Z\'go.opentelemetry.io/proto/otlp/trace/v1b\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n(opentelemetry/proto/trace/v1/trace.proto\x12\x1copentelemetry.proto.trace.v1\x1a*opentelemetry/proto/common/v1/common.proto\x1a.opentelemetry/proto/resource/v1/resource.proto\"Q\n\nTracesData\x12\x43\n\x0eresource_spans\x18\x01 \x03(\x0b\x32+.opentelemetry.proto.trace.v1.ResourceSpans\"\xa7\x01\n\rResourceSpans\x12;\n\x08resource\x18\x01 \x01(\x0b\x32).opentelemetry.proto.resource.v1.Resource\x12=\n\x0bscope_spans\x18\x02 \x03(\x0b\x32(.opentelemetry.proto.trace.v1.ScopeSpans\x12\x12\n\nschema_url\x18\x03 \x01(\tJ\x06\x08\xe8\x07\x10\xe9\x07\"\x97\x01\n\nScopeSpans\x12\x42\n\x05scope\x18\x01 \x01(\x0b\x32\x33.opentelemetry.proto.common.v1.InstrumentationScope\x12\x31\n\x05spans\x18\x02 \x03(\x0b\x32\".opentelemetry.proto.trace.v1.Span\x12\x12\n\nschema_url\x18\x03 \x01(\t\"\xe6\x07\n\x04Span\x12\x10\n\x08trace_id\x18\x01 \x01(\x0c\x12\x0f\n\x07span_id\x18\x02 \x01(\x0c\x12\x13\n\x0btrace_state\x18\x03 \x01(\t\x12\x16\n\x0eparent_span_id\x18\x04 \x01(\x0c\x12\x0c\n\x04name\x18\x05 \x01(\t\x12\x39\n\x04kind\x18\x06 \x01(\x0e\x32+.opentelemetry.proto.trace.v1.Span.SpanKind\x12\x1c\n\x14start_time_unix_nano\x18\x07 \x01(\x06\x12\x1a\n\x12\x65nd_time_unix_nano\x18\x08 \x01(\x06\x12;\n\nattributes\x18\t \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\n \x01(\r\x12\x38\n\x06\x65vents\x18\x0b \x03(\x0b\x32(.opentelemetry.proto.trace.v1.Span.Event\x12\x1c\n\x14\x64ropped_events_count\x18\x0c \x01(\r\x12\x36\n\x05links\x18\r \x03(\x0b\x32\'.opentelemetry.proto.trace.v1.Span.Link\x12\x1b\n\x13\x64ropped_links_count\x18\x0e \x01(\r\x12\x34\n\x06status\x18\x0f \x01(\x0b\x32$.opentelemetry.proto.trace.v1.Status\x1a\x8c\x01\n\x05\x45vent\x12\x16\n\x0etime_unix_nano\x18\x01 \x01(\x06\x12\x0c\n\x04name\x18\x02 \x01(\t\x12;\n\nattributes\x18\x03 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\x04 \x01(\r\x1a\x9d\x01\n\x04Link\x12\x10\n\x08trace_id\x18\x01 \x01(\x0c\x12\x0f\n\x07span_id\x18\x02 \x01(\x0c\x12\x13\n\x0btrace_state\x18\x03 \x01(\t\x12;\n\nattributes\x18\x04 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\x05 \x01(\r\"\x99\x01\n\x08SpanKind\x12\x19\n\x15SPAN_KIND_UNSPECIFIED\x10\x00\x12\x16\n\x12SPAN_KIND_INTERNAL\x10\x01\x12\x14\n\x10SPAN_KIND_SERVER\x10\x02\x12\x14\n\x10SPAN_KIND_CLIENT\x10\x03\x12\x16\n\x12SPAN_KIND_PRODUCER\x10\x04\x12\x16\n\x12SPAN_KIND_CONSUMER\x10\x05\"\xae\x01\n\x06Status\x12\x0f\n\x07message\x18\x02 \x01(\t\x12=\n\x04\x63ode\x18\x03 \x01(\x0e\x32/.opentelemetry.proto.trace.v1.Status.StatusCode\"N\n\nStatusCode\x12\x15\n\x11STATUS_CODE_UNSET\x10\x00\x12\x12\n\x0eSTATUS_CODE_OK\x10\x01\x12\x15\n\x11STATUS_CODE_ERROR\x10\x02J\x04\x08\x01\x10\x02\x42w\n\x1fio.opentelemetry.proto.trace.v1B\nTraceProtoP\x01Z\'go.opentelemetry.io/proto/otlp/trace/v1\xaa\x02\x1cOpenTelemetry.Proto.Trace.V1b\x06proto3') _TRACESDATA = DESCRIPTOR.message_types_by_name['TracesData'] _RESOURCESPANS = DESCRIPTOR.message_types_by_name['ResourceSpans'] _SCOPESPANS = DESCRIPTOR.message_types_by_name['ScopeSpans'] -_INSTRUMENTATIONLIBRARYSPANS = DESCRIPTOR.message_types_by_name['InstrumentationLibrarySpans'] _SPAN = DESCRIPTOR.message_types_by_name['Span'] _SPAN_EVENT = _SPAN.nested_types_by_name['Event'] _SPAN_LINK = _SPAN.nested_types_by_name['Link'] @@ -51,13 +50,6 @@ }) _sym_db.RegisterMessage(ScopeSpans) -InstrumentationLibrarySpans = _reflection.GeneratedProtocolMessageType('InstrumentationLibrarySpans', (_message.Message,), { - 'DESCRIPTOR' : _INSTRUMENTATIONLIBRARYSPANS, - '__module__' : 'opentelemetry.proto.trace.v1.trace_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.trace.v1.InstrumentationLibrarySpans) - }) -_sym_db.RegisterMessage(InstrumentationLibrarySpans) - Span = _reflection.GeneratedProtocolMessageType('Span', (_message.Message,), { 'Event' : _reflection.GeneratedProtocolMessageType('Event', (_message.Message,), { @@ -91,29 +83,23 @@ if _descriptor._USE_C_DESCRIPTORS == False: DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n\037io.opentelemetry.proto.trace.v1B\nTraceProtoP\001Z\'go.opentelemetry.io/proto/otlp/trace/v1' - _RESOURCESPANS.fields_by_name['instrumentation_library_spans']._options = None - _RESOURCESPANS.fields_by_name['instrumentation_library_spans']._serialized_options = b'\030\001' - _INSTRUMENTATIONLIBRARYSPANS._options = None - _INSTRUMENTATIONLIBRARYSPANS._serialized_options = b'\030\001' + DESCRIPTOR._serialized_options = b'\n\037io.opentelemetry.proto.trace.v1B\nTraceProtoP\001Z\'go.opentelemetry.io/proto/otlp/trace/v1\252\002\034OpenTelemetry.Proto.Trace.V1' _TRACESDATA._serialized_start=166 _TRACESDATA._serialized_end=247 _RESOURCESPANS._serialized_start=250 - _RESOURCESPANS._serialized_end=512 - _SCOPESPANS._serialized_start=515 - _SCOPESPANS._serialized_end=666 - _INSTRUMENTATIONLIBRARYSPANS._serialized_start=669 - _INSTRUMENTATIONLIBRARYSPANS._serialized_end=861 - _SPAN._serialized_start=864 - _SPAN._serialized_end=1862 - _SPAN_EVENT._serialized_start=1406 - _SPAN_EVENT._serialized_end=1546 - _SPAN_LINK._serialized_start=1549 - _SPAN_LINK._serialized_end=1706 - _SPAN_SPANKIND._serialized_start=1709 - _SPAN_SPANKIND._serialized_end=1862 - _STATUS._serialized_start=1865 - _STATUS._serialized_end=2039 - _STATUS_STATUSCODE._serialized_start=1955 - _STATUS_STATUSCODE._serialized_end=2033 + _RESOURCESPANS._serialized_end=417 + _SCOPESPANS._serialized_start=420 + _SCOPESPANS._serialized_end=571 + _SPAN._serialized_start=574 + _SPAN._serialized_end=1572 + _SPAN_EVENT._serialized_start=1116 + _SPAN_EVENT._serialized_end=1256 + _SPAN_LINK._serialized_start=1259 + _SPAN_LINK._serialized_end=1416 + _SPAN_SPANKIND._serialized_start=1419 + _SPAN_SPANKIND._serialized_end=1572 + _STATUS._serialized_start=1575 + _STATUS._serialized_end=1749 + _STATUS_STATUSCODE._serialized_start=1665 + _STATUS_STATUSCODE._serialized_end=1743 # @@protoc_insertion_point(module_scope) diff --git a/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_pb2.pyi b/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_pb2.pyi index 170d66e1c5..52052ff7e9 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_pb2.pyi +++ b/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_pb2.pyi @@ -49,7 +49,6 @@ class ResourceSpans(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... RESOURCE_FIELD_NUMBER: builtins.int SCOPE_SPANS_FIELD_NUMBER: builtins.int - INSTRUMENTATION_LIBRARY_SPANS_FIELD_NUMBER: builtins.int SCHEMA_URL_FIELD_NUMBER: builtins.int @property def resource(self) -> opentelemetry.proto.resource.v1.resource_pb2.Resource: @@ -61,37 +60,6 @@ class ResourceSpans(google.protobuf.message.Message): def scope_spans(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ScopeSpans]: """A list of ScopeSpans that originate from a resource.""" pass - @property - def instrumentation_library_spans(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___InstrumentationLibrarySpans]: - """A list of InstrumentationLibrarySpans that originate from a resource. - This field is deprecated and will be removed after grace period expires on June 15, 2022. - - During the grace period the following rules SHOULD be followed: - - For Binary Protobufs - ==================== - Binary Protobuf senders SHOULD NOT set instrumentation_library_spans. Instead - scope_spans SHOULD be set. - - Binary Protobuf receivers SHOULD check if instrumentation_library_spans is set - and scope_spans is not set then the value in instrumentation_library_spans - SHOULD be used instead by converting InstrumentationLibrarySpans into ScopeSpans. - If scope_spans is set then instrumentation_library_spans SHOULD be ignored. - - For JSON - ======== - JSON senders that set instrumentation_library_spans field MAY also set - scope_spans to carry the same spans, essentially double-publishing the same data. - Such double-publishing MAY be controlled by a user-settable option. - If double-publishing is not used then the senders SHOULD set scope_spans and - SHOULD NOT set instrumentation_library_spans. - - JSON receivers SHOULD check if instrumentation_library_spans is set and - scope_spans is not set then the value in instrumentation_library_spans - SHOULD be used instead by converting InstrumentationLibrarySpans into ScopeSpans. - If scope_spans is set then instrumentation_library_spans field SHOULD be ignored. - """ - pass schema_url: typing.Text = ... """This schema_url applies to the data in the "resource" field. It does not apply to the data in the "scope_spans" field which have their own schema_url field. @@ -101,11 +69,10 @@ class ResourceSpans(google.protobuf.message.Message): *, resource : typing.Optional[opentelemetry.proto.resource.v1.resource_pb2.Resource] = ..., scope_spans : typing.Optional[typing.Iterable[global___ScopeSpans]] = ..., - instrumentation_library_spans : typing.Optional[typing.Iterable[global___InstrumentationLibrarySpans]] = ..., schema_url : typing.Text = ..., ) -> None: ... def HasField(self, field_name: typing_extensions.Literal["resource",b"resource"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["instrumentation_library_spans",b"instrumentation_library_spans","resource",b"resource","schema_url",b"schema_url","scope_spans",b"scope_spans"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["resource",b"resource","schema_url",b"schema_url","scope_spans",b"scope_spans"]) -> None: ... global___ResourceSpans = ResourceSpans class ScopeSpans(google.protobuf.message.Message): @@ -138,48 +105,8 @@ class ScopeSpans(google.protobuf.message.Message): def ClearField(self, field_name: typing_extensions.Literal["schema_url",b"schema_url","scope",b"scope","spans",b"spans"]) -> None: ... global___ScopeSpans = ScopeSpans -class InstrumentationLibrarySpans(google.protobuf.message.Message): - """A collection of Spans produced by an InstrumentationLibrary. - InstrumentationLibrarySpans is wire-compatible with ScopeSpans for binary - Protobuf format. - This message is deprecated and will be removed on June 15, 2022. - """ - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... - INSTRUMENTATION_LIBRARY_FIELD_NUMBER: builtins.int - SPANS_FIELD_NUMBER: builtins.int - SCHEMA_URL_FIELD_NUMBER: builtins.int - @property - def instrumentation_library(self) -> opentelemetry.proto.common.v1.common_pb2.InstrumentationLibrary: - """The instrumentation library information for the spans in this message. - Semantically when InstrumentationLibrary isn't set, it is equivalent with - an empty instrumentation library name (unknown). - """ - pass - @property - def spans(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Span]: - """A list of Spans that originate from an instrumentation library.""" - pass - schema_url: typing.Text = ... - """This schema_url applies to all spans and span events in the "spans" field.""" - - def __init__(self, - *, - instrumentation_library : typing.Optional[opentelemetry.proto.common.v1.common_pb2.InstrumentationLibrary] = ..., - spans : typing.Optional[typing.Iterable[global___Span]] = ..., - schema_url : typing.Text = ..., - ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["instrumentation_library",b"instrumentation_library"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["instrumentation_library",b"instrumentation_library","schema_url",b"schema_url","spans",b"spans"]) -> None: ... -global___InstrumentationLibrarySpans = InstrumentationLibrarySpans - class Span(google.protobuf.message.Message): - """Span represents a single operation within a trace. Spans can be - nested to form a trace tree. Spans may also be linked to other spans - from the same or different trace and form graphs. Often, a trace - contains a root span that describes the end-to-end latency, and one - or more subspans for its sub-operations. A trace can also contain - multiple root spans, or none at all. Spans do not need to be - contiguous - there may be gaps or overlaps between spans in a trace. + """A Span represents a single operation performed by a single component of the system. The next available field id is 17. """ @@ -357,22 +284,18 @@ class Span(google.protobuf.message.Message): STATUS_FIELD_NUMBER: builtins.int trace_id: builtins.bytes = ... """A unique identifier for a trace. All spans from the same trace share - the same `trace_id`. The ID is a 16-byte array. An ID with all zeroes - is considered invalid. - - This field is semantically required. Receiver should generate new - random trace_id if empty or invalid trace_id was received. + the same `trace_id`. The ID is a 16-byte array. An ID with all zeroes OR + of length other than 16 bytes is considered invalid (empty string in OTLP/JSON + is zero-length and thus is also invalid). This field is required. """ span_id: builtins.bytes = ... """A unique identifier for a span within a trace, assigned when the span - is created. The ID is an 8-byte array. An ID with all zeroes is considered - invalid. - - This field is semantically required. Receiver should generate new - random span_id if empty or invalid span_id was received. + is created. The ID is an 8-byte array. An ID with all zeroes OR of length + other than 8 bytes is considered invalid (empty string in OTLP/JSON + is zero-length and thus is also invalid). This field is required. """ @@ -433,11 +356,11 @@ class Span(google.protobuf.message.Message): "/http/user_agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36" "/http/server_latency": 300 - "abc.com/myattribute": true - "abc.com/score": 10.239 + "example.com/myattribute": true + "example.com/score": 10.239 The OpenTelemetry API specification further restricts the allowed value types: - https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/common/common.md#attributes + https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/common/README.md#attribute Attribute keys MUST be unique (it is not allowed to have more than one attribute with the same key). """ @@ -514,8 +437,8 @@ class Status(google.protobuf.message.Message): """The default status.""" STATUS_CODE_OK = Status.StatusCode.V(1) - """The Span has been validated by an Application developers or Operator to have - completed successfully. + """The Span has been validated by an Application developer or Operator to + have completed successfully. """ STATUS_CODE_ERROR = Status.StatusCode.V(2) @@ -526,8 +449,8 @@ class Status(google.protobuf.message.Message): """The default status.""" STATUS_CODE_OK = Status.StatusCode.V(1) - """The Span has been validated by an Application developers or Operator to have - completed successfully. + """The Span has been validated by an Application developer or Operator to + have completed successfully. """ STATUS_CODE_ERROR = Status.StatusCode.V(2) diff --git a/scripts/proto_codegen.sh b/scripts/proto_codegen.sh index 7625848f71..53d8ad8fc7 100755 --- a/scripts/proto_codegen.sh +++ b/scripts/proto_codegen.sh @@ -12,7 +12,7 @@ # PROTO_REPO_DIR - the path to an existing checkout of the opentelemetry-proto repo # Pinned commit/branch/tag for the current version used in opentelemetry-proto python package. -PROTO_REPO_BRANCH_OR_COMMIT="v0.17.0" +PROTO_REPO_BRANCH_OR_COMMIT="v0.20.0" set -e From 902602721cf651223f30ccdbb0e35c87745bb210 Mon Sep 17 00:00:00 2001 From: Shalev Roda <65566801+shalevr@users.noreply.github.com> Date: Mon, 26 Jun 2023 23:58:10 +0300 Subject: [PATCH 2/8] Add unit to view instrument selection criteria (#3341) Co-authored-by: Srikanth Chekuri --- CHANGELOG.md | 2 ++ .../src/opentelemetry/sdk/metrics/_internal/view.py | 10 ++++++++++ opentelemetry-sdk/tests/metrics/test_view.py | 9 +++++++++ 3 files changed, 21 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index f1332776ef..1afd81b6eb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Add max_scale option to Exponential Bucket Histogram Aggregation [#3323](https://github.com/open-telemetry/opentelemetry-python/pull/3323)) - Use BoundedAttributes instead of raw dict to extract attributes from LogRecord and Support dropped_attributes_count in LogRecord ([#3310](https://github.com/open-telemetry/opentelemetry-python/pull/3310)) +- Add unit to view instrument selection criteria + ([#3341](https://github.com/open-telemetry/opentelemetry-python/pull/3341)) - Upgrade opentelemetry-proto to 0.20 and regen [#3355](https://github.com/open-telemetry/opentelemetry-python/pull/3355)) diff --git a/opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/view.py b/opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/view.py index b930c7a966..28f7b4fe08 100644 --- a/opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/view.py +++ b/opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/view.py @@ -76,6 +76,9 @@ class View: corresponding metrics stream. If `None` an instance of `DefaultAggregation` will be used. + instrument_unit: This is an instrument matching attribute: the unit the + instrument must have to match the view. + This class is not intended to be subclassed by the user. """ @@ -92,10 +95,12 @@ def __init__( description: Optional[str] = None, attribute_keys: Optional[Set[str]] = None, aggregation: Optional[Aggregation] = None, + instrument_unit: Optional[str] = None, ): if ( instrument_type is instrument_name + is instrument_unit is meter_name is meter_version is meter_schema_url @@ -122,6 +127,7 @@ def __init__( self._name = name self._instrument_type = instrument_type self._instrument_name = instrument_name + self._instrument_unit = instrument_unit self._meter_name = meter_name self._meter_version = meter_version self._meter_schema_url = meter_schema_url @@ -143,6 +149,10 @@ def _match(self, instrument: Instrument) -> bool: if not fnmatch(instrument.name, self._instrument_name): return False + if self._instrument_unit is not None: + if not fnmatch(instrument.unit, self._instrument_unit): + return False + if self._meter_name is not None: if instrument.instrumentation_scope.name != self._meter_name: return False diff --git a/opentelemetry-sdk/tests/metrics/test_view.py b/opentelemetry-sdk/tests/metrics/test_view.py index 2d1fee490f..00376a0068 100644 --- a/opentelemetry-sdk/tests/metrics/test_view.py +++ b/opentelemetry-sdk/tests/metrics/test_view.py @@ -37,6 +37,15 @@ def test_instrument_name(self): View(instrument_name="instrument_name")._match(mock_instrument) ) + def test_instrument_unit(self): + + mock_instrument = Mock() + mock_instrument.configure_mock(**{"unit": "instrument_unit"}) + + self.assertTrue( + View(instrument_unit="instrument_unit")._match(mock_instrument) + ) + def test_meter_name(self): self.assertTrue( From ad2de35bb9373d18fae7de515a326ce07fb89158 Mon Sep 17 00:00:00 2001 From: Nina Stawski Date: Mon, 26 Jun 2023 14:16:35 -0700 Subject: [PATCH 3/8] Add dropped_attributes_count support in exporters (#3351) Co-authored-by: Diego Hurtado --- CHANGELOG.md | 8 ++- .../common/_internal/_log_encoder/__init__.py | 1 + .../tests/test_log_encoder.py | 54 ++++++++++++++++++- .../sdk/_logs/_internal/__init__.py | 1 + .../tests/logs/test_log_record.py | 1 + 5 files changed, 62 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1afd81b6eb..0a22a6c7c7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,8 +7,12 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## Unreleased -- Add max_scale option to Exponential Bucket Histogram Aggregation [#3323](https://github.com/open-telemetry/opentelemetry-python/pull/3323)) -- Use BoundedAttributes instead of raw dict to extract attributes from LogRecord and Support dropped_attributes_count in LogRecord ([#3310](https://github.com/open-telemetry/opentelemetry-python/pull/3310)) +- Add max_scale option to Exponential Bucket Histogram Aggregation + ([#3323](https://github.com/open-telemetry/opentelemetry-python/pull/3323)) +- Use BoundedAttributes instead of raw dict to extract attributes from LogRecord + ([#3310](https://github.com/open-telemetry/opentelemetry-python/pull/3310)) +- Support dropped_attributes_count in LogRecord and exporters + ([#3351](https://github.com/open-telemetry/opentelemetry-python/pull/3351)) - Add unit to view instrument selection criteria ([#3341](https://github.com/open-telemetry/opentelemetry-python/pull/3341)) - Upgrade opentelemetry-proto to 0.20 and regen diff --git a/exporter/opentelemetry-exporter-otlp-proto-common/src/opentelemetry/exporter/otlp/proto/common/_internal/_log_encoder/__init__.py b/exporter/opentelemetry-exporter-otlp-proto-common/src/opentelemetry/exporter/otlp/proto/common/_internal/_log_encoder/__init__.py index 7c135d90ba..47c254033b 100644 --- a/exporter/opentelemetry-exporter-otlp-proto-common/src/opentelemetry/exporter/otlp/proto/common/_internal/_log_encoder/__init__.py +++ b/exporter/opentelemetry-exporter-otlp-proto-common/src/opentelemetry/exporter/otlp/proto/common/_internal/_log_encoder/__init__.py @@ -47,6 +47,7 @@ def _encode_log(log_data: LogData) -> PB2LogRecord: body=_encode_value(log_data.log_record.body), severity_text=log_data.log_record.severity_text, attributes=_encode_attributes(log_data.log_record.attributes), + dropped_attributes_count=log_data.log_record.dropped_attributes, severity_number=log_data.log_record.severity_number.value, ) diff --git a/exporter/opentelemetry-exporter-otlp-proto-common/tests/test_log_encoder.py b/exporter/opentelemetry-exporter-otlp-proto-common/tests/test_log_encoder.py index 1cd86b2833..1fdb1977ba 100644 --- a/exporter/opentelemetry-exporter-otlp-proto-common/tests/test_log_encoder.py +++ b/exporter/opentelemetry-exporter-otlp-proto-common/tests/test_log_encoder.py @@ -39,7 +39,7 @@ from opentelemetry.proto.resource.v1.resource_pb2 import ( Resource as PB2Resource, ) -from opentelemetry.sdk._logs import LogData +from opentelemetry.sdk._logs import LogData, LogLimits from opentelemetry.sdk._logs import LogRecord as SDKLogRecord from opentelemetry.sdk.resources import Resource as SDKResource from opentelemetry.sdk.util.instrumentation import InstrumentationScope @@ -51,6 +51,19 @@ def test_encode(self): sdk_logs, expected_encoding = self.get_test_logs() self.assertEqual(encode_logs(sdk_logs), expected_encoding) + def test_dropped_attributes_count(self): + sdk_logs = self._get_test_logs_dropped_attributes() + encoded_logs = encode_logs(sdk_logs) + self.assertTrue(hasattr(sdk_logs[0].log_record, "dropped_attributes")) + self.assertEqual( + # pylint:disable=no-member + encoded_logs.resource_logs[0] + .scope_logs[0] + .log_records[0] + .dropped_attributes_count, + 2, + ) + @staticmethod def _get_sdk_log_data() -> List[LogData]: log1 = LogData( @@ -251,3 +264,42 @@ def get_test_logs( ) return sdk_logs, pb2_service_request + + @staticmethod + def _get_test_logs_dropped_attributes() -> List[LogData]: + log1 = LogData( + log_record=SDKLogRecord( + timestamp=1644650195189786880, + trace_id=89564621134313219400156819398935297684, + span_id=1312458408527513268, + trace_flags=TraceFlags(0x01), + severity_text="WARN", + severity_number=SeverityNumber.WARN, + body="Do not go gentle into that good night. Rage, rage against the dying of the light", + resource=SDKResource({"first_resource": "value"}), + attributes={"a": 1, "b": "c", "user_id": "B121092"}, + limits=LogLimits(max_attributes=1), + ), + instrumentation_scope=InstrumentationScope( + "first_name", "first_version" + ), + ) + + log2 = LogData( + log_record=SDKLogRecord( + timestamp=1644650249738562048, + trace_id=0, + span_id=0, + trace_flags=TraceFlags.DEFAULT, + severity_text="WARN", + severity_number=SeverityNumber.WARN, + body="Cooper, this is no time for caution!", + resource=SDKResource({"second_resource": "CASE"}), + attributes={}, + ), + instrumentation_scope=InstrumentationScope( + "second_name", "second_version" + ), + ) + + return [log1, log2] diff --git a/opentelemetry-sdk/src/opentelemetry/sdk/_logs/_internal/__init__.py b/opentelemetry-sdk/src/opentelemetry/sdk/_logs/_internal/__init__.py index 7410138067..578ce2c391 100644 --- a/opentelemetry-sdk/src/opentelemetry/sdk/_logs/_internal/__init__.py +++ b/opentelemetry-sdk/src/opentelemetry/sdk/_logs/_internal/__init__.py @@ -203,6 +203,7 @@ def to_json(self, indent=4) -> str: "attributes": dict(self.attributes) if bool(self.attributes) else None, + "dropped_attributes": self.dropped_attributes, "timestamp": ns_to_iso_str(self.timestamp), "trace_id": f"0x{format_trace_id(self.trace_id)}" if self.trace_id is not None diff --git a/opentelemetry-sdk/tests/logs/test_log_record.py b/opentelemetry-sdk/tests/logs/test_log_record.py index a5993e5833..1f0bd785a8 100644 --- a/opentelemetry-sdk/tests/logs/test_log_record.py +++ b/opentelemetry-sdk/tests/logs/test_log_record.py @@ -27,6 +27,7 @@ def test_log_record_to_json(self): "severity_number": "None", "severity_text": None, "attributes": None, + "dropped_attributes": 0, "timestamp": "1970-01-01T00:00:00.000000Z", "trace_id": "", "span_id": "", From 251b74a2bb915dfc11fa839154980868869ea291 Mon Sep 17 00:00:00 2001 From: Shalev Roda <65566801+shalevr@users.noreply.github.com> Date: Tue, 27 Jun 2023 10:38:47 +0300 Subject: [PATCH 4/8] Add timeout for httpTestBase (#3318) --- .../opentelemetry-test-utils/src/opentelemetry/test/httptest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/opentelemetry-test-utils/src/opentelemetry/test/httptest.py b/tests/opentelemetry-test-utils/src/opentelemetry/test/httptest.py index 94964ea9f1..84591ca0f1 100644 --- a/tests/opentelemetry-test-utils/src/opentelemetry/test/httptest.py +++ b/tests/opentelemetry-test-utils/src/opentelemetry/test/httptest.py @@ -24,7 +24,7 @@ class HttpTestBase(unittest.TestCase): class Handler(BaseHTTPRequestHandler): protocol_version = "HTTP/1.1" # Support keep-alive. - # timeout = 3 # No timeout -- if shutdown hangs, make sure to close your connection + timeout = 3 # Seconds STATUS_RE = re.compile(r"/status/(\d+)") From f4aecdf5cee92283d44fbb2f39ee2b93cc1eb4ca Mon Sep 17 00:00:00 2001 From: Srikanth Chekuri Date: Tue, 27 Jun 2023 13:57:49 +0530 Subject: [PATCH 5/8] Show helpful message when propagator is not found (#3259) Co-authored-by: Leighton Chen Co-authored-by: Diego Hurtado --- .../src/opentelemetry/propagate/__init__.py | 9 +++++---- .../tests/propagators/test_propagators.py | 19 +++++++++---------- 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/opentelemetry-api/src/opentelemetry/propagate/__init__.py b/opentelemetry-api/src/opentelemetry/propagate/__init__.py index 56f217f282..90f9e61744 100644 --- a/opentelemetry-api/src/opentelemetry/propagate/__init__.py +++ b/opentelemetry-api/src/opentelemetry/propagate/__init__.py @@ -144,11 +144,12 @@ def inject( ) ).load()() ) - - except Exception: # pylint: disable=broad-except - logger.exception( - "Failed to load configured propagator: %s", propagator + except StopIteration: + raise ValueError( + f"Propagator {propagator} not found. It is either misspelled or not installed." ) + except Exception: # pylint: disable=broad-except + logger.exception("Failed to load propagator: %s", propagator) raise diff --git a/opentelemetry-api/tests/propagators/test_propagators.py b/opentelemetry-api/tests/propagators/test_propagators.py index bb84bc4f1a..29065b8cb3 100644 --- a/opentelemetry-api/tests/propagators/test_propagators.py +++ b/opentelemetry-api/tests/propagators/test_propagators.py @@ -15,7 +15,6 @@ # type: ignore from importlib import reload -from logging import ERROR from os import environ from unittest import TestCase from unittest.mock import Mock, patch @@ -109,16 +108,16 @@ def test_propagators(propagators): ) def test_composite_propagators_error(self): - # pylint: disable=import-outside-toplevel - import opentelemetry.propagate + with self.assertRaises(ValueError) as cm: + # pylint: disable=import-outside-toplevel + import opentelemetry.propagate + + reload(opentelemetry.propagate) - with self.assertRaises(Exception): - with self.assertLogs(level=ERROR) as err: - reload(opentelemetry.propagate) - self.assertIn( - "Failed to load configured propagator `unknown`", - err.output[0], - ) + self.assertEqual( + str(cm.exception), + "Propagator unknown not found. It is either misspelled or not installed.", + ) class TestTraceContextTextMapPropagator(TestCase): From b15de888fee2477e371ba4908b23bc957a212b3d Mon Sep 17 00:00:00 2001 From: Diego Hurtado Date: Tue, 27 Jun 2023 10:41:39 +0200 Subject: [PATCH 6/8] Remove log messages from console (#3357) Co-authored-by: Shalev Roda <65566801+shalevr@users.noreply.github.com> --- opentelemetry-sdk/tests/logs/test_export.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/opentelemetry-sdk/tests/logs/test_export.py b/opentelemetry-sdk/tests/logs/test_export.py index aa68b09624..774df5771a 100644 --- a/opentelemetry-sdk/tests/logs/test_export.py +++ b/opentelemetry-sdk/tests/logs/test_export.py @@ -58,6 +58,7 @@ def test_simple_log_record_processor_default_level(self): ) logger = logging.getLogger("default_level") + logger.propagate = False logger.addHandler(LoggingHandler(logger_provider=logger_provider)) logger.warning("Something is wrong") @@ -79,6 +80,7 @@ def test_simple_log_record_processor_custom_level(self): ) logger = logging.getLogger("custom_level") + logger.propagate = False logger.setLevel(logging.ERROR) logger.addHandler(LoggingHandler(logger_provider=logger_provider)) @@ -111,6 +113,7 @@ def test_simple_log_record_processor_trace_correlation(self): ) logger = logging.getLogger("trace_correlation") + logger.propagate = False logger.addHandler(LoggingHandler(logger_provider=logger_provider)) logger.warning("Warning message") @@ -150,6 +153,7 @@ def test_simple_log_record_processor_shutdown(self): ) logger = logging.getLogger("shutdown") + logger.propagate = False logger.addHandler(LoggingHandler(logger_provider=logger_provider)) logger.warning("Something is wrong") @@ -176,6 +180,7 @@ def test_emit_call_log_record(self): provider.add_log_record_processor(log_record_processor) logger = logging.getLogger("emit_call") + logger.propagate = False logger.addHandler(LoggingHandler(logger_provider=provider)) logger.error("error") @@ -310,6 +315,7 @@ def test_shutdown(self): provider.add_log_record_processor(log_record_processor) logger = logging.getLogger("shutdown") + logger.propagate = False logger.addHandler(LoggingHandler(logger_provider=provider)) logger.warning("warning message: %s", "possible upcoming heatwave") @@ -342,6 +348,7 @@ def test_force_flush(self): provider.add_log_record_processor(log_record_processor) logger = logging.getLogger("force_flush") + logger.propagate = False logger.addHandler(LoggingHandler(logger_provider=provider)) logger.critical("Earth is burning") @@ -360,6 +367,7 @@ def test_log_record_processor_too_many_logs(self): provider.add_log_record_processor(log_record_processor) logger = logging.getLogger("many_logs") + logger.propagate = False logger.addHandler(LoggingHandler(logger_provider=provider)) for log_no in range(1000): @@ -377,6 +385,7 @@ def test_with_multiple_threads(self): provider.add_log_record_processor(log_record_processor) logger = logging.getLogger("threads") + logger.propagate = False logger.addHandler(LoggingHandler(logger_provider=provider)) def bulk_log_and_flush(num_logs): @@ -411,6 +420,7 @@ def test_batch_log_record_processor_fork(self): provider.add_log_record_processor(log_record_processor) logger = logging.getLogger("test-fork") + logger.propagate = False logger.addHandler(LoggingHandler(logger_provider=provider)) logger.critical("yolo") From 6559e07d0f10aa947e3debbc65640690c866991a Mon Sep 17 00:00:00 2001 From: Shalev Roda <65566801+shalevr@users.noreply.github.com> Date: Tue, 27 Jun 2023 15:30:05 +0300 Subject: [PATCH 7/8] Update approvers list (#3358) --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index e3923e8fe5..4f42fd0bd5 100644 --- a/README.md +++ b/README.md @@ -101,6 +101,7 @@ Approvers ([@open-telemetry/python-approvers](https://github.com/orgs/open-telem - [Aaron Abbott](https://github.com/aabmass), Google - [Jeremy Voss](https://github.com/jeremydvoss), Microsoft - [Sanket Mehta](https://github.com/sanketmehta28), Cisco +- [Shalev Roda](https://github.com/shalevr), Cisco Emeritus Approvers From adbcf820f6ff24de80278113ce9629e67111ef8e Mon Sep 17 00:00:00 2001 From: nerstak <33179821+nerstak@users.noreply.github.com> Date: Wed, 28 Jun 2023 18:34:34 +0200 Subject: [PATCH 8/8] Exporting resources attributes on target_info for Prometheus Exporter (#3279) Co-authored-by: Srikanth Chekuri --- CHANGELOG.md | 2 + .../exporter/prometheus/__init__.py | 35 +++++++++-- .../tests/test_prometheus_exporter.py | 58 +++++++++++++++++-- 3 files changed, 86 insertions(+), 9 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0a22a6c7c7..d6d98b5e97 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## Unreleased + - Add max_scale option to Exponential Bucket Histogram Aggregation ([#3323](https://github.com/open-telemetry/opentelemetry-python/pull/3323)) - Use BoundedAttributes instead of raw dict to extract attributes from LogRecord @@ -35,6 +36,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Add speced out environment variables and arguments for BatchLogRecordProcessor ([#3237](https://github.com/open-telemetry/opentelemetry-python/pull/3237)) + ## Version 1.17.0/0.38b0 (2023-03-22) - Implement LowMemory temporality diff --git a/exporter/opentelemetry-exporter-prometheus/src/opentelemetry/exporter/prometheus/__init__.py b/exporter/opentelemetry-exporter-prometheus/src/opentelemetry/exporter/prometheus/__init__.py index 7442b7b242..9ece76755c 100644 --- a/exporter/opentelemetry-exporter-prometheus/src/opentelemetry/exporter/prometheus/__init__.py +++ b/exporter/opentelemetry-exporter-prometheus/src/opentelemetry/exporter/prometheus/__init__.py @@ -74,6 +74,7 @@ CounterMetricFamily, GaugeMetricFamily, HistogramMetricFamily, + InfoMetricFamily, ) from prometheus_client.core import Metric as PrometheusMetric @@ -97,6 +98,9 @@ _logger = getLogger(__name__) +_TARGET_INFO_NAME = "target" +_TARGET_INFO_DESCRIPTION = "Target metadata" + def _convert_buckets( bucket_counts: Sequence[int], explicit_bounds: Sequence[float] @@ -116,8 +120,7 @@ def _convert_buckets( class PrometheusMetricReader(MetricReader): """Prometheus metric exporter for OpenTelemetry.""" - def __init__(self) -> None: - + def __init__(self, disable_target_info: bool = False) -> None: super().__init__( preferred_temporality={ Counter: AggregationTemporality.CUMULATIVE, @@ -128,7 +131,7 @@ def __init__(self) -> None: ObservableGauge: AggregationTemporality.CUMULATIVE, } ) - self._collector = _CustomCollector() + self._collector = _CustomCollector(disable_target_info) REGISTRY.register(self._collector) self._collector._callback = self.collect @@ -153,12 +156,14 @@ class _CustomCollector: https://github.com/prometheus/client_python#custom-collectors """ - def __init__(self): + def __init__(self, disable_target_info: bool = False): self._callback = None self._metrics_datas = deque() self._non_letters_digits_underscore_re = compile( r"[^\w]", UNICODE | IGNORECASE ) + self._disable_target_info = disable_target_info + self._target_info = None def add_metrics_data(self, metrics_data: MetricsData) -> None: """Add metrics to Prometheus data""" @@ -175,6 +180,20 @@ def collect(self) -> None: metric_family_id_metric_family = {} + if len(self._metrics_datas): + if not self._disable_target_info: + if self._target_info is None: + attributes = {} + for res in self._metrics_datas[0].resource_metrics: + attributes = {**attributes, **res.resource.attributes} + + self._target_info = self._create_info_metric( + _TARGET_INFO_NAME, _TARGET_INFO_DESCRIPTION, attributes + ) + metric_family_id_metric_family[ + _TARGET_INFO_NAME + ] = self._target_info + while self._metrics_datas: self._translate_to_prometheus( self._metrics_datas.popleft(), metric_family_id_metric_family @@ -327,3 +346,11 @@ def _check_value(self, value: Union[int, float, str, Sequence]) -> str: if not isinstance(value, str): return dumps(value, default=str) return str(value) + + def _create_info_metric( + self, name: str, description: str, attributes: Dict[str, str] + ) -> InfoMetricFamily: + """Create an Info Metric Family with list of attributes""" + info = InfoMetricFamily(name, description, labels=attributes) + info.add_metric(labels=list(attributes.keys()), value=attributes) + return info diff --git a/exporter/opentelemetry-exporter-prometheus/tests/test_prometheus_exporter.py b/exporter/opentelemetry-exporter-prometheus/tests/test_prometheus_exporter.py index 1180fac614..c7ce1afae1 100644 --- a/exporter/opentelemetry-exporter-prometheus/tests/test_prometheus_exporter.py +++ b/exporter/opentelemetry-exporter-prometheus/tests/test_prometheus_exporter.py @@ -17,7 +17,11 @@ from unittest.mock import Mock, patch from prometheus_client import generate_latest -from prometheus_client.core import CounterMetricFamily, GaugeMetricFamily +from prometheus_client.core import ( + CounterMetricFamily, + GaugeMetricFamily, + InfoMetricFamily, +) from opentelemetry.exporter.prometheus import ( PrometheusMetricReader, @@ -33,6 +37,7 @@ ResourceMetrics, ScopeMetrics, ) +from opentelemetry.sdk.resources import Resource from opentelemetry.test.metrictestutil import ( _generate_gauge, _generate_sum, @@ -101,7 +106,7 @@ def test_histogram_to_prometheus(self): ] ) - collector = _CustomCollector() + collector = _CustomCollector(disable_target_info=True) collector.add_metrics_data(metrics_data) result_bytes = generate_latest(collector) result = result_bytes.decode("utf-8") @@ -146,7 +151,7 @@ def test_sum_to_prometheus(self): ] ) - collector = _CustomCollector() + collector = _CustomCollector(disable_target_info=True) collector.add_metrics_data(metrics_data) for prometheus_metric in collector.collect(): @@ -189,7 +194,7 @@ def test_gauge_to_prometheus(self): ] ) - collector = _CustomCollector() + collector = _CustomCollector(disable_target_info=True) collector.add_metrics_data(metrics_data) for prometheus_metric in collector.collect(): @@ -251,7 +256,7 @@ def test_list_labels(self): ) ] ) - collector = _CustomCollector() + collector = _CustomCollector(disable_target_info=True) collector.add_metrics_data(metrics_data) for prometheus_metric in collector.collect(): @@ -293,3 +298,46 @@ def test_multiple_collection_calls(self): result_2 = list(metric_reader._collector.collect()) self.assertEqual(result_0, result_1) self.assertEqual(result_1, result_2) + + def test_target_info_enabled_by_default(self): + metric_reader = PrometheusMetricReader() + provider = MeterProvider( + metric_readers=[metric_reader], + resource=Resource({"os": "Unix", "histo": 1}), + ) + meter = provider.get_meter("getting-started", "0.1.2") + counter = meter.create_counter("counter") + counter.add(1) + result = list(metric_reader._collector.collect()) + + for prometheus_metric in result[:0]: + self.assertEqual(type(prometheus_metric), InfoMetricFamily) + self.assertEqual(prometheus_metric.name, "target") + self.assertEqual( + prometheus_metric.documentation, "Target metadata" + ) + self.assertTrue(len(prometheus_metric.samples) == 1) + self.assertEqual(prometheus_metric.samples[0].value, 1) + self.assertTrue(len(prometheus_metric.samples[0].labels) == 2) + self.assertEqual(prometheus_metric.samples[0].labels["os"], "Unix") + self.assertEqual(prometheus_metric.samples[0].labels["histo"], "1") + + def test_target_info_disabled(self): + metric_reader = PrometheusMetricReader(disable_target_info=True) + provider = MeterProvider( + metric_readers=[metric_reader], + resource=Resource({"os": "Unix", "histo": 1}), + ) + meter = provider.get_meter("getting-started", "0.1.2") + counter = meter.create_counter("counter") + counter.add(1) + result = list(metric_reader._collector.collect()) + + for prometheus_metric in result: + self.assertNotEqual(type(prometheus_metric), InfoMetricFamily) + self.assertNotEqual(prometheus_metric.name, "target") + self.assertNotEqual( + prometheus_metric.documentation, "Target metadata" + ) + self.assertNotIn("os", prometheus_metric.samples[0].labels) + self.assertNotIn("histo", prometheus_metric.samples[0].labels)