diff --git a/FullAgent.sln b/FullAgent.sln index ccbb75dde..c5fa9d65a 100644 --- a/FullAgent.sln +++ b/FullAgent.sln @@ -167,6 +167,7 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Home", "src\Agent\NewRelic\ {203A8EA4-A0F2-4139-B02E-9B0F2B39C107} = {203A8EA4-A0F2-4139-B02E-9B0F2B39C107} {22274460-3222-4474-B679-19E1F27A2CC3} = {22274460-3222-4474-B679-19E1F27A2CC3} {230CDE78-9D21-4D10-9C4B-E00C88B8B021} = {230CDE78-9D21-4D10-9C4B-E00C88B8B021} + {270A9CC8-8031-49F4-A380-1389E7517DB7} = {270A9CC8-8031-49F4-A380-1389E7517DB7} {279F8AD0-C959-476F-BD58-3581D9A33238} = {279F8AD0-C959-476F-BD58-3581D9A33238} {2E6CF650-CB50-453D-830A-D00F0540FC2C} = {2E6CF650-CB50-453D-830A-D00F0540FC2C} {2FB30555-65A4-43D7-82AA-56BF203D3A96} = {2FB30555-65A4-43D7-82AA-56BF203D3A96} @@ -215,6 +216,8 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "StackExchangeRedis2Plus", " EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Elasticsearch", "src\Agent\NewRelic\Agent\Extensions\Providers\Wrapper\Elasticsearch\Elasticsearch.csproj", "{D9428449-3E4B-4723-A8AA-1191315C7AAD}" EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Kafka", "src\Agent\NewRelic\Agent\Extensions\Providers\Wrapper\Kafka\Kafka.csproj", "{270A9CC8-8031-49F4-A380-1389E7517DB7}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -445,6 +448,10 @@ Global {D9428449-3E4B-4723-A8AA-1191315C7AAD}.Debug|Any CPU.Build.0 = Debug|Any CPU {D9428449-3E4B-4723-A8AA-1191315C7AAD}.Release|Any CPU.ActiveCfg = Release|Any CPU {D9428449-3E4B-4723-A8AA-1191315C7AAD}.Release|Any CPU.Build.0 = Release|Any CPU + {270A9CC8-8031-49F4-A380-1389E7517DB7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {270A9CC8-8031-49F4-A380-1389E7517DB7}.Debug|Any CPU.Build.0 = Debug|Any CPU + {270A9CC8-8031-49F4-A380-1389E7517DB7}.Release|Any CPU.ActiveCfg = Release|Any CPU + {270A9CC8-8031-49F4-A380-1389E7517DB7}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE @@ -514,10 +521,11 @@ Global {3D69B4C9-FD16-461F-95AF-6FCA6EAA914E} = {5E86E10A-C38F-48CB-ADE9-67B22BB2F50A} {EC34F023-223D-432F-9401-9C3ED1B75DE4} = {5E86E10A-C38F-48CB-ADE9-67B22BB2F50A} {D9428449-3E4B-4723-A8AA-1191315C7AAD} = {5E86E10A-C38F-48CB-ADE9-67B22BB2F50A} + {270A9CC8-8031-49F4-A380-1389E7517DB7} = {5E86E10A-C38F-48CB-ADE9-67B22BB2F50A} EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution - SolutionGuid = {D8B98070-6B8E-403C-A07F-A3F2E4A3A3D0} EnterpriseLibraryConfigurationToolBinariesPath = packages\Unity.2.1.505.2\lib\NET35 + SolutionGuid = {D8B98070-6B8E-403C-A07F-A3F2E4A3A3D0} EndGlobalSection GlobalSection(TestCaseManagementSettings) = postSolution CategoryFile = FullAgent.vsmdi diff --git a/build/ArtifactBuilder/CoreAgentComponents.cs b/build/ArtifactBuilder/CoreAgentComponents.cs index 24a3f20e0..e5f4278b8 100644 --- a/build/ArtifactBuilder/CoreAgentComponents.cs +++ b/build/ArtifactBuilder/CoreAgentComponents.cs @@ -55,6 +55,7 @@ protected override void CreateAgentComponents() $@"{SourceHomeBuilderPath}\extensions\NewRelic.Providers.Wrapper.StackExchangeRedis.dll", $@"{SourceHomeBuilderPath}\extensions\NewRelic.Providers.Wrapper.StackExchangeRedis2Plus.dll", $@"{SourceHomeBuilderPath}\extensions\NewRelic.Providers.Wrapper.NServiceBus.dll", + $@"{SourceHomeBuilderPath}\extensions\NewRelic.Providers.Wrapper.Kafka.dll", }; var wrapperXmls = new[] @@ -74,6 +75,7 @@ protected override void CreateAgentComponents() $@"{SourceHomeBuilderPath}\extensions\NewRelic.Providers.Wrapper.StackExchangeRedis.Instrumentation.xml", $@"{SourceHomeBuilderPath}\extensions\NewRelic.Providers.Wrapper.StackExchangeRedis2Plus.Instrumentation.xml", $@"{SourceHomeBuilderPath}\extensions\NewRelic.Providers.Wrapper.NServiceBus.Instrumentation.xml", + $@"{SourceHomeBuilderPath}\extensions\NewRelic.Providers.Wrapper.Kafka.Instrumentation.xml", }; ExtensionXsd = $@"{SourceHomeBuilderPath}\extensions\extension.xsd"; diff --git a/build/ArtifactBuilder/FrameworkAgentComponents.cs b/build/ArtifactBuilder/FrameworkAgentComponents.cs index 0f00a87c9..9c4b560dc 100644 --- a/build/ArtifactBuilder/FrameworkAgentComponents.cs +++ b/build/ArtifactBuilder/FrameworkAgentComponents.cs @@ -63,7 +63,8 @@ protected override void CreateAgentComponents() $@"{SourceHomeBuilderPath}\extensions\NewRelic.Providers.Wrapper.WebOptimization.dll", $@"{SourceHomeBuilderPath}\extensions\NewRelic.Providers.Wrapper.WebServices.dll", $@"{SourceHomeBuilderPath}\extensions\NewRelic.Providers.Wrapper.AspNetCore.dll", - $@"{SourceHomeBuilderPath}\extensions\NewRelic.Providers.Wrapper.Owin.dll" + $@"{SourceHomeBuilderPath}\extensions\NewRelic.Providers.Wrapper.Owin.dll", + $@"{SourceHomeBuilderPath}\extensions\NewRelic.Providers.Wrapper.Kafka.dll", }; var wrapperXmls = new[] @@ -99,6 +100,7 @@ protected override void CreateAgentComponents() $@"{SourceHomeBuilderPath}\extensions\NewRelic.Providers.Wrapper.WebOptimization.Instrumentation.xml", $@"{SourceHomeBuilderPath}\extensions\NewRelic.Providers.Wrapper.WebServices.Instrumentation.xml", $@"{SourceHomeBuilderPath}\extensions\NewRelic.Providers.Wrapper.Misc.Instrumentation.xml", + $@"{SourceHomeBuilderPath}\extensions\NewRelic.Providers.Wrapper.Kafka.Instrumentation.xml", }; ExtensionXsd = $@"{SourceHomeBuilderPath}\extensions\extension.xsd"; diff --git a/src/Agent/MsiInstaller/Installer/Product.wxs b/src/Agent/MsiInstaller/Installer/Product.wxs index 68e5daedf..dded0cd0b 100644 --- a/src/Agent/MsiInstaller/Installer/Product.wxs +++ b/src/Agent/MsiInstaller/Installer/Product.wxs @@ -465,6 +465,10 @@ SPDX-License-Identifier: Apache-2.0 + + + + @@ -524,6 +528,9 @@ SPDX-License-Identifier: Apache-2.0 + + + @@ -629,6 +636,9 @@ SPDX-License-Identifier: Apache-2.0 + + + @@ -677,6 +687,9 @@ SPDX-License-Identifier: Apache-2.0 + + + diff --git a/src/Agent/NewRelic/Agent/Core/Agent.cs b/src/Agent/NewRelic/Agent/Core/Agent.cs index 35780e3bf..f9803b9b8 100644 --- a/src/Agent/NewRelic/Agent/Core/Agent.cs +++ b/src/Agent/NewRelic/Agent/Core/Agent.cs @@ -118,6 +118,11 @@ public ITransaction CreateTransaction(MessageBrokerDestinationType destinationTy return CreateTransaction(TransactionName.ForBrokerTransaction(destinationType, brokerVendorName, destination), true, wrapperOnCreate ?? NoOpWrapperOnCreate); } + public ITransaction CreateKafkaTransaction(MessageBrokerDestinationType destinationType, string brokerVendorName, string destination, Action wrapperOnCreate) + { + return CreateTransaction(TransactionName.ForKafkaBrokerTransaction(destinationType, brokerVendorName, destination), true, wrapperOnCreate ?? NoOpWrapperOnCreate); + } + public ITransaction CreateTransaction(bool isWeb, string category, string transactionDisplayName, bool doNotTrackAsUnitOfWork, Action wrapperOnCreate) { if (transactionDisplayName == null) @@ -401,11 +406,21 @@ public IStackExchangeRedisCache StackExchangeRedisCache set { _stackExchangeRedisCache = value; } } - public void RecordSupportabilityMetric(string metricName, int count) + public void RecordSupportabilityMetric(string metricName, long count = 1) { _agentHealthReporter.ReportSupportabilityCountMetric(metricName, count); } + public void RecordCountMetric(string metricName, long count = 1) + { + _agentHealthReporter.ReportCountMetric(metricName, count); + } + + public void RecordByteMetric(string metricName, long totalBytes, long? exclusiveBytes = null) + { + _agentHealthReporter.ReportByteMetric(metricName, totalBytes, exclusiveBytes); + } + public void RecordLogMessage(string frameworkName, object logEvent, Func getTimestamp, Func getLevel, Func getLogMessage, Func getLogException, Func> getContextData, string spanId, string traceId) { _agentHealthReporter.ReportLogForwardingFramework(frameworkName); diff --git a/src/Agent/NewRelic/Agent/Core/AgentHealth/AgentHealthReporter.cs b/src/Agent/NewRelic/Agent/Core/AgentHealth/AgentHealthReporter.cs index 25063afa4..bdc065684 100644 --- a/src/Agent/NewRelic/Agent/Core/AgentHealth/AgentHealthReporter.cs +++ b/src/Agent/NewRelic/Agent/Core/AgentHealth/AgentHealthReporter.cs @@ -107,6 +107,19 @@ private void ReportSupportabilityGaugeMetric(string metricName, float value) TrySend(metric); } + public void ReportCountMetric(string metricName, long count) + { + var metric = _metricBuilder.TryBuildCountMetric(metricName, count); + TrySend(metric); + } + public void ReportByteMetric(string metricName, long totalBytes, long? exclusiveBytes = null) + { + var metric = _metricBuilder.TryBuildByteMetric(metricName, totalBytes, exclusiveBytes); + TrySend(metric); + } + + + public void ReportDotnetVersion() { #if NETFRAMEWORK diff --git a/src/Agent/NewRelic/Agent/Core/AgentHealth/IAgentHealthReporter.cs b/src/Agent/NewRelic/Agent/Core/AgentHealth/IAgentHealthReporter.cs index c03cb2614..23eed86cf 100644 --- a/src/Agent/NewRelic/Agent/Core/AgentHealth/IAgentHealthReporter.cs +++ b/src/Agent/NewRelic/Agent/Core/AgentHealth/IAgentHealthReporter.cs @@ -129,6 +129,7 @@ public interface IAgentHealthReporter : IOutOfBandMetricSource void ReportAgentInfo(); void ReportSupportabilityCountMetric(string metricName, long count = 1); + void ReportCountMetric(string metricName, long count = 1); void ReportInfiniteTracingSpanResponseError(); void ReportInfiniteTracingSpanEventsSeen(long count = 1); @@ -148,6 +149,7 @@ public interface IAgentHealthReporter : IOutOfBandMetricSource void ReportLoggingEventsDropped(int droppedCount); void ReportLogForwardingFramework(string logFramework); void ReportLogForwardingEnabledWithFramework(string logFramework); + void ReportByteMetric(string metricName, long totalBytes, long? exclusiveBytes = null); void ReportLoggingEventsEmpty(int count = 1); } } diff --git a/src/Agent/NewRelic/Agent/Core/Metrics/MetricNames.cs b/src/Agent/NewRelic/Agent/Core/Metrics/MetricNames.cs index 39c14d92a..b30a45436 100644 --- a/src/Agent/NewRelic/Agent/Core/Metrics/MetricNames.cs +++ b/src/Agent/NewRelic/Agent/Core/Metrics/MetricNames.cs @@ -160,6 +160,7 @@ public static class MetricNames public const string OtherTransactionPrefix = "OtherTransaction"; public const string WebTransactionPrefix = "WebTransaction"; public const string SupportabilityPayloadsDroppedDueToMaxPayloadLimitPrefix = Supportability + PathSeparator + "DotNet/Collector" + PathSeparator + "MaxPayloadSizeLimit"; + public const string KafkaMessageBrokerConsume = "Consume"; public static readonly char PathSeparatorChar = PathSeparator[0]; public static readonly char[] PathSeparatorCharArray = { PathSeparatorChar }; @@ -358,6 +359,7 @@ public enum MessageBrokerAction public const string MessageBrokerNamed = "Named"; public const string MessageBrokerTemp = "Temp"; public const string Msmq = "MSMQ"; + public const string Serialization = "Serialization"; public static MetricName GetMessageBroker(MessageBrokerDestinationType type, MessageBrokerAction action, string vendor, string queueName) @@ -368,6 +370,14 @@ public enum MessageBrokerAction : MetricName.Create(MessageBrokerPrefix, vendor, normalizedType, action, MessageBrokerTemp); } + public static MetricName GetMessageBrokerSerialization(MessageBrokerDestinationType type, MessageBrokerAction action, + string vendor, string queueName, string kind) + { + var normalizedType = NormalizeMessageBrokerDestinationTypeForMetricName(type); + return MetricName.Create(MessageBrokerPrefix, vendor, normalizedType, action, MessageBrokerNamed, queueName, Serialization, kind); + + } + private static MessageBrokerDestinationType NormalizeMessageBrokerDestinationTypeForMetricName( MessageBrokerDestinationType type) { @@ -384,6 +394,14 @@ public enum MessageBrokerAction return type; } + private const string KakfaTopic = "Topic"; + private const string KakfaReceived = "Received"; + private const string KakfaMessages = "Messages"; + public static MetricName GetKafkaMessagesReceivedPerConsume(string topic) + { + return MetricName.Create(Message, "Kafka", KakfaTopic, MessageBrokerNamed, topic, KakfaReceived, KakfaMessages); + } + #endregion MessageBroker #region Datastore diff --git a/src/Agent/NewRelic/Agent/Core/Segments/MessageBrokerSerializationSegmentData.cs b/src/Agent/NewRelic/Agent/Core/Segments/MessageBrokerSerializationSegmentData.cs new file mode 100644 index 000000000..92a8a56a5 --- /dev/null +++ b/src/Agent/NewRelic/Agent/Core/Segments/MessageBrokerSerializationSegmentData.cs @@ -0,0 +1,77 @@ +// Copyright 2020 New Relic, Inc. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +using System; +using System.Collections.Generic; +using NewRelic.Agent.Core.Aggregators; +using NewRelic.Agent.Core.Metrics; +using NewRelic.Agent.Core.Time; +using static NewRelic.Agent.Core.WireModels.MetricWireModel; +using NewRelic.Agent.Configuration; + +namespace NewRelic.Agent.Core.Segments +{ + public class MessageBrokerSerializationSegmentData : AbstractSegmentData + { + + private const string TransactionGuidSegmentParameterKey = "transaction_guid"; + + public string Vendor { get; set; } + + public string Destination { get; set; } + + public MetricNames.MessageBrokerDestinationType DestinationType { get; set; } + + public MetricNames.MessageBrokerAction Action { get; set; } + + public string Kind { get; set; } + + + public MessageBrokerSerializationSegmentData(string vendor, string destination, MetricNames.MessageBrokerDestinationType destinationType, MetricNames.MessageBrokerAction action, string kind) + { + Vendor = vendor; + Destination = destination; + DestinationType = destinationType; + Action = action; + Kind = kind; + } + + public override bool IsCombinableWith(AbstractSegmentData otherData) + { + var otherTypedSegment = otherData as MessageBrokerSerializationSegmentData; + if (otherTypedSegment == null) + return false; + + if (!Vendor.Equals(otherTypedSegment.Vendor)) + return false; + + if (!Destination.Equals(otherTypedSegment.Destination)) + return false; + + if (DestinationType != otherTypedSegment.DestinationType) + return false; + + if (Action != otherTypedSegment.Action) + return false; + + if (!Kind.Equals(otherTypedSegment.Kind)) + return false; + + return true; + } + + public override string GetTransactionTraceName() + { + return MetricNames.GetMessageBrokerSerialization(DestinationType, Action, Vendor, Destination, Kind).ToString(); + } + + public override void AddMetricStats(Segment segment, TimeSpan durationOfChildren, TransactionMetricStatsCollection txStats, IConfigurationService configService) + { + var duration = segment.Duration.Value; + var exclusiveDuration = TimeSpanMath.Max(TimeSpan.Zero, duration - durationOfChildren); + + MetricBuilder.TryBuildMessageBrokerSerializationSegmentMetric(Vendor, Destination, DestinationType, Action, Kind, duration, exclusiveDuration, txStats); + + } + } +} diff --git a/src/Agent/NewRelic/Agent/Core/Segments/NoOpSegment.cs b/src/Agent/NewRelic/Agent/Core/Segments/NoOpSegment.cs index 4a2892d3b..5e1a5181a 100644 --- a/src/Agent/NewRelic/Agent/Core/Segments/NoOpSegment.cs +++ b/src/Agent/NewRelic/Agent/Core/Segments/NoOpSegment.cs @@ -50,6 +50,7 @@ public ISegmentExperimental MakeLeaf() } public void RemoveSegmentFromCallStack() { } + public void SetMessageBrokerDestination(string destination) { } public ISegmentExperimental SetSegmentData(ISegmentData segmentData) { diff --git a/src/Agent/NewRelic/Agent/Core/Segments/Segment.cs b/src/Agent/NewRelic/Agent/Core/Segments/Segment.cs index 5041f5e31..1c75d82df 100644 --- a/src/Agent/NewRelic/Agent/Core/Segments/Segment.cs +++ b/src/Agent/NewRelic/Agent/Core/Segments/Segment.cs @@ -210,6 +210,15 @@ public void RemoveSegmentFromCallStack() _transactionSegmentState.CallStackPop(this); } + public void SetMessageBrokerDestination(string destination) + { + if (SegmentData is MessageBrokerSegmentData) + { + var messageBrokerSegmentData = SegmentData as MessageBrokerSegmentData; + messageBrokerSegmentData!.Destination = destination; + } + } + private const long NoEndTime = -1; internal static NoOpSegment NoOpSegment = new NoOpSegment(); protected readonly static IEnumerable> EmptyImmutableParameters = new KeyValuePair[0]; @@ -410,7 +419,7 @@ public Segment CreateSimilar(TimeSpan newRelativeStartTime, TimeSpan newDuration public string ToStringForFinestLogging() { - return $"Id={UniqueId},ParentId={ParentUniqueId?.ToString() ?? "Root"},Name={Data.GetTransactionTraceName()},IsLeaf={IsLeaf},Combinable={Combinable},MethodCallData={MethodCallData}"; + return $"Id={UniqueId},ParentId={ParentUniqueId?.ToString() ?? "Root"},Name={GetTransactionTraceName()},IsLeaf={IsLeaf},Combinable={Combinable},MethodCallData={MethodCallData}"; } public ISegmentExperimental SetSegmentData(ISegmentData segmentData) diff --git a/src/Agent/NewRelic/Agent/Core/Transactions/NoOpTransaction.cs b/src/Agent/NewRelic/Agent/Core/Transactions/NoOpTransaction.cs index f6becb50b..d924c6578 100644 --- a/src/Agent/NewRelic/Agent/Core/Transactions/NoOpTransaction.cs +++ b/src/Agent/NewRelic/Agent/Core/Transactions/NoOpTransaction.cs @@ -81,6 +81,14 @@ public ISegment StartMessageBrokerSegment(MethodCall methodCall, MessageBrokerDe return Segment.NoOpSegment; } + public ISegment StartMessageBrokerSerializationSegment(MethodCall methodCall, MessageBrokerDestinationType destinationType, MessageBrokerAction operation, string brokerVendorName, string destinationName, string kind) + { +#if DEBUG + Log.Finest("Skipping StartMessageBrokerSegment outside of a transaction"); +#endif + return Segment.NoOpSegment; + } + public ISegment StartMethodSegment(MethodCall methodCall, string typeName, string methodName, bool isLeaf = false) { #if DEBUG @@ -175,6 +183,11 @@ public void SetMessageBrokerTransactionName(MessageBrokerDestinationType destina } + public void SetKafkaMessageBrokerTransactionName(MessageBrokerDestinationType destinationType, string brokerVendorName, string destination = null, TransactionNamePriority priority = TransactionNamePriority.Uri) + { + + } + public void SetOtherTransactionName(string category, string name, TransactionNamePriority priority = TransactionNamePriority.Uri) { diff --git a/src/Agent/NewRelic/Agent/Core/Transactions/Transaction.cs b/src/Agent/NewRelic/Agent/Core/Transactions/Transaction.cs index 14fdbf243..aadf3123a 100644 --- a/src/Agent/NewRelic/Agent/Core/Transactions/Transaction.cs +++ b/src/Agent/NewRelic/Agent/Core/Transactions/Transaction.cs @@ -257,6 +257,26 @@ public ISegment StartMessageBrokerSegment(MethodCall methodCall, MessageBrokerDe return segment; } + public ISegment StartMessageBrokerSerializationSegment(MethodCall methodCall, MessageBrokerDestinationType destinationType, MessageBrokerAction operation, string brokerVendorName, string destinationName, string kind) + { + if (Ignored) + return Segment.NoOpSegment; + if (brokerVendorName == null) + throw new ArgumentNullException("brokerVendorName"); + if (string.IsNullOrEmpty(kind)) + throw new ArgumentNullException("kind"); + + + var segment = StartSegmentImpl(methodCall); + var messageBrokerSegmentData = CreateMessageBrokerSerializationSegmentData(destinationType, operation, brokerVendorName, destinationName, kind); + + segment.SetSegmentData(messageBrokerSegmentData); + + if (Log.IsFinestEnabled) LogFinest($"Segment start {{{segment.ToStringForFinestLogging()}}}"); + + return segment; + } + public AbstractSegmentData CreateMessageBrokerSegmentData(MessageBrokerDestinationType destinationType, MessageBrokerAction operation, string brokerVendorName, string destinationName) { if (brokerVendorName == null) @@ -268,6 +288,17 @@ public AbstractSegmentData CreateMessageBrokerSegmentData(MessageBrokerDestinati return new MessageBrokerSegmentData(brokerVendorName, destinationName, destType, action); } + public AbstractSegmentData CreateMessageBrokerSerializationSegmentData(MessageBrokerDestinationType destinationType, MessageBrokerAction operation, string brokerVendorName, string destinationName, string kind) + { + if (brokerVendorName == null) + throw new ArgumentNullException("brokerVendorName"); + + var action = AgentWrapperApiEnumToMetricNamesEnum(operation); + var destType = AgentWrapperApiEnumToMetricNamesEnum(destinationType); + + return new MessageBrokerSerializationSegmentData(brokerVendorName, destinationName, destType, action, kind); + } + /// /// This creates a Datastore segment based on data gathered using the built-in StackExchange.Redis profiling system. /// @@ -786,6 +817,12 @@ public void SetMessageBrokerTransactionName(MessageBrokerDestinationType destina SetTransactionName(trxName, priority); } + public void SetKafkaMessageBrokerTransactionName(MessageBrokerDestinationType destinationType, string brokerVendorName, string destination = null, TransactionNamePriority priority = TransactionNamePriority.Uri) + { + var trxName = TransactionName.ForKafkaBrokerTransaction(destinationType, brokerVendorName, destination); + SetTransactionName(trxName, priority); + } + public void SetOtherTransactionName(string category, string name, TransactionNamePriority priority = TransactionNamePriority.Uri) { var trxName = TransactionName.ForOtherTransaction(category, name); diff --git a/src/Agent/NewRelic/Agent/Core/Transactions/TransactionName.cs b/src/Agent/NewRelic/Agent/Core/Transactions/TransactionName.cs index a2262a0a2..29edf873f 100644 --- a/src/Agent/NewRelic/Agent/Core/Transactions/TransactionName.cs +++ b/src/Agent/NewRelic/Agent/Core/Transactions/TransactionName.cs @@ -66,6 +66,7 @@ public static TransactionName ForUriTransaction(string normalizedUri) var trxName = new TransactionName(true, MetricNames.Uri, normalizedUri); return trxName; } + public static TransactionName ForBrokerTransaction(MessageBrokerDestinationType type, string vendor, string destination) { var trxName = new StringBuilder(vendor) @@ -87,6 +88,29 @@ public static TransactionName ForBrokerTransaction(MessageBrokerDestinationType return new TransactionName(false, MetricNames.Message, trxName.ToString()); } + /// + /// Builds a transaction name that conforms to the Kafka spec + /// + /// + /// + /// + /// + public static TransactionName ForKafkaBrokerTransaction(MessageBrokerDestinationType type, string vendor, string destination) + { + var trxName = new StringBuilder(vendor) + .Append(MetricNames.PathSeparator) + .Append(EnumNameCache.GetName(type)) + .Append(MetricNames.PathSeparator) + .Append(MetricNames.KafkaMessageBrokerConsume) + .Append(MetricNames.PathSeparator) + .Append(MetricNames.MessageBrokerNamed) + .Append(MetricNames.PathSeparator); + + trxName.Append(string.IsNullOrWhiteSpace(destination) ? MetricNames.MessageBrokerTemp : destination); + + return new TransactionName(false, MetricNames.Message, trxName.ToString()); + } + public static TransactionName ForCustomTransaction(bool isWeb, string name, int maxLength) { // Note: In our public docs to tells users that they must prefix their metric names with "Custom/", but there's no mechanism that actually enforces this restriction, so there's no way to know whether it'll be there or not. For consistency, we'll just strip off "Custom/" if there's at all and then we know it's consistently not there. diff --git a/src/Agent/NewRelic/Agent/Core/Transformers/TransactionTransformer/TransactionTransformer.cs b/src/Agent/NewRelic/Agent/Core/Transformers/TransactionTransformer/TransactionTransformer.cs index 926c2d282..ffa6e4d8b 100644 --- a/src/Agent/NewRelic/Agent/Core/Transformers/TransactionTransformer/TransactionTransformer.cs +++ b/src/Agent/NewRelic/Agent/Core/Transformers/TransactionTransformer/TransactionTransformer.cs @@ -291,6 +291,13 @@ private void GenerateAndCollectMetrics(ImmutableTransaction immutableTransaction MetricBuilder.TryBuildClientApplicationMetric(referrerCrossProcessId, catResponseTime, catResponseTime, txStats); } + // Capture required Kafka metrics + if (immutableTransaction.TransactionName.Name.StartsWith("Message/Kafka/Topic/Consume/Named")) + { + + MetricBuilder.TryBuildKafkaMessagesReceivedMetric(immutableTransaction.TransactionName.Name, 1, txStats); + } + using (_agentTimerService.StartNew("CollectMetrics")) { _metricAggregator.Collect(txStats); diff --git a/src/Agent/NewRelic/Agent/Core/Utilities/ExtensionsLoader.cs b/src/Agent/NewRelic/Agent/Core/Utilities/ExtensionsLoader.cs index 6a120e7e0..375fa841b 100644 --- a/src/Agent/NewRelic/Agent/Core/Utilities/ExtensionsLoader.cs +++ b/src/Agent/NewRelic/Agent/Core/Utilities/ExtensionsLoader.cs @@ -69,7 +69,12 @@ public static void Initialize(string installPathExtensionsDirectory) //The NewRelic.Providers.Wrapper.SerilogLogging.dll depends on the Serilog.dll; therefore, it should //only be loaded by the agent when Serilog is used otherwise assembly load exception will occur. { "SerilogCreateLoggerWrapper", Path.Combine(_installPathExtensionsDirectory, "NewRelic.Providers.Wrapper.SerilogLogging.dll") }, - { "SerilogDispatchWrapper", Path.Combine(_installPathExtensionsDirectory, "NewRelic.Providers.Wrapper.SerilogLogging.dll") } + { "SerilogDispatchWrapper", Path.Combine(_installPathExtensionsDirectory, "NewRelic.Providers.Wrapper.SerilogLogging.dll") }, + + // Kafka + { "KafkaProducerWrapper", Path.Combine(_installPathExtensionsDirectory, "NewRelic.Providers.Wrapper.Kafka.dll") }, + { "KafkaSerializerWrapper", Path.Combine(_installPathExtensionsDirectory, "NewRelic.Providers.Wrapper.Kafka.dll") }, + { "KafkaConsumerWrapper", Path.Combine(_installPathExtensionsDirectory, "NewRelic.Providers.Wrapper.Kafka.dll") } }; var nonAutoReflectedAssemblies = _dynamicLoadWrapperAssemblies.Values.Distinct().ToList(); diff --git a/src/Agent/NewRelic/Agent/Core/WireModels/IMetricBuilder.cs b/src/Agent/NewRelic/Agent/Core/WireModels/IMetricBuilder.cs index dfae95964..1cac52661 100644 --- a/src/Agent/NewRelic/Agent/Core/WireModels/IMetricBuilder.cs +++ b/src/Agent/NewRelic/Agent/Core/WireModels/IMetricBuilder.cs @@ -205,5 +205,8 @@ public interface IMetricBuilder MetricWireModel TryBuildSupportabilityLoggingEventsSentMetric(int loggingEventCount); MetricWireModel TryBuildSupportabilityLoggingEventsDroppedMetric(int droppedCount); + + MetricWireModel TryBuildCountMetric(string metricName, long count); + MetricWireModel TryBuildByteMetric(string metricName, long totalBytes, long? exclusiveBytes = null); } } diff --git a/src/Agent/NewRelic/Agent/Core/WireModels/MetricWireModel.cs b/src/Agent/NewRelic/Agent/Core/WireModels/MetricWireModel.cs index f0e7d1e18..697fd299f 100644 --- a/src/Agent/NewRelic/Agent/Core/WireModels/MetricWireModel.cs +++ b/src/Agent/NewRelic/Agent/Core/WireModels/MetricWireModel.cs @@ -298,6 +298,19 @@ public static void TryBuildErrorsMetrics(bool isWebTransaction, TransactionMetri #endregion + #region Kafka metrics + + public static void TryBuildKafkaMessagesReceivedMetric(string transactionName, int count, TransactionMetricStatsCollection txStats) + { + var parts = transactionName.Split('/'); + var proposedName = MetricNames.GetKafkaMessagesReceivedPerConsume(parts.Last()); + var data = MetricDataWireModel.BuildCountData(count); + txStats.MergeUnscopedStats(proposedName, data); + txStats.MergeScopedStats(proposedName, data); + } + + #endregion Kafka metrics + #endregion Transaction builders #region Segment builders @@ -339,6 +352,16 @@ public static void TryBuildErrorsMetrics(bool isWebTransaction, TransactionMetri txStats.MergeUnscopedStats(proposedName, data); } + public static void TryBuildMessageBrokerSerializationSegmentMetric(string vendor, string destination, + MetricNames.MessageBrokerDestinationType destinationType, MetricNames.MessageBrokerAction action, string kind, + TimeSpan totalTime, TimeSpan totalExclusiveTime, TransactionMetricStatsCollection txStats) + { + var proposedName = MetricNames.GetMessageBrokerSerialization(destinationType, action, vendor, destination, kind); + var data = MetricDataWireModel.BuildTimingData(totalTime, totalExclusiveTime); + txStats.MergeScopedStats(proposedName, data); + txStats.MergeUnscopedStats(proposedName, data); + } + public static void TryBuildExternalSegmentMetric(string host, string method, TimeSpan totalTime, TimeSpan totalExclusiveTime, TransactionMetricStatsCollection txStats, bool unscopedOnly) { @@ -987,6 +1010,18 @@ public MetricWireModel TryBuildSupportabilityLoggingEventsDroppedMetric(int drop return BuildMetric(_metricNameService, proposedName, null, data); } + public MetricWireModel TryBuildCountMetric(string metricName, long count) + { + var data = MetricDataWireModel.BuildCountData(count); + return BuildMetric(_metricNameService, metricName, null, data); + } + + public MetricWireModel TryBuildByteMetric(string metricName, long totalBytes, long? exclusiveBytes) + { + var data = MetricDataWireModel.BuildByteData(totalBytes, exclusiveBytes); + return BuildMetric(_metricNameService, metricName, null, data); + } + #endregion } } diff --git a/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Api/Experimental/IAgentExperimental.cs b/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Api/Experimental/IAgentExperimental.cs index 5040635d7..4feb82492 100644 --- a/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Api/Experimental/IAgentExperimental.cs +++ b/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Api/Experimental/IAgentExperimental.cs @@ -13,11 +13,25 @@ namespace NewRelic.Agent.Api.Experimental public interface IAgentExperimental { /// - /// Records a supportability metrics + /// Records a supportability metric /// /// - /// Defaults to 1.0f - void RecordSupportabilityMetric(string metricName, int count = 1); + /// Defaults to 1.0 + void RecordSupportabilityMetric(string metricName, long count = 1); + + /// + /// Records a count metric with the given name + /// + /// + /// + void RecordCountMetric(string metricName, long count = 1); + /// + /// Records a byte count metric with the given name + /// + /// + /// + /// + void RecordByteMetric(string metricName, long totalBytes, long? exclusiveBytes = null); /// /// Records the log message in the transaction to later be forwarded if log forwarding is enabled. diff --git a/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Api/IAgent.cs b/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Api/IAgent.cs index 5e6dfefd1..2496e4243 100644 --- a/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Api/IAgent.cs +++ b/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Api/IAgent.cs @@ -38,6 +38,17 @@ public interface IAgent : IAgentExperimental /// ITransaction CreateTransaction(MessageBrokerDestinationType destinationType, string brokerVendorName, string destination = null, Action wrapperOnCreate = null); + + /// + /// Create a new transaction for processing a request, conforming to the naming requirements of the Kafka spec. + /// + /// + /// The name of the message broker vendor. Must not be null. + /// The destination queue of the message being handled. Can be null. + /// A callback that is called if a transaction is created. Can be null. + /// + ITransaction CreateKafkaTransaction(MessageBrokerDestinationType destinationType, string brokerVendorName, string destination = null, Action wrapperOnCreate = null); + /// /// Create a new transaction for processing a request. /// diff --git a/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Api/ISegment.cs b/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Api/ISegment.cs index 2f1bc1a24..2a7d01ce6 100644 --- a/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Api/ISegment.cs +++ b/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Api/ISegment.cs @@ -68,5 +68,11 @@ public interface ISegment : ISpan /// Removes this segment from the top of the agent's internal call stack. Should only be used for asynchronous methods. Calling EndSegment is sufficient for synchronous methods. /// void RemoveSegmentFromCallStack(); + + /// + /// Sets the Destination on SegmentData, if the data is of type MessageBrokerSegmentData + /// + /// + void SetMessageBrokerDestination(string destination); } } diff --git a/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Api/ITransaction.cs b/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Api/ITransaction.cs index fdd97e92c..30c705cb1 100644 --- a/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Api/ITransaction.cs +++ b/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Api/ITransaction.cs @@ -91,6 +91,18 @@ public interface ITransaction /// an opaque object that will be needed when you want to end the segment. ISegment StartMessageBrokerSegment(MethodCall methodCall, MessageBrokerDestinationType destinationType, MessageBrokerAction operation, string brokerVendorName, string destinationName = null); + /// + /// Creates a segment for serializing a key or value in a message brokering system.. + /// + /// The method call that is responsible for starting this segment. + /// + /// + /// Must not be null. + /// Can be null. + /// + /// an opaque object that will be needed when you want to end the segment. + ISegment StartMessageBrokerSerializationSegment(MethodCall methodCall, MessageBrokerDestinationType destinationType, MessageBrokerAction operation, string brokerVendorName, string destinationName, string kind); + /// /// Starts a transaction segment. Does nothing if there is no current transaction. /// @@ -193,6 +205,17 @@ public interface ITransaction /// void SetMessageBrokerTransactionName(MessageBrokerDestinationType destinationType, string brokerVendorName, string destination = null, TransactionNamePriority priority = TransactionNamePriority.Uri); + /// + /// Sets the name of the current transaction to a name in the OtherTransaction namespace which is derived from some message broker details, + /// conforming to the naming requirements of the Kafka spec . Does nothing if there is no current transaction. + /// + /// + /// The name of the message broker vendor. Must not be null. + /// The destination queue of the message being handled. Can be null. + /// The priority of the name being set. Higher priority names override lower priority names. + /// + void SetKafkaMessageBrokerTransactionName(MessageBrokerDestinationType destinationType, string brokerVendorName, string destination = null, TransactionNamePriority priority = TransactionNamePriority.Uri); + /// /// Sets the name of the current transaction to a custom name in the OtherTransaction namespace. Does nothing if there is no current transaction. /// diff --git a/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/Kafka/Instrumentation.xml b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/Kafka/Instrumentation.xml new file mode 100644 index 000000000..e7a14a76c --- /dev/null +++ b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/Kafka/Instrumentation.xml @@ -0,0 +1,48 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/Kafka/Kafka.csproj b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/Kafka/Kafka.csproj new file mode 100644 index 000000000..89537e6b6 --- /dev/null +++ b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/Kafka/Kafka.csproj @@ -0,0 +1,22 @@ + + + + net462;netstandard2.0 + NewRelic.Providers.Wrapper.Kafka + NewRelic.Providers.Wrapper.Kafka + Kafka Wrapper Provider for New Relic .NET Agent + + + + Always + + + + + + + + + + + diff --git a/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/Kafka/KafkaConsumerWrapper.cs b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/Kafka/KafkaConsumerWrapper.cs new file mode 100644 index 000000000..752f55656 --- /dev/null +++ b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/Kafka/KafkaConsumerWrapper.cs @@ -0,0 +1,156 @@ +// Copyright 2020 New Relic, Inc. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +using System; +using System.Collections.Concurrent; +using System.Text; +using Confluent.Kafka; +using NewRelic.Agent.Api; +using NewRelic.Agent.Api.Experimental; +using NewRelic.Agent.Extensions.Providers.Wrapper; +using NewRelic.Reflection; + +namespace NewRelic.Providers.Wrapper.Kafka +{ + public class KafkaConsumerWrapper : IWrapper + { + private const string WrapperName = "KafkaConsumerWrapper"; + private const string BrokerVendorName = "Kafka"; + + public bool IsTransactionRequired => false; + + private static readonly ConcurrentDictionary> TopicAccessorDictionary = + new ConcurrentDictionary>(); + + private static readonly ConcurrentDictionary> MessageAccessorDictionary = + new ConcurrentDictionary>(); + private static readonly ConcurrentDictionary> KeyAccessorDictionary = + new ConcurrentDictionary>(); + private static readonly ConcurrentDictionary> ValueAccessorDictionary = + new ConcurrentDictionary>(); + + public CanWrapResponse CanWrap(InstrumentedMethodInfo methodInfo) + { + return new CanWrapResponse(WrapperName.Equals(methodInfo.RequestedWrapperName)); + } + + public AfterWrappedMethodDelegate BeforeWrappedMethod(InstrumentedMethodCall instrumentedMethodCall, IAgent agent, ITransaction transaction) + { + transaction = agent.CreateKafkaTransaction( + destinationType: MessageBrokerDestinationType.Topic, + brokerVendorName: BrokerVendorName, + destination: "unknown"); // placeholder since the topic name is unknown at this point + + var segment = transaction.StartMessageBrokerSegment(instrumentedMethodCall.MethodCall, MessageBrokerDestinationType.Topic, MessageBrokerAction.Consume, BrokerVendorName, "unknown"); + + return Delegates.GetDelegateFor(onSuccess: (resultAsObject) => + { + try + { + if (resultAsObject == null) // null is a valid return value, so we have to handle it. + { + transaction.Ignore(); + return; + } + + // result is actually ConsumeResult - but, because of the generic parameters, + // we have to reference it as object so we can use VisibilityBypasser on it + var type = resultAsObject.GetType(); + + // get the topic + var topicAccessor = TopicAccessorDictionary.GetOrAdd(type, GetTopicAccessorFunc); + string topic = topicAccessor(resultAsObject); + + // set the segment and transaction name + segment.SetMessageBrokerDestination(topic); + transaction.SetKafkaMessageBrokerTransactionName(MessageBrokerDestinationType.Topic, BrokerVendorName, topic); + + // get the Message.Headers property and add distributed trace headers + var messageAccessor = MessageAccessorDictionary.GetOrAdd(type, GetMessageAccessorFunc); + var messageAsObject = messageAccessor(resultAsObject); + + var headersSize = 0L; + if (messageAsObject is MessageMetadata messageMetaData) + { + headersSize = GetHeadersSize(messageMetaData.Headers); + + transaction.InsertDistributedTraceHeaders(messageMetaData.Headers, DistributedTraceHeadersSetter); + } + + ReportSizeMetrics(agent, transaction, topic, headersSize, messageAsObject); + } + finally + { + // need to guarantee that the segment and transaction are terminated + segment.End(); + transaction.End(); + } + }); + } + + private static long GetHeadersSize(Headers headers) + { + var headersSize = 0L; + if (headers != null) + { + foreach (var header in headers) + { + headersSize += Encoding.UTF8.GetByteCount(header.Key); + headersSize += header.GetValueBytes().Length; + } + } + return headersSize; + } + + private static void ReportSizeMetrics(IAgent agent, ITransaction transaction, string topic, long headersSize, object messageAsObject) + { + // get the message Key and Value properties so we can try to get their size + var messageType = messageAsObject.GetType(); + var keyAccessor = KeyAccessorDictionary.GetOrAdd(messageType, GetKeyAccessorFunc); + var valueAccessor = ValueAccessorDictionary.GetOrAdd(messageType, GetValueAccessorFunc); + + var keyAsObject = keyAccessor(messageAsObject); + var valueAsObject = valueAccessor(messageAsObject); + + var totalSize = headersSize + TryGetSize(keyAsObject) + TryGetSize(valueAsObject); + + if (totalSize > 0) + { + transaction.AddCustomAttribute("kafka.consume.byteCount", totalSize); + } + + // Add metrics for bytes received and messages received + var agentExp = agent.GetExperimentalApi(); + agentExp.RecordCountMetric($"Message/Kafka/Topic/Named/{topic}/Received/Messages", 1); + agentExp.RecordByteMetric($"Message/Kafka/Topic/Named/{topic}/Received/Bytes", totalSize); + } + + private static Func GetTopicAccessorFunc(Type t) => + VisibilityBypasser.Instance.GeneratePropertyAccessor(t, "Topic"); + private static Func GetMessageAccessorFunc(Type t) => + VisibilityBypasser.Instance.GeneratePropertyAccessor(t, "Message"); + private static Func GetKeyAccessorFunc(Type t) => + VisibilityBypasser.Instance.GeneratePropertyAccessor(t, "Key"); + private static Func GetValueAccessorFunc(Type t) => + VisibilityBypasser.Instance.GeneratePropertyAccessor(t, "Value"); + + private static void DistributedTraceHeadersSetter(Headers carrier, string key, string value) + { + carrier ??= new Headers(); + carrier.Add(key, Encoding.ASCII.GetBytes(value)); + } + + private static long TryGetSize(object obj) + { + if (obj == null) + return 0; + + // get the UTF8 byte count if it's a string, + // the array length if it's a byte array + // or zero if it's something else + return obj is string str ? Encoding.UTF8.GetByteCount(str) : + obj is byte[] bytes ? bytes.Length : + 0; + } + } +} diff --git a/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/Kafka/KafkaProducerWrapper.cs b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/Kafka/KafkaProducerWrapper.cs new file mode 100644 index 000000000..d454838a4 --- /dev/null +++ b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/Kafka/KafkaProducerWrapper.cs @@ -0,0 +1,44 @@ +// Copyright 2020 New Relic, Inc. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +using System.Text; +using System.Threading.Tasks; +using Confluent.Kafka; +using NewRelic.Agent.Api; +using NewRelic.Agent.Extensions.Providers.Wrapper; +using NewRelic.SystemExtensions; + +namespace NewRelic.Providers.Wrapper.Kafka +{ + public class KafkaProducerWrapper : IWrapper + { + private const string WrapperName = "KafkaProducerWrapper"; + private const string BrokerVendorName = "Kafka"; + + public bool IsTransactionRequired => true; + + public CanWrapResponse CanWrap(InstrumentedMethodInfo methodInfo) + { + return new CanWrapResponse(WrapperName.Equals(methodInfo.RequestedWrapperName)); + } + + public AfterWrappedMethodDelegate BeforeWrappedMethod(InstrumentedMethodCall instrumentedMethodCall, IAgent agent, ITransaction transaction) + { + var topicPartition = instrumentedMethodCall.MethodCall.MethodArguments.ExtractNotNullAs(0); + var messageMetadata = instrumentedMethodCall.MethodCall.MethodArguments.ExtractNotNullAs(1); + + var segment = transaction.StartMessageBrokerSegment(instrumentedMethodCall.MethodCall, MessageBrokerDestinationType.Topic, MessageBrokerAction.Produce, BrokerVendorName, topicPartition.Topic); + + transaction.InsertDistributedTraceHeaders(messageMetadata.Headers, DistributedTraceHeadersSetter); + + return instrumentedMethodCall.MethodCall.Method.MethodName == "Produce" ? Delegates.GetDelegateFor(segment) : Delegates.GetAsyncDelegateFor(agent, segment); + } + + private static void DistributedTraceHeadersSetter(Headers carrier, string key, string value) + { + carrier ??= new Headers(); + carrier.Add(key, Encoding.ASCII.GetBytes(value)); + } + + } +} diff --git a/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/Kafka/KafkaSerializerWrapper.cs b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/Kafka/KafkaSerializerWrapper.cs new file mode 100644 index 000000000..18eb3e383 --- /dev/null +++ b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/Kafka/KafkaSerializerWrapper.cs @@ -0,0 +1,34 @@ +// Copyright 2020 New Relic, Inc. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +using Confluent.Kafka; +using NewRelic.Agent.Api; +using NewRelic.Agent.Extensions.Providers.Wrapper; + +namespace NewRelic.Providers.Wrapper.Kafka +{ + public class KafkaSerializerWrapper : IWrapper + { + private const string WrapperName = "KafkaSerializerWrapper"; + + public bool IsTransactionRequired => true; + + public CanWrapResponse CanWrap(InstrumentedMethodInfo methodInfo) + { + return new CanWrapResponse(WrapperName.Equals(methodInfo.RequestedWrapperName)); + } + + public AfterWrappedMethodDelegate BeforeWrappedMethod(InstrumentedMethodCall instrumentedMethodCall, IAgent agent, ITransaction transaction) + { + // Serialize has 2 args, Deserialize has 3 + var context = instrumentedMethodCall.MethodCall.MethodArguments.Length == 2 + ? (SerializationContext)instrumentedMethodCall.MethodCall.MethodArguments[1] + : (SerializationContext)instrumentedMethodCall.MethodCall.MethodArguments[2]; + + // MessageBroker/Kafka/Topic/Named/{topic_name}/Serialization/Value + var segment = transaction.StartMessageBrokerSerializationSegment(instrumentedMethodCall.MethodCall, MessageBrokerDestinationType.Topic, MessageBrokerAction.Produce, "Kafka", context.Topic, context.Component.ToString()); + + return Delegates.GetDelegateFor(segment); + } + } +} diff --git a/tests/Agent/IntegrationTests/.dockerignore b/tests/Agent/IntegrationTests/.dockerignore new file mode 100644 index 000000000..3729ff0cd --- /dev/null +++ b/tests/Agent/IntegrationTests/.dockerignore @@ -0,0 +1,25 @@ +**/.classpath +**/.dockerignore +**/.env +**/.git +**/.gitignore +**/.project +**/.settings +**/.toolstarget +**/.vs +**/.vscode +**/*.*proj.user +**/*.dbmdl +**/*.jfm +**/azds.yaml +**/bin +**/charts +**/docker-compose* +**/Dockerfile* +**/node_modules +**/npm-debug.log +**/obj +**/secrets.dev.yaml +**/values.dev.yaml +LICENSE +README.md \ No newline at end of file diff --git a/tests/Agent/IntegrationTests/ContainerApplications/KafkaTestApp/Consumer.cs b/tests/Agent/IntegrationTests/ContainerApplications/KafkaTestApp/Consumer.cs new file mode 100644 index 000000000..4023cf48f --- /dev/null +++ b/tests/Agent/IntegrationTests/ContainerApplications/KafkaTestApp/Consumer.cs @@ -0,0 +1,54 @@ +// Copyright 2020 New Relic, Inc. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +using System; +using System.Threading.Tasks; +using Confluent.Kafka; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.Logging; + +namespace KafkaTestApp +{ + public class Consumer + { + private readonly string _topic; + private readonly IConfiguration _configuration; + private readonly ILogger _logger; + + public Consumer(IConfiguration configuration, string topic, ILogger logger) + { + _topic = topic; + _configuration = configuration; + _logger = logger; + } + + public Task StartConsuming() + { + using (var consumer = new ConsumerBuilder(_configuration.AsEnumerable()).Build()) + { + consumer.Subscribe(_topic); + try + { + while (true) + { + _ = consumer.Consume(120 * 1000); + } + } + catch (OperationCanceledException) + { + _logger.LogInformation("Consume operation canceled."); + } + catch (Exception ex) + { + _logger.LogError(ex, "Consumer error"); + } + finally + { + consumer.Close(); + } + } + + return Task.CompletedTask; + } + } +} diff --git a/tests/Agent/IntegrationTests/ContainerApplications/KafkaTestApp/Controllers/KafkaController.cs b/tests/Agent/IntegrationTests/ContainerApplications/KafkaTestApp/Controllers/KafkaController.cs new file mode 100644 index 000000000..5e6dc76de --- /dev/null +++ b/tests/Agent/IntegrationTests/ContainerApplications/KafkaTestApp/Controllers/KafkaController.cs @@ -0,0 +1,37 @@ +// Copyright 2020 New Relic, Inc. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +using System.Threading.Tasks; +using Microsoft.AspNetCore.Mvc; +using Microsoft.Extensions.Logging; + +namespace KafkaTestApp.Controllers +{ + [ApiController] + [Route("kafka")] + public class KafkaController : ControllerBase + { + private readonly ILogger _logger; + + public KafkaController(ILogger logger) + { + _logger = logger; + } + + [HttpGet] + [Route("produce")] + public async Task Produce() + { + await Program.Producer.Produce(); + return "Complete"; + } + + [HttpGet] + [Route("produceasync")] + public async Task ProduceAsync() + { + await Program.Producer.ProduceAsync(); + return "Complete"; + } + } +} diff --git a/tests/Agent/IntegrationTests/ContainerApplications/KafkaTestApp/Dockerfile b/tests/Agent/IntegrationTests/ContainerApplications/KafkaTestApp/Dockerfile new file mode 100644 index 000000000..a3ecc61e5 --- /dev/null +++ b/tests/Agent/IntegrationTests/ContainerApplications/KafkaTestApp/Dockerfile @@ -0,0 +1,57 @@ +ARG DISTRO_TAG +FROM --platform=amd64 mcr.microsoft.com/dotnet/aspnet:7.0-bullseye-slim AS base +WORKDIR /app +EXPOSE 80 +RUN apt-get update \ + && apt-get install -y wget \ + && wget https://packages.microsoft.com/config/debian/11/packages-microsoft-prod.deb -O packages-microsoft-prod.deb \ + && dpkg -i packages-microsoft-prod.deb \ + && rm packages-microsoft-prod.deb \ + && apt-get update \ + && apt-get install -y aspnetcore-runtime-6.0 + + +# build image is always amd64 (to match the runner architecture), even though the target architecture may be arm64 +FROM --platform=amd64 mcr.microsoft.com/dotnet/sdk:7.0-bullseye-slim AS build +RUN apt-get update \ + && apt-get install -y wget \ + && wget https://packages.microsoft.com/config/debian/11/packages-microsoft-prod.deb -O packages-microsoft-prod.deb \ + && dpkg -i packages-microsoft-prod.deb \ + && rm packages-microsoft-prod.deb \ + && apt-get update \ + && apt-get install -y dotnet-sdk-6.0 +WORKDIR /src +COPY ["KafkaTestApp/KafkaTestApp.csproj", "KafkaTestApp/"] +ARG APP_DOTNET_VERSION +RUN dotnet restore "KafkaTestApp/KafkaTestApp.csproj" -p:TargetFramework=net${APP_DOTNET_VERSION} + +COPY . . +WORKDIR "/src/KafkaTestApp" +RUN dotnet build "KafkaTestApp.csproj" -c Release -o /app/build --os linux --framework net${APP_DOTNET_VERSION} + + +FROM build AS publish +ARG APP_DOTNET_VERSION +RUN dotnet publish "KafkaTestApp.csproj" -c Release -o /app/publish /p:UseAppHost=false --os linux --framework net${APP_DOTNET_VERSION} + + +FROM base AS final + +# Enable the agent +ARG NEW_RELIC_HOST +ARG NEW_RELIC_LICENSE_KEY +ARG NEW_RELIC_APP_NAME + +ENV CORECLR_ENABLE_PROFILING=1 \ +CORECLR_PROFILER={36032161-FFC0-4B61-B559-F6C5D41BAE5A} \ +CORECLR_NEWRELIC_HOME=/usr/local/newrelic-dotnet-agent \ +CORECLR_PROFILER_PATH=/usr/local/newrelic-dotnet-agent/libNewRelicProfiler.so \ +NEW_RELIC_HOST=${NEW_RELIC_HOST} \ +NEW_RELIC_LICENSE_KEY=${NEW_RELIC_LICENSE_KEY} \ +NEW_RELIC_APP_NAME=${NEW_RELIC_APP_NAME} \ +NEWRELIC_LOG_DIRECTORY=/app/logs + +WORKDIR /app +COPY --from=publish /app/publish . + +ENTRYPOINT ["dotnet", "KafkaTestApp.dll"] diff --git a/tests/Agent/IntegrationTests/ContainerApplications/KafkaTestApp/KafkaTestApp.csproj b/tests/Agent/IntegrationTests/ContainerApplications/KafkaTestApp/KafkaTestApp.csproj new file mode 100644 index 000000000..546ccc3a5 --- /dev/null +++ b/tests/Agent/IntegrationTests/ContainerApplications/KafkaTestApp/KafkaTestApp.csproj @@ -0,0 +1,19 @@ + + + + net6.0;net7.0 + Linux + . + latest + + + + + + + + + + + + diff --git a/tests/Agent/IntegrationTests/ContainerApplications/KafkaTestApp/Producer.cs b/tests/Agent/IntegrationTests/ContainerApplications/KafkaTestApp/Producer.cs new file mode 100644 index 000000000..d2db1a266 --- /dev/null +++ b/tests/Agent/IntegrationTests/ContainerApplications/KafkaTestApp/Producer.cs @@ -0,0 +1,71 @@ +// Copyright 2020 New Relic, Inc. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +using System.Runtime.CompilerServices; +using System.Threading.Tasks; +using Confluent.Kafka; +using Confluent.Kafka.Admin; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.Logging; + +namespace KafkaTestApp +{ + public class Producer + { + private readonly string _topic; + private readonly IProducer _producer; + private readonly ILogger _logger; + + public Producer(IConfiguration configuration, string topic, ILogger logger) + { + _topic = topic; + _producer = new ProducerBuilder(configuration.AsEnumerable()).Build(); + _logger = logger; + } + + public void CreateTopic(IConfiguration configuration) + { + using (var adminClient = new AdminClientBuilder(configuration.AsEnumerable()).Build()) + { + try + { + adminClient.CreateTopicsAsync(new TopicSpecification[] { + new TopicSpecification { Name = _topic, ReplicationFactor = 1, NumPartitions = 1 } + }).Wait(10 * 1000); + } + catch (CreateTopicsException e) + { + _logger.LogInformation($"An error occured creating topic {e.Results[0].Topic}: {e.Results[0].Error.Reason}"); + } + } + } + + [MethodImpl(MethodImplOptions.NoInlining)] + public async Task Produce() + { + var user = "syncTestUser"; + var item = "syncTestItem"; + + _producer.Produce(_topic, new Message { Key = user, Value = item }, + (deliveryReport) => + { + if (deliveryReport.Error.Code != ErrorCode.NoError) + { + _logger.LogInformation($"Failed to deliver message: {deliveryReport.Error.Reason}"); + } + }); + + await Task.CompletedTask; + } + + [MethodImpl(MethodImplOptions.NoInlining)] + public async Task ProduceAsync() + { + var user = "asyncTestUser"; + var item = "asyncTestItem"; + + _ = _producer.ProduceAsync(_topic, new Message { Key = user, Value = item }).Result; + await Task.CompletedTask; + } + } +} diff --git a/tests/Agent/IntegrationTests/ContainerApplications/KafkaTestApp/Program.cs b/tests/Agent/IntegrationTests/ContainerApplications/KafkaTestApp/Program.cs new file mode 100644 index 000000000..506fda287 --- /dev/null +++ b/tests/Agent/IntegrationTests/ContainerApplications/KafkaTestApp/Program.cs @@ -0,0 +1,98 @@ +// Copyright 2020 New Relic, Inc. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +using System; +using System.IO; +using System.Net; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.AspNetCore.Builder; +using Microsoft.AspNetCore.Hosting; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; + +namespace KafkaTestApp +{ + public class Program + { + private const int TopicNameLength = 15; + private static string _topic; + public static Producer Producer; + public static Consumer Consumer; + + public static async Task Main(string[] args) + { + var builder = WebApplication.CreateBuilder(args); + + // Add services to the container. + builder.Services.AddControllers(); + + // listen to any ip on port 80 for http + IPEndPoint ipEndPointHttp = new IPEndPoint(IPAddress.Any, 80); + builder.WebHost.UseUrls($"http://{ipEndPointHttp}"); + + var app = builder.Build(); + + SetupKafka(app.Logger); + var cTask = Task.Run(() => Consumer.StartConsuming()); + + // Configure the HTTP request pipeline. + app.UseAuthorization(); + app.MapControllers(); + + await app.StartAsync(); + + CreatePidFile(); + + await app.WaitForShutdownAsync(); + } + + public static void SetupKafka(ILogger logger) + { + Thread.Sleep(15 * 1000); // Waiting for Kafka to get ready + + var broker = Environment.GetEnvironmentVariable("NEW_RELIC_KAFKA_CONTAINER_NAME"); + var kafkaConfig = new ConfigurationBuilder().AddInMemoryCollection().Build(); + kafkaConfig["bootstrap.servers"] = $"{broker}:9092"; + kafkaConfig["group.id"] = "kafka-dotnet-getting-started"; + kafkaConfig["auto.offset.reset"] = "earliest"; + kafkaConfig["dotnet.cancellation.delay.max.ms"] = "10000"; + + _topic = GenerateTopic(); + Producer = new Producer(kafkaConfig, _topic, logger); + Producer.CreateTopic(kafkaConfig); + Consumer = new Consumer(kafkaConfig, _topic, logger); + } + + public static void CreatePidFile() + { + var pidFileNameAndPath = Path.Combine(Environment.GetEnvironmentVariable("NEWRELIC_LOG_DIRECTORY"), "containerizedapp.pid"); + var pid = Environment.ProcessId; + using var file = File.CreateText(pidFileNameAndPath); + file.WriteLine(pid); + } + + private static string GenerateTopic() + { + var providedTopic = Environment.GetEnvironmentVariable("NEW_RELIC_KAFKA_TOPIC"); + if (!string.IsNullOrEmpty(providedTopic)) + { + return providedTopic; + } + + var builder = new StringBuilder(); + var rnd = new Random(); + + for (int i = 0; i < TopicNameLength; i++) + { + var shifter = Convert.ToInt32(Math.Floor(25 * rnd.NextDouble())); + builder.Append(Convert.ToChar(shifter + 65)); + } + + return builder.ToString(); + } + } +} diff --git a/tests/Agent/IntegrationTests/ContainerApplications/KafkaTestApp/appsettings.Development.json b/tests/Agent/IntegrationTests/ContainerApplications/KafkaTestApp/appsettings.Development.json new file mode 100644 index 000000000..0c208ae91 --- /dev/null +++ b/tests/Agent/IntegrationTests/ContainerApplications/KafkaTestApp/appsettings.Development.json @@ -0,0 +1,8 @@ +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.AspNetCore": "Warning" + } + } +} diff --git a/tests/Agent/IntegrationTests/ContainerApplications/KafkaTestApp/appsettings.json b/tests/Agent/IntegrationTests/ContainerApplications/KafkaTestApp/appsettings.json new file mode 100644 index 000000000..10f68b8c8 --- /dev/null +++ b/tests/Agent/IntegrationTests/ContainerApplications/KafkaTestApp/appsettings.json @@ -0,0 +1,9 @@ +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.AspNetCore": "Warning" + } + }, + "AllowedHosts": "*" +} diff --git a/tests/Agent/IntegrationTests/ContainerApplications/docker-compose.yml b/tests/Agent/IntegrationTests/ContainerApplications/docker-compose.yml index 5a7db2a18..384212fa1 100644 --- a/tests/Agent/IntegrationTests/ContainerApplications/docker-compose.yml +++ b/tests/Agent/IntegrationTests/ContainerApplications/docker-compose.yml @@ -11,7 +11,8 @@ version: "3" # PLATFORM The platform that the service runs on -- linux/amd64 or linux/arm64/v8 # DOTNET_VERSION The dotnet version number to use (7.0, 8.0, etc) # NETWORK_NAME The network name to use for containers in this app. Should be unique among all running instances. -# +# TEST_DOCKERFILE The path and dockerfile to use for the service. +# # and the usual suspects: # NEW_RELIC_LICENSE_KEY # NEW_RELIC_HOST @@ -28,7 +29,7 @@ services: platform: ${PLATFORM} build: context: . - dockerfile: SmokeTestApp/Dockerfile + dockerfile: ${TEST_DOCKERFILE} args: DISTRO_TAG: ${DISTRO_TAG} TARGET_ARCH: ${TARGET_ARCH} @@ -36,6 +37,7 @@ services: NEW_RELIC_APP_NAME: ${NEW_RELIC_APP_NAME} NEW_RELIC_HOST: ${NEW_RELIC_HOST} DOTNET_VERSION: ${DOTNET_VERSION} + APP_DOTNET_VERSION: ${APP_DOTNET_VERSION} ports: - "${PORT}:80" volumes: @@ -76,8 +78,49 @@ services: service: smoketestapp build: dockerfile: SmokeTestApp/Dockerfile.centos + + UbuntuX64Kafka1TestApp: + extends: + service: smoketestapp + depends_on: + - kafka-broker + environment: + - NEW_RELIC_KAFKA_TOPIC=${NEW_RELIC_KAFKA_TOPIC} + - NEW_RELIC_KAFKA_CONTAINER_NAME=${NEW_RELIC_KAFKA_CONTAINER_NAME} + + UbuntuX64Kafka2TestApp: + extends: + service: smoketestapp + depends_on: + - kafka-broker + environment: + - NEW_RELIC_KAFKA_TOPIC=${NEW_RELIC_KAFKA_TOPIC} + - NEW_RELIC_KAFKA_CONTAINER_NAME=${NEW_RELIC_KAFKA_CONTAINER_NAME} + + kafka-broker: + image: confluentinc/cp-kafka:7.5.0 + container_name: ${NEW_RELIC_KAFKA_CONTAINER_NAME} + environment: + KAFKA_BROKER_ID: 1 + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT + KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://${NEW_RELIC_KAFKA_CONTAINER_NAME}:29092,PLAINTEXT_HOST://${NEW_RELIC_KAFKA_CONTAINER_NAME}:9092 + KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 + KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 + KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 + KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1 + KAFKA_PROCESS_ROLES: broker,controller + KAFKA_NODE_ID: 1 + KAFKA_CONTROLLER_QUORUM_VOTERS: 1@${NEW_RELIC_KAFKA_CONTAINER_NAME}:29093 + KAFKA_LISTENERS: PLAINTEXT://${NEW_RELIC_KAFKA_CONTAINER_NAME}:29092,CONTROLLER://${NEW_RELIC_KAFKA_CONTAINER_NAME}:29093,PLAINTEXT_HOST://:9092 + KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT + KAFKA_CONTROLLER_LISTENER_NAMES: CONTROLLER + KAFKA_LOG_DIRS: /tmp/kraft-combined-logs + CLUSTER_ID: MkU3OEVBNTcwNTJENDM2Qk + networks: default: name: ${NETWORK_NAME} driver: bridge + driver_opts: + com.docker.network.bridge.enable_icc: "true" diff --git a/tests/Agent/IntegrationTests/ContainerIntegrationTests.sln b/tests/Agent/IntegrationTests/ContainerIntegrationTests.sln index 6c4d3d817..bd86b6585 100644 --- a/tests/Agent/IntegrationTests/ContainerIntegrationTests.sln +++ b/tests/Agent/IntegrationTests/ContainerIntegrationTests.sln @@ -20,6 +20,10 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "_docker", "_docker", "{FB10 ContainerApplications\docker-compose.yml = ContainerApplications\docker-compose.yml EndProjectSection EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaTestApp", "ContainerApplications\KafkaTestApp\KafkaTestApp.csproj", "{1F7402D8-E345-480C-BBA6-6313A1DEEB23}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "NewRelic.Testing.Assertions", "..\NewRelic.Testing.Assertions\NewRelic.Testing.Assertions.csproj", "{C0ADF41E-F8B8-4ECA-828F-F578E09B17A9}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -46,6 +50,14 @@ Global {F7EFC853-3F97-4D91-9A6F-37ADAAC739C1}.Debug|Any CPU.Build.0 = Debug|Any CPU {F7EFC853-3F97-4D91-9A6F-37ADAAC739C1}.Release|Any CPU.ActiveCfg = Release|Any CPU {F7EFC853-3F97-4D91-9A6F-37ADAAC739C1}.Release|Any CPU.Build.0 = Release|Any CPU + {1F7402D8-E345-480C-BBA6-6313A1DEEB23}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {1F7402D8-E345-480C-BBA6-6313A1DEEB23}.Debug|Any CPU.Build.0 = Debug|Any CPU + {1F7402D8-E345-480C-BBA6-6313A1DEEB23}.Release|Any CPU.ActiveCfg = Release|Any CPU + {1F7402D8-E345-480C-BBA6-6313A1DEEB23}.Release|Any CPU.Build.0 = Release|Any CPU + {C0ADF41E-F8B8-4ECA-828F-F578E09B17A9}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C0ADF41E-F8B8-4ECA-828F-F578E09B17A9}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C0ADF41E-F8B8-4ECA-828F-F578E09B17A9}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C0ADF41E-F8B8-4ECA-828F-F578E09B17A9}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE @@ -53,6 +65,7 @@ Global GlobalSection(NestedProjects) = preSolution {FBA07795-8066-4641-88E5-05DD272D333A} = {84D70574-4AC7-4EA7-AE52-832C3531E082} {FB10922F-3CC6-4497-AF53-DF6808380258} = {84D70574-4AC7-4EA7-AE52-832C3531E082} + {1F7402D8-E345-480C-BBA6-6313A1DEEB23} = {84D70574-4AC7-4EA7-AE52-832C3531E082} EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution SolutionGuid = {BB230433-D05D-4A1F-951B-CC14F47BBF42} diff --git a/tests/Agent/IntegrationTests/ContainerIntegrationTests/ContainerFixtures/ContainerApplication.cs b/tests/Agent/IntegrationTests/ContainerIntegrationTests/ContainerFixtures/ContainerApplication.cs index c107cab81..7042f1627 100644 --- a/tests/Agent/IntegrationTests/ContainerIntegrationTests/ContainerFixtures/ContainerApplication.cs +++ b/tests/Agent/IntegrationTests/ContainerIntegrationTests/ContainerFixtures/ContainerApplication.cs @@ -1,4 +1,4 @@ -// Copyright 2020 New Relic, Inc. All rights reserved. +// Copyright 2020 New Relic, Inc. All rights reserved. // SPDX-License-Identifier: Apache-2.0 using System; @@ -15,7 +15,7 @@ namespace NewRelic.Agent.ContainerIntegrationTests.ContainerFixtures; public class ContainerApplication : RemoteApplication { - + private readonly string _dockerfile; private readonly string _dotnetVersion; private readonly string _distroTag; private readonly string _targetArch; @@ -23,6 +23,9 @@ public class ContainerApplication : RemoteApplication private readonly string _containerPlatform; private readonly string _dockerComposeServiceName; + // Used for handling dependent containers started automatically for services + public readonly List DockerDependencies; + protected override string ApplicationDirectoryName { get; } protected override string SourceApplicationDirectoryPath @@ -33,12 +36,14 @@ protected override string SourceApplicationDirectoryPath } } - public ContainerApplication(string applicationDirectoryName, string distroTag, Architecture containerArchitecture, string dotnetVersion) : base(applicationType: ApplicationType.Container, isCoreApp: true) + public ContainerApplication(string applicationDirectoryName, string distroTag, Architecture containerArchitecture, string dotnetVersion, string dockerfile) : base(applicationType: ApplicationType.Container, isCoreApp: true) { ApplicationDirectoryName = applicationDirectoryName; _dockerComposeServiceName = applicationDirectoryName; _distroTag = distroTag; _dotnetVersion = dotnetVersion; + _dockerfile = dockerfile; + DockerDependencies = new List(); switch (containerArchitecture) { @@ -57,7 +62,7 @@ public ContainerApplication(string applicationDirectoryName, string distroTag, A public override string AppName => $"ContainerApplication: {_dotnetVersion}-{_distroTag}_{_targetArch}"; - private string ContainerName => $"smoketestapp_{_dotnetVersion}-{_distroTag}_{_targetArch}".ToLower(); // must be lowercase + private string ContainerName => $"{_dockerComposeServiceName}_{_dotnetVersion}-{_distroTag}_{_targetArch}".ToLower(); // must be lowercase public override void CopyToRemote() { @@ -70,7 +75,7 @@ public override void Start(string commandLineArguments, Dictionary 0) + { + foreach (var dep in DockerDependencies) + { + Process.Start("docker", $"container rm --force {dep}"); + } + } + + // Cleanup the networks with no attached containers. Mainly for testings on dev laptops - they can build up and block runs. + Process.Start("docker", "network prune -f"); } protected virtual void WaitForAppServerToStartListening(Process process, bool captureStandardOutput) diff --git a/tests/Agent/IntegrationTests/ContainerIntegrationTests/ContainerFixtures/ContainerFixture.cs b/tests/Agent/IntegrationTests/ContainerIntegrationTests/ContainerFixtures/ContainerFixture.cs index 4e1173687..9a6401a3d 100644 --- a/tests/Agent/IntegrationTests/ContainerIntegrationTests/ContainerFixtures/ContainerFixture.cs +++ b/tests/Agent/IntegrationTests/ContainerIntegrationTests/ContainerFixtures/ContainerFixture.cs @@ -1,4 +1,4 @@ -// Copyright 2020 New Relic, Inc. All rights reserved. +// Copyright 2020 New Relic, Inc. All rights reserved. // SPDX-License-Identifier: Apache-2.0 using NewRelic.Agent.IntegrationTestHelpers.RemoteServiceFixtures; diff --git a/tests/Agent/IntegrationTests/ContainerIntegrationTests/ContainerFixtures/LinuxKafkaTestFixtures.cs b/tests/Agent/IntegrationTests/ContainerIntegrationTests/ContainerFixtures/LinuxKafkaTestFixtures.cs new file mode 100644 index 000000000..54c02e5c5 --- /dev/null +++ b/tests/Agent/IntegrationTests/ContainerIntegrationTests/ContainerFixtures/LinuxKafkaTestFixtures.cs @@ -0,0 +1,54 @@ +// Copyright 2020 New Relic, Inc. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +using System; +using System.Threading.Tasks; + +namespace NewRelic.Agent.ContainerIntegrationTests.ContainerFixtures; + +public abstract class LinuxKafkaTestFixtureBase : ContainerFixture +{ + protected LinuxKafkaTestFixtureBase( + string applicationDirectoryName, + string distroTag, + ContainerApplication.Architecture containerArchitecture, + string dockerfile, + string dotnetVersion) : + base(new ContainerApplication(applicationDirectoryName, distroTag, containerArchitecture, dotnetVersion, dockerfile)) + { + } + + public virtual void ExerciseApplication() + { + var address = $"http://localhost:{Port}/kafka/"; + GetAndAssertStatusCode(address + "produce", System.Net.HttpStatusCode.OK); + GetAndAssertStatusCode(address + "produceasync", System.Net.HttpStatusCode.OK); + } + + public void Delay(int seconds) + { + Task.Delay(TimeSpan.FromSeconds(seconds)).GetAwaiter().GetResult(); + } +} + +public class UbuntuX64Kafka1TestFixture : LinuxKafkaTestFixtureBase +{ + private static readonly string Dockerfile = "KafkaTestApp/Dockerfile"; + private static readonly string ApplicationDirectoryName = "UbuntuX64Kafka1TestApp"; + private static readonly ContainerApplication.Architecture Architecture = ContainerApplication.Architecture.X64; + private static readonly string DistroTag = "bullseye-slim"; + private static readonly string DotnetVersion = "6.0"; + + public UbuntuX64Kafka1TestFixture() : base(ApplicationDirectoryName, DistroTag, Architecture, Dockerfile, DotnetVersion) { } +} + +public class UbuntuX64Kafka2TestFixture : LinuxKafkaTestFixtureBase +{ + private static readonly string Dockerfile = "KafkaTestApp/Dockerfile"; + private static readonly string ApplicationDirectoryName = "UbuntuX64Kafka2TestApp"; + private static readonly ContainerApplication.Architecture Architecture = ContainerApplication.Architecture.X64; + private static readonly string DistroTag = "bullseye-slim"; + private static readonly string DotnetVersion = "7.0"; + + public UbuntuX64Kafka2TestFixture() : base(ApplicationDirectoryName, DistroTag, Architecture, Dockerfile, DotnetVersion) { } +} diff --git a/tests/Agent/IntegrationTests/ContainerIntegrationTests/ContainerFixtures/LinuxSmokeTestFixtures.cs b/tests/Agent/IntegrationTests/ContainerIntegrationTests/ContainerFixtures/LinuxSmokeTestFixtures.cs index d8e2aef9c..6d8e471e2 100644 --- a/tests/Agent/IntegrationTests/ContainerIntegrationTests/ContainerFixtures/LinuxSmokeTestFixtures.cs +++ b/tests/Agent/IntegrationTests/ContainerIntegrationTests/ContainerFixtures/LinuxSmokeTestFixtures.cs @@ -11,8 +11,8 @@ public abstract class LinuxSmokeTestFixtureBase : ContainerFixture { private static readonly string DotnetVersion = "7.0"; - protected LinuxSmokeTestFixtureBase(string applicationDirectoryName, string distroTag, ContainerApplication.Architecture containerArchitecture) : - base(new ContainerApplication(applicationDirectoryName, distroTag, containerArchitecture, DotnetVersion)) + protected LinuxSmokeTestFixtureBase(string applicationDirectoryName, string distroTag, ContainerApplication.Architecture containerArchitecture, string dockerfile) : + base(new ContainerApplication(applicationDirectoryName, distroTag, containerArchitecture, DotnetVersion, dockerfile)) { } @@ -30,78 +30,87 @@ public void Delay(int seconds) public class DebianX64SmokeTestFixture : LinuxSmokeTestFixtureBase { + private static readonly string Dockerfile = "SmokeTestApp/Dockerfile"; private static readonly string ApplicationDirectoryName = "DebianX64SmokeTestApp"; private static readonly ContainerApplication.Architecture Architecture = ContainerApplication.Architecture.X64; private static readonly string DistroTag = "jammy"; - public DebianX64SmokeTestFixture() : base(ApplicationDirectoryName, DistroTag, Architecture) { } + public DebianX64SmokeTestFixture() : base(ApplicationDirectoryName, DistroTag, Architecture, Dockerfile) { } } public class UbuntuX64SmokeTestFixture : LinuxSmokeTestFixtureBase { + private static readonly string Dockerfile = "SmokeTestApp/Dockerfile"; private static readonly string ApplicationDirectoryName = "UbuntuX64SmokeTestApp"; private static readonly ContainerApplication.Architecture Architecture = ContainerApplication.Architecture.X64; private static readonly string DistroTag = "bullseye-slim"; - public UbuntuX64SmokeTestFixture() : base(ApplicationDirectoryName, DistroTag, Architecture) { } + public UbuntuX64SmokeTestFixture() : base(ApplicationDirectoryName, DistroTag, Architecture, Dockerfile) { } } public class AlpineX64SmokeTestFixture : LinuxSmokeTestFixtureBase { + private static readonly string Dockerfile = "SmokeTestApp/Dockerfile"; private static readonly string ApplicationDirectoryName = "AlpineX64SmokeTestApp"; private static readonly ContainerApplication.Architecture Architecture = ContainerApplication.Architecture.X64; private static readonly string DistroTag = "alpine"; - public AlpineX64SmokeTestFixture() : base(ApplicationDirectoryName, DistroTag, Architecture) { } + public AlpineX64SmokeTestFixture() : base(ApplicationDirectoryName, DistroTag, Architecture, Dockerfile) { } } public class DebianArm64SmokeTestFixture : LinuxSmokeTestFixtureBase { + private static readonly string Dockerfile = "SmokeTestApp/Dockerfile"; private static readonly string ApplicationDirectoryName = "DebianArm64SmokeTestApp"; private static readonly ContainerApplication.Architecture Architecture = ContainerApplication.Architecture.Arm64; private static readonly string DistroTag = "jammy"; - public DebianArm64SmokeTestFixture() : base(ApplicationDirectoryName, DistroTag, Architecture) { } + public DebianArm64SmokeTestFixture() : base(ApplicationDirectoryName, DistroTag, Architecture, Dockerfile) { } } public class UbuntuArm64SmokeTestFixture : LinuxSmokeTestFixtureBase { + private static readonly string Dockerfile = "SmokeTestApp/Dockerfile"; private static readonly string ApplicationDirectoryName = "UbuntuArm64SmokeTestApp"; private static readonly ContainerApplication.Architecture Architecture = ContainerApplication.Architecture.Arm64; private static readonly string DistroTag = "bullseye-slim"; - public UbuntuArm64SmokeTestFixture() : base(ApplicationDirectoryName, DistroTag, Architecture) { } + public UbuntuArm64SmokeTestFixture() : base(ApplicationDirectoryName, DistroTag, Architecture, Dockerfile) { } } public class CentosX64SmokeTestFixture : LinuxSmokeTestFixtureBase { + private static readonly string Dockerfile = "SmokeTestApp/Dockerfile"; private static readonly string ApplicationDirectoryName = "CentosX64SmokeTestApp"; private static readonly ContainerApplication.Architecture Architecture = ContainerApplication.Architecture.X64; private static readonly string DistroTag = "centos"; - public CentosX64SmokeTestFixture() : base(ApplicationDirectoryName, DistroTag, Architecture) { } + public CentosX64SmokeTestFixture() : base(ApplicationDirectoryName, DistroTag, Architecture, Dockerfile) { } } public class CentosArm64SmokeTestFixture : LinuxSmokeTestFixtureBase { + private static readonly string Dockerfile = "SmokeTestApp/Dockerfile"; private static readonly string ApplicationDirectoryName = "CentosArm64SmokeTestApp"; private static readonly ContainerApplication.Architecture Architecture = ContainerApplication.Architecture.Arm64; private static readonly string DistroTag = "centos"; - public CentosArm64SmokeTestFixture() : base(ApplicationDirectoryName, DistroTag, Architecture) { } + public CentosArm64SmokeTestFixture() : base(ApplicationDirectoryName, DistroTag, Architecture, Dockerfile) { } } public class AmazonX64SmokeTestFixture : LinuxSmokeTestFixtureBase { + private static readonly string Dockerfile = "SmokeTestApp/Dockerfile"; private static readonly string ApplicationDirectoryName = "AmazonX64SmokeTestApp"; private static readonly ContainerApplication.Architecture Architecture = ContainerApplication.Architecture.X64; private static readonly string DistroTag = "amazonlinux"; - public AmazonX64SmokeTestFixture() : base(ApplicationDirectoryName, DistroTag, Architecture) { } + public AmazonX64SmokeTestFixture() : base(ApplicationDirectoryName, DistroTag, Architecture, Dockerfile) { } } public class AmazonArm64SmokeTestFixture : LinuxSmokeTestFixtureBase { + private static readonly string Dockerfile = "SmokeTestApp/Dockerfile"; private static readonly string ApplicationDirectoryName = "AmazonArm64SmokeTestApp"; private static readonly ContainerApplication.Architecture Architecture = ContainerApplication.Architecture.Arm64; private static readonly string DistroTag = "amazonlinux"; - public AmazonArm64SmokeTestFixture() : base(ApplicationDirectoryName, DistroTag, Architecture) { } + public AmazonArm64SmokeTestFixture() : base(ApplicationDirectoryName, DistroTag, Architecture, Dockerfile) { } } diff --git a/tests/Agent/IntegrationTests/ContainerIntegrationTests/ContainerIntegrationTests.csproj b/tests/Agent/IntegrationTests/ContainerIntegrationTests/ContainerIntegrationTests.csproj index 7302ff737..10cbb47b6 100644 --- a/tests/Agent/IntegrationTests/ContainerIntegrationTests/ContainerIntegrationTests.csproj +++ b/tests/Agent/IntegrationTests/ContainerIntegrationTests/ContainerIntegrationTests.csproj @@ -23,6 +23,7 @@ + diff --git a/tests/Agent/IntegrationTests/ContainerIntegrationTests/LinuxKafkaTests.cs b/tests/Agent/IntegrationTests/ContainerIntegrationTests/LinuxKafkaTests.cs new file mode 100644 index 000000000..5cdba5e56 --- /dev/null +++ b/tests/Agent/IntegrationTests/ContainerIntegrationTests/LinuxKafkaTests.cs @@ -0,0 +1,128 @@ +// Copyright 2020 New Relic, Inc. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using NewRelic.Agent.ContainerIntegrationTests.ContainerFixtures; +using NewRelic.Agent.IntegrationTestHelpers; +using NewRelic.Testing.Assertions; +using Xunit; +using Xunit.Abstractions; + +namespace ContainerIntegrationTests; + +public abstract class LinuxKafkaTest : NewRelicIntegrationTest where T : LinuxKafkaTestFixtureBase +{ + private const int TopicNameLength = 15; + + internal string _topicName; + private readonly T _fixture; + private readonly static Random _rnd = new(); + + protected LinuxKafkaTest(T fixture, ITestOutputHelper output) : base(fixture) + { + _fixture = fixture; + _fixture.TestLogger = output; + + _topicName = GenerateTopic(); + var brokerName = "broker" + _topicName; + ((ContainerApplication)_fixture.RemoteApplication).DockerDependencies.Add(brokerName); + + _fixture.Actions(setupConfiguration: () => + { + var configModifier = new NewRelicConfigModifier(_fixture.DestinationNewRelicConfigFilePath); + configModifier.SetLogLevel("debug"); + configModifier.ConfigureFasterMetricsHarvestCycle(10); + configModifier.LogToConsole(); + + _fixture.RemoteApplication.SetAdditionalEnvironmentVariable("NEW_RELIC_KAFKA_TOPIC", _topicName); + _fixture.RemoteApplication.SetAdditionalEnvironmentVariable("NEW_RELIC_KAFKA_CONTAINER_NAME", brokerName); + }, + exerciseApplication: () => + { + _fixture.Delay(15); // wait long enough to ensure kafka and app are ready + _fixture.ExerciseApplication(); + + _fixture.Delay(11); // wait long enough to ensure a metric harvest occurs after we exercise the app + _fixture.AgentLog.WaitForLogLine(AgentLogBase.HarvestFinishedLogLineRegex, TimeSpan.FromSeconds(11)); + + // shut down the container and wait for the agent log to see it + _fixture.ShutdownRemoteApplication(); + _fixture.AgentLog.WaitForLogLine(AgentLogBase.ShutdownLogLineRegex, TimeSpan.FromSeconds(10)); + }); + + _fixture.Initialize(); + } + + [Fact] + public void Test() + { + var messageBrokerProduce = "MessageBroker/Kafka/Topic/Produce/Named/" + _topicName; + var messageBrokerProduceSerializationKey = messageBrokerProduce + "/Serialization/Key"; + var messageBrokerProduceSerializationValue = messageBrokerProduce + "/Serialization/Value"; + + var messageBrokerConsume = "MessageBroker/Kafka/Topic/Consume/Named/" + _topicName; + + var consumeTransactionName = @"OtherTransaction/Message/Kafka/Topic/Consume/Named/" + _topicName; + var produceWebTransactionName = @"WebTransaction/MVC/Kafka/Produce"; + + var metrics = _fixture.AgentLog.GetMetrics(); + var spans = _fixture.AgentLog.GetSpanEvents(); + var produceSpan = spans.FirstOrDefault(s => s.IntrinsicAttributes["name"].Equals(messageBrokerProduce)); + var consumeTxnSpan = spans.FirstOrDefault(s => s.IntrinsicAttributes["name"].Equals(consumeTransactionName)); + + var expectedMetrics = new List + { + new Assertions.ExpectedMetric { metricName = produceWebTransactionName, callCount = 2 }, // includes sync and async actions + new Assertions.ExpectedMetric { metricName = messageBrokerProduce, callCount = 2 }, + new Assertions.ExpectedMetric { metricName = messageBrokerProduce, metricScope = produceWebTransactionName, callCount = 2 }, + new Assertions.ExpectedMetric { metricName = messageBrokerProduceSerializationKey, callCount = 2 }, + new Assertions.ExpectedMetric { metricName = messageBrokerProduceSerializationKey, metricScope = produceWebTransactionName, callCount = 2 }, + new Assertions.ExpectedMetric { metricName = messageBrokerProduceSerializationValue, callCount = 2 }, + new Assertions.ExpectedMetric { metricName = messageBrokerProduceSerializationValue, metricScope = produceWebTransactionName, callCount = 2 }, + + new Assertions.ExpectedMetric { metricName = consumeTransactionName, callCount = 2 }, + new Assertions.ExpectedMetric { metricName = messageBrokerConsume, callCount = 2 }, + new Assertions.ExpectedMetric { metricName = messageBrokerConsume, metricScope = consumeTransactionName, callCount = 2 }, + }; + + NrAssert.Multiple( + () => Assertions.MetricsExist(expectedMetrics, metrics), + () => Assert.True(produceSpan.IntrinsicAttributes.ContainsKey("traceId")), + () => Assert.True(produceSpan.IntrinsicAttributes.ContainsKey("parentId")), + () => Assert.NotNull(consumeTxnSpan), + () => Assert.True(consumeTxnSpan.UserAttributes.ContainsKey("kafka.consume.byteCount")), + () => Assert.InRange((long)consumeTxnSpan.UserAttributes["kafka.consume.byteCount"], 20, 30), // usually is 24 - 26 + () => Assert.True(consumeTxnSpan.IntrinsicAttributes.ContainsKey("traceId")), + () => Assert.False(consumeTxnSpan.IntrinsicAttributes.ContainsKey("parentId")) + ); + } + + internal static string GenerateTopic() + { + var builder = new StringBuilder(); + for (int i = 0; i < TopicNameLength; i++) + { + var shifter = Convert.ToInt32(Math.Floor(25 * _rnd.NextDouble())); + builder.Append(Convert.ToChar(shifter + 65)); + } + + return builder.ToString(); + } +} + +public class UbuntuX64Kafka1Test : LinuxKafkaTest +{ + public UbuntuX64Kafka1Test(UbuntuX64Kafka1TestFixture fixture, ITestOutputHelper output) : base(fixture, output) + { + } +} + +public class UbuntuX64Kafka2Test : LinuxKafkaTest +{ + public UbuntuX64Kafka2Test(UbuntuX64Kafka2TestFixture fixture, ITestOutputHelper output) : base(fixture, output) + { + } +} diff --git a/tests/Agent/IntegrationTests/ContainerIntegrationTests/LinuxSmokeTests.cs b/tests/Agent/IntegrationTests/ContainerIntegrationTests/LinuxSmokeTests.cs index 606be501f..8618ca533 100644 --- a/tests/Agent/IntegrationTests/ContainerIntegrationTests/LinuxSmokeTests.cs +++ b/tests/Agent/IntegrationTests/ContainerIntegrationTests/LinuxSmokeTests.cs @@ -1,4 +1,4 @@ -// Copyright 2020 New Relic, Inc. All rights reserved. +// Copyright 2020 New Relic, Inc. All rights reserved. // SPDX-License-Identifier: Apache-2.0 using System; diff --git a/tests/Agent/UnitTests/CompositeTests/TransactionNameTests.cs b/tests/Agent/UnitTests/CompositeTests/TransactionNameTests.cs index 265cd8b67..becb569a7 100644 --- a/tests/Agent/UnitTests/CompositeTests/TransactionNameTests.cs +++ b/tests/Agent/UnitTests/CompositeTests/TransactionNameTests.cs @@ -177,6 +177,33 @@ public void SetMessageBrokerTransactionName_UpdatesTransactionNameCorrectly() NrAssert.Multiple( () => MetricAssertions.MetricsExist(expectedMetrics, actualMetrics), () => Assert.AreEqual("OtherTransaction/Message/vendor/Queue/Named/dest", transactionTrace.TransactionMetricName) + ); + } + + [Test] + public void SetKafkaMessageBrokerTransactionName_UpdatesTransactionNameCorrectly() + { + var transaction = _agent.CreateTransaction( + isWeb: true, + category: EnumNameCache.GetName(WebTransactionType.Action), + transactionDisplayName: "name", + doNotTrackAsUnitOfWork: true); + transaction.SetKafkaMessageBrokerTransactionName(MessageBrokerDestinationType.Topic, "vendor", "dest", TransactionNamePriority.Route); + var segment = _agent.StartTransactionSegmentOrThrow("simpleName"); + segment.End(); + transaction.End(); + + _compositeTestAgent.Harvest(); + + var expectedMetrics = new[] + { + new ExpectedMetric {Name = "OtherTransaction/Message/vendor/Topic/Consume/Named/dest"} + }; + var actualMetrics = _compositeTestAgent.Metrics.ToList(); + var transactionTrace = _compositeTestAgent.TransactionTraces.First(); + NrAssert.Multiple( + () => MetricAssertions.MetricsExist(expectedMetrics, actualMetrics), + () => Assert.AreEqual("OtherTransaction/Message/vendor/Topic/Consume/Named/dest", transactionTrace.TransactionMetricName) ); } diff --git a/tests/Agent/UnitTests/Core.UnitTest/AgentHealth/AgentHealthReporterTests.cs b/tests/Agent/UnitTests/Core.UnitTest/AgentHealth/AgentHealthReporterTests.cs index d3c047115..dc5f2eb83 100644 --- a/tests/Agent/UnitTests/Core.UnitTest/AgentHealth/AgentHealthReporterTests.cs +++ b/tests/Agent/UnitTests/Core.UnitTest/AgentHealth/AgentHealthReporterTests.cs @@ -136,6 +136,45 @@ public void ReportSupportabilityCountMetric_SuppliedCount() ); } + [Test] + public void ReportCountMetric() + { + const string MetricName = "Some/Metric/Name"; + _agentHealthReporter.ReportCountMetric(MetricName, 2); + Assert.AreEqual(1, _publishedMetrics.Count); + NrAssert.Multiple( + () => Assert.AreEqual(MetricName, _publishedMetrics[0].MetricName.Name), + () => Assert.AreEqual(2, _publishedMetrics[0].Data.Value0) + ); + } + + [Test] + public void ReportByteMetric() + { + const string MetricName = "Some/Metric/Name"; + const long totalBytes = 1024 * 1024 * 1024; + _agentHealthReporter.ReportByteMetric(MetricName, totalBytes); + Assert.AreEqual(1, _publishedMetrics.Count); + NrAssert.Multiple( + () => Assert.AreEqual(MetricName, _publishedMetrics[0].MetricName.Name), + () => Assert.AreEqual(MetricDataWireModel.BuildByteData(totalBytes), _publishedMetrics[0].Data) + ); + } + + [Test] + public void ReportByteMetric_WithExclusiveBytes() + { + const string MetricName = "Some/Metric/Name"; + const long totalBytes = 1024 * 1024 * 1024; + const long exclusiveBytes = 1024 * 1024 * 64; + _agentHealthReporter.ReportByteMetric(MetricName, totalBytes, exclusiveBytes); + Assert.AreEqual(1, _publishedMetrics.Count); + NrAssert.Multiple( + () => Assert.AreEqual(MetricName, _publishedMetrics[0].MetricName.Name), + () => Assert.AreEqual(MetricDataWireModel.BuildByteData(totalBytes, exclusiveBytes), _publishedMetrics[0].Data) + ); + } + [Test] public void CollectMetrics_ReportsAgentVersion() { diff --git a/tests/Agent/UnitTests/Core.UnitTest/Segments/SegmentTests.cs b/tests/Agent/UnitTests/Core.UnitTest/Segments/SegmentTests.cs index d5f2a9bb8..7fbadda62 100644 --- a/tests/Agent/UnitTests/Core.UnitTest/Segments/SegmentTests.cs +++ b/tests/Agent/UnitTests/Core.UnitTest/Segments/SegmentTests.cs @@ -5,6 +5,7 @@ using NewRelic.Agent.Core.Wrapper.AgentWrapperApi.Data; using NUnit.Framework; using System; +using NewRelic.Agent.Core.Metrics; namespace NewRelic.Agent.Core.Segments.Tests { @@ -22,5 +23,17 @@ public void End_WithException_HasErrorData() Assert.AreEqual("System.Exception", segment.ErrorData.ErrorTypeName); Assert.AreEqual("Unhandled exception", segment.ErrorData.ErrorMessage); } + + [Test] + public void SetMessageBrokerDestination_SetsDestination_IfSegmentData_IsMessageBrokerSegmentData() + { + var segment = new Segment(TransactionSegmentStateHelpers.GetItransactionSegmentState(), new MethodCallData("Type", "Method", 1)); + var messageBrokerSegmentData = new MessageBrokerSegmentData("broker", "unknown", MetricNames.MessageBrokerDestinationType.Topic, MetricNames.MessageBrokerAction.Consume); + segment.SetSegmentData(messageBrokerSegmentData); + + segment.SetMessageBrokerDestination("destination"); + + Assert.AreEqual("destination", ((MessageBrokerSegmentData)segment.SegmentData).Destination ); + } } } diff --git a/tests/Agent/UnitTests/Core.UnitTest/Transformers/TransactionTransformer/TransactionMetricNameMakerTests.cs b/tests/Agent/UnitTests/Core.UnitTest/Transformers/TransactionTransformer/TransactionMetricNameMakerTests.cs index 080ead795..56dee2c85 100644 --- a/tests/Agent/UnitTests/Core.UnitTest/Transformers/TransactionTransformer/TransactionMetricNameMakerTests.cs +++ b/tests/Agent/UnitTests/Core.UnitTest/Transformers/TransactionTransformer/TransactionMetricNameMakerTests.cs @@ -118,5 +118,27 @@ public void BuiltTransactionName_RunsThroughMetricNameService() Assert.IsFalse(builtName.ShouldIgnore); Assert.AreEqual("WebTransaction/NewName", builtName.PrefixedName); } + + [Test] + public void BuiltTransactionName_BuildsKafkaMessageBrokerTransactionMetricNameWithQueueName() + { + var transactionName = TransactionName.ForKafkaBrokerTransaction(Extensions.Providers.Wrapper.MessageBrokerDestinationType.Queue, "bar", "baz"); + + var builtName = _transactionMetricNameMaker.GetTransactionMetricName(transactionName); + + Assert.IsFalse(builtName.ShouldIgnore); + Assert.AreEqual("OtherTransaction/Message/bar/Queue/Consume/Named/baz", builtName.PrefixedName); + } + + [Test] + public void BuiltTransactionName_BuildsKafkaMessageBrokerTransactionMetricNameWithTemp_IfEmptyDestinationSpecified() + { + var transactionName = TransactionName.ForKafkaBrokerTransaction(Extensions.Providers.Wrapper.MessageBrokerDestinationType.Queue, "bar", ""); + + var builtName = _transactionMetricNameMaker.GetTransactionMetricName(transactionName); + + Assert.IsFalse(builtName.ShouldIgnore); + Assert.AreEqual("OtherTransaction/Message/bar/Queue/Consume/Named/Temp", builtName.PrefixedName); + } } } diff --git a/tests/Agent/UnitTests/Core.UnitTest/WireModels/MetricWireModelTests.cs b/tests/Agent/UnitTests/Core.UnitTest/WireModels/MetricWireModelTests.cs index 12e2c8b93..6ee017e9e 100644 --- a/tests/Agent/UnitTests/Core.UnitTest/WireModels/MetricWireModelTests.cs +++ b/tests/Agent/UnitTests/Core.UnitTest/WireModels/MetricWireModelTests.cs @@ -715,6 +715,52 @@ public void BuildSupportabilityLoggingEventsDroppedMetric() ); } + [Test] + public void BuildCountMetric() + { + const string metricName = "Some/Metric/Name"; + const int count = 999; + + var actual = _metricBuilder.TryBuildCountMetric(metricName, count); + + NrAssert.Multiple( + () => Assert.AreEqual(metricName, actual.MetricName.Name), + () => Assert.IsNull(actual.MetricName.Scope), + () => Assert.AreEqual(count, actual.Data.Value0) + ); + } + + [Test] + public void BuildByteMetric() + { + const string metricName = "Some/Metric/Name"; + const long byteCount = 1024 * 1024 * 1024; + + var actual = _metricBuilder.TryBuildByteMetric(metricName, byteCount); + + NrAssert.Multiple( + () => Assert.AreEqual(metricName, actual.MetricName.Name), + () => Assert.IsNull(actual.MetricName.Scope), + () => Assert.AreEqual(MetricDataWireModel.BuildByteData(byteCount), actual.Data) + ); + } + + [Test] + public void BuildByteMetric_WithExclusiveBytes() + { + const string metricName = "Some/Metric/Name"; + const long totalBytes = 1024 * 1024 * 1024; + const long exclusiveBytes = 1024 * 1024 * 128; + + var actual = _metricBuilder.TryBuildByteMetric(metricName, totalBytes, exclusiveBytes); + + NrAssert.Multiple( + () => Assert.AreEqual(metricName, actual.MetricName.Name), + () => Assert.IsNull(actual.MetricName.Scope), + () => Assert.AreEqual(MetricDataWireModel.BuildByteData(totalBytes, exclusiveBytes), actual.Data) + ); + } + #endregion #region DistributedTracing diff --git a/tests/Agent/UnitTests/Core.UnitTest/Wrapper/AgentWrapperApi/AgentWrapperApiTests.cs b/tests/Agent/UnitTests/Core.UnitTest/Wrapper/AgentWrapperApi/AgentWrapperApiTests.cs index 391787770..3b9370aa0 100644 --- a/tests/Agent/UnitTests/Core.UnitTest/Wrapper/AgentWrapperApi/AgentWrapperApiTests.cs +++ b/tests/Agent/UnitTests/Core.UnitTest/Wrapper/AgentWrapperApi/AgentWrapperApiTests.cs @@ -290,6 +290,19 @@ public void SetMessageBrokerTransactionName_SetsMessageBrokerTransactionName() Assert.AreEqual(false, addedTransactionName.IsWeb); } + [Test] + public void SetKafkaMessageBrokerTransactionName_SetsKafkaMessageBrokerTransactionName() + { + const TransactionNamePriority priority = TransactionNamePriority.FrameworkHigh; + SetupTransaction(); + + _agent.CurrentTransaction.SetKafkaMessageBrokerTransactionName(MessageBrokerDestinationType.Topic, "broker", "dest", priority); + + var addedTransactionName = _transaction.CandidateTransactionName.CurrentTransactionName; + Assert.AreEqual("Message/broker/Topic/Consume/Named/dest", addedTransactionName.UnprefixedName); + Assert.AreEqual(false, addedTransactionName.IsWeb); + } + [Test] public void SetOtherTransactionName_SetsOtherTransactionName() {