From f24a5daa2cc6117a73eac96f2a45d738335d063a Mon Sep 17 00:00:00 2001 From: Marty T <120425148+tippmar-nr@users.noreply.github.com> Date: Thu, 24 Oct 2024 16:23:39 -0500 Subject: [PATCH 01/19] feat: New Garbage Collection Metrics Sampler for .NET 6+ (#2838) --- .../Core/AgentHealth/AgentHealthReporter.cs | 11 + src/Agent/NewRelic/Agent/Core/AgentManager.cs | 4 +- .../Core/Config/BootstrapConfiguration.cs | 36 ++++ .../Configuration/DefaultConfiguration.cs | 6 +- .../Configuration/ReportedConfiguration.cs | 2 + .../Core/DependencyInjection/AgentServices.cs | 29 ++- .../Agent/Core/Metrics/MetricNames.cs | 15 ++ .../Agent/Core/Samplers/GCSampleType.cs | 90 +++++++++ .../Agent/Core/Samplers/GCSamplerV2.cs | 75 +++++++ .../Samplers/GCSamplerV2ReflectionHelper.cs | 112 +++++++++++ .../NewRelic/Agent/Core/Samplers/GcSampler.cs | 24 +-- .../Agent/Core/Samplers/ImmutableGCSample.cs | 67 +++++++ .../Transformers/GCSampleTransformerV2.cs | 129 ++++++++++++ .../Configuration/IConfiguration.cs | 2 + .../Reflection/VisibilityBypasser.cs | 34 ++++ .../NewRelicConfigModifier.cs | 9 + .../AgentMetrics/DotNetPerfMetricsTests.cs | 61 +++++- .../CompositeTests/CompositeTestAgent.cs | 6 +- .../AgentHealth/AgentHealthReporterTests.cs | 9 + .../Config/BootstrapConfigurationTests.cs | 67 +++++++ .../DataTransport/AgentSettingsTests.cs | 3 +- .../DataTransport/ConnectModelTests.cs | 3 +- .../ExhaustiveTestConfiguration.cs | 4 +- .../DependencyInjection/AgentServicesTests.cs | 65 +++++- .../Core.UnitTest/Metrics/MetricNamesTests.cs | 14 ++ .../Samplers/GCSamplerV2Tests.cs | 105 ++++++++++ .../Samplers/ImmutableGCSampleTests.cs | 67 +++++++ .../GCSampleTransformerV2Tests.cs | 188 ++++++++++++++++++ .../GCStatsSampleTransformerTests.cs | 7 +- .../Transformers/MetricTestHelpers.cs | 17 +- .../Reflection/VisibilityBypasserTests.cs | 67 ++++++- 31 files changed, 1272 insertions(+), 56 deletions(-) create mode 100644 src/Agent/NewRelic/Agent/Core/Samplers/GCSampleType.cs create mode 100644 src/Agent/NewRelic/Agent/Core/Samplers/GCSamplerV2.cs create mode 100644 src/Agent/NewRelic/Agent/Core/Samplers/GCSamplerV2ReflectionHelper.cs create mode 100644 src/Agent/NewRelic/Agent/Core/Samplers/ImmutableGCSample.cs create mode 100644 src/Agent/NewRelic/Agent/Core/Transformers/GCSampleTransformerV2.cs create mode 100644 tests/Agent/UnitTests/Core.UnitTest/Samplers/GCSamplerV2Tests.cs create mode 100644 tests/Agent/UnitTests/Core.UnitTest/Samplers/ImmutableGCSampleTests.cs create mode 100644 tests/Agent/UnitTests/Core.UnitTest/Transformers/GCSampleTransformerV2Tests.cs diff --git a/src/Agent/NewRelic/Agent/Core/AgentHealth/AgentHealthReporter.cs b/src/Agent/NewRelic/Agent/Core/AgentHealth/AgentHealthReporter.cs index 15dc94c244..53fce6b123 100644 --- a/src/Agent/NewRelic/Agent/Core/AgentHealth/AgentHealthReporter.cs +++ b/src/Agent/NewRelic/Agent/Core/AgentHealth/AgentHealthReporter.cs @@ -683,6 +683,7 @@ private void CollectOneTimeMetrics() ReportInfiniteTracingOneTimeMetrics(); ReportIfLoggingDisabled(); ReportIfInstrumentationIsDisabled(); + ReportIfGCSamplerV2IsEnabled(); } public void CollectMetrics() @@ -838,5 +839,15 @@ private void ReportIfInstrumentationIsDisabled() ReportSupportabilityGaugeMetric(MetricNames.SupportabilityIgnoredInstrumentation, ignoredCount); } } + + private void ReportIfGCSamplerV2IsEnabled() + { + if (_configuration.GCSamplerV2Enabled) + { + ReportSupportabilityCountMetric(MetricNames.SupportabilityGCSamplerV2Enabled); + } + + } + } } diff --git a/src/Agent/NewRelic/Agent/Core/AgentManager.cs b/src/Agent/NewRelic/Agent/Core/AgentManager.cs index 718fc0896c..00d9f31666 100644 --- a/src/Agent/NewRelic/Agent/Core/AgentManager.cs +++ b/src/Agent/NewRelic/Agent/Core/AgentManager.cs @@ -115,7 +115,7 @@ private AgentManager() } _container = AgentServices.GetContainer(); - AgentServices.RegisterServices(_container, bootstrapConfig.ServerlessModeEnabled); + AgentServices.RegisterServices(_container, bootstrapConfig.ServerlessModeEnabled, bootstrapConfig.GCSamplerV2Enabled); // Resolve IConfigurationService (so that it starts listening to config change events) and then publish the serialized event _container.Resolve(); @@ -162,7 +162,7 @@ private AgentManager() Log.Info("The New Relic agent is operating in serverless mode."); } - AgentServices.StartServices(_container, bootstrapConfig.ServerlessModeEnabled); + AgentServices.StartServices(_container, bootstrapConfig.ServerlessModeEnabled, bootstrapConfig.GCSamplerV2Enabled); // Setup the internal API first so that AgentApi can use it. InternalApi.SetAgentApiImplementation(agentApi); diff --git a/src/Agent/NewRelic/Agent/Core/Config/BootstrapConfiguration.cs b/src/Agent/NewRelic/Agent/Core/Config/BootstrapConfiguration.cs index 627945ef39..677896d980 100644 --- a/src/Agent/NewRelic/Agent/Core/Config/BootstrapConfiguration.cs +++ b/src/Agent/NewRelic/Agent/Core/Config/BootstrapConfiguration.cs @@ -2,11 +2,14 @@ // SPDX-License-Identifier: Apache-2.0 using System; +using System.Collections.Generic; using System.IO; +using System.Linq; using NewRelic.Agent.Core.Configuration; using NewRelic.Agent.Core.Utilities; using NewRelic.Agent.Extensions.Logging; using NewRelic.Agent.Core.SharedInterfaces; +using NewRelic.Agent.Extensions.SystemExtensions.Collections.Generic; namespace NewRelic.Agent.Core.Config { @@ -21,6 +24,7 @@ public interface IBootstrapConfiguration string ServerlessFunctionName { get; } string ServerlessFunctionVersion { get; } bool AzureFunctionModeDetected { get; } + bool GCSamplerV2Enabled { get; } } /// @@ -64,6 +68,7 @@ public BootstrapConfiguration(configuration localConfiguration, string configura public BootstrapConfiguration(configuration localConfiguration, string configurationFileName, Func> getWebConfigSettingWithProvenance, IConfigurationManagerStatic configurationManagerStatic, IProcessStatic processStatic, Predicate checkDirectoryExists, Func getFullPath) { ServerlessModeEnabled = CheckServerlessModeEnabled(localConfiguration); + GCSamplerV2Enabled = CheckGCSamplerV2Enabled(TryGetAppSettingAsBoolWithDefault(localConfiguration, "GCSamplerV2Enabled", false)); DebugStartupDelaySeconds = localConfiguration.debugStartupDelaySeconds; ConfigurationFileName = configurationFileName; LogConfig = new BootstrapLogConfig(localConfiguration.log, processStatic, checkDirectoryExists, getFullPath); @@ -133,6 +138,8 @@ public string AgentEnabledAt public bool AzureFunctionModeDetected => ConfigLoaderHelpers.GetEnvironmentVar("FUNCTIONS_WORKER_RUNTIME") != null; + public bool GCSamplerV2Enabled { get; private set;} + private bool CheckServerlessModeEnabled(configuration localConfiguration) { // We may need these later even if we don't use it now. @@ -154,6 +161,11 @@ private bool CheckServerlessModeEnabled(configuration localConfiguration) return localConfiguration.serverlessModeEnabled; } + private bool CheckGCSamplerV2Enabled(bool localConfigurationGcSamplerV2Enabled) + { + return localConfigurationGcSamplerV2Enabled || (ConfigLoaderHelpers.GetEnvironmentVar("NEW_RELIC_GC_SAMPLER_V2_ENABLED").TryToBoolean(out var enabledViaEnvVariable) && enabledViaEnvVariable); + } + private void SetAgentEnabledValues() { _agentEnabledWithProvenance = TryGetAgentEnabledFromWebConfig(); @@ -204,6 +216,30 @@ private ValueWithProvenance TryGetAgentEnabledSetting(Func TransformAppSettings(configuration localConfiguration) + { + if (localConfiguration.appSettings == null) + return new Dictionary(); + + return localConfiguration.appSettings + .Where(setting => setting != null) + .Select(setting => new KeyValuePair(setting.key, setting.value)) + .ToDictionary(IEnumerableExtensions.DuplicateKeyBehavior.KeepFirst); + } + + private bool TryGetAppSettingAsBoolWithDefault(configuration localConfiguration, string key, bool defaultValue) + { + var value = TransformAppSettings(localConfiguration).GetValueOrDefault(key); + + bool parsedBool; + var parsedSuccessfully = bool.TryParse(value, out parsedBool); + if (!parsedSuccessfully) + return defaultValue; + + return parsedBool; + } + + private class BootstrapLogConfig : ILogConfig { private readonly string _directoryFromLocalConfig; diff --git a/src/Agent/NewRelic/Agent/Core/Configuration/DefaultConfiguration.cs b/src/Agent/NewRelic/Agent/Core/Configuration/DefaultConfiguration.cs index 0db186bbfb..e87e7b1c80 100644 --- a/src/Agent/NewRelic/Agent/Core/Configuration/DefaultConfiguration.cs +++ b/src/Agent/NewRelic/Agent/Core/Configuration/DefaultConfiguration.cs @@ -1912,7 +1912,6 @@ public bool UtilizationDetectAzureFunction } } - public int? UtilizationLogicalProcessors { get @@ -2163,7 +2162,8 @@ public string AzureFunctionResourceIdWithFunctionName(string functionName) return string.Empty; } - return $"{AzureFunctionResourceId}/functions/{functionName}"; } + return $"{AzureFunctionResourceId}/functions/{functionName}"; + } public string AzureFunctionResourceGroupName { @@ -2466,6 +2466,8 @@ public TimeSpan StackExchangeRedisCleanupCycle } } + public bool GCSamplerV2Enabled => _bootstrapConfiguration.GCSamplerV2Enabled; + #endregion #region Helpers diff --git a/src/Agent/NewRelic/Agent/Core/Configuration/ReportedConfiguration.cs b/src/Agent/NewRelic/Agent/Core/Configuration/ReportedConfiguration.cs index ec4c150f0a..c60251e4a8 100644 --- a/src/Agent/NewRelic/Agent/Core/Configuration/ReportedConfiguration.cs +++ b/src/Agent/NewRelic/Agent/Core/Configuration/ReportedConfiguration.cs @@ -711,6 +711,8 @@ public ReportedConfiguration(IConfiguration configuration) public string AzureFunctionResourceIdWithFunctionName(string functionName) => _configuration.AzureFunctionResourceIdWithFunctionName(functionName); + [JsonProperty("gc_sampler_v2.enabled")] + public bool GCSamplerV2Enabled => _configuration.GCSamplerV2Enabled; public IReadOnlyDictionary GetAppSettings() { diff --git a/src/Agent/NewRelic/Agent/Core/DependencyInjection/AgentServices.cs b/src/Agent/NewRelic/Agent/Core/DependencyInjection/AgentServices.cs index a5a6bc9f69..0b424e3434 100644 --- a/src/Agent/NewRelic/Agent/Core/DependencyInjection/AgentServices.cs +++ b/src/Agent/NewRelic/Agent/Core/DependencyInjection/AgentServices.cs @@ -3,7 +3,9 @@ using System; using System.Collections.Generic; +#if NETFRAMEWORK using System.Threading; +#endif using NewRelic.Agent.Api; using NewRelic.Agent.Configuration; using NewRelic.Agent.Core.AgentHealth; @@ -58,7 +60,8 @@ public static IContainer GetContainer() /// /// /// - public static void RegisterServices(IContainer container, bool serverlessModeEnabled) + /// + public static void RegisterServices(IContainer container, bool serverlessModeEnabled, bool gcSamplerV2Enabled) { // we register this factory instead of just loading the storage contexts here because deferring the logic gives us a logger container.RegisterFactory>(ExtensionsLoader.LoadContextStorageFactories); @@ -91,9 +94,18 @@ public static void RegisterServices(IContainer container, bool serverlessModeEna container.Register(); container.Register(); #else - container.RegisterInstance>>>(() => new GCEventsListener()); - container.RegisterInstance>(GCSamplerNetCore.FXsamplerIsApplicableToFrameworkDefault); - container.Register(); + if (gcSamplerV2Enabled) + { + container.Register(); + container.Register(); + container.Register(); + } + else + { + container.RegisterInstance>>>(() => new GCEventsListener()); + container.RegisterInstance>(GCSamplerNetCore.FXsamplerIsApplicableToFrameworkDefault); + container.Register(); + } #endif container.Register(); @@ -225,7 +237,7 @@ public static void RegisterServices(IContainer container, bool serverlessModeEna /// /// Starts all of the services needed by resolving them. /// - public static void StartServices(IContainer container, bool serverlessModeEnabled) + public static void StartServices(IContainer container, bool serverlessModeEnabled, bool gcSamplerV2Enabled) { if (!serverlessModeEnabled) container.Resolve(); @@ -242,7 +254,12 @@ public static void StartServices(IContainer container, bool serverlessModeEnable samplerStartThread.Start(); #else if (!serverlessModeEnabled) - container.Resolve().Start(); + { + if (!gcSamplerV2Enabled) + container.Resolve().Start(); + else + container.Resolve().Start(); + } #endif if (!serverlessModeEnabled) { diff --git a/src/Agent/NewRelic/Agent/Core/Metrics/MetricNames.cs b/src/Agent/NewRelic/Agent/Core/Metrics/MetricNames.cs index 43851cf6f8..2ea94148d3 100644 --- a/src/Agent/NewRelic/Agent/Core/Metrics/MetricNames.cs +++ b/src/Agent/NewRelic/Agent/Core/Metrics/MetricNames.cs @@ -837,6 +837,7 @@ public static string GetSupportabilityInstallType(string installType) public const string SupportabilityLoggingFatalError = "Supportability/DotNET/AgentLogging/DisabledDueToError"; public const string SupportabilityIgnoredInstrumentation = SupportabilityDotnetPs + "IgnoredInstrumentation"; + public const string SupportabilityGCSamplerV2Enabled = SupportabilityDotnetPs + "GCSamplerV2/Enabled"; #endregion Supportability @@ -1034,6 +1035,20 @@ public static string GetThreadpoolThroughputStatsName(ThreadpoolThroughputStatsT { GCSampleType.LOHSize , "GC/LOH/Size" }, { GCSampleType.LOHSurvived, "GC/LOH/Survived" }, + + { GCSampleType.LOHCollectionCount, "GC/LOH/Collections" }, + { GCSampleType.POHCollectionCount, "GC/POH/Collections" }, + + { GCSampleType.TotalHeapMemory, "GC/Heap/Total" }, + { GCSampleType.TotalCommittedMemory, "GC/Heap/Committed" }, + { GCSampleType.TotalAllocatedMemory, "GC/Heap/Allocated" }, + + { GCSampleType.Gen0FragmentationSize, "GC/Gen0/Fragmentation" }, + { GCSampleType.Gen1FragmentationSize, "GC/Gen1/Fragmentation" }, + { GCSampleType.Gen2FragmentationSize, "GC/Gen2/Fragmentation" }, + { GCSampleType.LOHFragmentationSize, "GC/LOH/Fragmentation" }, + { GCSampleType.POHFragmentationSize, "GC/POH/Fragmentation" }, + { GCSampleType.POHSize, "GC/POH/Size" } }; public static string GetGCMetricName(GCSampleType sampleType) diff --git a/src/Agent/NewRelic/Agent/Core/Samplers/GCSampleType.cs b/src/Agent/NewRelic/Agent/Core/Samplers/GCSampleType.cs new file mode 100644 index 0000000000..26b9701f80 --- /dev/null +++ b/src/Agent/NewRelic/Agent/Core/Samplers/GCSampleType.cs @@ -0,0 +1,90 @@ +// Copyright 2020 New Relic, Inc. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +namespace NewRelic.Agent.Core.Samplers +{ + public enum GCSampleType + { + /// + /// Gen 0 heap size as of the current sample + /// + Gen0Size, + Gen0Promoted, + /// + /// Gen 1 heap size as of the current sample + /// + Gen1Size, + Gen1Promoted, + /// + /// Gen 2 heap size as of the current sample + /// + Gen2Size, + Gen2Survived, + /// + /// Large object heap size as of the current sample + /// + LOHSize, + LOHSurvived, + HandlesCount, + InducedCount, + PercentTimeInGc, + /// + /// Gen 0 heap collection count since the last sample + /// + Gen0CollectionCount, + /// + /// Gen 1 heap collection count since the last sample + /// + Gen1CollectionCount, + /// + /// Gen 2 heap collection count since the last sample + /// + Gen2CollectionCount, + + // the following are supported by GCSamplerV2 only + /// + /// Pinned object heap size + /// + POHSize, + /// + /// Large object heap collection count since the last sample + /// + LOHCollectionCount, + /// + /// Pinned object heap collection count since the last sample + /// + POHCollectionCount, + /// + /// Total heap memory in use as of the current sample + /// + TotalHeapMemory, + /// + /// Total committed memory in use as of the current sample + /// + TotalCommittedMemory, + /// + /// Total heap memory allocated since the last sample + /// + TotalAllocatedMemory, + /// + /// Fragmentation of the Gen 0 heap as of the current sample + /// + Gen0FragmentationSize, + /// + /// Fragmentation of the Gen 1 heap as of the current sample + /// + Gen1FragmentationSize, + /// + /// Fragmentation of the Gen 2 heap as of the current sample + /// + Gen2FragmentationSize, + /// + /// Fragmentation of the Large Object heap as of the current sample + /// + LOHFragmentationSize, + /// + /// Fragmentation of the Pinned Object heap as of the current sample + /// + POHFragmentationSize, + } +} diff --git a/src/Agent/NewRelic/Agent/Core/Samplers/GCSamplerV2.cs b/src/Agent/NewRelic/Agent/Core/Samplers/GCSamplerV2.cs new file mode 100644 index 0000000000..70921a8bea --- /dev/null +++ b/src/Agent/NewRelic/Agent/Core/Samplers/GCSamplerV2.cs @@ -0,0 +1,75 @@ +// Copyright 2020 New Relic, Inc. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +#if NETSTANDARD + +using System; +using NewRelic.Agent.Core.Time; +using NewRelic.Agent.Core.Transformers; +using NewRelic.Agent.Extensions.Logging; + +namespace NewRelic.Agent.Core.Samplers +{ + public class GCSamplerV2 : AbstractSampler + { + private readonly IGCSampleTransformerV2 _transformer; + private DateTime _lastSampleTime; + + private IGCSamplerV2ReflectionHelper _gCSamplerV2ReflectionHelper; + private bool _hasGCOccurred; + + private const int GCSamplerV2IntervalSeconds = 60; + + public GCSamplerV2(IScheduler scheduler, IGCSampleTransformerV2 transformer, IGCSamplerV2ReflectionHelper gCSamplerV2ReflectionHelper) + : base(scheduler, TimeSpan.FromSeconds(GCSamplerV2IntervalSeconds)) + { + _transformer = transformer; + _gCSamplerV2ReflectionHelper = gCSamplerV2ReflectionHelper; + _lastSampleTime = DateTime.UtcNow; + } + + public override void Sample() + { + if (_gCSamplerV2ReflectionHelper.ReflectionFailed) + { + Stop(); + Log.Error($"Unable to get GC sample due to reflection error. No GC metrics will be reported."); + return; + } + + _hasGCOccurred |= _gCSamplerV2ReflectionHelper.HasGCOccurred; + + if (!_hasGCOccurred) // don't do anything until at least one GC has completed + return; + + dynamic gcMemoryInfo = _gCSamplerV2ReflectionHelper.GCGetMemoryInfo_Invoker(0); // GCKind.Any + dynamic generationInfo = _gCSamplerV2ReflectionHelper.GetGenerationInfo(gcMemoryInfo); + + var genInfoLength = generationInfo.Length; + var heapSizesBytes = new long[genInfoLength]; + var fragmentationSizesBytes = new long[genInfoLength]; + var collectionCounts = new int[genInfoLength]; + + var index = 0; + foreach (var generation in generationInfo) + { + var generationIndex = index++; + heapSizesBytes[generationIndex] = generation.SizeAfterBytes; + fragmentationSizesBytes[generationIndex] = generation.FragmentationAfterBytes; + + collectionCounts[generationIndex] = GC.CollectionCount(generationIndex); + } + + var totalMemoryBytes = GC.GetTotalMemory(false); + var totalAllocatedBytes = (long)_gCSamplerV2ReflectionHelper.GCGetTotalAllocatedBytes_Invoker(false); + var totalCommittedBytes = gcMemoryInfo.TotalCommittedBytes; + + var currentSampleTime = DateTime.UtcNow; + + var sample = new ImmutableGCSample(currentSampleTime, _lastSampleTime, totalMemoryBytes, totalAllocatedBytes, totalCommittedBytes, heapSizesBytes, collectionCounts, fragmentationSizesBytes); + _transformer.Transform(sample); + _lastSampleTime = currentSampleTime; + } + } +} +#endif diff --git a/src/Agent/NewRelic/Agent/Core/Samplers/GCSamplerV2ReflectionHelper.cs b/src/Agent/NewRelic/Agent/Core/Samplers/GCSamplerV2ReflectionHelper.cs new file mode 100644 index 0000000000..9352304e60 --- /dev/null +++ b/src/Agent/NewRelic/Agent/Core/Samplers/GCSamplerV2ReflectionHelper.cs @@ -0,0 +1,112 @@ +// Copyright 2020 New Relic, Inc. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +using System; +using System.Linq.Expressions; +using System.Reflection; +using NewRelic.Agent.Extensions.Logging; +using NewRelic.Reflection; + +namespace NewRelic.Agent.Core.Samplers +{ + // to allow for unit testing + public interface IGCSamplerV2ReflectionHelper + { + Func GetGenerationInfo { get; } + bool ReflectionFailed { get; } + Func GCGetMemoryInfo_Invoker { get; } + Func GCGetTotalAllocatedBytes_Invoker { get; } + bool HasGCOccurred { get; } + } + + public class GCSamplerV2ReflectionHelper : IGCSamplerV2ReflectionHelper + { + public Func GetGenerationInfo { get; private set; } + public bool ReflectionFailed { get; private set; } + public Func GCGetMemoryInfo_Invoker { get; private set; } + public Func GCGetTotalAllocatedBytes_Invoker { get; private set; } + + public GCSamplerV2ReflectionHelper() + { + try + { + var assembly = Assembly.Load("System.Runtime"); + var gcType = assembly.GetType("System.GC"); + var paramType = assembly.GetType("System.GCKind"); + var returnType = assembly.GetType("System.GCMemoryInfo"); + + if (!VisibilityBypasser.Instance.TryGenerateOneParameterStaticMethodCaller(gcType, "GetGCMemoryInfo", paramType, returnType, out var accessor)) + { + ReflectionFailed = true; + } + else + GCGetMemoryInfo_Invoker = accessor; + + if (!ReflectionFailed) + { + paramType = typeof(bool); + returnType = typeof(long); + if (!VisibilityBypasser.Instance.TryGenerateOneParameterStaticMethodCaller(gcType, "GetTotalAllocatedBytes", paramType, returnType, out var accessor1)) + { + ReflectionFailed = true; + } + else + GCGetTotalAllocatedBytes_Invoker = accessor1; + } + + if (!ReflectionFailed) + GetGenerationInfo = GCMemoryInfoHelper.GenerateGetMemoryInfoMethod(); + } + catch (Exception e) + { + Log.Warn(e, $"Failed to initialize GCSamplerV2ReflectionHelper."); + ReflectionFailed = true; + } + } + + public bool HasGCOccurred => GC.CollectionCount(0) > 0; + } + + internal static class GCMemoryInfoHelper + { + /// + /// Generate a function that takes a GCMemoryInfo instance as an input parameter and + /// returns an array of GCGenerationInfo instances. + /// + /// Essentially builds the equivalent of + /// object Foo(object input) => ((GCMemoryInfo)input).GenerationInfo.ToArray(); + /// + public static Func GenerateGetMemoryInfoMethod() + { + var assembly = Assembly.Load("System.Runtime"); + var gcMemoryInfoType = assembly.GetType("System.GCMemoryInfo"); + + // Define a parameter expression for the input object + var inputParameter = Expression.Parameter(typeof(object), "input"); + + // Cast the input parameter to GCMemoryInfo + var gcMemoryInfoParameter = Expression.Convert(inputParameter, gcMemoryInfoType); + + // Get the GenerationInfo property + var generationInfoProperty = gcMemoryInfoType.GetProperty("GenerationInfo"); + + // Access the GenerationInfo property + var accessGenerationInfo = Expression.Property(gcMemoryInfoParameter, generationInfoProperty); + + // Get the ReadOnlySpan type using the full type name + var readOnlySpanType = assembly.GetType("System.ReadOnlySpan`1[[System.GCGenerationInfo, System.Private.CoreLib]]"); + + // Get the ToArray method of ReadOnlySpan + var toArrayMethod = readOnlySpanType.GetMethod("ToArray", BindingFlags.Public | BindingFlags.Instance); + + // Call ToArray() on GenerationInfo + var callToArray = Expression.Call(accessGenerationInfo, toArrayMethod); + + // Create a lambda expression + var lambda = Expression.Lambda>(Expression.Convert(callToArray, typeof(object)), inputParameter); + + // Compile the lambda expression into a delegate + return lambda.Compile(); + } + } +} diff --git a/src/Agent/NewRelic/Agent/Core/Samplers/GcSampler.cs b/src/Agent/NewRelic/Agent/Core/Samplers/GcSampler.cs index 5e6e5e8760..10305cd719 100644 --- a/src/Agent/NewRelic/Agent/Core/Samplers/GcSampler.cs +++ b/src/Agent/NewRelic/Agent/Core/Samplers/GcSampler.cs @@ -1,6 +1,8 @@ // Copyright 2020 New Relic, Inc. All rights reserved. // SPDX-License-Identifier: Apache-2.0 +#if NETFRAMEWORK + using System; using System.Collections.Generic; using System.Linq; @@ -12,26 +14,6 @@ namespace NewRelic.Agent.Core.Samplers { - public enum GCSampleType - { - Gen0Size, - Gen0Promoted, - Gen1Size, - Gen1Promoted, - Gen2Size, - Gen2Survived, - LOHSize, - LOHSurvived, - HandlesCount, - InducedCount, - PercentTimeInGc, - Gen0CollectionCount, - Gen1CollectionCount, - Gen2CollectionCount - } - -#if NETFRAMEWORK - public class GcSampler : AbstractSampler { private const string GCPerfCounterCategoryName = ".NET CLR Memory"; @@ -345,5 +327,5 @@ public override void Sample() } } } -#endif } +#endif diff --git a/src/Agent/NewRelic/Agent/Core/Samplers/ImmutableGCSample.cs b/src/Agent/NewRelic/Agent/Core/Samplers/ImmutableGCSample.cs new file mode 100644 index 0000000000..eb7aed3a3b --- /dev/null +++ b/src/Agent/NewRelic/Agent/Core/Samplers/ImmutableGCSample.cs @@ -0,0 +1,67 @@ +// Copyright 2020 New Relic, Inc. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +using System; + +namespace NewRelic.Agent.Core.Samplers +{ + public class ImmutableGCSample + { + public readonly DateTime LastSampleTime; + public readonly DateTime CurrentSampleTime; + + public readonly long TotalMemoryBytes; // In-use memory on the GC heap as of current GC + public readonly long TotalAllocatedBytes; // total memory allocated on GC heap since process start + public readonly long TotalCommittedBytes;// committed virtual memory as of current GC + + public readonly long[] GCHeapSizesBytes; // heap sizes as of current GC + public readonly int[] GCCollectionCounts; // number of collections since last sample + public readonly long[] GCFragmentationSizesBytes; // heap fragmentation size as of current GC + + public ImmutableGCSample() + { + LastSampleTime = CurrentSampleTime = DateTime.MinValue; + GCHeapSizesBytes = new long[5]; + GCCollectionCounts = new int[5]; + GCFragmentationSizesBytes = new long[5]; + } + + public ImmutableGCSample(DateTime lastSampleTime, DateTime currentSampleTime, long totalMemoryBytes, long totalAllocatedBytes, long totalCommittedBytes, long[] heapSizesBytes, int[] rawCollectionCounts, long[] fragmentationSizesBytes) + { + LastSampleTime = lastSampleTime; + CurrentSampleTime = currentSampleTime; + + TotalMemoryBytes = totalMemoryBytes; + + TotalAllocatedBytes = totalAllocatedBytes; + TotalCommittedBytes = totalCommittedBytes; + + GCHeapSizesBytes = heapSizesBytes; + GCFragmentationSizesBytes = fragmentationSizesBytes; + + // should always be 5, but handle smaller just in case + var collectionLength = rawCollectionCounts.Length; + GCCollectionCounts = new int[5]; // we always report 5 samples + + // Gen 0 + GCCollectionCounts[0] = rawCollectionCounts[0] - rawCollectionCounts[1]; + // Gen 1 + GCCollectionCounts[1] = rawCollectionCounts[1] - rawCollectionCounts[2]; + + // Gen 2 + if (collectionLength > 3) + GCCollectionCounts[2] = rawCollectionCounts[2] - rawCollectionCounts[3]; + else + GCCollectionCounts[2] = rawCollectionCounts[2]; + + // LOH & POH + if (collectionLength == 4) + GCCollectionCounts[3] = rawCollectionCounts[3]; + if (collectionLength > 4) + { + GCCollectionCounts[3] = rawCollectionCounts[3] - rawCollectionCounts[4]; + GCCollectionCounts[4] = rawCollectionCounts[4]; + } + } + } +} diff --git a/src/Agent/NewRelic/Agent/Core/Transformers/GCSampleTransformerV2.cs b/src/Agent/NewRelic/Agent/Core/Transformers/GCSampleTransformerV2.cs new file mode 100644 index 0000000000..4159951a9d --- /dev/null +++ b/src/Agent/NewRelic/Agent/Core/Transformers/GCSampleTransformerV2.cs @@ -0,0 +1,129 @@ +// Copyright 2020 New Relic, Inc. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +using System.Collections.Generic; +using NewRelic.Agent.Core.Aggregators; +using NewRelic.Agent.Core.Samplers; +using NewRelic.Agent.Core.WireModels; + +namespace NewRelic.Agent.Core.Transformers +{ + public interface IGCSampleTransformerV2 + { + void Transform(ImmutableGCSample sample); + } + + public class GCSampleTransformerV2 : IGCSampleTransformerV2 + { + private readonly IMetricBuilder _metricBuilder; + private readonly IMetricAggregator _metricAggregator; + + // public for testing purposes only + public ImmutableGCSample PreviousSample { get; private set; } + public ImmutableGCSample CurrentSample {get; private set;} = new(); + + public GCSampleTransformerV2(IMetricBuilder metricBuilder, IMetricAggregator metricAggregator) + { + _metricBuilder = metricBuilder; + _metricAggregator = metricAggregator; + } + + public void Transform(ImmutableGCSample sample) + { + PreviousSample = CurrentSample; + CurrentSample = sample; + + var metrics = BuildMetrics(); + RecordMetrics(metrics); + + } + + private List BuildMetrics() + { + var metrics = new List + { + CreateMetric_ByteData(GCSampleType.Gen0Size, CurrentSample.GCHeapSizesBytes[0]), + CreateMetric_Count(GCSampleType.Gen0CollectionCount, PreviousSample.GCCollectionCounts[0], CurrentSample.GCCollectionCounts[0]), + CreateMetric_ByteData(GCSampleType.Gen0FragmentationSize, CurrentSample.GCFragmentationSizesBytes[0]), + + CreateMetric_ByteData(GCSampleType.Gen1Size, CurrentSample.GCHeapSizesBytes[1]), + CreateMetric_Count(GCSampleType.Gen1CollectionCount, PreviousSample.GCCollectionCounts[1], CurrentSample.GCCollectionCounts[1]), + CreateMetric_ByteData(GCSampleType.Gen1FragmentationSize, CurrentSample.GCFragmentationSizesBytes[1]), + + CreateMetric_ByteData(GCSampleType.Gen2Size, CurrentSample.GCHeapSizesBytes[2]), + CreateMetric_Count(GCSampleType.Gen2CollectionCount, PreviousSample.GCCollectionCounts[2], CurrentSample.GCCollectionCounts[2]), + CreateMetric_ByteData(GCSampleType.Gen2FragmentationSize, CurrentSample.GCFragmentationSizesBytes[2]), + + CreateMetric_ByteData(GCSampleType.LOHSize, CurrentSample.GCHeapSizesBytes[3]), + CreateMetric_Count(GCSampleType.LOHCollectionCount, PreviousSample.GCCollectionCounts[3], CurrentSample.GCCollectionCounts[3]), + CreateMetric_ByteData(GCSampleType.LOHFragmentationSize, CurrentSample.GCFragmentationSizesBytes[3]), + + CreateMetric_ByteData(GCSampleType.POHSize, CurrentSample.GCHeapSizesBytes[4]), + CreateMetric_Count(GCSampleType.POHCollectionCount, PreviousSample.GCCollectionCounts[4], CurrentSample.GCCollectionCounts[4]), + CreateMetric_ByteData(GCSampleType.POHFragmentationSize, CurrentSample.GCFragmentationSizesBytes[4]), + + CreateMetric_ByteData(GCSampleType.TotalHeapMemory, CurrentSample.TotalMemoryBytes), + CreateMetric_ByteData(GCSampleType.TotalCommittedMemory, CurrentSample.TotalCommittedBytes), + + CreateMetric_ByteDataDelta(GCSampleType.TotalAllocatedMemory, PreviousSample.TotalAllocatedBytes, CurrentSample.TotalAllocatedBytes), + }; + + return metrics; + } + + private void RecordMetrics(List metrics) + { + foreach (var metric in metrics) + { + _metricAggregator.Collect(metric); + } + } + + /// + /// Create a byte data metric representing the current value + /// + /// + /// + /// + private MetricWireModel CreateMetric_ByteData(GCSampleType sampleType, long currentValueBytes) + { + return _metricBuilder.TryBuildGCBytesMetric(sampleType, currentValueBytes); + } + + /// + /// Create a byte data metric that is the difference between the current value and previous value + /// + /// + /// + /// + /// + private MetricWireModel CreateMetric_ByteDataDelta(GCSampleType sampleType, long previousValueBytes, long currentValueBytes) + { + var sampleValueBytes = currentValueBytes - previousValueBytes; + if (sampleValueBytes < 0) + { + sampleValueBytes = 0; + } + return _metricBuilder.TryBuildGCBytesMetric(sampleType, sampleValueBytes); + } + + /// + /// Create a count metric that is the difference between the current value and previous value + /// + /// + /// + /// + /// + private MetricWireModel CreateMetric_Count(GCSampleType sampleType, long previousValue, long currentValue) + { + var sampleValue = currentValue - previousValue; + if (sampleValue < 0) + { + sampleValue = 0; + } + + return _metricBuilder.TryBuildGCCountMetric(sampleType, (int)sampleValue); + } + + } +} diff --git a/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Configuration/IConfiguration.cs b/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Configuration/IConfiguration.cs index 320b7d6dbd..2c70b0d8f7 100644 --- a/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Configuration/IConfiguration.cs +++ b/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Configuration/IConfiguration.cs @@ -231,5 +231,7 @@ public interface IConfiguration string AzureFunctionResourceIdWithFunctionName(string functionName); bool UtilizationDetectAzureFunction { get; } + + bool GCSamplerV2Enabled { get; } } } diff --git a/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Reflection/VisibilityBypasser.cs b/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Reflection/VisibilityBypasser.cs index a9c61d1dbb..064c7b3fc5 100644 --- a/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Reflection/VisibilityBypasser.cs +++ b/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Reflection/VisibilityBypasser.cs @@ -297,6 +297,27 @@ private static Func GenerateMethodCallerInternal(Type ownerType, return GenerateMethodCallerInternal(resultType, methodInfo); } + public bool TryGenerateOneParameterStaticMethodCaller(Type ownerType, string methodName, Type paramType, Type returnType, out Func accessor) + { + try + { + var methodInfo = ownerType.GetMethod(methodName, BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance | BindingFlags.Static, null, new Type[] { paramType }, null); + if (methodInfo == null) + { + accessor = null; + return false; + } + + accessor = (object param) => methodInfo.Invoke(null, new object[] { param }); + return true; + } + catch + { + accessor = null; + return false; + } + } + private static Func GenerateMethodCallerInternal(Type ownerType, Type resultType, Type parameterType, string methodName) { var methodInfo = GetMethodInfo(ownerType, methodName); @@ -657,6 +678,19 @@ public Func GenerateParameterlessStaticMethodCaller(string ass return (Func)methodInfo.CreateDelegate(typeof(Func)); } + public bool TryGenerateParameterlessStaticMethodCaller(Type ownerType, string methodName, out Func accessor) + { + var methodInfo = ownerType.GetMethod(methodName, BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance | BindingFlags.Static); + if (methodInfo == null) + { + accessor = null; + return false; + } + + accessor = (Func)methodInfo.CreateDelegate(typeof(Func)); + return true; + } + private static PropertyInfo GetPropertyInfo(Type type, string propertyName) { var propertyInfo = type.GetProperty(propertyName, BindingFlags.Instance | BindingFlags.Static | BindingFlags.NonPublic | BindingFlags.Public); diff --git a/tests/Agent/IntegrationTests/IntegrationTestHelpers/NewRelicConfigModifier.cs b/tests/Agent/IntegrationTests/IntegrationTestHelpers/NewRelicConfigModifier.cs index 4a603a7b1d..230d2fffeb 100644 --- a/tests/Agent/IntegrationTests/IntegrationTestHelpers/NewRelicConfigModifier.cs +++ b/tests/Agent/IntegrationTests/IntegrationTestHelpers/NewRelicConfigModifier.cs @@ -503,5 +503,14 @@ public NewRelicConfigModifier SetDisableFileSystemWatcher(bool enabled = true) CommonUtils.ModifyOrCreateXmlAttributeInNewRelicConfig(_configFilePath, new[] { "configuration", "service" }, "disableFileSystemWatcher", enabled.ToString().ToLower()); return this; } + + public NewRelicConfigModifier EnableGCSamplerV2(bool enabled) + { + CommonUtils.ModifyOrCreateXmlNodeInNewRelicConfig(_configFilePath, new[] { "configuration" }, "appSettings", string.Empty); + CommonUtils.ModifyOrCreateXmlNodeInNewRelicConfig(_configFilePath, new[] { "configuration", "appSettings" }, "add", string.Empty); + CommonUtils.ModifyOrCreateXmlAttributeInNewRelicConfig(_configFilePath, new[] { "configuration", "appSettings", "add"}, "key", "GCSamplerV2Enabled"); + CommonUtils.ModifyOrCreateXmlAttributeInNewRelicConfig(_configFilePath, new[] { "configuration", "appSettings", "add"}, "value", $"{enabled}"); + return this; + } } } diff --git a/tests/Agent/IntegrationTests/IntegrationTests/AgentMetrics/DotNetPerfMetricsTests.cs b/tests/Agent/IntegrationTests/IntegrationTests/AgentMetrics/DotNetPerfMetricsTests.cs index 883ed350df..e482ce98ac 100644 --- a/tests/Agent/IntegrationTests/IntegrationTests/AgentMetrics/DotNetPerfMetricsTests.cs +++ b/tests/Agent/IntegrationTests/IntegrationTests/AgentMetrics/DotNetPerfMetricsTests.cs @@ -16,7 +16,7 @@ namespace NewRelic.Agent.IntegrationTests.AgentMetrics public class DotNetPerfMetricsTestsFW : DotNetPerfMetricsTests { public DotNetPerfMetricsTestsFW(ConsoleDynamicMethodFixtureFWLatest fixture, ITestOutputHelper output) - : base(fixture, output) + : base(fixture, output, false) { } @@ -27,7 +27,7 @@ public DotNetPerfMetricsTestsFW(ConsoleDynamicMethodFixtureFWLatest fixture, ITe public class DotNetPerfMetricsTestsCoreOldest : DotNetPerfMetricsTests { public DotNetPerfMetricsTestsCoreOldest(ConsoleDynamicMethodFixtureCoreOldest fixture, ITestOutputHelper output) - : base(fixture, output) + : base(fixture, output, false) { } @@ -38,13 +38,34 @@ public DotNetPerfMetricsTestsCoreOldest(ConsoleDynamicMethodFixtureCoreOldest fi public class DotNetPerfMetricsTestsCoreLatest : DotNetPerfMetricsTests { public DotNetPerfMetricsTestsCoreLatest(ConsoleDynamicMethodFixtureCoreLatest fixture, ITestOutputHelper output) - : base(fixture, output) + : base(fixture, output, false) { } protected override string[] ExpectedMetricNames_GC => ExpectedMetricNames_GC_NetCore; } + [NetCoreTest] + public class DotNetPerfMetricsTestsGCSamplerV2CoreOldest : DotNetPerfMetricsTests + { + public DotNetPerfMetricsTestsGCSamplerV2CoreOldest(ConsoleDynamicMethodFixtureCoreOldest fixture, ITestOutputHelper output) + : base(fixture, output, true) + { + } + + protected override string[] ExpectedMetricNames_GC => ExpectedMetricNames_GC_V2; + } + + [NetCoreTest] + public class DotNetPerfMetricsTestsGCSamplerV2CoreLatest : DotNetPerfMetricsTests + { + public DotNetPerfMetricsTestsGCSamplerV2CoreLatest(ConsoleDynamicMethodFixtureCoreLatest fixture, ITestOutputHelper output) + : base(fixture, output, true) + { + } + + protected override string[] ExpectedMetricNames_GC => ExpectedMetricNames_GC_V2; + } public abstract class DotNetPerfMetricsTests : NewRelicIntegrationTest where TFixture : ConsoleDynamicMethodFixture { @@ -58,8 +79,9 @@ public abstract class DotNetPerfMetricsTests : NewRelicIntegrationTest protected const string METRICNAME_THREADPOOL_COMPLETION_INUSE = "Threadpool/Completion/InUse"; protected readonly TFixture Fixture; + private readonly bool _gcSamplerV2Enabled; - protected abstract string[] ExpectedMetricNames_GC { get; } + protected abstract string[] ExpectedMetricNames_GC { get;} protected string[] ExpectedMetricNames_GC_NetFramework => new string[] { "GC/Gen0/Size", @@ -91,6 +113,29 @@ public abstract class DotNetPerfMetricsTests : NewRelicIntegrationTest "GC/Gen1/Collections", "GC/Gen2/Collections" }; + + protected string[] ExpectedMetricNames_GC_V2 => new string[] + { + "GC/Gen0/Size", + "GC/Gen0/Fragmentation", + "GC/Gen1/Size", + "GC/Gen1/Fragmentation", + "GC/Gen2/Size", + "GC/Gen2/Fragmentation", + "GC/LOH/Size", + "GC/LOH/Fragmentation", + "GC/POH/Size", + "GC/POH/Fragmentation", + "GC/Gen0/Collections", + "GC/Gen1/Collections", + "GC/Gen2/Collections", + "GC/LOH/Collections", + "GC/POH/Collections", + "GC/Heap/Total", + "GC/Heap/Committed", + "GC/Heap/Allocated" + }; + protected string[] ExpectedMetricNames_Memory => new string[] { "Memory/Physical", @@ -113,9 +158,12 @@ public abstract class DotNetPerfMetricsTests : NewRelicIntegrationTest "Threadpool/Throughput/QueueLength" }; - public DotNetPerfMetricsTests(TFixture fixture, ITestOutputHelper output) : base(fixture) + public DotNetPerfMetricsTests(TFixture fixture, ITestOutputHelper output, bool gcSamplerV2Enabled) : base(fixture) { Fixture = fixture; + + _gcSamplerV2Enabled = gcSamplerV2Enabled; + Fixture.TestLogger = output; Fixture.AddCommand($"PerformanceMetrics Test {THREADPOOL_WORKER_MAX} {THREADPOOL_COMPLETION_MAX}"); @@ -127,6 +175,9 @@ public DotNetPerfMetricsTests(TFixture fixture, ITestOutputHelper output) : base Fixture.RemoteApplication.NewRelicConfig.SetLogLevel("finest"); Fixture.RemoteApplication.AddAppSetting("NewRelic.EventListenerSamplersEnabled", "true"); Fixture.RemoteApplication.NewRelicConfig.ConfigureFasterMetricsHarvestCycle(10); + + if (_gcSamplerV2Enabled) + Fixture.RemoteApplication.NewRelicConfig.EnableGCSamplerV2(true); } ); diff --git a/tests/Agent/UnitTests/CompositeTests/CompositeTestAgent.cs b/tests/Agent/UnitTests/CompositeTests/CompositeTestAgent.cs index 8a9dc96cd3..934f18bbe5 100644 --- a/tests/Agent/UnitTests/CompositeTests/CompositeTestAgent.cs +++ b/tests/Agent/UnitTests/CompositeTests/CompositeTestAgent.cs @@ -133,7 +133,7 @@ public CompositeTestAgent(bool enableServerlessMode = false) : this(shouldAllowT { } - public CompositeTestAgent(bool shouldAllowThreads, bool includeAsyncLocalStorage, bool enableServerlessMode = false) + public CompositeTestAgent(bool shouldAllowThreads, bool includeAsyncLocalStorage, bool enableServerlessMode = false, bool enableGCSamplerV2 = false) { Log.Initialize(new Logger()); @@ -179,7 +179,7 @@ public CompositeTestAgent(bool shouldAllowThreads, bool includeAsyncLocalStorage // Construct services _container = AgentServices.GetContainer(); - AgentServices.RegisterServices(_container, enableServerlessMode); + AgentServices.RegisterServices(_container, enableServerlessMode, enableGCSamplerV2); // Replace existing registrations with mocks before resolving any services _container.ReplaceInstanceRegistration(mockEnvironment); @@ -220,7 +220,7 @@ public CompositeTestAgent(bool shouldAllowThreads, bool includeAsyncLocalStorage InstrumentationService = _container.Resolve(); InstrumentationWatcher = _container.Resolve(); - AgentServices.StartServices(_container, false); + AgentServices.StartServices(_container, false, enableGCSamplerV2); DisableAgentInitializer(); InternalApi.SetAgentApiImplementation(_container.Resolve()); diff --git a/tests/Agent/UnitTests/Core.UnitTest/AgentHealth/AgentHealthReporterTests.cs b/tests/Agent/UnitTests/Core.UnitTest/AgentHealth/AgentHealthReporterTests.cs index 37d91e2fd3..b25163cee6 100644 --- a/tests/Agent/UnitTests/Core.UnitTest/AgentHealth/AgentHealthReporterTests.cs +++ b/tests/Agent/UnitTests/Core.UnitTest/AgentHealth/AgentHealthReporterTests.cs @@ -60,6 +60,8 @@ private IConfiguration GetDefaultConfiguration() Mock.Arrange(() => configuration.InfiniteTracingCompression).Returns(true); Mock.Arrange(() => configuration.LoggingEnabled).Returns(() => _enableLogging); Mock.Arrange(() => configuration.IgnoredInstrumentation).Returns(() => _ignoredInstrumentation); + Mock.Arrange(() => configuration.GCSamplerV2Enabled).Returns(true); + return configuration; } @@ -522,5 +524,12 @@ public void IgnoredInstrumentationSupportabiltyMetricMissing() Assert.That(_publishedMetrics.Any(x => x.MetricNameModel.Name == "Supportability/Dotnet/IgnoredInstrumentation"), Is.False); } + + [Test] + public void GCSamplerV2EnabledSupportabiliityMetricPresent() + { + _agentHealthReporter.CollectMetrics(); + Assert.That(_publishedMetrics.Any(x => x.MetricNameModel.Name == "Supportability/Dotnet/GCSamplerV2/Enabled"), Is.True); + } } } diff --git a/tests/Agent/UnitTests/Core.UnitTest/Config/BootstrapConfigurationTests.cs b/tests/Agent/UnitTests/Core.UnitTest/Config/BootstrapConfigurationTests.cs index f737daa7f5..20a8bcdda1 100644 --- a/tests/Agent/UnitTests/Core.UnitTest/Config/BootstrapConfigurationTests.cs +++ b/tests/Agent/UnitTests/Core.UnitTest/Config/BootstrapConfigurationTests.cs @@ -2,11 +2,13 @@ // SPDX-License-Identifier: Apache-2.0 using System; +using System.Collections.Generic; using System.IO; using NewRelic.Agent.Core.Configuration; using NewRelic.Agent.Core.SharedInterfaces; using NUnit.Framework; using Telerik.JustMock; +using Telerik.JustMock.Helpers; namespace NewRelic.Agent.Core.Config { @@ -35,6 +37,7 @@ public void TestDefaultBootstrapConfiguration() Assert.That(config.ServerlessModeEnabled, Is.False); Assert.That(config.ServerlessFunctionName, Is.Null); Assert.That(config.ServerlessFunctionVersion, Is.Null); + Assert.That(config.GCSamplerV2Enabled, Is.False); }); } @@ -152,6 +155,54 @@ public void DoesNotThrowWhenExceptionOccursWhileReadingAppSettings() Assert.That(config.AgentEnabled, Is.True); } + [Test] + public void GCSamplerV2_DisabledByDefault() + { + var config = CreateBootstrapConfiguration(); + + Assert.That(config.GCSamplerV2Enabled, Is.False); + } + [Test] + public void GCSamplerV2_EnabledViaLocalConfig() + { + _localConfiguration.appSettings.Add(new configurationAdd { key = "GCSamplerV2Enabled", value = "true" }); + + var config = CreateBootstrapConfiguration(); + + Assert.Multiple(() => + { + Assert.That(config.GCSamplerV2Enabled, Is.True); + }); + } + [Test] + public void GCSamplerV2_EnabledViaEnvironmentVariable() + { + _originalEnvironment = ConfigLoaderHelpers.EnvironmentVariableProxy; + try + { + + var environmentMock = Mock.Create(); + Mock.Arrange(() => environmentMock.GetEnvironmentVariable(Arg.IsAny())).Returns(MockGetEnvironmentVar); + ConfigLoaderHelpers.EnvironmentVariableProxy = environmentMock; + + _localConfiguration.appSettings.Add(new configurationAdd { key = "GCSamplerV2Enabled", value = "false" }); + + SetEnvironmentVar("NEW_RELIC_GC_SAMPLER_V2_ENABLED", "1"); + + var config = CreateBootstrapConfiguration(); + + Assert.Multiple(() => + { + Assert.That(config.GCSamplerV2Enabled, Is.True); + }); + + } + finally + { + ConfigLoaderHelpers.EnvironmentVariableProxy = _originalEnvironment; + } + } + private BootstrapConfiguration CreateBootstrapConfiguration() { return new BootstrapConfiguration(_localConfiguration, TestFileName, _ => _webConfigValueWithProvenance, _configurationManagerStatic, new ProcessStatic(), Directory.Exists, Path.GetFullPath); @@ -163,5 +214,21 @@ private BootstrapConfiguration CreateBootstrapConfiguration() private IConfigurationManagerStatic _configurationManagerStatic; private const string TestWebConfigProvenance = "web.config"; private const string TestAppSettingProvenance = "app setting"; + + private IEnvironment _originalEnvironment; + private Dictionary _envVars = new Dictionary(); + private void SetEnvironmentVar(string name, string value) + { + _envVars[name] = value; + } + + private void ClearEnvironmentVars() => _envVars.Clear(); + + private string MockGetEnvironmentVar(string name) + { + if (_envVars.TryGetValue(name, out var value)) return value; + return null; + } + } } diff --git a/tests/Agent/UnitTests/Core.UnitTest/DataTransport/AgentSettingsTests.cs b/tests/Agent/UnitTests/Core.UnitTest/DataTransport/AgentSettingsTests.cs index af0e69da57..05d96011d0 100644 --- a/tests/Agent/UnitTests/Core.UnitTest/DataTransport/AgentSettingsTests.cs +++ b/tests/Agent/UnitTests/Core.UnitTest/DataTransport/AgentSettingsTests.cs @@ -341,7 +341,8 @@ public void serializes_correctly() "agent.disable_file_system_watcher": false, "ai_monitoring.enabled": true, "ai_monitoring.streaming.enabled": true, - "ai_monitoring.record_content.enabled": true + "ai_monitoring.record_content.enabled": true, + "gc_sampler_v2.enabled": true } """; diff --git a/tests/Agent/UnitTests/Core.UnitTest/DataTransport/ConnectModelTests.cs b/tests/Agent/UnitTests/Core.UnitTest/DataTransport/ConnectModelTests.cs index 5ae72cf47c..74a4d8a364 100644 --- a/tests/Agent/UnitTests/Core.UnitTest/DataTransport/ConnectModelTests.cs +++ b/tests/Agent/UnitTests/Core.UnitTest/DataTransport/ConnectModelTests.cs @@ -412,7 +412,8 @@ public void serializes_correctly() "agent.disable_file_system_watcher": false, "ai_monitoring.enabled": true, "ai_monitoring.streaming.enabled": true, - "ai_monitoring.record_content.enabled": true + "ai_monitoring.record_content.enabled": true, + "gc_sampler_v2.enabled": true }, "metadata": { "hello": "there" diff --git a/tests/Agent/UnitTests/Core.UnitTest/DataTransport/ExhaustiveTestConfiguration.cs b/tests/Agent/UnitTests/Core.UnitTest/DataTransport/ExhaustiveTestConfiguration.cs index 90345085d5..3cf8cab6ca 100644 --- a/tests/Agent/UnitTests/Core.UnitTest/DataTransport/ExhaustiveTestConfiguration.cs +++ b/tests/Agent/UnitTests/Core.UnitTest/DataTransport/ExhaustiveTestConfiguration.cs @@ -378,7 +378,6 @@ public class ExhaustiveTestConfiguration : IConfiguration public bool UtilizationDetectKubernetes => true; public bool UtilizationDetectAzureFunction => true; - public int? UtilizationLogicalProcessors => 22; public int? UtilizationTotalRamMib => 33; @@ -493,5 +492,8 @@ public IReadOnlyDictionary GetAppSettings() public string AzureFunctionResourceIdWithFunctionName(string functionName) => $"AzureFunctionResourceId/{functionName}"; public string LoggingLevel => "info"; + + public bool GCSamplerV2Enabled => true; + } } diff --git a/tests/Agent/UnitTests/Core.UnitTest/DependencyInjection/AgentServicesTests.cs b/tests/Agent/UnitTests/Core.UnitTest/DependencyInjection/AgentServicesTests.cs index aeadafac2d..b4783bbcf2 100644 --- a/tests/Agent/UnitTests/Core.UnitTest/DependencyInjection/AgentServicesTests.cs +++ b/tests/Agent/UnitTests/Core.UnitTest/DependencyInjection/AgentServicesTests.cs @@ -1,6 +1,13 @@ // Copyright 2020 New Relic, Inc. All rights reserved. // SPDX-License-Identifier: Apache-2.0 +#if NET +using System; +using System.Collections.Generic; +using NewRelic.Agent.Core.Samplers; +using NewRelic.Agent.Core.Transformers; +#endif + using Autofac.Core.Registration; using NewRelic.Agent.Configuration; using NewRelic.Agent.Core.Commands; @@ -26,7 +33,7 @@ public void ConfigurationServiceCanFullyResolve() using (new ConfigurationAutoResponder(configuration)) using (var container = AgentServices.GetContainer()) { - AgentServices.RegisterServices(container, false); + AgentServices.RegisterServices(container, false, false); Assert.DoesNotThrow(() => container.Resolve()); } } @@ -44,7 +51,7 @@ public void AllServicesCanFullyResolve() using (new ConfigurationAutoResponder(configuration)) using (var container = AgentServices.GetContainer()) { - AgentServices.RegisterServices(container, false); + AgentServices.RegisterServices(container, false, false); container.ReplaceInstanceRegistration(configurationService); #if NET @@ -52,7 +59,7 @@ public void AllServicesCanFullyResolve() #endif Assert.DoesNotThrow(() => container.Resolve()); - Assert.DoesNotThrow(() => AgentServices.StartServices(container, false)); + Assert.DoesNotThrow(() => AgentServices.StartServices(container, false, false)); } } @@ -72,7 +79,7 @@ public void CorrectServicesAreRegistered_BasedOnServerlessMode(bool serverlessMo using (new ConfigurationAutoResponder(configuration)) using (var container = AgentServices.GetContainer()) { - AgentServices.RegisterServices(container, serverlessModeEnabled); + AgentServices.RegisterServices(container, serverlessModeEnabled, false); container.ReplaceInstanceRegistration(configurationService); #if NET @@ -80,7 +87,7 @@ public void CorrectServicesAreRegistered_BasedOnServerlessMode(bool serverlessMo #endif // Assert Assert.DoesNotThrow(() => container.Resolve()); - Assert.DoesNotThrow(() => AgentServices.StartServices(container, true)); + Assert.DoesNotThrow(() => AgentServices.StartServices(container, true, false)); // ensure dependent services are registered if (serverlessModeEnabled) @@ -112,5 +119,53 @@ public void CorrectServicesAreRegistered_BasedOnServerlessMode(bool serverlessMo } } } + +#if NET + [TestCase(true)] + [TestCase(false)] + public void CorrectServicesAreRegistered_BasedOnGCSamplerV2EnabledMode(bool gcSamplerV2Enabled) + { + // Arrange + var configuration = Mock.Create(); + Mock.Arrange(() => configuration.AutoStartAgent).Returns(false); + Mock.Arrange(() => configuration.NewRelicConfigFilePath).Returns("c:\\"); + var configurationService = Mock.Create(); + Mock.Arrange(() => configurationService.Configuration).Returns(configuration); + + // Act + using (new ConfigurationAutoResponder(configuration)) + using (var container = AgentServices.GetContainer()) + { + AgentServices.RegisterServices(container, false, gcSamplerV2Enabled); + + container.ReplaceInstanceRegistration(configurationService); + container.ReplaceRegistrations(); // creates a new scope, registering the replacement instances from all .ReplaceRegistration() calls above + // Assert + Assert.DoesNotThrow(() => container.Resolve()); + Assert.DoesNotThrow(() => AgentServices.StartServices(container, false, gcSamplerV2Enabled)); + + // ensure dependent services are registered + if (gcSamplerV2Enabled) + { + Assert.DoesNotThrow(() => container.Resolve()); + Assert.DoesNotThrow(() => container.Resolve()); + + Assert.Throws(() => container.Resolve>>>()); + Assert.Throws(() => container.Resolve>()); + Assert.Throws(() => container.Resolve()); + + } + else + { + Assert.DoesNotThrow(() => container.Resolve>>>()); + Assert.DoesNotThrow(() => container.Resolve>()); + Assert.DoesNotThrow(() => container.Resolve()); + + Assert.Throws(() => container.Resolve()); + Assert.Throws(() => container.Resolve()); + } + } + } +#endif } } diff --git a/tests/Agent/UnitTests/Core.UnitTest/Metrics/MetricNamesTests.cs b/tests/Agent/UnitTests/Core.UnitTest/Metrics/MetricNamesTests.cs index 22cd05d110..c1412d1f2b 100644 --- a/tests/Agent/UnitTests/Core.UnitTest/Metrics/MetricNamesTests.cs +++ b/tests/Agent/UnitTests/Core.UnitTest/Metrics/MetricNamesTests.cs @@ -328,6 +328,20 @@ public static void MetricNamesTest_GetGCMetricName() { GCSampleType.LOHSize, "GC/LOH/Size" }, { GCSampleType.LOHSurvived, "GC/LOH/Survived" }, + + { GCSampleType.LOHCollectionCount, "GC/LOH/Collections" }, + { GCSampleType.POHCollectionCount, "GC/POH/Collections" }, + + { GCSampleType.TotalHeapMemory, "GC/Heap/Total" }, + { GCSampleType.TotalCommittedMemory, "GC/Heap/Committed" }, + { GCSampleType.TotalAllocatedMemory, "GC/Heap/Allocated" }, + + { GCSampleType.Gen0FragmentationSize, "GC/Gen0/Fragmentation" }, + { GCSampleType.Gen1FragmentationSize, "GC/Gen1/Fragmentation" }, + { GCSampleType.Gen2FragmentationSize, "GC/Gen2/Fragmentation" }, + { GCSampleType.LOHFragmentationSize, "GC/LOH/Fragmentation" }, + { GCSampleType.POHFragmentationSize, "GC/POH/Fragmentation" }, + { GCSampleType.POHSize, "GC/POH/Size" } }; //Ensure that we have covered all sample types with our tests diff --git a/tests/Agent/UnitTests/Core.UnitTest/Samplers/GCSamplerV2Tests.cs b/tests/Agent/UnitTests/Core.UnitTest/Samplers/GCSamplerV2Tests.cs new file mode 100644 index 0000000000..f81d1560f8 --- /dev/null +++ b/tests/Agent/UnitTests/Core.UnitTest/Samplers/GCSamplerV2Tests.cs @@ -0,0 +1,105 @@ +// Copyright 2020 New Relic, Inc. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 +#if NET + +using System; +using NewRelic.Agent.Core.Time; +using NewRelic.Agent.Core.Transformers; +using NUnit.Framework; +using Telerik.JustMock; + +namespace NewRelic.Agent.Core.Samplers +{ + [TestFixture] + public class GCSamplerV2Tests + { + private IScheduler _scheduler; + private IGCSampleTransformerV2 _transformer; + private IGCSamplerV2ReflectionHelper _reflectionHelper; + private GCSamplerV2 _gcSamplerV2; + + [SetUp] + public void SetUp() + { + _scheduler = Mock.Create(); + _transformer = Mock.Create(); + _reflectionHelper = Mock.Create(); + + _gcSamplerV2 = new GCSamplerV2(_scheduler, _transformer, _reflectionHelper); + } + + [TearDown] + public void TearDown() + { + _gcSamplerV2.Dispose(); + } + + [Test] + public void Sample_ShouldStop_WhenReflectionFails() + { + // Arrange + Mock.Arrange(() => _reflectionHelper.ReflectionFailed).Returns(true); + + // Act + _gcSamplerV2.Sample(); + + // Assert + Mock.Assert(() => _scheduler.StopExecuting(Arg.IsAny(), Arg.IsAny()), Occurs.Once()); + } + + [Test] + public void Sample_ShouldNotTransform_WhenNoGCOccurred() + { + // Arrange + Mock.Arrange(() => _reflectionHelper.ReflectionFailed).Returns(false); + Mock.Arrange(() => _reflectionHelper.HasGCOccurred).Returns(false); + + // Act + _gcSamplerV2.Sample(); + + // Assert + Mock.Assert(() => _transformer.Transform(Arg.IsAny()), Occurs.Never()); + } + + [Test] + public void Sample_Transforms_WhenGCHasOccurred() + { + // Arrange + Mock.Arrange(() => _reflectionHelper.ReflectionFailed).Returns(false); + Mock.Arrange(() => _reflectionHelper.HasGCOccurred).Returns(true); + + var gcMemoryInfo = new GCMemoryInfo { TotalCommittedBytes = 4096L }; + var generationInfo = new[] + { + new GenerationInfo { SizeAfterBytes = 100L, FragmentationAfterBytes = 10L }, + new GenerationInfo { SizeAfterBytes = 200L, FragmentationAfterBytes = 20L }, + new GenerationInfo { SizeAfterBytes = 300L, FragmentationAfterBytes = 30L }, + new GenerationInfo { SizeAfterBytes = 400L, FragmentationAfterBytes = 40L }, + new GenerationInfo { SizeAfterBytes = 500L, FragmentationAfterBytes = 50L } + }; + + Mock.Arrange(() => _reflectionHelper.GCGetMemoryInfo_Invoker(Arg.IsAny())).Returns(gcMemoryInfo); + Mock.Arrange(() => _reflectionHelper.GetGenerationInfo(Arg.IsAny())).Returns(generationInfo); + Mock.Arrange(() => _reflectionHelper.GCGetTotalAllocatedBytes_Invoker(Arg.IsAny())).Returns(2048L); + + // Act + _gcSamplerV2.Sample(); + + // Assert + Mock.Assert(() => _transformer.Transform(Arg.IsAny()), Occurs.Once()); + } + + // Mock classes to replace anonymous types + public class GCMemoryInfo + { + public long TotalCommittedBytes { get; set; } + } + + public class GenerationInfo + { + public long SizeAfterBytes { get; set; } + public long FragmentationAfterBytes { get; set; } + } + } +} +#endif diff --git a/tests/Agent/UnitTests/Core.UnitTest/Samplers/ImmutableGCSampleTests.cs b/tests/Agent/UnitTests/Core.UnitTest/Samplers/ImmutableGCSampleTests.cs new file mode 100644 index 0000000000..d31666f83d --- /dev/null +++ b/tests/Agent/UnitTests/Core.UnitTest/Samplers/ImmutableGCSampleTests.cs @@ -0,0 +1,67 @@ +// Copyright 2020 New Relic, Inc. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +using System; +using NewRelic.Agent.Core.Samplers; +using NUnit.Framework; + +namespace NewRelic.Agent.Core.Tests.Samplers +{ + [TestFixture] + public class ImmutableGCSampleTests + { + [Test] + public void Constructor_Default_ShouldInitializeFields() + { + // Act + var sample = new ImmutableGCSample(); + + // Assert + Assert.Multiple(() => + { + Assert.That(sample.LastSampleTime, Is.EqualTo(DateTime.MinValue)); + Assert.That(sample.CurrentSampleTime, Is.EqualTo(DateTime.MinValue)); + Assert.That(sample.GCHeapSizesBytes.Length, Is.EqualTo(5)); + Assert.That(sample.GCCollectionCounts.Length, Is.EqualTo(5)); + Assert.That(sample.GCFragmentationSizesBytes.Length, Is.EqualTo(5)); + }); + } + + [Test] + [TestCase(3, new long[] { 100, 200, 300 }, new[] { 5, 4, 3 }, new long[] { 10, 20, 30 }, new[] { 1, 1, 3, 0, 0 })] + [TestCase(4, new long[] { 100, 200, 300, 400 }, new[] { 5, 4, 3, 2 }, new long[] { 10, 20, 30, 40 }, new[] { 1, 1, 1, 2, 0})] + [TestCase(5, new long[] { 100, 200, 300, 400, 500 }, new[] { 5, 4, 3, 2, 1 }, new long[] { 10, 20, 30, 40, 50 }, new[] { 1, 1, 1, 1, 1 })] + public void Constructor_WithParameters_ShouldInitializeFields(int collectionLength, long[] heapSizesBytes, int[] rawCollectionCounts, long[] fragmentationSizesBytes, int[] expectedCollectionCounts) + { + // Arrange + var lastSampleTime = DateTime.UtcNow.AddMinutes(-1); + var currentSampleTime = DateTime.UtcNow; + var totalMemoryBytes = 1024L; + var totalAllocatedBytes = 2048L; + var totalCommittedBytes = 4096L; + + // Act + var sample = new ImmutableGCSample(lastSampleTime, currentSampleTime, totalMemoryBytes, totalAllocatedBytes, totalCommittedBytes, heapSizesBytes, rawCollectionCounts, fragmentationSizesBytes); + + // Assert + Assert.Multiple(() => + { + Assert.That(sample.LastSampleTime, Is.EqualTo(lastSampleTime)); + Assert.That(sample.CurrentSampleTime, Is.EqualTo(currentSampleTime)); + Assert.That(sample.TotalMemoryBytes, Is.EqualTo(totalMemoryBytes)); + Assert.That(sample.TotalAllocatedBytes, Is.EqualTo(totalAllocatedBytes)); + Assert.That(sample.TotalCommittedBytes, Is.EqualTo(totalCommittedBytes)); + Assert.That(sample.GCHeapSizesBytes, Is.EqualTo(heapSizesBytes)); + Assert.That(sample.GCFragmentationSizesBytes, Is.EqualTo(fragmentationSizesBytes)); + + // Verify GCCollectionCounts + Assert.That(sample.GCCollectionCounts.Length, Is.EqualTo(5)); + Assert.That(sample.GCCollectionCounts[0], Is.EqualTo(expectedCollectionCounts[0])); // Gen 0 + Assert.That(sample.GCCollectionCounts[1], Is.EqualTo(expectedCollectionCounts[1])); // Gen 1 + Assert.That(sample.GCCollectionCounts[2], Is.EqualTo(expectedCollectionCounts[2])); // Gen 2 + Assert.That(sample.GCCollectionCounts[3], Is.EqualTo(expectedCollectionCounts[3])); // LOH + Assert.That(sample.GCCollectionCounts[4], Is.EqualTo(expectedCollectionCounts[4])); // POH + }); + } + } +} diff --git a/tests/Agent/UnitTests/Core.UnitTest/Transformers/GCSampleTransformerV2Tests.cs b/tests/Agent/UnitTests/Core.UnitTest/Transformers/GCSampleTransformerV2Tests.cs new file mode 100644 index 0000000000..b6dc9c96ad --- /dev/null +++ b/tests/Agent/UnitTests/Core.UnitTest/Transformers/GCSampleTransformerV2Tests.cs @@ -0,0 +1,188 @@ +// Copyright 2020 New Relic, Inc. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +using System.Collections.Generic; +using NewRelic.Agent.Core.Aggregators; +using NewRelic.Agent.Core.Metrics; +using NewRelic.Agent.Core.Samplers; +using NewRelic.Agent.Core.WireModels; +using NUnit.Framework; +using Telerik.JustMock; + +namespace NewRelic.Agent.Core.Transformers +{ + [TestFixture] + public class GCSampleTransformerV2Tests + { + private IMetricBuilder _metricBuilder; + private IMetricAggregator _metricAggregator; + private GCSampleTransformerV2 _transformer; + + [SetUp] + public void SetUp() + { + _metricBuilder = new MetricWireModel.MetricBuilder(new MetricNameService()); + + _metricAggregator = Mock.Create(); + + _transformer = new GCSampleTransformerV2(_metricBuilder, _metricAggregator); + } + + [Test] + public void Transform_ShouldUpdateCurrentAndPreviousSamples() + { + // Arrange + var sample = CreateSample(); + Mock.Arrange(() => _metricAggregator.Collect(Arg.IsAny())); + + // Act + _transformer.Transform(sample); + + // Assert + + Assert.Multiple(() => + { + Assert.That(_transformer.PreviousSample, Is.Not.Null); + Assert.That(_transformer.CurrentSample, Is.EqualTo(sample)); + }); + } + + [Test] + public void Transform_ShouldBuildAndRecordMetrics() + { + // Arrange + var sample = CreateSample(); + + var generatedMetrics = new Dictionary(); + + Mock.Arrange(() => _metricAggregator.Collect(Arg.IsAny())).DoInstead(m => generatedMetrics.Add(m.MetricNameModel.Name, m.DataModel)); + + // Act + _transformer.Transform(sample); + + // Assert + const float bytesPerMb = 1048576f; + Assert.Multiple(() => + { + Assert.That(generatedMetrics, Has.Count.EqualTo(18)); + MetricTestHelpers.CompareMetric(generatedMetrics, MetricNames.GetGCMetricName(GCSampleType.TotalAllocatedMemory), sample.TotalAllocatedBytes / bytesPerMb); + MetricTestHelpers.CompareMetric(generatedMetrics, MetricNames.GetGCMetricName(GCSampleType.TotalCommittedMemory), sample.TotalCommittedBytes / bytesPerMb); + MetricTestHelpers.CompareMetric(generatedMetrics, MetricNames.GetGCMetricName(GCSampleType.TotalHeapMemory), sample.TotalMemoryBytes / bytesPerMb); + MetricTestHelpers.CompareMetric(generatedMetrics, MetricNames.GetGCMetricName(GCSampleType.Gen0Size), sample.GCHeapSizesBytes[0] / bytesPerMb); + MetricTestHelpers.CompareMetric(generatedMetrics, MetricNames.GetGCMetricName(GCSampleType.Gen1Size), sample.GCHeapSizesBytes[1] / bytesPerMb); + MetricTestHelpers.CompareMetric(generatedMetrics, MetricNames.GetGCMetricName(GCSampleType.Gen2Size), sample.GCHeapSizesBytes[2] / bytesPerMb); + MetricTestHelpers.CompareMetric(generatedMetrics, MetricNames.GetGCMetricName(GCSampleType.LOHSize), sample.GCHeapSizesBytes[3] / bytesPerMb); + MetricTestHelpers.CompareMetric(generatedMetrics, MetricNames.GetGCMetricName(GCSampleType.POHSize), sample.GCHeapSizesBytes[4] / bytesPerMb); + MetricTestHelpers.CompareMetric(generatedMetrics, MetricNames.GetGCMetricName(GCSampleType.Gen0FragmentationSize), sample.GCFragmentationSizesBytes[0] / bytesPerMb); + MetricTestHelpers.CompareMetric(generatedMetrics, MetricNames.GetGCMetricName(GCSampleType.Gen1FragmentationSize), sample.GCFragmentationSizesBytes[1] / bytesPerMb); + MetricTestHelpers.CompareMetric(generatedMetrics, MetricNames.GetGCMetricName(GCSampleType.Gen2FragmentationSize), sample.GCFragmentationSizesBytes[2] / bytesPerMb); + MetricTestHelpers.CompareMetric(generatedMetrics, MetricNames.GetGCMetricName(GCSampleType.LOHFragmentationSize), sample.GCFragmentationSizesBytes[3] / bytesPerMb); + MetricTestHelpers.CompareMetric(generatedMetrics, MetricNames.GetGCMetricName(GCSampleType.POHFragmentationSize), sample.GCFragmentationSizesBytes[4] / bytesPerMb); + MetricTestHelpers.CompareCountMetric(generatedMetrics, MetricNames.GetGCMetricName(GCSampleType.Gen0CollectionCount), sample.GCCollectionCounts[0]); + MetricTestHelpers.CompareCountMetric(generatedMetrics, MetricNames.GetGCMetricName(GCSampleType.Gen1CollectionCount), sample.GCCollectionCounts[1]); + MetricTestHelpers.CompareCountMetric(generatedMetrics, MetricNames.GetGCMetricName(GCSampleType.Gen2CollectionCount), sample.GCCollectionCounts[2]); + MetricTestHelpers.CompareCountMetric(generatedMetrics, MetricNames.GetGCMetricName(GCSampleType.LOHCollectionCount), sample.GCCollectionCounts[3]); + MetricTestHelpers.CompareCountMetric(generatedMetrics, MetricNames.GetGCMetricName(GCSampleType.POHCollectionCount), sample.GCCollectionCounts[4]); + }); + } + + [Test] + public void Transform_ShouldRecordZeroMetric_WhenCurrentValueIsLessThanPreviousValue() + { + // Arrange + var previousSample = CreateSampleWithCollectionCounts([3, 3, 3, 3, 3]); + var currentSample = CreateSampleWithCollectionCounts([1, 1, 1, 1, 1]); + + var generatedMetrics = new Dictionary(); + + Mock.Arrange(() => _metricAggregator.Collect(Arg.IsAny())).DoInstead(m => generatedMetrics.Add(m.MetricNameModel.Name, m.DataModel)); + + // Act + _transformer.Transform(previousSample); + + generatedMetrics.Clear(); + + _transformer.Transform(currentSample); + + // Assert + Assert.Multiple(() => + { + Assert.That(generatedMetrics, Has.Count.EqualTo(18)); + MetricTestHelpers.CompareCountMetric(generatedMetrics, MetricNames.GetGCMetricName(GCSampleType.Gen0CollectionCount), 0); + MetricTestHelpers.CompareCountMetric(generatedMetrics, MetricNames.GetGCMetricName(GCSampleType.Gen1CollectionCount), 0); + MetricTestHelpers.CompareCountMetric(generatedMetrics, MetricNames.GetGCMetricName(GCSampleType.Gen2CollectionCount), 0); + MetricTestHelpers.CompareCountMetric(generatedMetrics, MetricNames.GetGCMetricName(GCSampleType.LOHCollectionCount), 0); + MetricTestHelpers.CompareCountMetric(generatedMetrics, MetricNames.GetGCMetricName(GCSampleType.POHCollectionCount), 0); + }); + } + + + private ImmutableGCSample CreateSample() + { + return new ImmutableGCSample( + lastSampleTime: System.DateTime.UtcNow.AddMinutes(-1), + currentSampleTime: System.DateTime.UtcNow, + totalMemoryBytes: 1024L, + totalAllocatedBytes: 2048L, + totalCommittedBytes: 4096L, + heapSizesBytes: [100, 200, 300, 400, 500], + rawCollectionCounts: [5, 4, 3, 2, 1], + fragmentationSizesBytes: [10, 20, 30, 40, 50] + ); + } + + private ImmutableGCSample CreateSampleWithCollectionCounts(int[] collectionCounts) + { + return new ImmutableGCSample( + lastSampleTime: System.DateTime.UtcNow.AddMinutes(-1), + currentSampleTime: System.DateTime.UtcNow, + totalMemoryBytes: 1024L, + totalAllocatedBytes: 2048L, + totalCommittedBytes: 4096L, + heapSizesBytes: [100, 200, 300, 400, 500], + rawCollectionCounts: collectionCounts, + fragmentationSizesBytes: [10, 20, 30, 40, 50] + ); + } + + [Test] + public void Transform_ShouldRecordZeroMetric_WhenCurrentAllocatedMemoryIsLessThanPreviousAllocatedMemory() + { + // Arrange + var previousSample = CreateSampleWithAllocatedBytes(2048L); + var currentSample = CreateSampleWithAllocatedBytes(1024L); + + var generatedMetrics = new Dictionary(); + + Mock.Arrange(() => _metricAggregator.Collect(Arg.IsAny())).DoInstead(m => generatedMetrics.Add(m.MetricNameModel.Name, m.DataModel)); + + // Act + _transformer.Transform(previousSample); + + generatedMetrics.Clear(); + + _transformer.Transform(currentSample); + + // Assert + Assert.Multiple(() => + { + Assert.That(generatedMetrics, Has.Count.EqualTo(18)); + MetricTestHelpers.CompareMetric(generatedMetrics, MetricNames.GetGCMetricName(GCSampleType.TotalAllocatedMemory), 0); + }); + } + + private ImmutableGCSample CreateSampleWithAllocatedBytes(long allocatedBytes) + { + return new ImmutableGCSample( + lastSampleTime: System.DateTime.UtcNow.AddMinutes(-1), + currentSampleTime: System.DateTime.UtcNow, + totalMemoryBytes: 1024L, + totalAllocatedBytes: allocatedBytes, + totalCommittedBytes: 4096L, + heapSizesBytes: [100, 200, 300, 400, 500], + rawCollectionCounts: [5, 4, 3, 2, 1], + fragmentationSizesBytes: [10, 20, 30, 40, 50] + ); + } + } +} diff --git a/tests/Agent/UnitTests/Core.UnitTest/Transformers/GCStatsSampleTransformerTests.cs b/tests/Agent/UnitTests/Core.UnitTest/Transformers/GCStatsSampleTransformerTests.cs index 74e66a7f55..db0320125f 100644 --- a/tests/Agent/UnitTests/Core.UnitTest/Transformers/GCStatsSampleTransformerTests.cs +++ b/tests/Agent/UnitTests/Core.UnitTest/Transformers/GCStatsSampleTransformerTests.cs @@ -41,7 +41,12 @@ public void Setup() //Build example sample data var sampleValue = 0f; _sampleData = new Dictionary(); - foreach (var val in Enum.GetValues(typeof(GCSampleType))) + + var values = Enum.GetValues(typeof(GCSampleType)); + // only consider "old" GCSampleType enum members for this test + values = values.Cast().Where(x => x < GCSampleType.POHSize).ToArray(); + + foreach (var val in values) { _sampleData.Add((GCSampleType)val, sampleValue++); } diff --git a/tests/Agent/UnitTests/Core.UnitTest/Transformers/MetricTestHelpers.cs b/tests/Agent/UnitTests/Core.UnitTest/Transformers/MetricTestHelpers.cs index 78ac38e8d2..94c052b9af 100644 --- a/tests/Agent/UnitTests/Core.UnitTest/Transformers/MetricTestHelpers.cs +++ b/tests/Agent/UnitTests/Core.UnitTest/Transformers/MetricTestHelpers.cs @@ -13,13 +13,18 @@ public static class MetricTestHelpers public static void CompareMetric(Dictionary generatedMetrics, string metricName, float expectedValue) { NrAssert.Multiple( - () => Assert.That(generatedMetrics[metricName].Value0, Is.EqualTo(1)), - () => Assert.That(generatedMetrics[metricName].Value1, Is.EqualTo(expectedValue)), - () => Assert.That(generatedMetrics[metricName].Value2, Is.EqualTo(expectedValue)), - () => Assert.That(generatedMetrics[metricName].Value3, Is.EqualTo(expectedValue)), - () => Assert.That(generatedMetrics[metricName].Value4, Is.EqualTo(expectedValue)), - () => Assert.That(generatedMetrics[metricName].Value5, Is.EqualTo(expectedValue * expectedValue)) + () => Assert.That(generatedMetrics[metricName].Value0, Is.EqualTo(1), message: $"{metricName}.Value0"), + () => Assert.That(generatedMetrics[metricName].Value1, Is.EqualTo(expectedValue), message: $"{metricName}.Value1"), + () => Assert.That(generatedMetrics[metricName].Value2, Is.EqualTo(expectedValue), message: $"{metricName}.Value2"), + () => Assert.That(generatedMetrics[metricName].Value3, Is.EqualTo(expectedValue), message: $"{metricName}.Value3"), + () => Assert.That(generatedMetrics[metricName].Value4, Is.EqualTo(expectedValue), message: $"{metricName}.Value4"), + () => Assert.That(generatedMetrics[metricName].Value5, Is.EqualTo(expectedValue * expectedValue), message: $"{metricName}.Value5") ); } + + public static void CompareCountMetric(Dictionary generatedMetrics, string metricName, float expectedValue) + { + Assert.That(generatedMetrics[metricName].Value0, Is.EqualTo(expectedValue), message: $"{metricName}.Value0"); + } } } diff --git a/tests/Agent/UnitTests/NewRelic.Agent.Extensions.Tests/Reflection/VisibilityBypasserTests.cs b/tests/Agent/UnitTests/NewRelic.Agent.Extensions.Tests/Reflection/VisibilityBypasserTests.cs index f25521058f..f9cc2811d4 100644 --- a/tests/Agent/UnitTests/NewRelic.Agent.Extensions.Tests/Reflection/VisibilityBypasserTests.cs +++ b/tests/Agent/UnitTests/NewRelic.Agent.Extensions.Tests/Reflection/VisibilityBypasserTests.cs @@ -54,6 +54,8 @@ private class PrivateInner public string GetWritableStringField { get { return _writableStringField; } } private int _writeableIntField = 7; public int GetWriteableIntField { get { return _writeableIntField; } } + + public static int StaticMethodWithOneParameter(int param) { return param;} } public static class PublicStatic @@ -628,7 +630,7 @@ public void test_input_validation() public class StaticMethodTests { [Test] - public void test_static_generator() + public void generate_parameterless_static_method_caller() { var assemblyName = Assembly.GetExecutingAssembly().FullName; var typeName = "NewRelic.Reflection.UnitTests.PublicStatic"; @@ -640,5 +642,66 @@ public void test_static_generator() Assert.Throws(() => VisibilityBypasser.Instance.GenerateParameterlessStaticMethodCaller(assemblyName, typeName, "NoSuchMethod")); } - } + [Test] + public void try_generate_one_parameter_static_method_caller() + { + var methodName = "StaticMethodWithOneParameter"; + var expectedValue = 5; + + var success = VisibilityBypasser.Instance.TryGenerateOneParameterStaticMethodCaller(typeof(PublicOuter), methodName, typeof(int), typeof(int), out var accessor ); + Assert.That(success, Is.True); + + var actualValue = accessor(5); + + Assert.That(actualValue, Is.EqualTo(expectedValue)); + } + + [Test] + public void try_generate_one_parameter_static_method_caller_failure() + { + // Arrange + var ownerType = typeof(PublicStatic); + var methodName = "NonExistentMethod"; + var paramType = typeof(int); + var returnType = typeof(int); + Func accessor; + + // Act + var result = VisibilityBypasser.Instance.TryGenerateOneParameterStaticMethodCaller(ownerType, methodName, paramType, returnType, out accessor); + + // Assert + Assert.That(result, Is.False); + Assert.That(accessor, Is.Null); + } + + + [Test] + public void try_generate_parameterless_static_method_caller() + { + var methodName = "GetANumber"; + var expectedValue = 3; + + var success = VisibilityBypasser.Instance.TryGenerateParameterlessStaticMethodCaller(typeof(PublicStatic), methodName, out var accessor); + Assert.That(success, Is.True); + + var actualValue = accessor(); + + Assert.That(actualValue, Is.EqualTo(expectedValue)); + } + + [Test] + public void try_generate_parameterless_static_method_caller_failure() + { + // Arrange + var ownerType = typeof(PublicStatic); + var methodName = "NonExistentMethod"; + Func accessor; + + // Act + var success = VisibilityBypasser.Instance.TryGenerateParameterlessStaticMethodCaller(ownerType, methodName, out accessor); + + // Assert + Assert.That(success, Is.False); + Assert.That(accessor, Is.Null); + } } } From 936b6f63c837dd03222281870ae5db40574693ff Mon Sep 17 00:00:00 2001 From: Chris Hynes <111462425+chynesNR@users.noreply.github.com> Date: Fri, 25 Oct 2024 10:44:12 -0700 Subject: [PATCH 02/19] feat: Add config option for providing AWS account ID for linking (#2851) --- .../Agent/Core/Config/Configuration.cs | 88 +++++++++++++++++++ .../Agent/Core/Config/Configuration.xsd | 24 +++++ .../Configuration/DefaultConfiguration.cs | 17 ++++ .../Configuration/ReportedConfiguration.cs | 3 + .../Configuration/IConfiguration.cs | 1 + .../DefaultConfigurationTests.cs | 84 ++++++++++++++++++ .../DataTransport/AgentSettingsTests.cs | 1 + .../ExhaustiveTestConfiguration.cs | 2 +- 8 files changed, 219 insertions(+), 1 deletion(-) diff --git a/src/Agent/NewRelic/Agent/Core/Config/Configuration.cs b/src/Agent/NewRelic/Agent/Core/Config/Configuration.cs index 6395264aaa..7b7ac990d5 100644 --- a/src/Agent/NewRelic/Agent/Core/Config/Configuration.cs +++ b/src/Agent/NewRelic/Agent/Core/Config/Configuration.cs @@ -98,6 +98,8 @@ public partial class configuration private configurationCodeLevelMetrics codeLevelMetricsField; + private configurationCloud cloudField; + private bool agentEnabledField; private bool rootAgentEnabledField; @@ -121,6 +123,7 @@ public partial class configuration /// public configuration() { + this.cloudField = new configurationCloud(); this.codeLevelMetricsField = new configurationCodeLevelMetrics(); this.processHostField = new configurationProcessHost(); this.utilizationField = new configurationUtilization(); @@ -600,6 +603,18 @@ public configurationCodeLevelMetrics codeLevelMetrics } } + public configurationCloud cloud + { + get + { + return this.cloudField; + } + set + { + this.cloudField = value; + } + } + /// /// Set this to true to enable the agent. /// @@ -6013,6 +6028,79 @@ public virtual configurationCodeLevelMetrics Clone() #endregion } + [System.CodeDom.Compiler.GeneratedCodeAttribute("Xsd2Code", "3.6.0.20097")] + [System.SerializableAttribute()] + [System.ComponentModel.DesignerCategoryAttribute("code")] + [System.Xml.Serialization.XmlTypeAttribute(AnonymousType=true, Namespace="urn:newrelic-config")] + public partial class configurationCloud + { + + private configurationCloudAws awsField; + + /// + /// configurationCloud class constructor + /// + public configurationCloud() + { + this.awsField = new configurationCloudAws(); + } + + public configurationCloudAws aws + { + get + { + return this.awsField; + } + set + { + this.awsField = value; + } + } + + #region Clone method + /// + /// Create a clone of this configurationCloud object + /// + public virtual configurationCloud Clone() + { + return ((configurationCloud)(this.MemberwiseClone())); + } + #endregion + } + + [System.CodeDom.Compiler.GeneratedCodeAttribute("Xsd2Code", "3.6.0.20097")] + [System.SerializableAttribute()] + [System.ComponentModel.DesignerCategoryAttribute("code")] + [System.Xml.Serialization.XmlTypeAttribute(AnonymousType=true, Namespace="urn:newrelic-config")] + public partial class configurationCloudAws + { + + private string accountIdField; + + [System.Xml.Serialization.XmlAttributeAttribute()] + public string accountId + { + get + { + return this.accountIdField; + } + set + { + this.accountIdField = value; + } + } + + #region Clone method + /// + /// Create a clone of this configurationCloudAws object + /// + public virtual configurationCloudAws Clone() + { + return ((configurationCloudAws)(this.MemberwiseClone())); + } + #endregion + } + [System.CodeDom.Compiler.GeneratedCodeAttribute("Xsd2Code", "3.6.0.20097")] [System.SerializableAttribute()] [System.Xml.Serialization.XmlTypeAttribute(AnonymousType=true, Namespace="urn:newrelic-config")] diff --git a/src/Agent/NewRelic/Agent/Core/Config/Configuration.xsd b/src/Agent/NewRelic/Agent/Core/Config/Configuration.xsd index 79e85781f1..33f046677d 100644 --- a/src/Agent/NewRelic/Agent/Core/Config/Configuration.xsd +++ b/src/Agent/NewRelic/Agent/Core/Config/Configuration.xsd @@ -2025,6 +2025,30 @@ + + + + + Provide linking data needed for certain cloud services. + + + + + + + + + + + The AWS Account ID used by this application. + + + + + + + + diff --git a/src/Agent/NewRelic/Agent/Core/Configuration/DefaultConfiguration.cs b/src/Agent/NewRelic/Agent/Core/Configuration/DefaultConfiguration.cs index e87e7b1c80..986e88bf7d 100644 --- a/src/Agent/NewRelic/Agent/Core/Configuration/DefaultConfiguration.cs +++ b/src/Agent/NewRelic/Agent/Core/Configuration/DefaultConfiguration.cs @@ -2978,6 +2978,23 @@ public bool CodeLevelMetricsEnabled #endregion + #region Cloud + private string _awsAccountId; + public string AwsAccountId + { + get + { + if (_awsAccountId != null) + { + return _awsAccountId; + } + _awsAccountId = EnvironmentOverrides(_localConfiguration.cloud.aws.accountId, "NEW_RELIC_CLOUD_AWS_ACCOUNT_ID"); + + return _awsAccountId; + } + } + #endregion + public static bool GetLoggingEnabledValue(IEnvironment environment, configurationLog localLogConfiguration) { return EnvironmentOverrides(environment, localLogConfiguration.enabled, "NEW_RELIC_LOG_ENABLED"); diff --git a/src/Agent/NewRelic/Agent/Core/Configuration/ReportedConfiguration.cs b/src/Agent/NewRelic/Agent/Core/Configuration/ReportedConfiguration.cs index c60251e4a8..c0f9e586eb 100644 --- a/src/Agent/NewRelic/Agent/Core/Configuration/ReportedConfiguration.cs +++ b/src/Agent/NewRelic/Agent/Core/Configuration/ReportedConfiguration.cs @@ -711,6 +711,9 @@ public ReportedConfiguration(IConfiguration configuration) public string AzureFunctionResourceIdWithFunctionName(string functionName) => _configuration.AzureFunctionResourceIdWithFunctionName(functionName); + [JsonIgnore] + public string AwsAccountId => _configuration.AwsAccountId; + [JsonProperty("gc_sampler_v2.enabled")] public bool GCSamplerV2Enabled => _configuration.GCSamplerV2Enabled; diff --git a/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Configuration/IConfiguration.cs b/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Configuration/IConfiguration.cs index 2c70b0d8f7..809871b591 100644 --- a/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Configuration/IConfiguration.cs +++ b/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Configuration/IConfiguration.cs @@ -232,6 +232,7 @@ public interface IConfiguration bool UtilizationDetectAzureFunction { get; } + string AwsAccountId { get; } bool GCSamplerV2Enabled { get; } } } diff --git a/tests/Agent/UnitTests/Core.UnitTest/Configuration/DefaultConfigurationTests.cs b/tests/Agent/UnitTests/Core.UnitTest/Configuration/DefaultConfigurationTests.cs index ac591dfb48..d6559d45ff 100644 --- a/tests/Agent/UnitTests/Core.UnitTest/Configuration/DefaultConfigurationTests.cs +++ b/tests/Agent/UnitTests/Core.UnitTest/Configuration/DefaultConfigurationTests.cs @@ -4438,9 +4438,93 @@ public void AzureFunctionServiceName_ShouldReturnServiceName_WhenEnvironmentVari // Assert Assert.That(result, Is.EqualTo("some-service-name")); } + #endregion + + #region Cloud + [Test] + public void Cloud_Section_Parsing_And_Override() + { + string xmlString = """ + + + + + + + """; + var config = GenerateConfigFromXml(xmlString); + + Assert.That(config.AwsAccountId, Is.EqualTo("123456789012")); + + xmlString = """ + + + + + + + """; + config = GenerateConfigFromXml(xmlString); + + Assert.That(config.AwsAccountId, Is.Null); + + xmlString = """ + + + + + + """; + config = GenerateConfigFromXml(xmlString); + + Assert.That(config.AwsAccountId, Is.Null); + + xmlString = """ + + + + """; + config = GenerateConfigFromXml(xmlString); + + Assert.That(config.AwsAccountId, Is.Null); + + // null from the last test, but env override should work + Mock.Arrange(() => _environment.GetEnvironmentVariableFromList("NEW_RELIC_CLOUD_AWS_ACCOUNT_ID")).Returns("444488881212"); + + Assert.That(config.AwsAccountId, Is.EqualTo("444488881212")); + + // A second call should use the cached value + Assert.That(config.AwsAccountId, Is.EqualTo("444488881212")); + + // If it exists in the config, the env variable should still override + xmlString = """ + + + + + + + """; + config = GenerateConfigFromXml(xmlString); + Assert.That(config.AwsAccountId, Is.EqualTo("444488881212")); + } #endregion + private DefaultConfiguration GenerateConfigFromXml(string xml) + { + var root = new XmlRootAttribute { ElementName = "configuration", Namespace = "urn:newrelic-config" }; + var serializer = new XmlSerializer(typeof(configuration), root); + + configuration localConfiguration; + using (var reader = new StringReader(xml)) + { + localConfiguration = serializer.Deserialize(reader) as configuration; + } + + return new TestableDefaultConfiguration(_environment, localConfiguration, _serverConfig, _runTimeConfig, _securityPoliciesConfiguration, _bootstrapConfiguration, _processStatic, _httpRuntimeStatic, _configurationManagerStatic, _dnsStatic); + } + private void CreateDefaultConfiguration() { _defaultConfig = new TestableDefaultConfiguration(_environment, _localConfig, _serverConfig, _runTimeConfig, _securityPoliciesConfiguration, _bootstrapConfiguration, _processStatic, _httpRuntimeStatic, _configurationManagerStatic, _dnsStatic); diff --git a/tests/Agent/UnitTests/Core.UnitTest/DataTransport/AgentSettingsTests.cs b/tests/Agent/UnitTests/Core.UnitTest/DataTransport/AgentSettingsTests.cs index 05d96011d0..433d4fc458 100644 --- a/tests/Agent/UnitTests/Core.UnitTest/DataTransport/AgentSettingsTests.cs +++ b/tests/Agent/UnitTests/Core.UnitTest/DataTransport/AgentSettingsTests.cs @@ -360,6 +360,7 @@ public void serializes_correctly() Assert.That(agentSettings.ServerlessFunctionName, Is.Null); Assert.That(agentSettings.ServerlessFunctionVersion, Is.Null); Assert.That(json, Is.EqualTo(expectedJson.Condense())); + Assert.That(agentSettings.AwsAccountId, Is.Empty); }); } } diff --git a/tests/Agent/UnitTests/Core.UnitTest/DataTransport/ExhaustiveTestConfiguration.cs b/tests/Agent/UnitTests/Core.UnitTest/DataTransport/ExhaustiveTestConfiguration.cs index 3cf8cab6ca..7c217d442e 100644 --- a/tests/Agent/UnitTests/Core.UnitTest/DataTransport/ExhaustiveTestConfiguration.cs +++ b/tests/Agent/UnitTests/Core.UnitTest/DataTransport/ExhaustiveTestConfiguration.cs @@ -493,7 +493,7 @@ public IReadOnlyDictionary GetAppSettings() public string LoggingLevel => "info"; + public string AwsAccountId => ""; public bool GCSamplerV2Enabled => true; - } } From e77683b7e60afa502b6e700a51945c757530a47b Mon Sep 17 00:00:00 2001 From: Marty T <120425148+tippmar-nr@users.noreply.github.com> Date: Fri, 25 Oct 2024 12:58:29 -0500 Subject: [PATCH 03/19] fix: Revert environment variable name change in installers and scripts (#2852) --- build/ArtifactBuilder/Artifacts/MsiInstaller.cs | 2 +- build/Linux/build/common/run.sh | 2 +- build/Linux/build/common/setenv.sh | 8 +++++--- build/Linux/build/deb/postinst | 4 ++-- .../Linux/build/rpm/newrelic-dotnet-agent.spec | 4 ++-- .../Content/applicationHost.xdt | 4 ++-- .../AzureSiteExtension/Content/install.ps1 | 4 ++-- .../tools/NewRelicHelper.psm1 | 6 +++--- .../NugetAzureWebSites-x64/tools/install.ps1 | 4 ++-- .../NugetAzureWebSites-x86/tools/install.ps1 | 4 ++-- docs/development.md | 8 ++++---- src/Agent/Miscellaneous/azure-app-services.md | 2 +- src/Agent/MsiInstaller/Installer/Product.wxs | 17 ++++++++--------- .../InstallerActions/CustomActions.cs | 4 +++- .../Extensions/Providers/Wrapper/README.md | 2 +- src/Agent/NewRelic/Profiler/README.md | 2 +- 16 files changed, 40 insertions(+), 37 deletions(-) diff --git a/build/ArtifactBuilder/Artifacts/MsiInstaller.cs b/build/ArtifactBuilder/Artifacts/MsiInstaller.cs index 2dc8976c7a..724fd69cf6 100644 --- a/build/ArtifactBuilder/Artifacts/MsiInstaller.cs +++ b/build/ArtifactBuilder/Artifacts/MsiInstaller.cs @@ -17,7 +17,7 @@ class MsiInstaller : Artifact private readonly string[] _coreIISRegistryValues = new string[] { "CORECLR_ENABLE_PROFILING=1", "CORECLR_PROFILER={36032161-FFC0-4B61-B559-F6C5D41BAE5A}", - "CORECLR_NEW_RELIC_HOME=[NETAGENTCOMMONFOLDER]" }; + "CORECLR_NEWRELIC_HOME=[NETAGENTCOMMONFOLDER]" }; private readonly AgentComponents _frameworkAgentComponents; private readonly AgentComponents _coreAgentComponents; diff --git a/build/Linux/build/common/run.sh b/build/Linux/build/common/run.sh index f2f2577653..c000d28f52 100644 --- a/build/Linux/build/common/run.sh +++ b/build/Linux/build/common/run.sh @@ -2,4 +2,4 @@ # This script can be used to run a dotnet application with New Relic monitoring. -CORECLR_NEW_RELIC_HOME=${CORECLR_NEW_RELIC_HOME:-/usr/local/newrelic-dotnet-agent} CORECLR_ENABLE_PROFILING=1 CORECLR_PROFILER={36032161-FFC0-4B61-B559-F6C5D41BAE5A} CORECLR_PROFILER_PATH=$CORECLR_NEW_RELIC_HOME/libNewRelicProfiler.so $@ +NRHOME=${CORECLR_NEWRELIC_HOME:-${CORECLR_NEW_RELIC_HOME:-/usr/local/newrelic-dotnet-agent}} CORECLR_ENABLE_PROFILING=1 CORECLR_PROFILER={36032161-FFC0-4B61-B559-F6C5D41BAE5A} CORECLR_PROFILER_PATH=$NRHOME/libNewRelicProfiler.so $@ diff --git a/build/Linux/build/common/setenv.sh b/build/Linux/build/common/setenv.sh index 578e885e76..e96da8bc78 100644 --- a/build/Linux/build/common/setenv.sh +++ b/build/Linux/build/common/setenv.sh @@ -1,9 +1,11 @@ #!/bin/bash -if [ -z "$CORECLR_NEW_RELIC_HOME" ]; then - echo "CORECLR_NEW_RELIC_HOME is undefined" +if [ -z "$CORECLR_NEWRELIC_HOME" ]; && [ -z "$CORECLR_NEW_RELIC_HOME" ]; then + echo "CORECLR_NEWRELIC_HOME is undefined" else + NRHOME=${CORECLR_NEWRELIC_HOME:-${CORECLR_NEW_RELIC_HOME}} + export CORECLR_ENABLE_PROFILING=1 export CORECLR_PROFILER={36032161-FFC0-4B61-B559-F6C5D41BAE5A} - export CORECLR_PROFILER_PATH=$CORECLR_NEW_RELIC_HOME/libNewRelicProfiler.so + export CORECLR_PROFILER_PATH=$NRHOME/libNewRelicProfiler.so fi \ No newline at end of file diff --git a/build/Linux/build/deb/postinst b/build/Linux/build/deb/postinst index 4216750650..fa3231ca3e 100644 --- a/build/Linux/build/deb/postinst +++ b/build/Linux/build/deb/postinst @@ -45,7 +45,7 @@ fi rm -f $NEWRELIC_HOME/extensions/NewRelic.Providers.Wrapper.Logging.Instrumentation.xml 2> /dev/null rm -f $NEWRELIC_HOME/extensions/NewRelic.Providers.Wrapper.Logging.dll 2> /dev/null -echo "export CORECLR_NEW_RELIC_HOME=${NEWRELIC_HOME}" > /etc/profile.d/${PACKAGE_NAME}-path.sh +echo "export CORECLR_NEWRELIC_HOME=${NEWRELIC_HOME}" > /etc/profile.d/${PACKAGE_NAME}-path.sh source /etc/profile.d/${PACKAGE_NAME}-path.sh chmod o+w $NEWRELIC_HOME/logs @@ -53,4 +53,4 @@ chmod +x $NEWRELIC_HOME/*.sh 2> /dev/null printf "Initialize the New Relic .NET Agent environment variables by running:\n" printf "\t\033[1msource /etc/profile.d/${PACKAGE_NAME}-path.sh\033[0m\n" -printf "\t\033[1msource $CORECLR_NEW_RELIC_HOME/setenv.sh\033[0m\n" +printf "\t\033[1msource $CORECLR_NEWRELIC_HOME/setenv.sh\033[0m\n" diff --git a/build/Linux/build/rpm/newrelic-dotnet-agent.spec b/build/Linux/build/rpm/newrelic-dotnet-agent.spec index 0fa7abe334..08681a1ec2 100644 --- a/build/Linux/build/rpm/newrelic-dotnet-agent.spec +++ b/build/Linux/build/rpm/newrelic-dotnet-agent.spec @@ -88,7 +88,7 @@ fi rm -f $NEWRELIC_HOME/extensions/NewRelic.Providers.Wrapper.Logging.Instrumentation.xml 2> /dev/null rm -f $NEWRELIC_HOME/extensions/NewRelic.Providers.Wrapper.Logging.dll 2> /dev/null -echo "export CORECLR_NEW_RELIC_HOME=${NEWRELIC_HOME}" > /etc/profile.d/%{name}-path.sh +echo "export CORECLR_NEWRELIC_HOME=${NEWRELIC_HOME}" > /etc/profile.d/%{name}-path.sh source /etc/profile.d/%{name}-path.sh chmod o+w $NEWRELIC_HOME/logs @@ -96,4 +96,4 @@ chmod +x $NEWRELIC_HOME/*.sh 2> /dev/null printf "Initialize the New Relic .NET Agent environment variables by running:\n" printf "\t\033[1msource /etc/profile.d/%{name}-path.sh\033[0m\n" -printf "\t\033[1msource $CORECLR_NEW_RELIC_HOME/setenv.sh\033[0m\n" +printf "\t\033[1msource $CORECLR_NEWRELIC_HOME/setenv.sh\033[0m\n" diff --git a/build/Packaging/AzureSiteExtension/Content/applicationHost.xdt b/build/Packaging/AzureSiteExtension/Content/applicationHost.xdt index 78da167eb3..5d42192866 100644 --- a/build/Packaging/AzureSiteExtension/Content/applicationHost.xdt +++ b/build/Packaging/AzureSiteExtension/Content/applicationHost.xdt @@ -8,12 +8,12 @@ - + - + diff --git a/build/Packaging/AzureSiteExtension/Content/install.ps1 b/build/Packaging/AzureSiteExtension/Content/install.ps1 index 0beec29618..69c63cde5d 100644 --- a/build/Packaging/AzureSiteExtension/Content/install.ps1 +++ b/build/Packaging/AzureSiteExtension/Content/install.ps1 @@ -320,7 +320,7 @@ try if ($env:NEWRELIC_LICENSEKEY -eq $null -and $env:NEW_RELIC_LICENSE_KEY -eq $null) { - WriteToInstallLog "The environment variable NEW_RELIC_LICENSE_KEY must be set. Please make sure to add it." + WriteToInstallLog "The environment variable NEWRELIC_LICENSE_KEY must be set. Please make sure to add it." } RemoveNewRelicInstallArtifacts "." @@ -337,7 +337,7 @@ try "/configuration/system.webServer/runtime/environmentVariables/add[@name='CORECLR_PROFILER']", "/configuration/system.webServer/runtime/environmentVariables/add[@name='CORECLR_PROFILER_PATH_32']", "/configuration/system.webServer/runtime/environmentVariables/add[@name='CORECLR_PROFILER_PATH_64']", - "/configuration/system.webServer/runtime/environmentVariables/add[@name='CORECLR_NEW_RELIC_HOME']") + "/configuration/system.webServer/runtime/environmentVariables/add[@name='CORECLR_NEWRELIC_HOME']") $file = resolve-path(".\applicationHost.xdt") RemoveXmlElements $file $xPaths } diff --git a/build/Packaging/NugetAzureCloudServices/tools/NewRelicHelper.psm1 b/build/Packaging/NugetAzureCloudServices/tools/NewRelicHelper.psm1 index 1b771a6315..233eec09f0 100644 --- a/build/Packaging/NugetAzureCloudServices/tools/NewRelicHelper.psm1 +++ b/build/Packaging/NugetAzureCloudServices/tools/NewRelicHelper.psm1 @@ -244,12 +244,12 @@ function update_azure_service_definition([System.__ComObject] $project){ #Helps Azure Workers find the newrelic.config $variableNHNode = $xml.CreateElement('Variable','http://schemas.microsoft.com/ServiceHosting/2008/10/ServiceDefinition') - $variableNHNode.SetAttribute('name','NEW_RELIC_HOME') + $variableNHNode.SetAttribute('name','NEWRELIC_HOME') $variableNHNode.SetAttribute('value','D:\ProgramData\New Relic\.NET Agent\') #Helps Azure Workers find the NewRelic.Agent.Core.dll $variableNIPNode = $xml.CreateElement('Variable','http://schemas.microsoft.com/ServiceHosting/2008/10/ServiceDefinition') - $variableNIPNode.SetAttribute('name','NEW_RELIC_INSTALL_PATH') + $variableNIPNode.SetAttribute('name','NEWRELIC_INSTALL_PATH') $variableNIPNode.SetAttribute('value','D:\Program Files\New Relic\.NET Agent\') $runtimeEnvironmentNode.AppendChild($variableCEPNode) @@ -390,7 +390,7 @@ function cleanup_azure_service_definition([System.__ComObject] $project){ $runtimeNode = $modified.Runtime if($runtimeNode -ne $null -and $runtimeNode.ChildNodes.Count -gt 0){ - $variableNodes = $runtimeNode.Environment.Variable | where { $_.name -eq "COR_ENABLE_PROFILING" -or $_.name -eq "COR_PROFILER" -or $_.name -eq "NEW_RELIC_HOME" -or $_.name -eq "NEW_RELIC_INSTALL_PATH" } + $variableNodes = $runtimeNode.Environment.Variable | where { $_.name -eq "COR_ENABLE_PROFILING" -or $_.name -eq "COR_PROFILER" -or $_.name -eq "NEW_RELIC_HOME" -or $_.name -eq "NEW_RELIC_INSTALL_PATH" -or $_.name -eq "NEWRELIC_HOME" -or $_.name -eq "NEWRELIC_INSTALL_PATH" } if($variableNodes -ne $null -and $variableNodes.Count -gt 0){ foreach($varNode in $variableNodes){ [Void]$varNode.ParentNode.RemoveChild($varNode) diff --git a/build/Packaging/NugetAzureWebSites-x64/tools/install.ps1 b/build/Packaging/NugetAzureWebSites-x64/tools/install.ps1 index 3154d92952..332e1861e4 100644 --- a/build/Packaging/NugetAzureWebSites-x64/tools/install.ps1 +++ b/build/Packaging/NugetAzureWebSites-x64/tools/install.ps1 @@ -27,5 +27,5 @@ Write-Host "---------------------------------------" Write-Host "COR_ENABLE_PROFILING 1" Write-Host "COR_PROFILER {71DA0A04-7777-4EC6-9643-7D28B46A8A41}" Write-Host "COR_PROFILER_PATH D:\Home\site\wwwroot\newrelic\NewRelic.Profiler.dll" -Write-Host "NEW_RELIC_HOME D:\Home\site\wwwroot\newrelic" -Write-Host "NEW_RELIC_LICENSE_KEY [REPLACE WITH YOUR LICENSE KEY]" +Write-Host "NEWRELIC_HOME D:\Home\site\wwwroot\newrelic" +Write-Host "NEWRELIC_LICENSE_KEY [REPLACE WITH YOUR LICENSE KEY]" diff --git a/build/Packaging/NugetAzureWebSites-x86/tools/install.ps1 b/build/Packaging/NugetAzureWebSites-x86/tools/install.ps1 index 3154d92952..332e1861e4 100644 --- a/build/Packaging/NugetAzureWebSites-x86/tools/install.ps1 +++ b/build/Packaging/NugetAzureWebSites-x86/tools/install.ps1 @@ -27,5 +27,5 @@ Write-Host "---------------------------------------" Write-Host "COR_ENABLE_PROFILING 1" Write-Host "COR_PROFILER {71DA0A04-7777-4EC6-9643-7D28B46A8A41}" Write-Host "COR_PROFILER_PATH D:\Home\site\wwwroot\newrelic\NewRelic.Profiler.dll" -Write-Host "NEW_RELIC_HOME D:\Home\site\wwwroot\newrelic" -Write-Host "NEW_RELIC_LICENSE_KEY [REPLACE WITH YOUR LICENSE KEY]" +Write-Host "NEWRELIC_HOME D:\Home\site\wwwroot\newrelic" +Write-Host "NEWRELIC_LICENSE_KEY [REPLACE WITH YOUR LICENSE KEY]" diff --git a/docs/development.md b/docs/development.md index abd89c66ff..96da6c9225 100644 --- a/docs/development.md +++ b/docs/development.md @@ -33,8 +33,8 @@ You need to configure the following environment variables for the agent to attac #### Environment variables for .NET Framework ```bash -NEW_RELIC_LICENSE_KEY= -NEW_RELIC_HOME=path\to\home\directory +NEWRELIC_LICENSE_KEY= +NEWRELIC_HOME=path\to\home\directory COR_ENABLE_PROFILING=1 COR_PROFILER={71DA0A04-7777-4EC6-9643-7D28B46A8A41} COR_PROFILER_PATH=path\to\home\directory\NewRelic.Profiler.dll @@ -42,8 +42,8 @@ COR_PROFILER_PATH=path\to\home\directory\NewRelic.Profiler.dll #### Environment variables for .NET Core ```bash -NEW_RELIC_LICENSE_KEY= -CORECLR_NEW_RELIC_HOME=path\to\home\directory +NEWRELIC_LICENSE_KEY= +CORECLR_NEWRELIC_HOME=path\to\home\directory CORECLR_ENABLE_PROFILING=1 CORECLR_PROFILER={36032161-FFC0-4B61-B559-F6C5D41BAE5A} CORECLR_PROFILER_PATH=path\to\home\directory\NewRelic.Profiler.dll diff --git a/src/Agent/Miscellaneous/azure-app-services.md b/src/Agent/Miscellaneous/azure-app-services.md index f63816021e..6e6af1354b 100644 --- a/src/Agent/Miscellaneous/azure-app-services.md +++ b/src/Agent/Miscellaneous/azure-app-services.md @@ -29,7 +29,7 @@ An example of this configuration inside a `.csproj` file looks something like: CORECLR_ENABLE_PROFILING=1 CORECLR_PROFILER={36032161-FFC0-4B61-B559-F6C5D41BAE5A} CORECLR_PROFILER_PATH=D:\Home\site\wwwroot\newrelic\NewRelic.Profiler.dll -CORECLR_NEW_RELIC_HOME=D:\Home\site\wwwroot\newrelic +CORECLR_NEWRELIC_HOME=D:\Home\site\wwwroot\newrelic ``` Note: environment variables for Azure App Services are set in the `Application settings` configuration page for your app, under `App settings`. You can verify the environment variables for your app by browsing to the `Environment` tab of your app's Kudu diagnostic console and then jumping to the `Environment Variables` section, e.g. `https://myappname.scm.azurewebsites.net/Env.cshtml#envVariables` diff --git a/src/Agent/MsiInstaller/Installer/Product.wxs b/src/Agent/MsiInstaller/Installer/Product.wxs index 6597abb078..83adb71f46 100644 --- a/src/Agent/MsiInstaller/Installer/Product.wxs +++ b/src/Agent/MsiInstaller/Installer/Product.wxs @@ -761,8 +761,7 @@ SPDX-License-Identifier: Apache-2.0 - - + @@ -798,7 +797,7 @@ SPDX-License-Identifier: Apache-2.0 - + @@ -809,7 +808,7 @@ SPDX-License-Identifier: Apache-2.0 - + @@ -819,7 +818,7 @@ SPDX-License-Identifier: Apache-2.0 - + @@ -832,8 +831,8 @@ SPDX-License-Identifier: Apache-2.0 - - + + @@ -843,8 +842,8 @@ SPDX-License-Identifier: Apache-2.0 - - + + diff --git a/src/Agent/MsiInstaller/InstallerActions/CustomActions.cs b/src/Agent/MsiInstaller/InstallerActions/CustomActions.cs index f80977700b..55e7b419c2 100644 --- a/src/Agent/MsiInstaller/InstallerActions/CustomActions.cs +++ b/src/Agent/MsiInstaller/InstallerActions/CustomActions.cs @@ -444,7 +444,9 @@ public ActionResult ValidateFilesAndFolders() DeleteFile(@"C:\Program Files\New Relic\.NET Agent\netframework\Extensions\NewRelic.Providers.Wrapper.Asp35.dll"); - String newrelicHomePath = Environment.GetEnvironmentVariable("NEW_RELIC_HOME"); + String newrelicHomePath = Environment.GetEnvironmentVariable("NEWRELIC_HOME"); + if (newrelicHomePath != null) DeleteFile(newrelicHomePath + @"\newrelic.xml"); + newrelicHomePath = Environment.GetEnvironmentVariable("NEW_RELIC_HOME"); if (newrelicHomePath != null) DeleteFile(newrelicHomePath + @"\newrelic.xml"); return ActionResult.Success; diff --git a/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/README.md b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/README.md index d712402181..a30b0f30a4 100644 --- a/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/README.md +++ b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/README.md @@ -204,7 +204,7 @@ You should use `AgentWrapperApi.HandleWrapperException` to deal with exceptions ## Notes on dynamic wrapper assembly loading ## -Wrapper assemblies are loaded dynamically at agent startup. All assemblies found in **[NEW_RELIC_HOME]/Extensions** will be loaded, and all `IWrapper`s found in the assembly will be instantiated and passed to the `WrapperService` (contained inside a `LazyMap`). +Wrapper assemblies are loaded dynamically at agent startup. All assemblies found in **[NEWRELIC_HOME]/Extensions** will be loaded, and all `IWrapper`s found in the assembly will be instantiated and passed to the `WrapperService` (contained inside a `LazyMap`). When an instrumented method is hit, `WrapperService` will ask the `LazyMap` for an appropriate `IWrapper`. The map will return the first wrapper it finds that returns **true** from `IWrapper.CanWrap`, or null if they all return **false**. The resulting wrapper (or null) will be cached in the map which is keyed on the fully qualified method signature. If the wrapper loader returns null, the agent will fall back to using tracers. diff --git a/src/Agent/NewRelic/Profiler/README.md b/src/Agent/NewRelic/Profiler/README.md index 0a82d3de0e..9c376c4a26 100644 --- a/src/Agent/NewRelic/Profiler/README.md +++ b/src/Agent/NewRelic/Profiler/README.md @@ -97,7 +97,7 @@ When instrumentation in the `extensions` directory changes on disk, the profiler The Microsoft defined environment variables like `COR_ENABLE_PROFILING` and `COR_PROFILER_PATH` used to attach the profiler to a process. On CoreCLR all of those environment variables are prefixed with `CORECLR` instead of `COR`, ie `CORECLR_ENABLE_PROFILING`. -After the profiler attaches it uses custom environment variables to determine the path to `NewRelic.Agent.Core.dll`. It tries to find the dll in the path specified by `NEW_RELIC_INSTALL_PATH`, or if that is undefined then `NEW_RELIC_HOME`. On CoreCLR, those environment variables are prefixed with `CORECLR_` so that the agent does not try to use the .NET Framework managed agent in a CoreCLR process. +After the profiler attaches it uses custom environment variables to determine the path to `NewRelic.Agent.Core.dll`. It tries to find the dll in the path specified by `NEWRELIC_INSTALL_PATH`, or if that is undefined then `NEWRELIC_HOME`. On CoreCLR, those environment variables are prefixed with `CORECLR_` so that the agent does not try to use the .NET Framework managed agent in a CoreCLR process. ## FAQ From ae1d4220e684192525f13c670436dbf4400012bd Mon Sep 17 00:00:00 2001 From: Marty T <120425148+tippmar-nr@users.noreply.github.com> Date: Mon, 28 Oct 2024 10:43:35 -0500 Subject: [PATCH 04/19] fix: Remove usage of non-thread safe HashSet in AwsSdk pipeline wrappers. Thanks, @gjunge! (#2855) (#2857) --- .../Providers/Wrapper/AwsSdk/AwsSdkPipelineWrapper.cs | 8 ++++++-- .../Providers/Wrapper/AwsSdk/SQSRequestHandler.cs | 8 ++++++-- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AwsSdk/AwsSdkPipelineWrapper.cs b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AwsSdk/AwsSdkPipelineWrapper.cs index e6472ab852..14ede37d8f 100644 --- a/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AwsSdk/AwsSdkPipelineWrapper.cs +++ b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AwsSdk/AwsSdkPipelineWrapper.cs @@ -3,6 +3,7 @@ using System.Collections.Generic; using NewRelic.Agent.Api; +using NewRelic.Agent.Extensions.Collections; using NewRelic.Agent.Extensions.Providers.Wrapper; namespace NewRelic.Providers.Wrapper.AwsSdk @@ -12,7 +13,7 @@ public class AwsSdkPipelineWrapper : IWrapper public bool IsTransactionRequired => true; private const string WrapperName = "AwsSdkPipelineWrapper"; - private static HashSet _unsupportedRequestTypes = new(); + private static ConcurrentHashSet _unsupportedRequestTypes = new(); public CanWrapResponse CanWrap(InstrumentedMethodInfo methodInfo) { @@ -54,8 +55,11 @@ public AfterWrappedMethodDelegate BeforeWrappedMethod(InstrumentedMethodCall ins return SQSRequestHandler.HandleSQSRequest(instrumentedMethodCall, agent, transaction, request, isAsync, executionContext); } - if (_unsupportedRequestTypes.Add(requestType)) // log once per unsupported request type + if (!_unsupportedRequestTypes.Contains(requestType)) // log once per unsupported request type + { agent.Logger.Debug($"AwsSdkPipelineWrapper: Unsupported request type: {requestType}. Returning NoOp delegate."); + _unsupportedRequestTypes.Add(requestType); + } return Delegates.NoOp; } diff --git a/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AwsSdk/SQSRequestHandler.cs b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AwsSdk/SQSRequestHandler.cs index c11701e2c3..45f4fecffb 100644 --- a/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AwsSdk/SQSRequestHandler.cs +++ b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AwsSdk/SQSRequestHandler.cs @@ -7,6 +7,7 @@ using System.Threading.Tasks; using NewRelic.Agent.Api; using NewRelic.Agent.Extensions.AwsSdk; +using NewRelic.Agent.Extensions.Collections; using NewRelic.Agent.Extensions.Providers.Wrapper; using NewRelic.Reflection; @@ -15,7 +16,7 @@ namespace NewRelic.Providers.Wrapper.AwsSdk internal static class SQSRequestHandler { private static readonly ConcurrentDictionary> _getRequestResponseFromGeneric = new(); - private static readonly HashSet _unsupportedSQSRequestTypes = []; + private static readonly ConcurrentHashSet _unsupportedSQSRequestTypes = []; public static AfterWrappedMethodDelegate HandleSQSRequest(InstrumentedMethodCall instrumentedMethodCall, IAgent agent, ITransaction transaction, dynamic request, bool isAsync, dynamic executionContext) { @@ -35,8 +36,11 @@ public static AfterWrappedMethodDelegate HandleSQSRequest(InstrumentedMethodCall action = MessageBrokerAction.Purge; break; default: - if (_unsupportedSQSRequestTypes.Add(requestType)) // log once per unsupported request type + if (!_unsupportedSQSRequestTypes.Contains(requestType)) // log once per unsupported request type + { agent.Logger.Debug($"AwsSdkPipelineWrapper: SQS Request type {requestType} is not supported. Returning NoOp delegate."); + _unsupportedSQSRequestTypes.Add(requestType); + } return Delegates.NoOp; } From 2460527c83c0b196329a2e2b61e435cd20cd6dbd Mon Sep 17 00:00:00 2001 From: Alex Hemsath <57361211+nr-ahemsath@users.noreply.github.com> Date: Tue, 29 Oct 2024 09:48:36 -0700 Subject: [PATCH 05/19] feat: Add AWSSDK.DynamoDBv2 instrumentation (#2858) * Bring initial POC to feature branch (#2836) * First pass at DynamoDB support * db.system attribute was not being set correctly --------- Co-authored-by: chynesNR * Clean up POC (#2839) * Remove datastore vendor name from ConnectionInfo in MemcachedHelpers * Get operation name from request type by converting PascalCaseRequest to snake_case * Make operation name cache thread safe (#2841) * Make operation name cache thread safe * Cleaner implementation based on PR feedback * Update src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AwsSdk/DynamoDbRequestHandler.cs Co-authored-by: Chris Ventura <45495992+nrcventura@users.noreply.github.com> --------- Co-authored-by: Chris Ventura <45495992+nrcventura@users.noreply.github.com> * DynamoDB integration tests (#2854) * Rename things in preparation to add another AWS SDK test type * Add project dependencies * More renaming * Initial plumbing created * More cleanup * Forklift exerciser methods from standalone test app * Working tests * Cleanup * Fix port assignment issue seen in CI * Only wait up to two minutes for table to become active * List ports in use in container tests host For temporary troubleshooting * Update tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/AwsSdkExercisers/AwsSdkDynamoDBExerciser.cs Co-authored-by: Marty T <120425148+tippmar-nr@users.noreply.github.com> * Fix port conflict issue plus PR feedback --------- Co-authored-by: Marty T <120425148+tippmar-nr@users.noreply.github.com> * Add unit tests for ToSnakeCase() (#2859) Unit tests for ToSnakeCase() --------- Co-authored-by: chynesNR Co-authored-by: Chris Ventura <45495992+nrcventura@users.noreply.github.com> Co-authored-by: Marty T <120425148+tippmar-nr@users.noreply.github.com> --- .../workflows/run_linux_container_tests.yml | 2 +- .../Agent/Core/Api/TransactionBridgeApi.cs | 2 +- .../Core/Segments/DatastoreSegmentData.cs | 3 +- .../IbmDb2ConnectionStringParser.cs | 2 +- .../MsSqlConnectionStringParser.cs | 2 +- .../MySqlConnectionStringParser.cs | 4 +- .../OracleConnectionStringParser.cs | 4 +- .../PostgresConnectionStringParser.cs | 2 +- ...tackExchangeRedisConnectionStringParser.cs | 2 +- .../Parsing/IConnectionInfo.cs | 7 +- .../Parsing/StringsHelper.cs | 29 ++ .../Providers/Wrapper/Constants.cs | 1 + .../Wrapper/AwsSdk/AwsSdkPipelineWrapper.cs | 4 + .../Wrapper/AwsSdk/DynamoDbRequestHandler.cs | 44 +++ .../CosmosDb/ExecuteItemQueryAsyncWrapper.cs | 4 +- .../CosmosDb/RequestInvokerHandlerWrapper.cs | 4 +- .../Wrapper/Elasticsearch/RequestWrapper.cs | 4 +- .../Wrapper/Memcached/MemcachedHelpers.cs | 7 +- .../Wrapper/MongoDb26/MongoDbHelper.cs | 4 +- .../ServiceStackRedis/SendCommandWrapper.cs | 2 +- .../Wrapper/StackExchangeRedis/Common.cs | 2 +- .../StackExchangeRedis2Plus/SessionCache.cs | 4 +- .../AwsSdkExerciser/AwsSdkTestType.cs | 12 - .../AwsSdkDynamoDBExerciser.cs | 250 ++++++++++++++++++ .../AwsSdkSQSExerciser.cs} | 25 +- .../AwsSdkTestApp/AwsSdkTestApp.csproj | 1 + .../Controllers/AwsSdkDynamoDBController.cs | 122 +++++++++ ...dkController.cs => AwsSdkSQSController.cs} | 40 +-- .../SQSReceiverService.cs | 8 +- .../docker-compose-awssdk.yml | 8 + .../ContainerIntegrationTests.sln | 14 +- .../Fixtures/AwsSdkContainerTestFixtures.cs | 79 ++++-- .../Tests/AwsSdk/AwsSdkDynamoDBTest.cs | 120 +++++++++ .../Tests/AwsSdk/AwsSdkSQSTest.cs | 4 +- .../AgentWrapperApiExtensions.cs | 4 +- .../Spans/SpanEventMakerTests.cs | 4 +- .../DatastoreSegmentTransformerTests.cs | 2 +- .../SqlTraceMakerTests.cs | 2 +- .../TestTransactions.cs | 2 +- .../TransactionTraceMakerTests.cs | 2 +- .../Parsing/StringsHelperTest.cs | 26 ++ 41 files changed, 752 insertions(+), 112 deletions(-) create mode 100644 src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AwsSdk/DynamoDbRequestHandler.cs delete mode 100644 tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/AwsSdkExerciser/AwsSdkTestType.cs create mode 100644 tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/AwsSdkExercisers/AwsSdkDynamoDBExerciser.cs rename tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/{AwsSdkExerciser/AwsSdkExerciser.cs => AwsSdkExercisers/AwsSdkSQSExerciser.cs} (93%) create mode 100644 tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/Controllers/AwsSdkDynamoDBController.cs rename tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/Controllers/{AwsSdkController.cs => AwsSdkSQSController.cs} (70%) create mode 100644 tests/Agent/IntegrationTests/ContainerIntegrationTests/Tests/AwsSdk/AwsSdkDynamoDBTest.cs diff --git a/.github/workflows/run_linux_container_tests.yml b/.github/workflows/run_linux_container_tests.yml index d711ed9e61..cd4de53d51 100644 --- a/.github/workflows/run_linux_container_tests.yml +++ b/.github/workflows/run_linux_container_tests.yml @@ -76,6 +76,6 @@ jobs: INTEGRATION_TEST_SECRETS: ${{ secrets.TEST_SECRETS }} run: | echo $INTEGRATION_TEST_SECRETS | dotnet user-secrets set --project ${{ env.integration_tests_shared_project }} - + - name: Build & Run Linux Container Integration Tests run: dotnet test ./tests/Agent/IntegrationTests/ContainerIntegrationTests/ContainerIntegrationTests.csproj --framework net8.0 diff --git a/src/Agent/NewRelic/Agent/Core/Api/TransactionBridgeApi.cs b/src/Agent/NewRelic/Agent/Core/Api/TransactionBridgeApi.cs index 78d8e8cb4b..915ff60bb6 100644 --- a/src/Agent/NewRelic/Agent/Core/Api/TransactionBridgeApi.cs +++ b/src/Agent/NewRelic/Agent/Core/Api/TransactionBridgeApi.cs @@ -226,7 +226,7 @@ public ISegment StartDatastoreSegment(string vendor, string model, string operat var method = new Method(typeof(object), "StartDatastoreSegment", string.Empty); var methodCall = new MethodCall(method, null, null, false); var parsedSqlStatement = new ParsedSqlStatement(DatastoreVendor.Other, model, operation); - var connectionInfo = new ConnectionInfo(vendor.ToLower(), host, portPathOrID, databaseName); + var connectionInfo = new ConnectionInfo(host, portPathOrID, databaseName); return _transaction.StartDatastoreSegment( methodCall: methodCall, parsedSqlStatement: parsedSqlStatement, diff --git a/src/Agent/NewRelic/Agent/Core/Segments/DatastoreSegmentData.cs b/src/Agent/NewRelic/Agent/Core/Segments/DatastoreSegmentData.cs index 8a445aca87..a1afd2162e 100644 --- a/src/Agent/NewRelic/Agent/Core/Segments/DatastoreSegmentData.cs +++ b/src/Agent/NewRelic/Agent/Core/Segments/DatastoreSegmentData.cs @@ -31,7 +31,6 @@ public class DatastoreSegmentData : AbstractSegmentData, IDatastoreSegmentData public DatastoreVendor DatastoreVendorName => _parsedSqlStatement.DatastoreVendor; public string Model => _parsedSqlStatement.Model; public string CommandText { get; set; } - public string Vendor => _connectionInfo.Vendor; public string Host => _connectionInfo.Host; public int? Port => _connectionInfo.Port; public string PathOrId => _connectionInfo.PathOrId; @@ -222,7 +221,7 @@ public override void SetSpanTypeSpecificAttributes(SpanAttributeValueCollection AttribDefs.DbCollection.TrySetValue(attribVals, _parsedSqlStatement.Model); } - AttribDefs.DbSystem.TrySetValue(attribVals, Vendor); + AttribDefs.DbSystem.TrySetValue(attribVals, DatastoreVendorName.ToKnownName()); AttribDefs.DbInstance.TrySetValue(attribVals, DatabaseName); AttribDefs.DbOperation.TrySetValue(attribVals, Operation); AttribDefs.PeerAddress.TrySetValue(attribVals, $"{Host}:{PortPathOrId}"); diff --git a/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Parsing/ConnectionString/IbmDb2ConnectionStringParser.cs b/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Parsing/ConnectionString/IbmDb2ConnectionStringParser.cs index 8cf3314aaf..9bca8cc810 100644 --- a/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Parsing/ConnectionString/IbmDb2ConnectionStringParser.cs +++ b/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Parsing/ConnectionString/IbmDb2ConnectionStringParser.cs @@ -27,7 +27,7 @@ public ConnectionInfo GetConnectionInfo(string utilizationHostName) var portPathOrId = ParsePortPathOrId(); var databaseName = ConnectionStringParserHelper.GetKeyValuePair(_connectionStringBuilder, _databaseNameKeys)?.Value; - return new ConnectionInfo(DatastoreVendor.IBMDB2.ToKnownName(), host, portPathOrId, databaseName); + return new ConnectionInfo(host, portPathOrId, databaseName); } private string ParseHost() diff --git a/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Parsing/ConnectionString/MsSqlConnectionStringParser.cs b/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Parsing/ConnectionString/MsSqlConnectionStringParser.cs index 953acdce3d..0dd99fb99c 100644 --- a/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Parsing/ConnectionString/MsSqlConnectionStringParser.cs +++ b/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Parsing/ConnectionString/MsSqlConnectionStringParser.cs @@ -27,7 +27,7 @@ public ConnectionInfo GetConnectionInfo(string utilizationHostName) var portPathOrId = ParsePortPathOrId(); var databaseName = ConnectionStringParserHelper.GetKeyValuePair(_connectionStringBuilder, _databaseNameKeys)?.Value; var instanceName = ParseInstanceName(); - return new ConnectionInfo(DatastoreVendor.MySQL.ToKnownName(), host, portPathOrId, databaseName, instanceName); + return new ConnectionInfo(host, portPathOrId, databaseName, instanceName); } private string ParseHost() diff --git a/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Parsing/ConnectionString/MySqlConnectionStringParser.cs b/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Parsing/ConnectionString/MySqlConnectionStringParser.cs index 9eac69b940..39079ea679 100644 --- a/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Parsing/ConnectionString/MySqlConnectionStringParser.cs +++ b/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Parsing/ConnectionString/MySqlConnectionStringParser.cs @@ -35,7 +35,7 @@ public ConnectionInfo GetConnectionInfo(string utilizationHostName) var port = ConnectionStringParserHelper.GetKeyValuePair(_connectionStringBuilder, _portKeys)?.Value; if (port == null && host != null) { - return new ConnectionInfo(DatastoreVendor.MySQL.ToKnownName(), host, "default", databaseName); + return new ConnectionInfo(host, "default", databaseName); } else { @@ -44,7 +44,7 @@ public ConnectionInfo GetConnectionInfo(string utilizationHostName) { portNum = -1; } - return new ConnectionInfo(DatastoreVendor.MySQL.ToKnownName(), host, portNum, databaseName); + return new ConnectionInfo(host, portNum, databaseName); } diff --git a/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Parsing/ConnectionString/OracleConnectionStringParser.cs b/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Parsing/ConnectionString/OracleConnectionStringParser.cs index 7d85b66da7..76294180a0 100644 --- a/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Parsing/ConnectionString/OracleConnectionStringParser.cs +++ b/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Parsing/ConnectionString/OracleConnectionStringParser.cs @@ -29,14 +29,14 @@ public ConnectionInfo GetConnectionInfo(string utilizationHostName) var portStr = ParsePortString(); if (string.IsNullOrEmpty(portStr)) { - return new ConnectionInfo(DatastoreVendor.Oracle.ToKnownName(), host, "default", null); + return new ConnectionInfo(host, "default", null); } int port; if (!int.TryParse(portStr, out port)) { port = -1; } - return new ConnectionInfo(DatastoreVendor.Oracle.ToKnownName(), host, port, null); + return new ConnectionInfo(host, port, null); } private string ParseHost() diff --git a/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Parsing/ConnectionString/PostgresConnectionStringParser.cs b/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Parsing/ConnectionString/PostgresConnectionStringParser.cs index e50094fde5..f9029981e1 100644 --- a/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Parsing/ConnectionString/PostgresConnectionStringParser.cs +++ b/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Parsing/ConnectionString/PostgresConnectionStringParser.cs @@ -35,7 +35,7 @@ public ConnectionInfo GetConnectionInfo(string utilizationHostName) portNum = -1; } - return new ConnectionInfo(DatastoreVendor.Postgres.ToKnownName(), host, portNum, databaseName); + return new ConnectionInfo(host, portNum, databaseName); } } } diff --git a/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Parsing/ConnectionString/StackExchangeRedisConnectionStringParser.cs b/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Parsing/ConnectionString/StackExchangeRedisConnectionStringParser.cs index bb79178666..cdae23860d 100644 --- a/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Parsing/ConnectionString/StackExchangeRedisConnectionStringParser.cs +++ b/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Parsing/ConnectionString/StackExchangeRedisConnectionStringParser.cs @@ -44,7 +44,7 @@ public ConnectionInfo GetConnectionInfo(string utilizationHostName) { portNum = -1; } - return new ConnectionInfo(DatastoreVendor.Redis.ToKnownName(), ConnectionStringParserHelper.NormalizeHostname(hostPortPair[0], utilizationHostName), portNum, null); + return new ConnectionInfo(ConnectionStringParserHelper.NormalizeHostname(hostPortPair[0], utilizationHostName), portNum, null); } return new ConnectionInfo(null, null, null, null); diff --git a/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Parsing/IConnectionInfo.cs b/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Parsing/IConnectionInfo.cs index 7b1a4bbc3e..27062a25df 100644 --- a/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Parsing/IConnectionInfo.cs +++ b/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Parsing/IConnectionInfo.cs @@ -5,9 +5,8 @@ namespace NewRelic.Agent.Extensions.Parsing { public class ConnectionInfo { - public ConnectionInfo(string vendor, string host, int port, string databaseName, string instanceName = null) + public ConnectionInfo(string host, int port, string databaseName, string instanceName = null) { - Vendor = vendor; Host = ValueOrUnknown(host); if (port >= 0) { @@ -18,9 +17,8 @@ public ConnectionInfo(string vendor, string host, int port, string databaseName, InstanceName = instanceName; } - public ConnectionInfo(string vendor, string host, string pathOrId, string databaseName, string instanceName = null) + public ConnectionInfo(string host, string pathOrId, string databaseName, string instanceName = null) { - Vendor = vendor; Host = ValueOrUnknown(host); Port = null; PathOrId = ValueOrUnknown(pathOrId); @@ -33,7 +31,6 @@ private static string ValueOrUnknown(string value) return string.IsNullOrEmpty(value) ? "unknown" : value; } - public string Vendor { get; private set; } public string Host { get; private set; } public string PortPathOrId { get => (Port != null) ? Port.ToString() : PathOrId; } public int? Port { get; private set; } = null; diff --git a/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Parsing/StringsHelper.cs b/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Parsing/StringsHelper.cs index 30d632680e..83e3dff3ec 100644 --- a/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Parsing/StringsHelper.cs +++ b/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Parsing/StringsHelper.cs @@ -119,5 +119,34 @@ public static string RemoveBracketsQuotesParenthesis(string value) return value; } + + public static string ToSnakeCase(this string text) + { + if (text == null) + { + throw new ArgumentNullException(nameof(text)); + } + if (text.Length < 2) + { + return text.ToLowerInvariant(); + } + var sb = new StringBuilder(); + sb.Append(char.ToLowerInvariant(text[0])); + for (int i = 1; i < text.Length; ++i) + { + char c = text[i]; + if (char.IsUpper(c)) + { + sb.Append('_'); + sb.Append(char.ToLowerInvariant(c)); + } + else + { + sb.Append(c); + } + } + return sb.ToString(); + } + } } diff --git a/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Providers/Wrapper/Constants.cs b/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Providers/Wrapper/Constants.cs index dbdbc5e7e4..116c412f80 100644 --- a/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Providers/Wrapper/Constants.cs +++ b/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Providers/Wrapper/Constants.cs @@ -80,6 +80,7 @@ public enum DatastoreVendor //SQLite, CosmosDB, Elasticsearch, + DynamoDB, Other } diff --git a/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AwsSdk/AwsSdkPipelineWrapper.cs b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AwsSdk/AwsSdkPipelineWrapper.cs index 14ede37d8f..33c19c9514 100644 --- a/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AwsSdk/AwsSdkPipelineWrapper.cs +++ b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AwsSdk/AwsSdkPipelineWrapper.cs @@ -54,6 +54,10 @@ public AfterWrappedMethodDelegate BeforeWrappedMethod(InstrumentedMethodCall ins { return SQSRequestHandler.HandleSQSRequest(instrumentedMethodCall, agent, transaction, request, isAsync, executionContext); } + else if (requestType.StartsWith("Amazon.DynamoDBv2")) + { + return DynamoDbRequestHandler.HandleDynamoDbRequest(instrumentedMethodCall, agent, transaction, request, isAsync, executionContext); + } if (!_unsupportedRequestTypes.Contains(requestType)) // log once per unsupported request type { diff --git a/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AwsSdk/DynamoDbRequestHandler.cs b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AwsSdk/DynamoDbRequestHandler.cs new file mode 100644 index 0000000000..023ee2281e --- /dev/null +++ b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AwsSdk/DynamoDbRequestHandler.cs @@ -0,0 +1,44 @@ +// Copyright 2020 New Relic, Inc. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +using System.Collections.Concurrent; +using System.Threading.Tasks; +using NewRelic.Agent.Api; +using NewRelic.Agent.Extensions.Parsing; +using NewRelic.Agent.Extensions.Providers.Wrapper; + +namespace NewRelic.Providers.Wrapper.AwsSdk +{ + internal static class DynamoDbRequestHandler + { + + private static ConcurrentDictionary _operationNameCache = new ConcurrentDictionary(); + + public static AfterWrappedMethodDelegate HandleDynamoDbRequest(InstrumentedMethodCall instrumentedMethodCall, IAgent agent, ITransaction transaction, dynamic request, bool isAsync, dynamic executionContext) + { + var requestType = request.GetType().Name as string; + + string model; + string operation; + + // PutItemRequest => put_item, + // CreateTableRequest => create_table, etc. + operation = _operationNameCache.GetOrAdd(requestType, GetOperationNameFromRequestType); + + // Even though there is no common interface they all implement, every Request type I checked + // has a TableName property + model = request.TableName; + + var segment = transaction.StartDatastoreSegment(instrumentedMethodCall.MethodCall, new ParsedSqlStatement(DatastoreVendor.DynamoDB, model, operation), isLeaf: true); + return isAsync ? + Delegates.GetAsyncDelegateFor(agent, segment) + : + Delegates.GetDelegateFor(segment); + } + + private static string GetOperationNameFromRequestType(string requestType) + { + return requestType.Replace("Request", string.Empty).ToSnakeCase(); + } + } +} diff --git a/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/CosmosDb/ExecuteItemQueryAsyncWrapper.cs b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/CosmosDb/ExecuteItemQueryAsyncWrapper.cs index d93ab837ad..57d38b613e 100644 --- a/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/CosmosDb/ExecuteItemQueryAsyncWrapper.cs +++ b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/CosmosDb/ExecuteItemQueryAsyncWrapper.cs @@ -1,4 +1,4 @@ -// Copyright 2020 New Relic, Inc. All rights reserved. +// Copyright 2020 New Relic, Inc. All rights reserved. // SPDX-License-Identifier: Apache-2.0 using System; @@ -78,7 +78,7 @@ public AfterWrappedMethodDelegate BeforeWrappedMethod(InstrumentedMethodCall ins var segment = transaction.StartDatastoreSegment( instrumentedMethodCall.MethodCall, new ParsedSqlStatement(DatastoreVendor.CosmosDB, model, operation), - connectionInfo: endpoint != null ? new ConnectionInfo(DatastoreVendor.CosmosDB.ToKnownName(), endpoint.Host, endpoint.Port, databaseName) : new ConnectionInfo(string.Empty, string.Empty, string.Empty, databaseName), + connectionInfo: endpoint != null ? new ConnectionInfo(endpoint.Host, endpoint.Port, databaseName) : new ConnectionInfo(string.Empty, string.Empty, string.Empty, databaseName), commandText : querySpec != null ? _queryGetter.Invoke(querySpec) : string.Empty, isLeaf: true); diff --git a/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/CosmosDb/RequestInvokerHandlerWrapper.cs b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/CosmosDb/RequestInvokerHandlerWrapper.cs index 2f081674c1..8ae4c9e4d7 100644 --- a/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/CosmosDb/RequestInvokerHandlerWrapper.cs +++ b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/CosmosDb/RequestInvokerHandlerWrapper.cs @@ -1,4 +1,4 @@ -// Copyright 2020 New Relic, Inc. All rights reserved. +// Copyright 2020 New Relic, Inc. All rights reserved. // SPDX-License-Identifier: Apache-2.0 using System; @@ -67,7 +67,7 @@ public AfterWrappedMethodDelegate BeforeWrappedMethod(InstrumentedMethodCall ins var segment = transaction.StartDatastoreSegment( instrumentedMethodCall.MethodCall, new ParsedSqlStatement(DatastoreVendor.CosmosDB, model, operation), - connectionInfo: endpoint != null ? new ConnectionInfo(DatastoreVendor.CosmosDB.ToKnownName(), endpoint.Host, endpoint.Port, databaseName) : new ConnectionInfo(string.Empty, string.Empty, string.Empty, databaseName), + connectionInfo: endpoint != null ? new ConnectionInfo(endpoint.Host, endpoint.Port, databaseName) : new ConnectionInfo(string.Empty, string.Empty, string.Empty, databaseName), isLeaf: true); return Delegates.GetAsyncDelegateFor(agent, segment); diff --git a/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/Elasticsearch/RequestWrapper.cs b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/Elasticsearch/RequestWrapper.cs index f44c1a7fd7..4ab8296e8b 100644 --- a/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/Elasticsearch/RequestWrapper.cs +++ b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/Elasticsearch/RequestWrapper.cs @@ -68,7 +68,7 @@ public AfterWrappedMethodDelegate BeforeWrappedMethod(InstrumentedMethodCall ins } var transactionExperimental = transaction.GetExperimentalApi(); - var datastoreSegmentData = transactionExperimental.CreateDatastoreSegmentData(new ParsedSqlStatement(DatastoreVendor.Elasticsearch, model, operation), new ConnectionInfo(DatastoreVendor.Elasticsearch.ToKnownName(), string.Empty, string.Empty, string.Empty), string.Empty, null); + var datastoreSegmentData = transactionExperimental.CreateDatastoreSegmentData(new ParsedSqlStatement(DatastoreVendor.Elasticsearch, model, operation), new ConnectionInfo(string.Empty, string.Empty, string.Empty), string.Empty, null); var segment = transactionExperimental.StartSegment(instrumentedMethodCall.MethodCall); segment.GetExperimentalApi().SetSegmentData(datastoreSegmentData).MakeLeaf(); @@ -271,7 +271,7 @@ private static void SetUriOnDatastoreSegment(ISegment segment, Uri uri) { var segmentExperimentalApi = segment.GetExperimentalApi(); var data = segmentExperimentalApi.SegmentData as IDatastoreSegmentData; - data.SetConnectionInfo(new ConnectionInfo(DatastoreVendor.Elasticsearch.ToKnownName(), uri.Host, uri.Port, string.Empty)); + data.SetConnectionInfo(new ConnectionInfo(uri.Host, uri.Port, string.Empty)); segmentExperimentalApi.SetSegmentData(data); } diff --git a/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/Memcached/MemcachedHelpers.cs b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/Memcached/MemcachedHelpers.cs index 1411d438f4..2252777742 100644 --- a/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/Memcached/MemcachedHelpers.cs +++ b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/Memcached/MemcachedHelpers.cs @@ -4,7 +4,6 @@ using System; using NewRelic.Agent.Api; using NewRelic.Agent.Extensions.Parsing; -using NewRelic.Agent.Extensions.Providers.Wrapper; using NewRelic.Reflection; namespace NewRelic.Providers.Wrapper.Memcached @@ -27,7 +26,7 @@ public static ConnectionInfo GetConnectionInfo(string key, object target, IAgent { if (_hasGetServerFailed) { - return new ConnectionInfo(DatastoreVendor.Memcached.ToKnownName(), null, -1, null); + return new ConnectionInfo(null, -1, null); } try @@ -67,13 +66,13 @@ public static ConnectionInfo GetConnectionInfo(string key, object target, IAgent _portGetter ??= VisibilityBypasser.Instance.GeneratePropertyAccessor(endpointType, "Port"); int? port = _portGetter(endpoint); - return new ConnectionInfo(DatastoreVendor.Memcached.ToKnownName(), address, port.HasValue ? port.Value : -1, null); + return new ConnectionInfo(address, port.HasValue ? port.Value : -1, null); } catch (Exception exception) { agent.Logger.Warn(exception, "Unable to get Memcached server address/port, likely to due to type differences. Server address/port will not be available."); _hasGetServerFailed = true; - return new ConnectionInfo(DatastoreVendor.Memcached.ToKnownName(), null, -1, null); + return new ConnectionInfo(null, -1, null); } } } diff --git a/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/MongoDb26/MongoDbHelper.cs b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/MongoDb26/MongoDbHelper.cs index fda21adc02..d7a24943c1 100644 --- a/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/MongoDb26/MongoDbHelper.cs +++ b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/MongoDb26/MongoDbHelper.cs @@ -128,7 +128,7 @@ public static ConnectionInfo GetConnectionInfoFromCursor(object asyncCursor, obj var databaseName = GetDatabaseNameFromCollectionNamespace(collectionNamespace); - return new ConnectionInfo(DatastoreVendor.MongoDB.ToKnownName(), host, port, databaseName); + return new ConnectionInfo(host, port, databaseName); } public static ConnectionInfo GetConnectionInfoFromDatabase(object database, string utilizationHostName) @@ -146,7 +146,7 @@ public static ConnectionInfo GetConnectionInfoFromDatabase(object database, stri host = ConnectionStringParserHelper.NormalizeHostname(rawHost, utilizationHostName); } - return new ConnectionInfo(DatastoreVendor.MongoDB.ToKnownName(), host, port, databaseName); + return new ConnectionInfo(host, port, databaseName); } private static IList GetServersFromDatabase(object database) diff --git a/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/ServiceStackRedis/SendCommandWrapper.cs b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/ServiceStackRedis/SendCommandWrapper.cs index c51be5c3b4..6ec86114c9 100644 --- a/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/ServiceStackRedis/SendCommandWrapper.cs +++ b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/ServiceStackRedis/SendCommandWrapper.cs @@ -82,7 +82,7 @@ public AfterWrappedMethodDelegate BeforeWrappedMethod(InstrumentedMethodCall ins portNum = -1; } var databaseName = TryGetPropertyName(PropertyDatabaseName, contextObject); - var connectionInfo = new ConnectionInfo(DatastoreVendor.Redis.ToKnownName(), host, portNum, databaseName); + var connectionInfo = new ConnectionInfo(host, portNum, databaseName); var segment = transaction.StartDatastoreSegment(instrumentedMethodCall.MethodCall, ParsedSqlStatement.FromOperation(DatastoreVendor.Redis, operation), connectionInfo); diff --git a/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/StackExchangeRedis/Common.cs b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/StackExchangeRedis/Common.cs index b840c9064b..fc9a1ba478 100644 --- a/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/StackExchangeRedis/Common.cs +++ b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/StackExchangeRedis/Common.cs @@ -122,7 +122,7 @@ public static ConnectionInfo GetConnectionInfoFromConnectionMultiplexer(MethodCa return null; } - return new ConnectionInfo(DatastoreVendor.Redis.ToKnownName(), host, port, null, null); + return new ConnectionInfo(host, port, null, null); } private static string GetCommandNameFromEnumValue(Enum commandValue) diff --git a/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/StackExchangeRedis2Plus/SessionCache.cs b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/StackExchangeRedis2Plus/SessionCache.cs index 7e37c1a373..91f8109119 100644 --- a/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/StackExchangeRedis2Plus/SessionCache.cs +++ b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/StackExchangeRedis2Plus/SessionCache.cs @@ -121,14 +121,14 @@ private ConnectionInfo GetConnectionInfo(EndPoint endpoint) { var port = dnsEndpoint.Port; var host = ConnectionStringParserHelper.NormalizeHostname(dnsEndpoint.Host, _agent.Configuration.UtilizationHostName); - return new ConnectionInfo(DatastoreVendor.Redis.ToKnownName(), host, port, null, null); + return new ConnectionInfo(host, port, null, null); } if (endpoint is IPEndPoint ipEndpoint) { var port = ipEndpoint.Port; var host = ConnectionStringParserHelper.NormalizeHostname(ipEndpoint.Address.ToString(), _agent.Configuration.UtilizationHostName); - return new ConnectionInfo(DatastoreVendor.Redis.ToKnownName(), host, port, null, null); + return new ConnectionInfo(host, port, null, null); } return null; diff --git a/tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/AwsSdkExerciser/AwsSdkTestType.cs b/tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/AwsSdkExerciser/AwsSdkTestType.cs deleted file mode 100644 index 847a2a9581..0000000000 --- a/tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/AwsSdkExerciser/AwsSdkTestType.cs +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2020 New Relic, Inc. All rights reserved. -// SPDX-License-Identifier: Apache-2.0 - -namespace AwsSdkTestApp.AwsSdkExerciser; - -public enum AwsSdkTestType -{ - SQS, - SNS, - SES, - // etc -} diff --git a/tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/AwsSdkExercisers/AwsSdkDynamoDBExerciser.cs b/tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/AwsSdkExercisers/AwsSdkDynamoDBExerciser.cs new file mode 100644 index 0000000000..e07fe0aa84 --- /dev/null +++ b/tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/AwsSdkExercisers/AwsSdkDynamoDBExerciser.cs @@ -0,0 +1,250 @@ +// Copyright 2020 New Relic, Inc. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +using Amazon.DynamoDBv2.Model; +using Amazon.DynamoDBv2; +using System.Runtime.CompilerServices; +using System.Threading.Tasks; +using System; +using System.Collections.Generic; +using Amazon.Runtime; +using System.Threading; + +namespace AwsSdkTestApp.AwsSdkExercisers +{ + public class AwsSdkDynamoDBExerciser : IDisposable + { + private readonly AmazonDynamoDBClient _amazonDynamoDBClient; + + public AwsSdkDynamoDBExerciser() + { + _amazonDynamoDBClient = GetDynamoDBClient(); + } + + + private AmazonDynamoDBClient GetDynamoDBClient() + { + + AmazonDynamoDBConfig clientConfig = new AmazonDynamoDBConfig(); + // Set the endpoint URL + clientConfig.ServiceURL = "http://dynamodb:8000"; // port must match what is set in docker compose + clientConfig.AuthenticationRegion = "us-west-2"; + var creds = new BasicAWSCredentials("xxx", "xxx"); + AmazonDynamoDBClient client = new AmazonDynamoDBClient(creds, clientConfig); + + return client; + } + + #region Table Operations + [MethodImpl(MethodImplOptions.NoOptimization | MethodImplOptions.NoInlining)] + public async Task CreateTableAsync(string name) + { + var response = await _amazonDynamoDBClient.CreateTableAsync(new CreateTableRequest + { + TableName = name, + AttributeDefinitions = new List() + { + new AttributeDefinition + { + AttributeName = "title", + AttributeType = ScalarAttributeType.S, + }, + new AttributeDefinition + { + AttributeName = "year", + AttributeType = ScalarAttributeType.N, + }, + }, + KeySchema = new List() + { + new KeySchemaElement + { + AttributeName = "year", + KeyType = KeyType.HASH, + }, + new KeySchemaElement + { + AttributeName = "title", + KeyType = KeyType.RANGE, + }, + }, + ProvisionedThroughput = new ProvisionedThroughput + { + ReadCapacityUnits = 5, + WriteCapacityUnits = 5, + }, + }); + if (response.HttpStatusCode != System.Net.HttpStatusCode.OK) + { + Console.WriteLine($"Got bad http status code: {response.HttpStatusCode}"); + } + + // Wait until the table is ACTIVE and then report success. + Console.Write("Waiting for table to become active..."); + + var request = new DescribeTableRequest + { + TableName = response.TableDescription.TableName, + }; + + TableStatus status; + + int sleepDuration = 2000; + + var startTime = DateTime.Now; + do + { + await Task.Delay(sleepDuration); + + var describeTableResponse = await _amazonDynamoDBClient.DescribeTableAsync(request); + status = describeTableResponse.Table.TableStatus; + + Console.Write("."); + } + while (status != "ACTIVE" && DateTime.Now - startTime < TimeSpan.FromMinutes(2)); + + return status == TableStatus.ACTIVE; + } + + [MethodImpl(MethodImplOptions.NoOptimization | MethodImplOptions.NoInlining)] + public async Task DeleteTableAsync(string tableName) + { + var request = new DeleteTableRequest + { + TableName = tableName + }; + var response = await _amazonDynamoDBClient.DeleteTableAsync(request); + + return response.HttpStatusCode == System.Net.HttpStatusCode.OK; + } + #endregion + + #region CRUD operations + [MethodImpl(MethodImplOptions.NoOptimization | MethodImplOptions.NoInlining)] + public async Task PutItemAsync(string tableName, string title, string year) + { + var item = new Dictionary + { + ["title"] = new AttributeValue { S = title }, + ["year"] = new AttributeValue { N = year }, + }; + + var request = new PutItemRequest + { + TableName = tableName, + Item = item, + }; + + var response = await _amazonDynamoDBClient.PutItemAsync(request); + return response.HttpStatusCode == System.Net.HttpStatusCode.OK; + } + + [MethodImpl(MethodImplOptions.NoOptimization | MethodImplOptions.NoInlining)] + public async Task GetItemAsync(string tableName, string title, string year) + { + var request = new GetItemRequest + { + TableName = tableName, + Key = new Dictionary() + { { "title", new AttributeValue { S = title } }, + { "year", new AttributeValue { N = year } } + } + }; + var response = await _amazonDynamoDBClient.GetItemAsync(request); + + // Check the response. + var result = response.Item; + + Console.WriteLine($"GetItemAsync: response.Item['year'] == {result["year"]}"); + return response.HttpStatusCode == System.Net.HttpStatusCode.OK; + } + + [MethodImpl(MethodImplOptions.NoOptimization | MethodImplOptions.NoInlining)] + public async Task UpdateItemAsync(string tableName, string title, string year) + { + var request = new UpdateItemRequest + { + TableName = tableName, + Key = new Dictionary() + { { "title", new AttributeValue { S = title } }, + { "year", new AttributeValue { N = year } } + }, + ExpressionAttributeNames = new Dictionary + { + {"#NA", "Rating" } + }, + ExpressionAttributeValues = new Dictionary() + { + { ":new", new AttributeValue { N = "5" } } + }, + UpdateExpression = "SET #NA = :new" + }; + var response = await _amazonDynamoDBClient.UpdateItemAsync(request); + + return response.HttpStatusCode == System.Net.HttpStatusCode.OK; + } + + [MethodImpl(MethodImplOptions.NoOptimization | MethodImplOptions.NoInlining)] + public async Task DeleteItemAsync(string tableName, string title, string year) + { + var request = new DeleteItemRequest + { + TableName = tableName, + Key = new Dictionary() + { { "title", new AttributeValue { S = title } }, + { "year", new AttributeValue { N = year } } + }, + }; + var response = await _amazonDynamoDBClient.DeleteItemAsync(request); + + return response.HttpStatusCode == System.Net.HttpStatusCode.OK; + } + #endregion + + #region Query Operations + [MethodImpl(MethodImplOptions.NoOptimization | MethodImplOptions.NoInlining)] + public async Task QueryAsync(string tableName, string title, string year) + { + var request = new QueryRequest + { + TableName = tableName, + KeyConditionExpression = "#title = :title and #year = :year", + ExpressionAttributeNames = new Dictionary() + { + {"#title", "title" }, + {"#year", "year" } + }, + ExpressionAttributeValues = new Dictionary() + { + {":title", new AttributeValue { S = title } }, + {":year" , new AttributeValue { N = year } } + } + }; + var response = await _amazonDynamoDBClient.QueryAsync(request); + + Console.WriteLine($"QueryAsync: number of item returned = {response.Items.Count}"); + return response.HttpStatusCode == System.Net.HttpStatusCode.OK; + } + + [MethodImpl(MethodImplOptions.NoOptimization | MethodImplOptions.NoInlining)] + public async Task ScanAsync(string tableName) + { + var request = new ScanRequest + { + TableName = tableName, + Limit = 10 + + }; + var response = await _amazonDynamoDBClient.ScanAsync(request); + + Console.WriteLine($"ScanAsync: number of item returned = {response.Items.Count}"); + return response.HttpStatusCode == System.Net.HttpStatusCode.OK; + } + #endregion + + public void Dispose() + { + _amazonDynamoDBClient?.Dispose(); + } + } +} diff --git a/tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/AwsSdkExerciser/AwsSdkExerciser.cs b/tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/AwsSdkExercisers/AwsSdkSQSExerciser.cs similarity index 93% rename from tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/AwsSdkExerciser/AwsSdkExerciser.cs rename to tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/AwsSdkExercisers/AwsSdkSQSExerciser.cs index e9b626eefc..ffef934382 100644 --- a/tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/AwsSdkExerciser/AwsSdkExerciser.cs +++ b/tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/AwsSdkExercisers/AwsSdkSQSExerciser.cs @@ -9,26 +9,19 @@ using System.Linq; using System.Collections.Generic; -namespace AwsSdkTestApp.AwsSdkExerciser +namespace AwsSdkTestApp.AwsSdkExercisers { - public class AwsSdkExerciser : IDisposable + public class AwsSdkSQSExerciser : IDisposable { - public AwsSdkExerciser(AwsSdkTestType testType) - { - switch (testType) - { - case AwsSdkTestType.SQS: - _amazonSqsClient = GetSqsClient(); - break; - default: - throw new ArgumentException("Invalid test type"); - } - } - #region SQS - private readonly AmazonSQSClient _amazonSqsClient; private string _sqsQueueUrl = null; + public AwsSdkSQSExerciser() + { + _amazonSqsClient = GetSqsClient(); + } + + private AmazonSQSClient GetSqsClient() { // configure the client to use LocalStack @@ -190,8 +183,6 @@ public void SQS_SetQueueUrl(string messageQueueUrl) _sqsQueueUrl = messageQueueUrl; } - #endregion - public void Dispose() { _amazonSqsClient?.Dispose(); diff --git a/tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/AwsSdkTestApp.csproj b/tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/AwsSdkTestApp.csproj index 78f2f4f55e..0b57c9eb29 100644 --- a/tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/AwsSdkTestApp.csproj +++ b/tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/AwsSdkTestApp.csproj @@ -15,6 +15,7 @@ runtime; build; native; contentfiles; analyzers; buildtransitive + diff --git a/tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/Controllers/AwsSdkDynamoDBController.cs b/tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/Controllers/AwsSdkDynamoDBController.cs new file mode 100644 index 0000000000..8ec8329568 --- /dev/null +++ b/tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/Controllers/AwsSdkDynamoDBController.cs @@ -0,0 +1,122 @@ +// Copyright 2020 New Relic, Inc. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +using System.ComponentModel.DataAnnotations; +using System.Threading.Tasks; +using AwsSdkTestApp.AwsSdkExercisers; +using Microsoft.AspNetCore.Mvc; +using Microsoft.Extensions.Logging; + +namespace AwsSdkTestApp.Controllers +{ + [ApiController] + [Route("[controller]")] + public class AwsSdkDynamoDBController : ControllerBase + { + private readonly ILogger _logger; + + public AwsSdkDynamoDBController(ILogger logger) + { + _logger = logger; + + _logger.LogInformation("Created AwsSdkDynamoDBController"); + } + + // GET: /AwsSdkDynamoDB/CreateTableAsync?tableName=tableName + [HttpGet("CreateTableAsync")] + public async Task CreateTableAsync([Required] string tableName) + { + _logger.LogInformation("Starting DynamoDB CreateTableAsync {tableName}", tableName); + + using var awsSdkDynamoDBExerciser = new AwsSdkDynamoDBExerciser(); + + await awsSdkDynamoDBExerciser.CreateTableAsync(tableName); + _logger.LogInformation("Finished CreateTableAsync for {tableName}", tableName); + } + + // GET: /AwsSdkDynamoDB/DeleteTableAsync?tableName=tableName + [HttpGet("DeleteTableAsync")] + public async Task DeleteTableAsync([Required] string tableName) + { + _logger.LogInformation("Starting DynamoDB DeleteTableAsync {tableName}", tableName); + + using var awsSdkDynamoDBExerciser = new AwsSdkDynamoDBExerciser(); + + await awsSdkDynamoDBExerciser.DeleteTableAsync(tableName); + _logger.LogInformation("Finished DeleteTableAsync for {tableName}", tableName); + } + + // GET: /AwsSdkDynamoDB/PutItemAsync?tableName=tableName&title=title&year=year + [HttpGet("PutItemAsync")] + public async Task PutItemAsync([Required] string tableName, string title, string year) + { + _logger.LogInformation("Starting DynamoDB PutItemAsync {tableName} {title} {year}", tableName, title, year); + + using var awsSdkDynamoDBExerciser = new AwsSdkDynamoDBExerciser(); + + await awsSdkDynamoDBExerciser.PutItemAsync(tableName, title, year); + _logger.LogInformation("Finished PutItemAsync for {title} {year}", title, year); + } + + // GET: /AwsSdkDynamoDB/GetItemAsync?tableName=tableName&title=title&year=year + [HttpGet("GetItemAsync")] + public async Task GetItemAsync([Required] string tableName, string title, string year) + { + _logger.LogInformation("Starting DynamoDB GetItemAsync {tableName} {title} {year}", tableName, title, year); + + using var awsSdkDynamoDBExerciser = new AwsSdkDynamoDBExerciser(); + + await awsSdkDynamoDBExerciser.GetItemAsync(tableName, title, year); + _logger.LogInformation("Finished GetItemAsync for {title} {year}", title, year); + } + + // GET: /AwsSdkDynamoDB/UpdateItemAsync?tableName=tableName&title=title&year=year + [HttpGet("UpdateItemAsync")] + public async Task UpdateItemAsync([Required] string tableName, string title, string year) + { + _logger.LogInformation("Starting DynamoDB UpdateItemAsync {tableName} {title} {year}", tableName, title, year); + + using var awsSdkDynamoDBExerciser = new AwsSdkDynamoDBExerciser(); + + await awsSdkDynamoDBExerciser.UpdateItemAsync(tableName, title, year); + _logger.LogInformation("Finished UpdateItemAsync for {title} {year}", title, year); + } + + // GET: /AwsSdkDynamoDB/DeleteItemAsync?tableName=tableName&title=title&year=year + [HttpGet("DeleteItemAsync")] + public async Task DeleteItemAsync([Required] string tableName, string title, string year) + { + _logger.LogInformation("Starting DynamoDB DeleteItemAsync {tableName} {title} {year}", tableName, title, year); + + using var awsSdkDynamoDBExerciser = new AwsSdkDynamoDBExerciser(); + + await awsSdkDynamoDBExerciser.DeleteItemAsync(tableName, title, year); + _logger.LogInformation("Finished DeleteItemAsync for {title} {year}", title, year); + } + + // GET: /AwsSdkDynamoDB/QueryAsync?tableName=tableName&title=title&year=year + [HttpGet("QueryAsync")] + public async Task QueryAsync([Required] string tableName, string title, string year) + { + _logger.LogInformation("Starting DynamoDB QueryAsync {tableName} {title} {year}", tableName, title, year); + + using var awsSdkDynamoDBExerciser = new AwsSdkDynamoDBExerciser(); + + await awsSdkDynamoDBExerciser.QueryAsync(tableName, title, year); + _logger.LogInformation("Finished QueryAsync for {title} {year}", title, year); + } + + // GET: /AwsSdkDynamoDB/ScanAsync?tableName=tableName + [HttpGet("ScanAsync")] + public async Task ScanAsync([Required] string tableName) + { + _logger.LogInformation("Starting DynamoDB ScanAsync {tableName}", tableName); + + using var awsSdkDynamoDBExerciser = new AwsSdkDynamoDBExerciser(); + + await awsSdkDynamoDBExerciser.ScanAsync(tableName); + _logger.LogInformation("Finished ScanAsync for {tableName}", tableName); + } + + } +} diff --git a/tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/Controllers/AwsSdkController.cs b/tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/Controllers/AwsSdkSQSController.cs similarity index 70% rename from tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/Controllers/AwsSdkController.cs rename to tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/Controllers/AwsSdkSQSController.cs index 9fab3c712f..fce3d02d2d 100644 --- a/tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/Controllers/AwsSdkController.cs +++ b/tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/Controllers/AwsSdkSQSController.cs @@ -6,7 +6,7 @@ using System.Threading; using System.Threading.Tasks; using Amazon.SQS.Model; -using AwsSdkTestApp.AwsSdkExerciser; +using AwsSdkTestApp.AwsSdkExercisers; using AwsSdkTestApp.SQSBackgroundService; using Microsoft.AspNetCore.Mvc; using Microsoft.Extensions.Logging; @@ -15,13 +15,13 @@ namespace AwsSdkTestApp.Controllers { [ApiController] [Route("[controller]")] - public class AwsSdkController : ControllerBase + public class AwsSdkSQSController : ControllerBase { - private readonly ILogger _logger; + private readonly ILogger _logger; private readonly ISQSRequestQueue _requestQueue; private readonly ISQSResponseQueue _responseQueue; - public AwsSdkController(ILogger logger, ISQSRequestQueue requestQueue, ISQSResponseQueue responseQueue) + public AwsSdkSQSController(ILogger logger, ISQSRequestQueue requestQueue, ISQSResponseQueue responseQueue) { _logger = logger; _requestQueue = requestQueue; @@ -36,20 +36,20 @@ public async Task SQS_SendReceivePurgeAsync([Required]string queueName) { _logger.LogInformation("Starting SQS_SendReceivePurge for {Queue}", queueName); - using var awsSdkExerciser = new AwsSdkExerciser.AwsSdkExerciser(AwsSdkTestType.SQS); + using var awsSdkSQSExerciser = new AwsSdkSQSExerciser(); - await awsSdkExerciser.SQS_InitializeAsync(queueName); + await awsSdkSQSExerciser.SQS_InitializeAsync(queueName); - await awsSdkExerciser.SQS_SendMessageAsync("Hello World!"); - await awsSdkExerciser.SQS_ReceiveMessageAsync(); + await awsSdkSQSExerciser.SQS_SendMessageAsync("Hello World!"); + await awsSdkSQSExerciser.SQS_ReceiveMessageAsync(); var messages = new[] { "Hello", "World" }; - await awsSdkExerciser.SQS_SendMessageBatchAsync(messages); - await awsSdkExerciser.SQS_ReceiveMessageAsync(messages.Length); + await awsSdkSQSExerciser.SQS_SendMessageBatchAsync(messages); + await awsSdkSQSExerciser.SQS_ReceiveMessageAsync(messages.Length); - await awsSdkExerciser.SQS_PurgeQueueAsync(); + await awsSdkSQSExerciser.SQS_PurgeQueueAsync(); - await awsSdkExerciser.SQS_TeardownAsync(); + await awsSdkSQSExerciser.SQS_TeardownAsync(); _logger.LogInformation("Finished SQS_SendReceivePurge for {Queue}", queueName); } @@ -64,8 +64,8 @@ public async Task SQS_SendReceivePurgeAsync([Required]string queueName) public async Task SQS_InitializeQueueAsync([Required]string queueName) { _logger.LogInformation("Initializing queue {Queue}", queueName); - using var awsSdkExerciser = new AwsSdkExerciser.AwsSdkExerciser(AwsSdkTestType.SQS); - var queueUrl = await awsSdkExerciser.SQS_InitializeAsync(queueName); + using var awsSdkSQSExerciser = new AwsSdkSQSExerciser(); + var queueUrl = await awsSdkSQSExerciser.SQS_InitializeAsync(queueName); _logger.LogInformation("Queue {Queue} initialized with URL {QueueUrl}", queueName, queueUrl); return queueUrl; } @@ -75,10 +75,10 @@ public async Task SQS_InitializeQueueAsync([Required]string queueName) public async Task SQS_SendMessageToQueueAsync([Required]string message, [Required]string messageQueueUrl) { _logger.LogInformation("Sending message {Message} to {Queue}", message, messageQueueUrl); - using var awsSdkExerciser = new AwsSdkExerciser.AwsSdkExerciser(AwsSdkTestType.SQS); - awsSdkExerciser.SQS_SetQueueUrl(messageQueueUrl); + using var awsSdkSQSExerciser = new AwsSdkSQSExerciser(); + awsSdkSQSExerciser.SQS_SetQueueUrl(messageQueueUrl); - await awsSdkExerciser.SQS_SendMessageAsync(message); + await awsSdkSQSExerciser.SQS_SendMessageAsync(message); _logger.LogInformation("Message {Message} sent to {Queue}", message, messageQueueUrl); } @@ -99,10 +99,10 @@ public async Task> SQS_ReceiveMessageFromQueueAsync([Requir public async Task SQS_DeleteQueueAsync([Required]string messageQueueUrl) { _logger.LogInformation("Deleting queue {Queue}", messageQueueUrl); - using var awsSdkExerciser = new AwsSdkExerciser.AwsSdkExerciser(AwsSdkTestType.SQS); - awsSdkExerciser.SQS_SetQueueUrl(messageQueueUrl); + using var awsSdkSQSExerciser = new AwsSdkSQSExerciser(); + awsSdkSQSExerciser.SQS_SetQueueUrl(messageQueueUrl); - await awsSdkExerciser.SQS_TeardownAsync(); + await awsSdkSQSExerciser.SQS_TeardownAsync(); _logger.LogInformation("Queue {Queue} deleted", messageQueueUrl); } } diff --git a/tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/SQSBackgroundService/SQSReceiverService.cs b/tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/SQSBackgroundService/SQSReceiverService.cs index 5939beccdc..7e0ec60ce8 100644 --- a/tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/SQSBackgroundService/SQSReceiverService.cs +++ b/tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/SQSBackgroundService/SQSReceiverService.cs @@ -6,7 +6,7 @@ using System.Threading; using System.Threading.Tasks; using Amazon.SQS.Model; -using AwsSdkTestApp.AwsSdkExerciser; +using AwsSdkTestApp.AwsSdkExercisers; using Microsoft.Extensions.Hosting; using Microsoft.Extensions.Logging; using NewRelic.Api.Agent; @@ -56,10 +56,10 @@ protected override async Task ExecuteAsync(CancellationToken stoppingToken) private async Task> ProcessRequestAsync(string queueUrl) { _logger.LogInformation("Received a request to receive a message from {Queue}", queueUrl); - using var awsSdkExerciser = new AwsSdkExerciser.AwsSdkExerciser(AwsSdkTestType.SQS); - awsSdkExerciser.SQS_SetQueueUrl(queueUrl); + using var awsSdkSQSExerciser = new AwsSdkSQSExerciser(); + awsSdkSQSExerciser.SQS_SetQueueUrl(queueUrl); _logger.LogInformation("Receiving a message from {Queue}", queueUrl); - var messages = await awsSdkExerciser.SQS_ReceiveMessageAsync(); + var messages = await awsSdkSQSExerciser.SQS_ReceiveMessageAsync(); _logger.LogInformation("Received a message from {Queue}; queuing a response", queueUrl); await _responseQueue.QueueResponseAsync(messages); _logger.LogInformation("Finished processing request for {Queue}", queueUrl); diff --git a/tests/Agent/IntegrationTests/ContainerApplications/docker-compose-awssdk.yml b/tests/Agent/IntegrationTests/ContainerApplications/docker-compose-awssdk.yml index 06ebdb1a0d..a3873cf8d0 100644 --- a/tests/Agent/IntegrationTests/ContainerApplications/docker-compose-awssdk.yml +++ b/tests/Agent/IntegrationTests/ContainerApplications/docker-compose-awssdk.yml @@ -33,10 +33,18 @@ services: volumes: - "${LOCALSTACK_VOLUME_DIR:-./volume}:/var/lib/localstack" - "/var/run/docker.sock:/var/run/docker.sock" + + dynamodb: + command: "-jar DynamoDBLocal.jar -inMemory" + image: "amazon/dynamodb-local:latest" + expose: # ports are only available internal to the service, not external so there's no chance for conflicts + - "8000" + working_dir: /home/dynamodblocal awssdktestapp: depends_on: - localstack + - dynamodb container_name: ${CONTAINER_NAME} image: ${CONTAINER_NAME} platform: ${PLATFORM} diff --git a/tests/Agent/IntegrationTests/ContainerIntegrationTests.sln b/tests/Agent/IntegrationTests/ContainerIntegrationTests.sln index 6125d5c40b..4bfa52e5a2 100644 --- a/tests/Agent/IntegrationTests/ContainerIntegrationTests.sln +++ b/tests/Agent/IntegrationTests/ContainerIntegrationTests.sln @@ -35,7 +35,11 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "NewRelic.Testing.Assertions EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AwsSdkTestApp", "ContainerApplications\AwsSdkTestApp\AwsSdkTestApp.csproj", "{70731828-AFC8-4262-9076-3FB39E224D10}" EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MemcachedTestApp", "ContainerApplications\MemcachedTestApp\MemcachedTestApp.csproj", "{3D46F286-A19A-4942-8E3F-8999E953A6F2}" +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "MemcachedTestApp", "ContainerApplications\MemcachedTestApp\MemcachedTestApp.csproj", "{3D46F286-A19A-4942-8E3F-8999E953A6F2}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "TestSerializationHelpers", "..\Shared\TestSerializationHelpers\TestSerializationHelpers.csproj", "{C8DF0638-A57D-4F5F-9ED2-E54879685DBC}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "TestSerializationHelpers.Test", "..\Shared\TestSerializationHelpers.Test\TestSerializationHelpers.Test.csproj", "{76A0FABB-B5B2-46B4-8C81-239F1C7E056A}" EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution @@ -79,6 +83,14 @@ Global {3D46F286-A19A-4942-8E3F-8999E953A6F2}.Debug|Any CPU.Build.0 = Debug|Any CPU {3D46F286-A19A-4942-8E3F-8999E953A6F2}.Release|Any CPU.ActiveCfg = Release|Any CPU {3D46F286-A19A-4942-8E3F-8999E953A6F2}.Release|Any CPU.Build.0 = Release|Any CPU + {C8DF0638-A57D-4F5F-9ED2-E54879685DBC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C8DF0638-A57D-4F5F-9ED2-E54879685DBC}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C8DF0638-A57D-4F5F-9ED2-E54879685DBC}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C8DF0638-A57D-4F5F-9ED2-E54879685DBC}.Release|Any CPU.Build.0 = Release|Any CPU + {76A0FABB-B5B2-46B4-8C81-239F1C7E056A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {76A0FABB-B5B2-46B4-8C81-239F1C7E056A}.Debug|Any CPU.Build.0 = Debug|Any CPU + {76A0FABB-B5B2-46B4-8C81-239F1C7E056A}.Release|Any CPU.ActiveCfg = Release|Any CPU + {76A0FABB-B5B2-46B4-8C81-239F1C7E056A}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE diff --git a/tests/Agent/IntegrationTests/ContainerIntegrationTests/Fixtures/AwsSdkContainerTestFixtures.cs b/tests/Agent/IntegrationTests/ContainerIntegrationTests/Fixtures/AwsSdkContainerTestFixtures.cs index 5de25d69b5..b70159f279 100644 --- a/tests/Agent/IntegrationTests/ContainerIntegrationTests/Fixtures/AwsSdkContainerTestFixtures.cs +++ b/tests/Agent/IntegrationTests/ContainerIntegrationTests/Fixtures/AwsSdkContainerTestFixtures.cs @@ -34,43 +34,92 @@ public class AwsSdkContainerSQSTestFixture : AwsSdkContainerTestFixtureBase private const ContainerApplication.Architecture Architecture = ContainerApplication.Architecture.X64; private const string DistroTag = "jammy"; - public AwsSdkContainerSQSTestFixture() : base(DistroTag, Architecture, Dockerfile) { } + private readonly string BaseUrl; - public void ExerciseSQS_SendReceivePurge(string queueName) + public AwsSdkContainerSQSTestFixture() : base(DistroTag, Architecture, Dockerfile) { - var address = $"http://localhost:{Port}/awssdk"; + BaseUrl = $"http://localhost:{Port}/awssdksqs"; + } + public void ExerciseSQS_SendReceivePurge(string queueName) + { // The exerciser will return a 500 error if the `RequestMessage.MessageAttributeNames` collection is modified by our instrumentation. // See https://github.com/newrelic/newrelic-dotnet-agent/pull/2646 - GetAndAssertStatusCode($"{address}/SQS_SendReceivePurge?queueName={queueName}", System.Net.HttpStatusCode.OK); + GetAndAssertStatusCode($"{BaseUrl}/SQS_SendReceivePurge?queueName={queueName}", System.Net.HttpStatusCode.OK); } public string ExerciseSQS_SendAndReceiveInSeparateTransactions(string queueName) { - var address = $"http://localhost:{Port}/awssdk"; - - var queueUrl = GetString($"{address}/SQS_InitializeQueue?queueName={queueName}"); + var queueUrl = GetString($"{BaseUrl}/SQS_InitializeQueue?queueName={queueName}"); - GetAndAssertStatusCode($"{address}/SQS_SendMessageToQueue?message=Hello&messageQueueUrl={queueUrl}", System.Net.HttpStatusCode.OK); + GetAndAssertStatusCode($"{BaseUrl}/SQS_SendMessageToQueue?message=Hello&messageQueueUrl={queueUrl}", System.Net.HttpStatusCode.OK); - var messagesJson = GetString($"{address}/SQS_ReceiveMessageFromQueue?messageQueueUrl={queueUrl}"); + var messagesJson = GetString($"{BaseUrl}/SQS_ReceiveMessageFromQueue?messageQueueUrl={queueUrl}"); - GetAndAssertStatusCode($"{address}/SQS_DeleteQueue?messageQueueUrl={queueUrl}", System.Net.HttpStatusCode.OK); + GetAndAssertStatusCode($"{BaseUrl}/SQS_DeleteQueue?messageQueueUrl={queueUrl}", System.Net.HttpStatusCode.OK); return messagesJson; } public string ExerciseSQS_ReceiveEmptyMessage(string queueName) { - var address = $"http://localhost:{Port}/awssdk"; + var queueUrl = GetString($"{BaseUrl}/SQS_InitializeQueue?queueName={queueName}"); - var queueUrl = GetString($"{address}/SQS_InitializeQueue?queueName={queueName}"); + var messagesJson = GetString($"{BaseUrl}/SQS_ReceiveMessageFromQueue?messageQueueUrl={queueUrl}"); - var messagesJson = GetString($"{address}/SQS_ReceiveMessageFromQueue?messageQueueUrl={queueUrl}"); - - GetAndAssertStatusCode($"{address}/SQS_DeleteQueue?messageQueueUrl={queueUrl}", System.Net.HttpStatusCode.OK); + GetAndAssertStatusCode($"{BaseUrl}/SQS_DeleteQueue?messageQueueUrl={queueUrl}", System.Net.HttpStatusCode.OK); return messagesJson; } } + +public class AwsSdkContainerDynamoDBTestFixture : AwsSdkContainerTestFixtureBase +{ + private const string Dockerfile = "AwsSdkTestApp/Dockerfile"; + private const ContainerApplication.Architecture Architecture = ContainerApplication.Architecture.X64; + private const string DistroTag = "jammy"; + + private readonly string BaseUrl; + + public AwsSdkContainerDynamoDBTestFixture() : base(DistroTag, Architecture, Dockerfile) + { + BaseUrl = $"http://localhost:{Port}/awssdkdynamodb"; + } + + public void CreateTableAsync(string tableName) + { + GetAndAssertStatusCode($"{BaseUrl}/CreateTableAsync?tableName={tableName}", System.Net.HttpStatusCode.OK); + } + public void DeleteTableAsync(string tableName) + { + GetAndAssertStatusCode($"{BaseUrl}/DeleteTableAsync?tableName={tableName}", System.Net.HttpStatusCode.OK); + } + + public void PutItemAsync(string tableName, string title, string year) + { + GetAndAssertStatusCode($"{BaseUrl}/PutItemAsync?tableName={tableName}&title={title}&year={year}", System.Net.HttpStatusCode.OK); + } + public void GetItemAsync(string tableName, string title, string year) + { + GetAndAssertStatusCode($"{BaseUrl}/GetItemAsync?tableName={tableName}&title={title}&year={year}", System.Net.HttpStatusCode.OK); + } + public void UpdateItemAsync(string tableName, string title, string year) + { + GetAndAssertStatusCode($"{BaseUrl}/UpdateItemAsync?tableName={tableName}&title={title}&year={year}", System.Net.HttpStatusCode.OK); + } + + public void DeleteItemAsync(string tableName, string title, string year) + { + GetAndAssertStatusCode($"{BaseUrl}/DeleteItemAsync?tableName={tableName}&title={title}&year={year}", System.Net.HttpStatusCode.OK); + } + public void QueryAsync(string tableName, string title, string year) + { + GetAndAssertStatusCode($"{BaseUrl}/QueryAsync?tableName={tableName}&title={title}&year={year}", System.Net.HttpStatusCode.OK); + } + public void ScanAsync(string tableName) + { + GetAndAssertStatusCode($"{BaseUrl}/ScanAsync?tableName={tableName}", System.Net.HttpStatusCode.OK); + } + +} diff --git a/tests/Agent/IntegrationTests/ContainerIntegrationTests/Tests/AwsSdk/AwsSdkDynamoDBTest.cs b/tests/Agent/IntegrationTests/ContainerIntegrationTests/Tests/AwsSdk/AwsSdkDynamoDBTest.cs new file mode 100644 index 0000000000..c136e83bf7 --- /dev/null +++ b/tests/Agent/IntegrationTests/ContainerIntegrationTests/Tests/AwsSdk/AwsSdkDynamoDBTest.cs @@ -0,0 +1,120 @@ +// Copyright 2020 New Relic, Inc. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +using System; +using System.Collections.Generic; +using System.Linq; +using NewRelic.Agent.IntegrationTestHelpers; +using Xunit; +using Xunit.Abstractions; + +namespace NewRelic.Agent.ContainerIntegrationTests.Tests.AwsSdk; + +public abstract class AwsSdkDynamoDBTestBase : NewRelicIntegrationTest +{ + private readonly AwsSdkContainerDynamoDBTestFixture _fixture; + + private readonly string _tableName = $"TestTable-{Guid.NewGuid()}"; + private readonly string _title = "Ghost"; + private readonly string _year = "1990"; + + protected AwsSdkDynamoDBTestBase(AwsSdkContainerDynamoDBTestFixture fixture, ITestOutputHelper output) : base(fixture) + { + _fixture = fixture; + _fixture.TestLogger = output; + + _fixture.Actions(setupConfiguration: () => + { + var configModifier = new NewRelicConfigModifier(_fixture.DestinationNewRelicConfigFilePath); + configModifier.SetLogLevel("finest"); + configModifier.ForceTransactionTraces(); + configModifier.EnableDistributedTrace(); + configModifier.ConfigureFasterMetricsHarvestCycle(15); + configModifier.ConfigureFasterSpanEventsHarvestCycle(15); + configModifier.ConfigureFasterTransactionTracesHarvestCycle(15); + configModifier.LogToConsole(); + + }, + exerciseApplication: () => + { + _fixture.Delay(5); + + _fixture.CreateTableAsync(_tableName); + + _fixture.PutItemAsync(_tableName, _title, _year); + _fixture.GetItemAsync(_tableName, _title, _year); + _fixture.UpdateItemAsync(_tableName, _title, _year); + + _fixture.QueryAsync(_tableName, _title, _year); + _fixture.ScanAsync(_tableName); + + _fixture.DeleteItemAsync(_tableName, _title, _year); + _fixture.DeleteTableAsync(_tableName); + + _fixture.AgentLog.WaitForLogLine(AgentLogBase.MetricDataLogLineRegex, TimeSpan.FromMinutes(2)); + _fixture.AgentLog.WaitForLogLine(AgentLogBase.TransactionTransformCompletedLogLineRegex, TimeSpan.FromMinutes(2)); + + // shut down the container and wait for the agent log to see it + _fixture.ShutdownRemoteApplication(); + _fixture.AgentLog.WaitForLogLine(AgentLogBase.ShutdownLogLineRegex, TimeSpan.FromSeconds(10)); + }); + + _fixture.Initialize(); + } + + + [Fact] + public void Test() + { + Assert.Equal(0, _fixture.AgentLog.GetWrapperExceptionLineCount()); + Assert.Equal(0, _fixture.AgentLog.GetApplicationErrorLineCount()); + + var metrics = _fixture.AgentLog.GetMetrics().ToList(); + + var metricScopeBase = "WebTransaction/MVC/AwsSdkDynamoDB/"; + var createTableScope = metricScopeBase + "CreateTable/{tableName}"; + var scanScope = metricScopeBase + "Scan/{tableName}"; + var deleteTableScope = metricScopeBase + "DeleteTable/{tableName}"; + var putItemScope = metricScopeBase + "PutItem/{tableName}/{title}/{year}"; + var getItemScope = metricScopeBase + "GetItem/{tableName}/{title}/{year}"; + var updateItemScope = metricScopeBase + "UpdateItem/{tableName}/{title}/{year}"; + var deleteItemScope = metricScopeBase + "DeleteItem/{tableName}/{title}/{year}"; + var queryScope = metricScopeBase + "Query/{tableName}/{title}/{year}"; + + var expectedMetrics = new List + { + new() { metricName = $"Datastore/statement/DynamoDB/{_tableName}/create_table", callCount = 1}, + new() { metricName = $"Datastore/statement/DynamoDB/{_tableName}/create_table", callCount = 1, metricScope = createTableScope}, + new() { metricName = $"Datastore/statement/DynamoDB/{_tableName}/describe_table", callCount = 1}, + new() { metricName = $"Datastore/statement/DynamoDB/{_tableName}/describe_table", callCount = 1, metricScope = createTableScope}, + new() { metricName = $"Datastore/statement/DynamoDB/{_tableName}/put_item", callCount = 1}, + new() { metricName = $"Datastore/statement/DynamoDB/{_tableName}/put_item", callCount = 1, metricScope = putItemScope}, + new() { metricName = $"Datastore/statement/DynamoDB/{_tableName}/get_item", callCount = 1}, + new() { metricName = $"Datastore/statement/DynamoDB/{_tableName}/get_item", callCount = 1, metricScope = getItemScope}, + new() { metricName = $"Datastore/statement/DynamoDB/{_tableName}/update_item", callCount = 1}, + new() { metricName = $"Datastore/statement/DynamoDB/{_tableName}/update_item", callCount = 1, metricScope = updateItemScope}, + new() { metricName = $"Datastore/statement/DynamoDB/{_tableName}/delete_item", callCount = 1}, + new() { metricName = $"Datastore/statement/DynamoDB/{_tableName}/delete_item", callCount = 1, metricScope = deleteItemScope}, + new() { metricName = $"Datastore/statement/DynamoDB/{_tableName}/query", callCount = 1}, + new() { metricName = $"Datastore/statement/DynamoDB/{_tableName}/query", callCount = 1, metricScope = queryScope}, + new() { metricName = $"Datastore/statement/DynamoDB/{_tableName}/scan", callCount = 1}, + new() { metricName = $"Datastore/statement/DynamoDB/{_tableName}/scan", callCount = 1, metricScope = scanScope}, + new() { metricName = $"Datastore/statement/DynamoDB/{_tableName}/delete_table", callCount = 1}, + new() { metricName = $"Datastore/statement/DynamoDB/{_tableName}/delete_table", callCount = 1, metricScope = deleteTableScope}, + + }; + + Assertions.MetricsExist(expectedMetrics, metrics); + } +} + +// Base class with derived classes pattern copied from another tests file +// but we currently don't need to use it for anything + +public class AwsSdkDynamoDBTest : AwsSdkDynamoDBTestBase +{ + public AwsSdkDynamoDBTest(AwsSdkContainerDynamoDBTestFixture fixture, ITestOutputHelper output) : base(fixture, output) + { + } +} + diff --git a/tests/Agent/IntegrationTests/ContainerIntegrationTests/Tests/AwsSdk/AwsSdkSQSTest.cs b/tests/Agent/IntegrationTests/ContainerIntegrationTests/Tests/AwsSdk/AwsSdkSQSTest.cs index 73b5a4b13e..edd06af3fb 100644 --- a/tests/Agent/IntegrationTests/ContainerIntegrationTests/Tests/AwsSdk/AwsSdkSQSTest.cs +++ b/tests/Agent/IntegrationTests/ContainerIntegrationTests/Tests/AwsSdk/AwsSdkSQSTest.cs @@ -18,8 +18,8 @@ public abstract class AwsSdkSQSTestBase : NewRelicIntegrationTest queryParameters = null) { methodCall = methodCall ?? GetDefaultMethodCall(agent); - var segment = agent.CurrentTransaction.StartDatastoreSegment(methodCall, new ParsedSqlStatement(vendor, model, operation), new ConnectionInfo(vendor.ToKnownName(), host, portPathOrId, databaseName), commandText, queryParameters); + var segment = agent.CurrentTransaction.StartDatastoreSegment(methodCall, new ParsedSqlStatement(vendor, model, operation), new ConnectionInfo(host, portPathOrId, databaseName), commandText, queryParameters); if (segment == null) throw new NullReferenceException("segment"); @@ -67,7 +67,7 @@ public static ISegment StartStackExchangeRedisDatastoreRequestSegmentOrThrow(thi { methodCall = methodCall ?? GetDefaultMethodCall(agent); var xTransaction = (ITransactionExperimental)agent.CurrentTransaction; - var segment = xTransaction.StartStackExchangeRedisSegment(RuntimeHelpers.GetHashCode(methodCall), ParsedSqlStatement.FromOperation(vendor, operation), new ConnectionInfo(vendor.ToKnownName(), host, portPathOrId, databaseName), relativeStartTime, relativeEndTime); + var segment = xTransaction.StartStackExchangeRedisSegment(RuntimeHelpers.GetHashCode(methodCall), ParsedSqlStatement.FromOperation(vendor, operation), new ConnectionInfo(host, portPathOrId, databaseName), relativeStartTime, relativeEndTime); if (segment == null) throw new NullReferenceException("segment"); diff --git a/tests/Agent/UnitTests/Core.UnitTest/Spans/SpanEventMakerTests.cs b/tests/Agent/UnitTests/Core.UnitTest/Spans/SpanEventMakerTests.cs index 28c8161eff..0fd6e1e378 100644 --- a/tests/Agent/UnitTests/Core.UnitTest/Spans/SpanEventMakerTests.cs +++ b/tests/Agent/UnitTests/Core.UnitTest/Spans/SpanEventMakerTests.cs @@ -177,7 +177,7 @@ public void SetUp() _childGenericSegment.SetSegmentData(new SimpleSegmentData(SegmentName)); // Datastore Segments - _connectionInfo = new ConnectionInfo(DatastoreVendor.MSSQL.ToKnownName(), "localhost", 1234, "default", "maininstance"); + _connectionInfo = new ConnectionInfo("localhost", 1234, "default", "maininstance"); _parsedSqlStatement = SqlParser.GetParsedDatabaseStatement(DatastoreVendor.MSSQL, System.Data.CommandType.Text, ShortQuery); _obfuscatedSql = _databaseService.GetObfuscatedSql(ShortQuery, DatastoreVendor.MSSQL); @@ -648,7 +648,7 @@ public void Do_Not_Generate_DbCollection_Attribute_When_Model_IsNullOrEmpty() var testSegment = new Segment(CreateTransactionSegmentState(3, null, 777), new MethodCallData(MethodCallType, MethodCallMethod, 1)); testSegment.SetSegmentData(new DatastoreSegmentData(_databaseService, parsedSqlStatement: new ParsedSqlStatement(DatastoreVendor.CosmosDB, string.Empty, "ReadDatabase"), - connectionInfo: new ConnectionInfo("none", "localhost", "1234", "default", "maininstance"))); + connectionInfo: new ConnectionInfo("localhost", "1234", "default", "maininstance"))); // ARRANGE var segments = new List() diff --git a/tests/Agent/UnitTests/Core.UnitTest/Transformers/DatastoreSegmentTransformerTests.cs b/tests/Agent/UnitTests/Core.UnitTest/Transformers/DatastoreSegmentTransformerTests.cs index f1f2c4f83e..614740b2fc 100644 --- a/tests/Agent/UnitTests/Core.UnitTest/Transformers/DatastoreSegmentTransformerTests.cs +++ b/tests/Agent/UnitTests/Core.UnitTest/Transformers/DatastoreSegmentTransformerTests.cs @@ -340,7 +340,7 @@ private Segment GetSegment(DatastoreVendor vendor, string operation, string mode private Segment GetSegment(DatastoreVendor vendor, string operation, string model, double duration, CrossApplicationResponseData catResponseData = null, string host = null, string portPathOrId = null) { var methodCallData = new MethodCallData("foo", "bar", 1); - var data = new DatastoreSegmentData(_databaseService, new ParsedSqlStatement(vendor, model, operation), null, new ConnectionInfo("none", host, portPathOrId, null)); + var data = new DatastoreSegmentData(_databaseService, new ParsedSqlStatement(vendor, model, operation), null, new ConnectionInfo(host, portPathOrId, null)); var segment = new Segment(TransactionSegmentStateHelpers.GetItransactionSegmentState(), methodCallData); segment.SetSegmentData(data); diff --git a/tests/Agent/UnitTests/Core.UnitTest/Transformers/TransactionTransformer/SqlTraceMakerTests.cs b/tests/Agent/UnitTests/Core.UnitTest/Transformers/TransactionTransformer/SqlTraceMakerTests.cs index 2b764f87e6..3f48f38ae6 100644 --- a/tests/Agent/UnitTests/Core.UnitTest/Transformers/TransactionTransformer/SqlTraceMakerTests.cs +++ b/tests/Agent/UnitTests/Core.UnitTest/Transformers/TransactionTransformer/SqlTraceMakerTests.cs @@ -165,7 +165,7 @@ private ImmutableTransaction BuildTestTransaction(string uri = null, string guid private Segment BuildSegment(DatastoreVendor vendor, string model, string commandText, TimeSpan startTime = new TimeSpan(), TimeSpan? duration = null, string name = "", MethodCallData methodCallData = null, IEnumerable> parameters = null, string host = null, string portPathOrId = null, string databaseName = null) { var data = new DatastoreSegmentData(_databaseService, new ParsedSqlStatement(vendor, model, null), commandText, - new ConnectionInfo("none", host, portPathOrId, databaseName)); + new ConnectionInfo(host, portPathOrId, databaseName)); methodCallData = methodCallData ?? new MethodCallData("typeName", "methodName", 1); var segment = new Segment(TransactionSegmentStateHelpers.GetItransactionSegmentState(), methodCallData); diff --git a/tests/Agent/UnitTests/Core.UnitTest/Transformers/TransactionTransformer/TestTransactions.cs b/tests/Agent/UnitTests/Core.UnitTest/Transformers/TransactionTransformer/TestTransactions.cs index cc98f9f3f6..2598380bb9 100644 --- a/tests/Agent/UnitTests/Core.UnitTest/Transformers/TransactionTransformer/TestTransactions.cs +++ b/tests/Agent/UnitTests/Core.UnitTest/Transformers/TransactionTransformer/TestTransactions.cs @@ -118,7 +118,7 @@ public static ImmutableTransaction CreateTestTransactionWithSegments(IEnumerable txSegmentState = TransactionSegmentStateHelpers.GetItransactionSegmentState(); methodCallData = methodCallData ?? new MethodCallData("typeName", "methodName", 1); - var data = new DatastoreSegmentData(_databaseService, new ParsedSqlStatement(vendor, model, null), commandText, new ConnectionInfo("none", host, portPathOrId, databaseName)); + var data = new DatastoreSegmentData(_databaseService, new ParsedSqlStatement(vendor, model, null), commandText, new ConnectionInfo(host, portPathOrId, databaseName)); var segment = new Segment(txSegmentState, methodCallData); segment.SetSegmentData(data); diff --git a/tests/Agent/UnitTests/Core.UnitTest/Transformers/TransactionTransformer/TransactionTraceMakerTests.cs b/tests/Agent/UnitTests/Core.UnitTest/Transformers/TransactionTransformer/TransactionTraceMakerTests.cs index 1df4d45378..6f46dd5733 100644 --- a/tests/Agent/UnitTests/Core.UnitTest/Transformers/TransactionTransformer/TransactionTraceMakerTests.cs +++ b/tests/Agent/UnitTests/Core.UnitTest/Transformers/TransactionTransformer/TransactionTraceMakerTests.cs @@ -354,7 +354,7 @@ private static ImmutableSegmentTreeNode BuildNode(ImmutableTransaction transacti var data = new DatastoreSegmentData(_databaseService, new ParsedSqlStatement(DatastoreVendor.MSSQL, "test_table", "SELECT"), "SELECT * FROM test_table", - new ConnectionInfo("My Vendor", "My Host", "My Port", "My Database")); + new ConnectionInfo("My Host", "My Port", "My Database")); var segment = new Segment(TransactionSegmentStateHelpers.GetItransactionSegmentState(), methodCallData); segment.SetSegmentData(data); diff --git a/tests/Agent/UnitTests/NewRelic.Agent.Extensions.Tests/Parsing/StringsHelperTest.cs b/tests/Agent/UnitTests/NewRelic.Agent.Extensions.Tests/Parsing/StringsHelperTest.cs index 72ff4eead0..25deb83b80 100644 --- a/tests/Agent/UnitTests/NewRelic.Agent.Extensions.Tests/Parsing/StringsHelperTest.cs +++ b/tests/Agent/UnitTests/NewRelic.Agent.Extensions.Tests/Parsing/StringsHelperTest.cs @@ -135,6 +135,32 @@ public void validate_CleanUri_String_Version(string uri, string expected) Assert.That(actual, Is.EqualTo(expected)); } + [TestCase("AlphaBeta", "alpha_beta")] + [TestCase("Alpha", "alpha")] + [TestCase("alpha", "alpha")] + [TestCase("alpha_beta", "alpha_beta")] + [TestCase("alphaBeta", "alpha_beta")] + [TestCase("AlphaBetaGamma", "alpha_beta_gamma")] + [TestCase("A", "a")] + [TestCase("", "")] + public void validate_ToSnakeCase(string input, string expected) + { + var actual = input.ToSnakeCase(); + Assert.That(actual, Is.EqualTo(expected)); + } + + [Test] + public void validate_ToSnakeCase_nullInput_throwsException() + { + Assert.Throws(TryNullSnakeCase); + } + + private void TryNullSnakeCase() + { + string input = null; + input.ToSnakeCase(); + } + #if NET6_0_OR_GREATER [Test] public void validate_CleanUri_handles_invalidoperationexception() From 7f07c4de5d0b5641e953980f8c19214ec2cad2cf Mon Sep 17 00:00:00 2001 From: Chris Hynes <111462425+chynesNR@users.noreply.github.com> Date: Wed, 30 Oct 2024 09:36:33 -0700 Subject: [PATCH 06/19] test: Force terminate helper processes still running at dispose time (#2860) --- .../Applications/ContainerApplication.cs | 2 +- .../RemoteServiceFixtures/AzureFuncTool.cs | 2 +- .../RemoteServiceFixtures/DotnetTool.cs | 2 +- .../RemoteServiceFixtures/RemoteApplication.cs | 15 ++++++++++++++- .../RemoteApplicationFixture.cs | 2 +- .../RemoteConsoleApplication.cs | 2 +- .../RemoteServiceFixtures/MockNewRelicFixture.cs | 2 +- .../RemoteServiceFixtures/OwinRemotingFixture.cs | 2 +- 8 files changed, 21 insertions(+), 8 deletions(-) diff --git a/tests/Agent/IntegrationTests/ContainerIntegrationTests/Applications/ContainerApplication.cs b/tests/Agent/IntegrationTests/ContainerIntegrationTests/Applications/ContainerApplication.cs index 563f85f3ef..d0d5b89ad5 100644 --- a/tests/Agent/IntegrationTests/ContainerIntegrationTests/Applications/ContainerApplication.cs +++ b/tests/Agent/IntegrationTests/ContainerIntegrationTests/Applications/ContainerApplication.cs @@ -182,7 +182,7 @@ public override void Start(string commandLineArguments, Dictionary Date: Wed, 30 Oct 2024 12:45:22 -0700 Subject: [PATCH 07/19] test: Disable tests using deprecated AI models (#2861) --- .../Agent/IntegrationTests/IntegrationTests/LLM/BedrockTests.cs | 1 - .../IntegrationTests/LLM/LLMAccountDisabledTests.cs | 2 +- .../Agent/IntegrationTests/IntegrationTests/LLM/LLMApiTests.cs | 2 +- 3 files changed, 2 insertions(+), 3 deletions(-) diff --git a/tests/Agent/IntegrationTests/IntegrationTests/LLM/BedrockTests.cs b/tests/Agent/IntegrationTests/IntegrationTests/LLM/BedrockTests.cs index 720e9033cf..74b39c791d 100644 --- a/tests/Agent/IntegrationTests/IntegrationTests/LLM/BedrockTests.cs +++ b/tests/Agent/IntegrationTests/IntegrationTests/LLM/BedrockTests.cs @@ -21,7 +21,6 @@ public abstract class BedrockTestsBase : NewRelicIntegrationTest _bedrockModelsToTest = new List { - "meta13", "amazonembed", "amazonexpress", "cohere", diff --git a/tests/Agent/IntegrationTests/IntegrationTests/LLM/LLMAccountDisabledTests.cs b/tests/Agent/IntegrationTests/IntegrationTests/LLM/LLMAccountDisabledTests.cs index 407252edd8..6c01db09fc 100644 --- a/tests/Agent/IntegrationTests/IntegrationTests/LLM/LLMAccountDisabledTests.cs +++ b/tests/Agent/IntegrationTests/IntegrationTests/LLM/LLMAccountDisabledTests.cs @@ -42,7 +42,7 @@ public LlmAccountDisabledTestsBase(TFixture fixture, ITestOutputHelper output) : _fixture.Initialize(); } - [Fact] + //[Fact] // The model we were using that was marked as disabled is deprecated, so the test no longer works public void BedrockDisabledTest() { // Make sure it actually got called diff --git a/tests/Agent/IntegrationTests/IntegrationTests/LLM/LLMApiTests.cs b/tests/Agent/IntegrationTests/IntegrationTests/LLM/LLMApiTests.cs index de1b8c56e3..8375faa86b 100644 --- a/tests/Agent/IntegrationTests/IntegrationTests/LLM/LLMApiTests.cs +++ b/tests/Agent/IntegrationTests/IntegrationTests/LLM/LLMApiTests.cs @@ -50,7 +50,7 @@ public LlmApiTestsBase(TFixture fixture, ITestOutputHelper output) : base(fixtur _fixture.Initialize(); } - [Fact] + //[Fact] The Meta Llama models have been deprecated, so these tests need to be reworked public void BedrockApiTest() { bool found = false; From d630c9c5158e0e5c9ad90a342e2b2fc2f7d42dbb Mon Sep 17 00:00:00 2001 From: dotnet-agent-team-bot <141066016+dotnet-agent-team-bot@users.noreply.github.com> Date: Wed, 30 Oct 2024 13:38:35 -0700 Subject: [PATCH 08/19] chore(main): release 10.33.0 (#2843) --- release-please/.release-please-manifest.json | 2 +- src/Agent/CHANGELOG.md | 16 ++++++++++++++++ 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/release-please/.release-please-manifest.json b/release-please/.release-please-manifest.json index 1c1366d31d..8238793e63 100644 --- a/release-please/.release-please-manifest.json +++ b/release-please/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "10.32.0" + ".": "10.33.0" } \ No newline at end of file diff --git a/src/Agent/CHANGELOG.md b/src/Agent/CHANGELOG.md index ba714f5de4..4dedc4bb8d 100644 --- a/src/Agent/CHANGELOG.md +++ b/src/Agent/CHANGELOG.md @@ -4,6 +4,22 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [10.33.0](https://github.com/newrelic/newrelic-dotnet-agent/compare/v10.32.0...v10.33.0) (2024-10-30) + + +### New features + +* Add AWSSDK.DynamoDBv2 instrumentation. Versions 3.5 through 4.0.0-preview.4 are supported. Note: previous agent versions would instrument DynamoDB calls as Externals; now DynamoDB calls will show up as Database operations in the New Relic UI. ([2460527](https://github.com/newrelic/newrelic-dotnet-agent/commit/2460527c83c0b196329a2e2b61e435cd20cd6dbd)) +* Add config option for providing AWS account ID for linking ([#2851](https://github.com/newrelic/newrelic-dotnet-agent/issues/2851)) ([936b6f6](https://github.com/newrelic/newrelic-dotnet-agent/commit/936b6f63c837dd03222281870ae5db40574693ff)) +* New Garbage Collection Metrics Sampler for .NET 6+ ([#2838](https://github.com/newrelic/newrelic-dotnet-agent/issues/2838)) ([f24a5da](https://github.com/newrelic/newrelic-dotnet-agent/commit/f24a5daa2cc6117a73eac96f2a45d738335d063a)) + + +### Fixes + +* Fix potential race condition in AWS SDK, AWS Bedrock, and Elastisearch that could lead to an orphaned Transaction. ([#2842](https://github.com/newrelic/newrelic-dotnet-agent/issues/2842)) ([3afa15f](https://github.com/newrelic/newrelic-dotnet-agent/commit/3afa15f3cf70d4bda5cecbd728c377ad7cb4ff28)) +* Remove usage of non-thread safe HashSet in AwsSdk pipeline wrappers. Thanks, [@gjunge](https://github.com/gjunge)! ([#2855](https://github.com/newrelic/newrelic-dotnet-agent/issues/2855)) ([#2857](https://github.com/newrelic/newrelic-dotnet-agent/issues/2857)) ([ae1d422](https://github.com/newrelic/newrelic-dotnet-agent/commit/ae1d4220e684192525f13c670436dbf4400012bd)) +* Revert environment variable name change in installers and scripts ([#2852](https://github.com/newrelic/newrelic-dotnet-agent/issues/2852)) ([e77683b](https://github.com/newrelic/newrelic-dotnet-agent/commit/e77683b7e60afa502b6e700a51945c757530a47b)) + ## [10.32.0](https://github.com/newrelic/newrelic-dotnet-agent/compare/v10.31.0...v10.32.0) (2024-10-15) From a629138e0e0b00a52f513d64c661576376e64455 Mon Sep 17 00:00:00 2001 From: Marty T <120425148+tippmar-nr@users.noreply.github.com> Date: Thu, 31 Oct 2024 16:35:29 -0500 Subject: [PATCH 09/19] fix: Fix syntax error in `setenv.sh` (#2864) Fixes a syntax error that was introduced in #2852. Resolves #2863 --- build/Linux/build/common/setenv.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/build/Linux/build/common/setenv.sh b/build/Linux/build/common/setenv.sh index e96da8bc78..3d127a3ea0 100644 --- a/build/Linux/build/common/setenv.sh +++ b/build/Linux/build/common/setenv.sh @@ -1,6 +1,6 @@ #!/bin/bash -if [ -z "$CORECLR_NEWRELIC_HOME" ]; && [ -z "$CORECLR_NEW_RELIC_HOME" ]; then +if [ -z "$CORECLR_NEWRELIC_HOME" ] && [ -z "$CORECLR_NEW_RELIC_HOME" ]; then echo "CORECLR_NEWRELIC_HOME is undefined" else NRHOME=${CORECLR_NEWRELIC_HOME:-${CORECLR_NEW_RELIC_HOME}} @@ -8,4 +8,4 @@ else export CORECLR_ENABLE_PROFILING=1 export CORECLR_PROFILER={36032161-FFC0-4B61-B559-F6C5D41BAE5A} export CORECLR_PROFILER_PATH=$NRHOME/libNewRelicProfiler.so -fi \ No newline at end of file +fi From 111d1a9c132f200de23786291e3ea85f4206a1ab Mon Sep 17 00:00:00 2001 From: dotnet-agent-team-bot <141066016+dotnet-agent-team-bot@users.noreply.github.com> Date: Thu, 31 Oct 2024 15:21:45 -0700 Subject: [PATCH 10/19] chore(main): release 10.33.1 (#2865) --- release-please/.release-please-manifest.json | 2 +- src/Agent/CHANGELOG.md | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/release-please/.release-please-manifest.json b/release-please/.release-please-manifest.json index 8238793e63..ee26c1132e 100644 --- a/release-please/.release-please-manifest.json +++ b/release-please/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "10.33.0" + ".": "10.33.1" } \ No newline at end of file diff --git a/src/Agent/CHANGELOG.md b/src/Agent/CHANGELOG.md index 4dedc4bb8d..f5ba5cdd39 100644 --- a/src/Agent/CHANGELOG.md +++ b/src/Agent/CHANGELOG.md @@ -4,6 +4,13 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [10.33.1](https://github.com/newrelic/newrelic-dotnet-agent/compare/v10.33.0...v10.33.1) (2024-10-31) + + +### Fixes + +* Fix syntax error in `setenv.sh` ([#2864](https://github.com/newrelic/newrelic-dotnet-agent/issues/2864)) ([a629138](https://github.com/newrelic/newrelic-dotnet-agent/commit/a629138e0e0b00a52f513d64c661576376e64455)), closes [#2863](https://github.com/newrelic/newrelic-dotnet-agent/issues/2863) + ## [10.33.0](https://github.com/newrelic/newrelic-dotnet-agent/compare/v10.32.0...v10.33.0) (2024-10-30) From d01a9611bb814ffcc02cbfe13b655f7153e3ee35 Mon Sep 17 00:00:00 2001 From: Marty T <120425148+tippmar-nr@users.noreply.github.com> Date: Mon, 4 Nov 2024 09:13:03 -0600 Subject: [PATCH 11/19] chore: Add ignoreMajor config to Dotty (#2867) --- .../scripts/nugetSlackNotifications/PackageInfo.cs | 4 +++- .../scripts/nugetSlackNotifications/Program.cs | 10 +++++++++- .../scripts/nugetSlackNotifications/packageInfo.json | 12 ++++++++++-- 3 files changed, 22 insertions(+), 4 deletions(-) diff --git a/.github/workflows/scripts/nugetSlackNotifications/PackageInfo.cs b/.github/workflows/scripts/nugetSlackNotifications/PackageInfo.cs index 56f2509b89..dc0c93556d 100644 --- a/.github/workflows/scripts/nugetSlackNotifications/PackageInfo.cs +++ b/.github/workflows/scripts/nugetSlackNotifications/PackageInfo.cs @@ -1,4 +1,4 @@ -using System.Text.Json.Serialization; +using System.Text.Json.Serialization; namespace nugetSlackNotifications { @@ -10,6 +10,8 @@ public class PackageInfo public bool IgnorePatch { get; set; } [JsonPropertyName("ignoreMinor")] public bool IgnoreMinor { get; set; } + [JsonPropertyName("ignoreMajor")] + public bool IgnoreMajor { get; set; } [JsonPropertyName("ignoreReason")] public string IgnoreReason {get; set;} } diff --git a/.github/workflows/scripts/nugetSlackNotifications/Program.cs b/.github/workflows/scripts/nugetSlackNotifications/Program.cs index f11272d1ea..432088134c 100644 --- a/.github/workflows/scripts/nugetSlackNotifications/Program.cs +++ b/.github/workflows/scripts/nugetSlackNotifications/Program.cs @@ -131,7 +131,7 @@ static async Task CheckPackage(PackageInfo package, PackageMetadataResource meta // check publish date if (latest.Published >= searchTime) { - if (previous != null && (package.IgnorePatch || package.IgnoreMinor)) + if (previous != null && (package.IgnorePatch || package.IgnoreMinor || package.IgnoreMajor)) { var previousVersion = previous.Identity.Version; var latestVersion = latest.Identity.Version; @@ -154,6 +154,14 @@ static async Task CheckPackage(PackageInfo package, PackageMetadataResource meta return; } } + if (package.IgnoreMajor) + { + if (previousVersion.Major != latestVersion.Major) + { + Log.Information($"Package {packageName} ignores Major version updates."); + return; + } + } } var previousVersionDescription = previous?.Identity.Version.ToNormalizedString() ?? "Unknown"; diff --git a/.github/workflows/scripts/nugetSlackNotifications/packageInfo.json b/.github/workflows/scripts/nugetSlackNotifications/packageInfo.json index 442b659029..a0b9618f48 100644 --- a/.github/workflows/scripts/nugetSlackNotifications/packageInfo.json +++ b/.github/workflows/scripts/nugetSlackNotifications/packageInfo.json @@ -54,7 +54,11 @@ "ignoreReason": "frequent patch releases create too much noise" }, { - "packageName": "log4net" + "packageName": "log4net", + "ignorePatch": true, + "ignoreMinor": true, + "ignoreMajor": true, + "ignoreReason": "Breaking major update. See https://github.com/newrelic/newrelic-dotnet-agent/issues/2764" }, { "packageName": "microsoft.extensions.logging" @@ -66,7 +70,11 @@ "packageName": "microsoft.net.http" }, { - "packageName": "mongodb.driver" + "packageName": "mongodb.driver", + "ignorePatch": true, + "ignoreMinor": true, + "ignoreMajor": true, + "ignoreReason": "Breaking since at least 2.25.0. See https://new-relic.atlassian.net/browse/NR-281915" }, { "packageName": "mysql.data" From 77bbe387d9efedb140dc6dbc7b4561d45db8e1fb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 4 Nov 2024 09:17:24 -0600 Subject: [PATCH 12/19] chore(deps): bump actions/setup-dotnet from 4.0.1 to 4.1.0 in the github-actions group (#2869) chore(deps): bump actions/setup-dotnet in the github-actions group Bumps the github-actions group with 1 update: [actions/setup-dotnet](https://github.com/actions/setup-dotnet). Updates `actions/setup-dotnet` from 4.0.1 to 4.1.0 - [Release notes](https://github.com/actions/setup-dotnet/releases) - [Commits](https://github.com/actions/setup-dotnet/compare/6bd8b7f7774af54e05809fcc5431931b3eb1ddee...3e891b0cb619bf60e2c25674b222b8940e2c1c25) --- updated-dependencies: - dependency-name: actions/setup-dotnet dependency-type: direct:production update-type: version-update:semver-minor dependency-group: github-actions ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/multiverse_run.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/multiverse_run.yml b/.github/workflows/multiverse_run.yml index aa69b97aa7..eebf2e3ed1 100644 --- a/.github/workflows/multiverse_run.yml +++ b/.github/workflows/multiverse_run.yml @@ -62,7 +62,7 @@ jobs: shell: bash - name: Setup .NET Core 3.1.100 - uses: actions/setup-dotnet@6bd8b7f7774af54e05809fcc5431931b3eb1ddee # v4.0.1 + uses: actions/setup-dotnet@3e891b0cb619bf60e2c25674b222b8940e2c1c25 # v4.1.0 with: dotnet-version: '3.1.100' @@ -107,7 +107,7 @@ jobs: egress-policy: audit - name: Setup .NET Core 3.1.100 - uses: actions/setup-dotnet@6bd8b7f7774af54e05809fcc5431931b3eb1ddee # v4.0.1 + uses: actions/setup-dotnet@3e891b0cb619bf60e2c25674b222b8940e2c1c25 # v4.1.0 with: dotnet-version: '3.1.100' From ed5ba088794a70402977b781180659902b0b3b70 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 4 Nov 2024 09:23:58 -0600 Subject: [PATCH 13/19] chore(deps): bump the nuget-tests group across 1 directory with 3 updates (#2870) Bumps the nuget-tests group with 3 updates in the / directory: [Verify.NUnit](https://github.com/VerifyTests/Verify), [System.Collections.Immutable](https://github.com/dotnet/runtime) and System.Net.Http. Updates `Verify.NUnit` from 27.0.1 to 28.1.2 - [Release notes](https://github.com/VerifyTests/Verify/releases) - [Commits](https://github.com/VerifyTests/Verify/compare/27.0.1...28.1.2) Updates `System.Collections.Immutable` from 1.1.36 to 8.0.0 - [Release notes](https://github.com/dotnet/runtime/releases) - [Commits](https://github.com/dotnet/runtime/commits/v8.0.0) Updates `System.Net.Http` from 2.0.20710.0 to 4.3.0 --- updated-dependencies: - dependency-name: Verify.NUnit dependency-type: direct:production update-type: version-update:semver-major dependency-group: nuget-tests - dependency-name: System.Collections.Immutable dependency-type: direct:production update-type: version-update:semver-major dependency-group: nuget-tests - dependency-name: System.Net.Http dependency-type: direct:production update-type: version-update:semver-major dependency-group: nuget-tests ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .../UnitTests/PublicApiChangeTests/PublicApiChangeTests.csproj | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/Agent/UnitTests/PublicApiChangeTests/PublicApiChangeTests.csproj b/tests/Agent/UnitTests/PublicApiChangeTests/PublicApiChangeTests.csproj index 188a0c4c2f..1beee142e3 100644 --- a/tests/Agent/UnitTests/PublicApiChangeTests/PublicApiChangeTests.csproj +++ b/tests/Agent/UnitTests/PublicApiChangeTests/PublicApiChangeTests.csproj @@ -11,7 +11,7 @@ - + all runtime; build; native; contentfiles; analyzers; buildtransitive From 7ed945b659be8a670d27c5a266738ffd5a712a41 Mon Sep 17 00:00:00 2001 From: Marty T <120425148+tippmar-nr@users.noreply.github.com> Date: Mon, 4 Nov 2024 14:05:39 -0600 Subject: [PATCH 14/19] fix: Update excluded processes for Azure Linux App Service (#2873) * Exclude `kuduagent.dll` from instrumentation. Resolves #2871 * Add an exclusion for `./DiagServer` commandline --- src/Agent/NewRelic/Home/Home.csproj | 2 +- .../NewRelic/Profiler/Configuration/Configuration.h | 9 +++++++-- .../Profiler/ConfigurationTest/ShouldInstrumentTest.cpp | 2 ++ 3 files changed, 10 insertions(+), 3 deletions(-) diff --git a/src/Agent/NewRelic/Home/Home.csproj b/src/Agent/NewRelic/Home/Home.csproj index 953b989675..80cf0d7f7d 100644 --- a/src/Agent/NewRelic/Home/Home.csproj +++ b/src/Agent/NewRelic/Home/Home.csproj @@ -13,7 +13,7 @@ - + diff --git a/src/Agent/NewRelic/Profiler/Configuration/Configuration.h b/src/Agent/NewRelic/Profiler/Configuration/Configuration.h index 894eae7e7d..fa4005dfef 100644 --- a/src/Agent/NewRelic/Profiler/Configuration/Configuration.h +++ b/src/Agent/NewRelic/Profiler/Configuration/Configuration.h @@ -563,7 +563,12 @@ namespace NewRelic { namespace Profiler { namespace Configuration { { //If it contains MsBuild, it is a build command and should not be profiled. bool isMsBuildInvocation = NewRelic::Profiler::Strings::ContainsCaseInsensitive(commandLine, _X("MSBuild.dll")); - bool isKudu = NewRelic::Profiler::Strings::ContainsCaseInsensitive(commandLine, _X("Kudu.Services.Web")); + bool isKudu = NewRelic::Profiler::Strings::ContainsCaseInsensitive(commandLine, _X("Kudu.Services.Web")) || + // kuduagent.dll is a new version of kudu (maybe KuduLite) in recent versions of Linux Azure App Services + NewRelic::Profiler::Strings::ContainsCaseInsensitive(commandLine, _X("kuduagent.dll")); + // DiagServer is a short-lived process that seems to be invoked every 5 minutes in recent versions of Linux Azure App Services. + bool isDiagServer = NewRelic::Profiler::Strings::ContainsCaseInsensitive(commandLine, _X("./DiagServer")); + std::vector out; Tokenize(commandLine, out); @@ -600,7 +605,7 @@ namespace NewRelic { namespace Profiler { namespace Configuration { } } - if (isMsBuildInvocation || isKudu) { + if (isMsBuildInvocation || isKudu || isDiagServer) { LogInfo(L"This process will not be instrumented. Command line identified as invalid invocation for instrumentation"); return false; } diff --git a/src/Agent/NewRelic/Profiler/ConfigurationTest/ShouldInstrumentTest.cpp b/src/Agent/NewRelic/Profiler/ConfigurationTest/ShouldInstrumentTest.cpp index 3d4e755d62..221cb90131 100644 --- a/src/Agent/NewRelic/Profiler/ConfigurationTest/ShouldInstrumentTest.cpp +++ b/src/Agent/NewRelic/Profiler/ConfigurationTest/ShouldInstrumentTest.cpp @@ -41,6 +41,8 @@ namespace NewRelic { namespace Profiler { namespace Configuration { namespace Te Assert::IsFalse(configuration.ShouldInstrument(processPath, L"", appPoolId, _X("app1.exe | dotnet run"), isCoreClr)); Assert::IsFalse(configuration.ShouldInstrument(processPath, L"", appPoolId, _X("dotnet Kudu.Services.Web.dll"), isCoreClr)); Assert::IsFalse(configuration.ShouldInstrument(processPath, L"", appPoolId, _X("/opt/Kudu/Kudu.Services.Web"), isCoreClr)); + Assert::IsFalse(configuration.ShouldInstrument(processPath, L"", appPoolId, _X("/appservice/dotnet/dotnet /appservice/kuduagent/kuduagent.dll"), isCoreClr)); + Assert::IsFalse(configuration.ShouldInstrument(processPath, L"", appPoolId, _X("./DiagServer"), isCoreClr)); Assert::IsTrue(configuration.ShouldInstrument(processPath, L"", appPoolId, _X("dotnetXexe restore"), isCoreClr)); Assert::IsTrue(configuration.ShouldInstrument(processPath, L"", appPoolId, _X("\"c:\\program files\\dotnet.exe\"run"), isCoreClr)); From 11eedcbae89c4b3b4456039f6c984dc757a551fb Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 4 Nov 2024 13:37:14 -0800 Subject: [PATCH 15/19] test:Dotty instrumentation library updates for 2024-Nov-01 (#2866) --- .../Common/MFALatestPackages/MFALatestPackages.csproj | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/Agent/IntegrationTests/SharedApplications/Common/MFALatestPackages/MFALatestPackages.csproj b/tests/Agent/IntegrationTests/SharedApplications/Common/MFALatestPackages/MFALatestPackages.csproj index e713bdde6e..b72937d49d 100644 --- a/tests/Agent/IntegrationTests/SharedApplications/Common/MFALatestPackages/MFALatestPackages.csproj +++ b/tests/Agent/IntegrationTests/SharedApplications/Common/MFALatestPackages/MFALatestPackages.csproj @@ -81,8 +81,8 @@ - - + + @@ -99,4 +99,4 @@ - + \ No newline at end of file From 8a241f82e5da3bae8b8437b0b9240c2ae6e94f70 Mon Sep 17 00:00:00 2001 From: Marty T <120425148+tippmar-nr@users.noreply.github.com> Date: Mon, 4 Nov 2024 15:37:31 -0600 Subject: [PATCH 16/19] ci: Add shellcheck job (#2876) --- .github/workflows/all_solutions.yml | 22 +- build/Linux/build/common/run.sh | 2 +- build/Linux/test/scripts/osht.sh | 935 +++++++++--------- .../cosmosdb/documentation.start.sh | 3 +- .../couchbase/configure-server.sh | 2 + 5 files changed, 493 insertions(+), 471 deletions(-) diff --git a/.github/workflows/all_solutions.yml b/.github/workflows/all_solutions.yml index 2736c7150e..481015bb8d 100644 --- a/.github/workflows/all_solutions.yml +++ b/.github/workflows/all_solutions.yml @@ -34,12 +34,32 @@ jobs: contents: read pull-requests: read + shellcheck: + name: Validate shell scripts + needs: check-modified-files + runs-on: ubuntu-latest + steps: + - name: Harden Runner + uses: step-security/harden-runner@91182cccc01eb5e619899d80e4e971d6181294a7 # v2.10.1 + with: + disable-sudo: true + egress-policy: audit + + - name: Checkout + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + fetch-depth: 0 + - name: Run Shellcheck + run: | + find ${{ github.workspace }} -name "*.sh" -exec shellcheck --severity=error {} + # This builds both FullAgent and MSIInstaller since MSIInstaller requires FullAgent artifacts. build-fullagent-msi: name: Build FullAgent and MSIInstaller runs-on: windows-2022 - needs: check-modified-files + needs: + - check-modified-files + - shellcheck # don't run this job if triggered by Dependabot, will cause all other jobs to be skipped as well # run this job if non-workflow files were modified, or if triggered by a release, a manual execution or schedule if: github.actor != 'dependabot[bot]' && (needs.check-modified-files.outputs.non-workflow-files-changed == 'true' || github.event.release || github.event_name == 'workflow_dispatch' || github.event_name == 'schedule') diff --git a/build/Linux/build/common/run.sh b/build/Linux/build/common/run.sh index c000d28f52..de233e2151 100644 --- a/build/Linux/build/common/run.sh +++ b/build/Linux/build/common/run.sh @@ -2,4 +2,4 @@ # This script can be used to run a dotnet application with New Relic monitoring. -NRHOME=${CORECLR_NEWRELIC_HOME:-${CORECLR_NEW_RELIC_HOME:-/usr/local/newrelic-dotnet-agent}} CORECLR_ENABLE_PROFILING=1 CORECLR_PROFILER={36032161-FFC0-4B61-B559-F6C5D41BAE5A} CORECLR_PROFILER_PATH=$NRHOME/libNewRelicProfiler.so $@ +NRHOME="${CORECLR_NEWRELIC_HOME:-${CORECLR_NEW_RELIC_HOME:-/usr/local/newrelic-dotnet-agent}}" CORECLR_ENABLE_PROFILING=1 CORECLR_PROFILER={36032161-FFC0-4B61-B559-F6C5D41BAE5A} CORECLR_PROFILER_PATH="$NRHOME/libNewRelicProfiler.so" "$@" \ No newline at end of file diff --git a/build/Linux/test/scripts/osht.sh b/build/Linux/test/scripts/osht.sh index 7411298548..0163258f68 100644 --- a/build/Linux/test/scripts/osht.sh +++ b/build/Linux/test/scripts/osht.sh @@ -1,469 +1,470 @@ -############################################################################### -# Copyright 2016 Cory Bennett -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -############################################################################### -: ${OSHT_MKTEMP=mktemp -t osht.XXXXXX} -: ${OSHT_ABORT=} -: ${OSHT_DIFF=diff -u} -: ${OSHT_JUNIT=} -: ${OSHT_JUNIT_OUTPUT="$(cd "$(dirname "$0")"; pwd)/$(basename "$0")-tests.xml"} -: ${OSHT_STDOUT=$($OSHT_MKTEMP)} -: ${OSHT_STDERR=$($OSHT_MKTEMP)} -: ${OSHT_STDIO=$($OSHT_MKTEMP)} -: ${OSHT_VERBOSE=} -: ${OSHT_WATCH=} - -: ${_OSHT_CURRENT_TEST_FILE=$($OSHT_MKTEMP)} -: ${_OSHT_CURRENT_TEST=0} -: ${_OSHT_DEPTH=2} -: ${_OSHT_RUNNER=$($OSHT_MKTEMP)} -: ${_OSHT_DIFFOUT=$($OSHT_MKTEMP)} -: ${_OSHT_FAILED_FILE=$($OSHT_MKTEMP)} -: ${_OSHT_INITPATH=$(pwd)} -: ${_OSHT_JUNIT=$($OSHT_MKTEMP)} -: ${_OSHT_LAPSE=} -: ${_OSHT_PLANNED_TESTS=} -: ${_OSHT_SKIP=} -: ${_OSHT_START=} -: ${_OSHT_TESTING=} -: ${_OSHT_TODO=} - -export OSHT_VERSION=1.0.0 - -declare -a _OSHT_ARGS - -function _osht_usage { - [ -n "${1:-}" ] && echo -e "Error: $1\n" >&2 - cat <] [--junit] [--verbose] [--abort] -Options: --a|--abort On the first error abort the test execution --h|--help This help message --j|--junit Enable JUnit xml writing --o|--output= Location to write JUnit xml file [default: $OSHT_JUNIT_OUTPUT] --v|--verbose Print extra output for debugging tests --w|--watch Print output to stdout to allow watching progress on long-running tests -EOF - exit 0 -} - - -while true; do - [[ $# == 0 ]] && break - case $1 in - -a | --abort) OSHT_ABORT=1; shift;; - -h | --help) _osht_usage;; - -j | --junit) OSHT_JUNIT=1; shift ;; - -o | --output) OSHT_JUNIT_OUTPUT=$2; shift 2 ;; - -v | --verbose) OSHT_VERBOSE=1; shift ;; - -w | --watch) OSHT_WATCH=1; shift ;; - -- ) shift; break ;; - -* ) (_osht_usage "Invalid argument $1") >&2 && exit 1;; - * ) break ;; - esac -done - - -function _osht_cleanup { - local rv=$? - if [ -z "$_OSHT_PLANNED_TESTS" ]; then - _OSHT_PLANNED_TESTS=$_OSHT_CURRENT_TEST - echo "1..$_OSHT_PLANNED_TESTS" - fi - if [[ -n $OSHT_JUNIT ]]; then - _osht_init_junit > $OSHT_JUNIT_OUTPUT - cat $_OSHT_JUNIT >> $OSHT_JUNIT_OUTPUT - _osht_end_junit >> $OSHT_JUNIT_OUTPUT - fi - local failed=$(_osht_failed) - rm -f $OSHT_STDOUT $OSHT_STDERR $OSHT_STDIO $_OSHT_CURRENT_TEST_FILE $_OSHT_JUNIT $_OSHT_FAILED_FILE $_OSHT_DIFFOUT $_OSHT_RUNNER - if [[ $_OSHT_PLANNED_TESTS != $_OSHT_CURRENT_TEST ]]; then - echo "Looks like you planned $_OSHT_PLANNED_TESTS tests but ran $_OSHT_CURRENT_TEST." >&2 - rv=255 - fi - if [[ $failed > 0 ]]; then - echo "Looks like you failed $failed test of $_OSHT_CURRENT_TEST." >&2 - rv=$failed - fi - - exit $rv -} - -trap _osht_cleanup INT TERM EXIT - -function _osht_xmlencode { - sed -e 's/\&/\&/g' -e 's/\"/\"/g' -e 's//\>/g' -} - -function _osht_strip_terminal_escape { - sed -e $'s/\x1B\[[0-9]*;[0-9]*[m|K|G|A]//g' -e $'s/\x1B\[[0-9]*[m|K|G|A]//g' -} - -function _osht_timestamp { - if [ -n "$_OSHT_TESTING" ]; then - echo "2016-01-01T08:00:00" - else - date "+%Y-%m-%dT%H:%M:%S" - fi -} - -function _osht_init_junit { - cat < - -EOF -} - -function _osht_add_junit { - if [[ -z $OSHT_JUNIT ]]; then - return - fi - failure= - if [[ $# != 0 ]]; then - failure=" - " - fi - local stdout=$(cat $OSHT_STDOUT | _osht_strip_terminal_escape) - local stderr=$(cat $OSHT_STDERR | _osht_strip_terminal_escape) - local _OSHT_DEPTH=$(($_OSHT_DEPTH+1)) - cat <> $_OSHT_JUNIT - - $failure - - -EOF -} - -function _osht_end_junit { - cat < -EOF -} - -function _osht_source { - local parts=($(caller $_OSHT_DEPTH)) - local fn=$(basename ${parts[2]}) - echo ${fn%.*} -} - -function _osht_get_line { - local parts=($(caller $_OSHT_DEPTH)) - (cd $_OSHT_INITPATH && sed "${parts[0]}q;d" ${parts[2]}) -} - -function _osht_source_file { - local parts=($(caller $_OSHT_DEPTH)) - echo "${parts[2]}" -} - -function _osht_source_linenum { - local parts=($(caller $_OSHT_DEPTH)) - echo "${parts[0]}" -} - -function _osht_increment_test { - _OSHT_CURRENT_TEST=$(cat $_OSHT_CURRENT_TEST_FILE) - let _OSHT_CURRENT_TEST=_OSHT_CURRENT_TEST+1 - echo $_OSHT_CURRENT_TEST > $_OSHT_CURRENT_TEST_FILE - _osht_start -} - -function _osht_increment_failed { - local _FAILED=$(_osht_failed) - let _FAILED=_FAILED+1 - echo $_FAILED > $_OSHT_FAILED_FILE -} - -function _osht_failed { - [[ -s $_OSHT_FAILED_FILE ]] && cat $_OSHT_FAILED_FILE || echo "0" -} - -function _osht_start { - _OSHT_START=$(date +%s) -} - -function _osht_stop { - local _now=$(date +%s) - _OSHT_LAPSE=$(($_now - $_OSHT_START)) -} - -function _osht_ok { - _osht_stop - _osht_debug - echo -n "ok $_OSHT_CURRENT_TEST - $(_osht_get_line)" - if [ -n "$_OSHT_TODO" ]; then - echo " # TODO Test Know to fail" - else - echo - fi - _osht_add_junit -} - -function _osht_nok { - _osht_stop - if [ -z "$_OSHT_TODO" ]; then - echo "# ERROR: $(_osht_source_file) at line $(_osht_source_linenum)" - fi - _osht_debug - echo -n "not ok $_OSHT_CURRENT_TEST - $(_osht_get_line)" - if [ -n "$_OSHT_TODO" ]; then - echo " # TODO Test Know to fail" - else - _osht_increment_failed - echo - fi - _osht_add_junit "${_OSHT_ARGS[@]}" - if [ -n "$OSHT_ABORT" ]; then - exit 1 - fi -} - -function _osht_run { - # reset STDIO files - : >$OSHT_STDOUT - : >$OSHT_STDERR - : >$OSHT_STDIO - - cat < $_OSHT_RUNNER #!/bin/bash -set -o monitor -exec 1> >(tee -a -- $OSHT_STDOUT $OSHT_STDIO) -exec 2> >(tee -a -- $OSHT_STDERR $OSHT_STDIO >&2) -function cleanup { - rv=\$? - platform=\$(uname -s) - if [[ \$platform == Darwin ]]; then - PGRP=\$(ps -p \$\$ -o pgid=) - else - PGRP=\$(ps -p \$\$ --no-header -o pgrp) - fi - - if [[ \$platform == Darwin ]]; then - PIDS=\$(ps -o pgid=,ppid=,pid=,comm= | awk "\\\$1 == \$PGRP && \\\$4 == \"tee\" {print \\\$2\" \"\\\$3}") - else - PIDS=\$(ps --no-headers -o pgrp,ppid,pid,cmd | awk "\\\$1 == \$PGRP && \\\$4 == \"tee\" {print \\\$2\" \"\\\$3}") - fi - if [[ -n "\$PIDS" ]]; then - kill \$PIDS >/dev/null 2>&1 - fi - return \$rv -} -trap cleanup INT TERM EXIT -"\$@" -EOF - chmod 755 $_OSHT_RUNNER - - set +e - if [[ -n "$OSHT_WATCH" ]]; then - SEDBUFOPT=-u - if [ $(uname -s) == "Darwin" ]; then - SEDBUFOPT=-l - fi - $_OSHT_RUNNER "$@" 2>&1 | sed $SEDBUFOPT 's/^/# /' - OSHT_STATUS=${PIPESTATUS[0]} - else - $_OSHT_RUNNER "$@" >/dev/null 2>&1 - OSHT_STATUS=$? - fi - set -e -} - -function _osht_qq { - declare -a out - local p - for p in "$@"; do - out+=($(printf %q "$p")) - done - local IFS=" " - echo -n "${out[*]}" -} +# ############################################################################### +# # Copyright 2016 Cory Bennett +# # +# # Licensed under the Apache License, Version 2.0 (the "License"); +# # you may not use this file except in compliance with the License. +# # You may obtain a copy of the License at +# # +# # http://www.apache.org/licenses/LICENSE-2.0 +# # +# # Unless required by applicable law or agreed to in writing, software +# # distributed under the License is distributed on an "AS IS" BASIS, +# # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# # See the License for the specific language governing permissions and +# # limitations under the License. +# ############################################################################### +# : ${OSHT_MKTEMP=mktemp -t osht.XXXXXX} +# : ${OSHT_ABORT=} +# : ${OSHT_DIFF=diff -u} +# : ${OSHT_JUNIT=} +# : ${OSHT_JUNIT_OUTPUT="$(cd "$(dirname "$0")"; pwd)/$(basename "$0")-tests.xml"} +# : ${OSHT_STDOUT=$($OSHT_MKTEMP)} +# : ${OSHT_STDERR=$($OSHT_MKTEMP)} +# : ${OSHT_STDIO=$($OSHT_MKTEMP)} +# : ${OSHT_VERBOSE=} +# : ${OSHT_WATCH=} + +# : ${_OSHT_CURRENT_TEST_FILE=$($OSHT_MKTEMP)} +# : ${_OSHT_CURRENT_TEST=0} +# : ${_OSHT_DEPTH=2} +# : ${_OSHT_RUNNER=$($OSHT_MKTEMP)} +# : ${_OSHT_DIFFOUT=$($OSHT_MKTEMP)} +# : ${_OSHT_FAILED_FILE=$($OSHT_MKTEMP)} +# : ${_OSHT_INITPATH=$(pwd)} +# : ${_OSHT_JUNIT=$($OSHT_MKTEMP)} +# : ${_OSHT_LAPSE=} +# : ${_OSHT_PLANNED_TESTS=} +# : ${_OSHT_SKIP=} +# : ${_OSHT_START=} +# : ${_OSHT_TESTING=} +# : ${_OSHT_TODO=} + +# export OSHT_VERSION=1.0.0 + +# declare -a _OSHT_ARGS + +# function _osht_usage { +# [ -n "${1:-}" ] && echo -e "Error: $1\n" >&2 +# cat <] [--junit] [--verbose] [--abort] +# Options: +# -a|--abort On the first error abort the test execution +# -h|--help This help message +# -j|--junit Enable JUnit xml writing +# -o|--output= Location to write JUnit xml file [default: $OSHT_JUNIT_OUTPUT] +# -v|--verbose Print extra output for debugging tests +# -w|--watch Print output to stdout to allow watching progress on long-running tests +# EOF +# exit 0 +# } + + +# while true; do +# [[ $# == 0 ]] && break +# case $1 in +# -a | --abort) OSHT_ABORT=1; shift;; +# -h | --help) _osht_usage;; +# -j | --junit) OSHT_JUNIT=1; shift ;; +# -o | --output) OSHT_JUNIT_OUTPUT=$2; shift 2 ;; +# -v | --verbose) OSHT_VERBOSE=1; shift ;; +# -w | --watch) OSHT_WATCH=1; shift ;; +# -- ) shift; break ;; +# -* ) (_osht_usage "Invalid argument $1") >&2 && exit 1;; +# * ) break ;; +# esac +# done + + +# function _osht_cleanup { +# local rv=$? +# if [ -z "$_OSHT_PLANNED_TESTS" ]; then +# _OSHT_PLANNED_TESTS=$_OSHT_CURRENT_TEST +# echo "1..$_OSHT_PLANNED_TESTS" +# fi +# if [[ -n $OSHT_JUNIT ]]; then +# _osht_init_junit > $OSHT_JUNIT_OUTPUT +# cat $_OSHT_JUNIT >> $OSHT_JUNIT_OUTPUT +# _osht_end_junit >> $OSHT_JUNIT_OUTPUT +# fi +# local failed=$(_osht_failed) +# rm -f $OSHT_STDOUT $OSHT_STDERR $OSHT_STDIO $_OSHT_CURRENT_TEST_FILE $_OSHT_JUNIT $_OSHT_FAILED_FILE $_OSHT_DIFFOUT $_OSHT_RUNNER +# if [[ $_OSHT_PLANNED_TESTS != $_OSHT_CURRENT_TEST ]]; then +# echo "Looks like you planned $_OSHT_PLANNED_TESTS tests but ran $_OSHT_CURRENT_TEST." >&2 +# rv=255 +# fi +# if [[ $failed > 0 ]]; then +# echo "Looks like you failed $failed test of $_OSHT_CURRENT_TEST." >&2 +# rv=$failed +# fi + +# exit $rv +# } + +# trap _osht_cleanup INT TERM EXIT + +# function _osht_xmlencode { +# sed -e 's/\&/\&/g' -e 's/\"/\"/g' -e 's//\>/g' +# } + +# function _osht_strip_terminal_escape { +# sed -e $'s/\x1B\[[0-9]*;[0-9]*[m|K|G|A]//g' -e $'s/\x1B\[[0-9]*[m|K|G|A]//g' +# } + +# function _osht_timestamp { +# if [ -n "$_OSHT_TESTING" ]; then +# echo "2016-01-01T08:00:00" +# else +# date "+%Y-%m-%dT%H:%M:%S" +# fi +# } + +# function _osht_init_junit { +# cat < +# +# EOF +# } + +# function _osht_add_junit { +# if [[ -z $OSHT_JUNIT ]]; then +# return +# fi +# failure= +# if [[ $# != 0 ]]; then +# failure=" +# " +# fi +# local stdout=$(cat $OSHT_STDOUT | _osht_strip_terminal_escape) +# local stderr=$(cat $OSHT_STDERR | _osht_strip_terminal_escape) +# local _OSHT_DEPTH=$(($_OSHT_DEPTH+1)) +# cat <> $_OSHT_JUNIT +# +# $failure +# +# +# EOF +# } + +# function _osht_end_junit { +# cat < +# EOF +# } + +# function _osht_source { +# local parts=($(caller $_OSHT_DEPTH)) +# local fn=$(basename ${parts[2]}) +# echo ${fn%.*} +# } + +# function _osht_get_line { +# local parts=($(caller $_OSHT_DEPTH)) +# (cd $_OSHT_INITPATH && sed "${parts[0]}q;d" ${parts[2]}) +# } + +# function _osht_source_file { +# local parts=($(caller $_OSHT_DEPTH)) +# echo "${parts[2]}" +# } + +# function _osht_source_linenum { +# local parts=($(caller $_OSHT_DEPTH)) +# echo "${parts[0]}" +# } + +# function _osht_increment_test { +# _OSHT_CURRENT_TEST=$(cat $_OSHT_CURRENT_TEST_FILE) +# let _OSHT_CURRENT_TEST=_OSHT_CURRENT_TEST+1 +# echo $_OSHT_CURRENT_TEST > $_OSHT_CURRENT_TEST_FILE +# _osht_start +# } + +# function _osht_increment_failed { +# local _FAILED=$(_osht_failed) +# let _FAILED=_FAILED+1 +# echo $_FAILED > $_OSHT_FAILED_FILE +# } + +# function _osht_failed { +# [[ -s $_OSHT_FAILED_FILE ]] && cat $_OSHT_FAILED_FILE || echo "0" +# } + +# function _osht_start { +# _OSHT_START=$(date +%s) +# } + +# function _osht_stop { +# local _now=$(date +%s) +# _OSHT_LAPSE=$(($_now - $_OSHT_START)) +# } + +# function _osht_ok { +# _osht_stop +# _osht_debug +# echo -n "ok $_OSHT_CURRENT_TEST - $(_osht_get_line)" +# if [ -n "$_OSHT_TODO" ]; then +# echo " # TODO Test Know to fail" +# else +# echo +# fi +# _osht_add_junit +# } + +# function _osht_nok { +# _osht_stop +# if [ -z "$_OSHT_TODO" ]; then +# echo "# ERROR: $(_osht_source_file) at line $(_osht_source_linenum)" +# fi +# _osht_debug +# echo -n "not ok $_OSHT_CURRENT_TEST - $(_osht_get_line)" +# if [ -n "$_OSHT_TODO" ]; then +# echo " # TODO Test Know to fail" +# else +# _osht_increment_failed +# echo +# fi +# _osht_add_junit "${_OSHT_ARGS[@]}" +# if [ -n "$OSHT_ABORT" ]; then +# exit 1 +# fi +# } + +# function _osht_run { +# # reset STDIO files +# : >$OSHT_STDOUT +# : >$OSHT_STDERR +# : >$OSHT_STDIO + +# cat < $_OSHT_RUNNER +# #!/bin/bash +# set -o monitor +# exec 1> >(tee -a -- $OSHT_STDOUT $OSHT_STDIO) +# exec 2> >(tee -a -- $OSHT_STDERR $OSHT_STDIO >&2) +# function cleanup { +# rv=\$? +# platform=\$(uname -s) +# if [[ \$platform == Darwin ]]; then +# PGRP=\$(ps -p \$\$ -o pgid=) +# else +# PGRP=\$(ps -p \$\$ --no-header -o pgrp) +# fi + +# if [[ \$platform == Darwin ]]; then +# PIDS=\$(ps -o pgid=,ppid=,pid=,comm= | awk "\\\$1 == \$PGRP && \\\$4 == \"tee\" {print \\\$2\" \"\\\$3}") +# else +# PIDS=\$(ps --no-headers -o pgrp,ppid,pid,cmd | awk "\\\$1 == \$PGRP && \\\$4 == \"tee\" {print \\\$2\" \"\\\$3}") +# fi +# if [[ -n "\$PIDS" ]]; then +# kill \$PIDS >/dev/null 2>&1 +# fi +# return \$rv +# } +# trap cleanup INT TERM EXIT +# "\$@" +# EOF +# chmod 755 $_OSHT_RUNNER + +# set +e +# if [[ -n "$OSHT_WATCH" ]]; then +# SEDBUFOPT=-u +# if [ $(uname -s) == "Darwin" ]; then +# SEDBUFOPT=-l +# fi +# $_OSHT_RUNNER "$@" 2>&1 | sed $SEDBUFOPT 's/^/# /' +# OSHT_STATUS=${PIPESTATUS[0]} +# else +# $_OSHT_RUNNER "$@" >/dev/null 2>&1 +# OSHT_STATUS=$? +# fi +# set -e +# } + +# function _osht_qq { +# declare -a out +# local p +# for p in "$@"; do +# out+=($(printf %q "$p")) +# done +# local IFS=" " +# echo -n "${out[*]}" +# } -function _osht_debug { - if [[ -n $OSHT_VERBOSE ]]; then - _osht_debugmsg | sed 's/^/# /g' - fi -} - -function _osht_debugmsg { - local parts=($(caller $_OSHT_DEPTH)) - local op=${parts[1]} - if [[ ${parts[1]} == "TODO" ]]; then - parts=($(caller $(($_OSHT_DEPTH-1)))) - op=${parts[1]} - fi - case $op in - IS) - _osht_qq "${_OSHT_ARGS[@]}"; echo;; - ISNT) - _osht_qq \! "${_OSHT_ARGS[@]}"; echo;; - OK) - _osht_qq test "${_OSHT_ARGS[@]}"; echo;; - NOK) - _osht_qq test \! "${_OSHT_ARGS[@]}"; echo;; - NRUNS|RUNS) - echo "RUNNING: $(_osht_qq "${_OSHT_ARGS[@]}")" - echo "STATUS: $OSHT_STATUS" - echo "STDIO < $tmpfile - $OSHT_DIFF $tmpfile $OSHT_STDIO | tee $_OSHT_DIFFOUT | sed 's/^/# /g' - local status=${PIPESTATUS[0]} - rm $tmpfile - [[ $status == 0 ]] && _osht_ok || _osht_nok -} - -function ODIFF { - _osht_args $OSHT_DIFF - $OSHT_STDOUT - _osht_increment_test - tmpfile=$($OSHT_MKTEMP) - cat - > $tmpfile - $OSHT_DIFF $tmpfile $OSHT_STDOUT | tee $_OSHT_DIFFOUT | sed 's/^/# /g' - local status=${PIPESTATUS[0]} - rm $tmpfile - [[ $status == 0 ]] && _osht_ok || _osht_nok -} - -function EDIFF { - _osht_args $OSHT_DIFF - $OSHT_STDERR - _osht_increment_test - tmpfile=$($OSHT_MKTEMP) - cat - > $tmpfile - $OSHT_DIFF $tmpfile $OSHT_STDERR | tee $_OSHT_DIFFOUT | sed 's/^/# /g' - local status=${PIPESTATUS[0]} - rm $tmpfile - [[ $status == 0 ]] && _osht_ok || _osht_nok -} - -function TODO { - local _OSHT_TODO=1 - local _OSHT_DEPTH=$(($_OSHT_DEPTH+1)) - "$@" -} +# function _osht_debug { +# if [[ -n $OSHT_VERBOSE ]]; then +# _osht_debugmsg | sed 's/^/# /g' +# fi +# } + +# function _osht_debugmsg { +# local parts=($(caller $_OSHT_DEPTH)) +# local op=${parts[1]} +# if [[ ${parts[1]} == "TODO" ]]; then +# parts=($(caller $(($_OSHT_DEPTH-1)))) +# op=${parts[1]} +# fi +# case $op in +# IS) +# _osht_qq "${_OSHT_ARGS[@]}"; echo;; +# ISNT) +# _osht_qq \! "${_OSHT_ARGS[@]}"; echo;; +# OK) +# _osht_qq test "${_OSHT_ARGS[@]}"; echo;; +# NOK) +# _osht_qq test \! "${_OSHT_ARGS[@]}"; echo;; +# NRUNS|RUNS) +# echo "RUNNING: $(_osht_qq "${_OSHT_ARGS[@]}")" +# echo "STATUS: $OSHT_STATUS" +# echo "STDIO < $tmpfile +# $OSHT_DIFF $tmpfile $OSHT_STDIO | tee $_OSHT_DIFFOUT | sed 's/^/# /g' +# local status=${PIPESTATUS[0]} +# rm $tmpfile +# [[ $status == 0 ]] && _osht_ok || _osht_nok +# } + +# function ODIFF { +# _osht_args $OSHT_DIFF - $OSHT_STDOUT +# _osht_increment_test +# tmpfile=$($OSHT_MKTEMP) +# cat - > $tmpfile +# $OSHT_DIFF $tmpfile $OSHT_STDOUT | tee $_OSHT_DIFFOUT | sed 's/^/# /g' +# local status=${PIPESTATUS[0]} +# rm $tmpfile +# [[ $status == 0 ]] && _osht_ok || _osht_nok +# } + +# function EDIFF { +# _osht_args $OSHT_DIFF - $OSHT_STDERR +# _osht_increment_test +# tmpfile=$($OSHT_MKTEMP) +# cat - > $tmpfile +# $OSHT_DIFF $tmpfile $OSHT_STDERR | tee $_OSHT_DIFFOUT | sed 's/^/# /g' +# local status=${PIPESTATUS[0]} +# rm $tmpfile +# [[ $status == 0 ]] && _osht_ok || _osht_nok +# } + +# function TODO { +# local _OSHT_TODO=1 +# local _OSHT_DEPTH=$(($_OSHT_DEPTH+1)) +# "$@" +# } diff --git a/tests/Agent/IntegrationTests/UnboundedServices/cosmosdb/documentation.start.sh b/tests/Agent/IntegrationTests/UnboundedServices/cosmosdb/documentation.start.sh index 065904b0ed..fa95c465f3 100644 --- a/tests/Agent/IntegrationTests/UnboundedServices/cosmosdb/documentation.start.sh +++ b/tests/Agent/IntegrationTests/UnboundedServices/cosmosdb/documentation.start.sh @@ -1,6 +1,5 @@ -# Dump of /usr/local/bin/cosmos/start.sh in the docker image... useful for discovering undocumented env vars, unless it changes! :) - #!/bin/sh +# Dump of /usr/local/bin/cosmos/start.sh in the docker image... useful for discovering undocumented env vars, unless it changes! :) export PAL_NO_DEFAULT_PACKAGES=1 export PAL_LOADER_SNAPS=1 diff --git a/tests/Agent/IntegrationTests/UnboundedServices/couchbase/configure-server.sh b/tests/Agent/IntegrationTests/UnboundedServices/couchbase/configure-server.sh index 9549f40d35..9de5af19ae 100644 --- a/tests/Agent/IntegrationTests/UnboundedServices/couchbase/configure-server.sh +++ b/tests/Agent/IntegrationTests/UnboundedServices/couchbase/configure-server.sh @@ -1,3 +1,5 @@ +#!/bin/sh + set -m /entrypoint.sh couchbase-server & From 95477b92053bdc55c0388e399cc05741bd629c5e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 5 Nov 2024 09:36:15 -0600 Subject: [PATCH 17/19] chore(deps): bump NewRelic.Agent.Api from 10.32.0 to 10.33.1 in /build in the nuget-agent group across 1 directory (#2877) chore(deps): bump NewRelic.Agent.Api Bumps the nuget-agent group with 1 update in the /build directory: [NewRelic.Agent.Api](https://github.com/newrelic/newrelic-dotnet-agent). Updates `NewRelic.Agent.Api` from 10.32.0 to 10.33.1 - [Release notes](https://github.com/newrelic/newrelic-dotnet-agent/releases) - [Commits](https://github.com/newrelic/newrelic-dotnet-agent/compare/v10.32.0...v10.33.1) --- updated-dependencies: - dependency-name: NewRelic.Agent.Api dependency-type: direct:production update-type: version-update:semver-minor dependency-group: nuget-agent ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .../nugetSlackNotifications/nugetSlackNotifications.csproj | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/scripts/nugetSlackNotifications/nugetSlackNotifications.csproj b/.github/workflows/scripts/nugetSlackNotifications/nugetSlackNotifications.csproj index 04a7b913d2..2e92fcd341 100644 --- a/.github/workflows/scripts/nugetSlackNotifications/nugetSlackNotifications.csproj +++ b/.github/workflows/scripts/nugetSlackNotifications/nugetSlackNotifications.csproj @@ -6,7 +6,7 @@ - + From 75d4d140f54b1d71f8ee146e933494077891c268 Mon Sep 17 00:00:00 2001 From: Marty T <120425148+tippmar-nr@users.noreply.github.com> Date: Tue, 5 Nov 2024 14:28:17 -0600 Subject: [PATCH 18/19] test: Remove deprecated `cohere` LLM model from testing (#2878) --- FullAgent.sln | 4 ++-- .../IntegrationTests/IntegrationTests/LLM/BedrockTests.cs | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/FullAgent.sln b/FullAgent.sln index 45d2bc0a90..113ac8211a 100644 --- a/FullAgent.sln +++ b/FullAgent.sln @@ -219,7 +219,7 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AzureFunction", "src\Agent\ EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "PublicApiChangeTests", "tests\Agent\UnitTests\PublicApiChangeTests\PublicApiChangeTests.csproj", "{A8F6EFEA-1C31-4461-A7B4-25C30D954EE2}" EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Memcached", "src\Agent\NewRelic\Agent\Extensions\Providers\Wrapper\Memcached\Memcached.csproj", "{5D74E5C5-9BA3-423B-86F7-14C2D1A14661}" +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Memcached", "src\Agent\NewRelic\Agent\Extensions\Providers\Wrapper\Memcached\Memcached.csproj", "{5D74E5C5-9BA3-423B-86F7-14C2D1A14661}" EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution @@ -540,8 +540,8 @@ Global {5D74E5C5-9BA3-423B-86F7-14C2D1A14661} = {5E86E10A-C38F-48CB-ADE9-67B22BB2F50A} EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution - EnterpriseLibraryConfigurationToolBinariesPath = packages\Unity.2.1.505.2\lib\NET35 SolutionGuid = {D8B98070-6B8E-403C-A07F-A3F2E4A3A3D0} + EnterpriseLibraryConfigurationToolBinariesPath = packages\Unity.2.1.505.2\lib\NET35 EndGlobalSection GlobalSection(TestCaseManagementSettings) = postSolution CategoryFile = FullAgent.vsmdi diff --git a/tests/Agent/IntegrationTests/IntegrationTests/LLM/BedrockTests.cs b/tests/Agent/IntegrationTests/IntegrationTests/LLM/BedrockTests.cs index 74b39c791d..0a0e0dcb34 100644 --- a/tests/Agent/IntegrationTests/IntegrationTests/LLM/BedrockTests.cs +++ b/tests/Agent/IntegrationTests/IntegrationTests/LLM/BedrockTests.cs @@ -23,7 +23,6 @@ public abstract class BedrockTestsBase : NewRelicIntegrationTest Date: Tue, 5 Nov 2024 13:48:19 -0800 Subject: [PATCH 19/19] feat: Add Distributed Tracing support for Azure Functions HTTPTrigger. (#2868) --- .../Helpers/DictionaryHelpers.cs | 33 +++ .../AzureFunction/AzureFunction.csproj | 1 + .../Wrapper/AzureFunction/FunctionDetails.cs | 204 ++++++++++++++++++ .../FunctionsHttpProxyingMiddlewareWrapper.cs | 14 ++ .../InvokeFunctionAsyncWrapper.cs | 191 +--------------- .../AzureFunctionHttpTriggerTests.cs | 66 ++++++ .../AzureFunctionApplicationFixture.cs | 25 ++- .../Helpers/DictionaryHelpersTests.cs | 30 +++ 8 files changed, 379 insertions(+), 185 deletions(-) create mode 100644 src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Helpers/DictionaryHelpers.cs create mode 100644 src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AzureFunction/FunctionDetails.cs create mode 100644 tests/Agent/UnitTests/NewRelic.Agent.Extensions.Tests/Helpers/DictionaryHelpersTests.cs diff --git a/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Helpers/DictionaryHelpers.cs b/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Helpers/DictionaryHelpers.cs new file mode 100644 index 0000000000..1b8ef65740 --- /dev/null +++ b/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Helpers/DictionaryHelpers.cs @@ -0,0 +1,33 @@ +// Copyright 2020 New Relic, Inc. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +using System.Collections.Generic; +using Newtonsoft.Json; + +namespace NewRelic.Agent.Extensions.Helpers +{ + public static class DictionaryHelpers + { + /// + /// Converts a JSON string to a dictionary. Will always return a dictionary, even if the JSON is invalid. + /// + /// + /// IReadOnlyDictionary + public static IReadOnlyDictionary FromJson(string json) + { + if (string.IsNullOrEmpty(json)) + { + return new Dictionary(); + } + + try + { + return JsonConvert.DeserializeObject>(json); + } + catch + { + return new Dictionary(); + } + } + } +} diff --git a/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AzureFunction/AzureFunction.csproj b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AzureFunction/AzureFunction.csproj index 070dd10f1a..14cb6749d5 100644 --- a/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AzureFunction/AzureFunction.csproj +++ b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AzureFunction/AzureFunction.csproj @@ -14,6 +14,7 @@ + diff --git a/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AzureFunction/FunctionDetails.cs b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AzureFunction/FunctionDetails.cs new file mode 100644 index 0000000000..d4d3eabb24 --- /dev/null +++ b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AzureFunction/FunctionDetails.cs @@ -0,0 +1,204 @@ +// Copyright 2020 New Relic, Inc. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +using System; +using System.Collections; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Linq; +using System.Reflection; +using NewRelic.Agent.Api; +using NewRelic.Agent.Extensions.Helpers; +using NewRelic.Reflection; + +namespace NewRelic.Providers.Wrapper.AzureFunction; + +internal class FunctionDetails +{ + private static MethodInfo _bindFunctionInputAsync; + private static MethodInfo _genericFunctionInputBindingFeatureGetter; + private static bool? _hasAspNetCoreExtensionsReference; + + private static readonly ConcurrentDictionary _functionTriggerCache = new(); + private static Func _functionDefinitionGetter; + private static Func _parametersGetter; + private static Func> _propertiesGetter; + + private const string AspNetCoreExtensionsAssemblyName = "Microsoft.Azure.Functions.Worker.Extensions.Http.AspNetCore"; + private const string IFunctionInputBindingFeatureTypeName = "Microsoft.Azure.Functions.Worker.Context.Features.IFunctionInputBindingFeature"; + + public FunctionDetails(dynamic functionContext, IAgent agent) + { + try + { + FunctionName = functionContext.FunctionDefinition.Name; + InvocationId = functionContext.InvocationId; + + // cache the trigger by function name + if (!_functionTriggerCache.TryGetValue(FunctionName, out string trigger)) + { + // functionContext.FunctionDefinition.Parameters is an ImmutableArray + var funcAsObj = (object)functionContext; + _functionDefinitionGetter ??= VisibilityBypasser.Instance.GeneratePropertyAccessor(funcAsObj.GetType(), "FunctionDefinition"); + var functionDefinition = _functionDefinitionGetter(funcAsObj); + + _parametersGetter ??= VisibilityBypasser.Instance.GeneratePropertyAccessor(functionDefinition.GetType(), "Parameters"); + var parameters = _parametersGetter(functionDefinition) as IEnumerable; + + // Trigger is normally the first parameter, but we'll check all parameters to be sure. + var foundTrigger = false; + foreach (var parameter in parameters) + { + // Properties is an IReadOnlyDictionary + _propertiesGetter ??= VisibilityBypasser.Instance.GeneratePropertyAccessor>(parameter.GetType(), "Properties"); + var properties = _propertiesGetter(parameter); + if (properties == null || properties.Count == 0) + { + continue; + } + + if (!properties.TryGetValue("bindingAttribute", out var triggerAttribute)) + { + foreach (var propVal in properties.Values) + { + if (propVal.GetType().Name.Contains("Trigger")) + { + triggerAttribute = propVal; + break; + } + } + + if (triggerAttribute == null) + { + continue; + } + } + + var triggerTypeName = triggerAttribute.GetType().Name; + Trigger = triggerTypeName.ResolveTriggerType(); + foundTrigger = true; + break; + } + + // shouldn't happen, as all functions are required to have a trigger + if (!foundTrigger) + { + agent.Logger.Debug($"Function {FunctionName} does not have a trigger, defaulting to 'other'"); + Trigger = "other"; + } + + _functionTriggerCache[FunctionName] = Trigger; + } + else + { + Trigger = trigger; + } + + if (IsWebTrigger) + { + ParseHttpTriggerParameters(agent, functionContext); + } + } + catch (Exception ex) + { + agent.Logger.Error(ex, "Error getting Azure Function details."); + throw; + } + } + + private void ParseHttpTriggerParameters(IAgent agent, dynamic functionContext) + { + if (!_hasAspNetCoreExtensionsReference.HasValue) + { + // see if the Microsoft.Azure.Functions.Worker.Extensions.Http.AspNetCore assembly is in the list of loaded assemblies + var loadedAssemblies = AppDomain.CurrentDomain.GetAssemblies(); + var assembly = loadedAssemblies.FirstOrDefault(a => a.GetName().Name == AspNetCoreExtensionsAssemblyName); + + _hasAspNetCoreExtensionsReference = assembly != null; + + if (_hasAspNetCoreExtensionsReference.Value) + agent.Logger.Debug($"{AspNetCoreExtensionsAssemblyName} assembly is loaded; InvokeFunctionAsyncWrapper will defer HttpTrigger parameter parsing to FunctionsHttpProxyingMiddlewareWrapper."); + } + + // don't parse request parameters here if the Microsoft.Azure.Functions.Worker.Extensions.Http.AspNetCore assembly is loaded. + // If it is loaded, parsing occurs over in FunctionsHttpProxyingMiddlewareWrapper + if (_hasAspNetCoreExtensionsReference.Value) + { + return; + } + + object features = functionContext.Features; + + if (_genericFunctionInputBindingFeatureGetter == null) // cache the methodinfo lookups for performance + { + var get = features.GetType().GetMethod("Get"); + if (get != null) + { + _genericFunctionInputBindingFeatureGetter = get.MakeGenericMethod(features.GetType().Assembly.GetType(IFunctionInputBindingFeatureTypeName)); + } + else + { + agent.Logger.Debug("Unable to find FunctionContext.Features.Get method; unable to parse request parameters."); + return; + } + + var bindFunctionInputType = features.GetType().Assembly.GetType(IFunctionInputBindingFeatureTypeName); + if (bindFunctionInputType == null) + { + agent.Logger.Debug("Unable to find IFunctionInputBindingFeature type; unable to parse request parameters."); + return; + } + _bindFunctionInputAsync = bindFunctionInputType.GetMethod("BindFunctionInputAsync"); + if (_bindFunctionInputAsync == null) + { + agent.Logger.Debug("Unable to find BindFunctionInputAsync method; unable to parse request parameters."); + return; + } + } + + if (_genericFunctionInputBindingFeatureGetter != null) + { + // Get the input binding feature and bind the input from the function context + var inputBindingFeature = _genericFunctionInputBindingFeatureGetter.Invoke(features, []); + dynamic valueTask = _bindFunctionInputAsync.Invoke(inputBindingFeature, [functionContext]); + + valueTask.AsTask().Wait(); // BindFunctionInputAsync returns a ValueTask, so we need to convert it to a Task to wait on it + + object[] inputArguments = valueTask.Result.Values; + + if (inputArguments is { Length: > 0 }) + { + var reqData = (dynamic)inputArguments[0]; + + if (reqData != null && reqData.GetType().Name == "GrpcHttpRequestData" && !string.IsNullOrEmpty(reqData.Method)) + { + RequestMethod = reqData.Method; + Uri uri = reqData.Url; + RequestPath = $"/{uri.GetComponents(UriComponents.Path, UriFormat.Unescaped)}"; // has to start with a slash + } + } + } + + if (functionContext?.BindingContext?.BindingData is IReadOnlyDictionary bindingData && bindingData.ContainsKey("Headers")) + { + // The headers are stored as a JSON blob. + var headersJson = bindingData["Headers"].ToString(); + Headers = DictionaryHelpers.FromJson(headersJson); + } + } + + public bool IsValid() + { + return !string.IsNullOrEmpty(FunctionName) && !string.IsNullOrEmpty(Trigger) && !string.IsNullOrEmpty(InvocationId); + } + + public string FunctionName { get; } + + public string Trigger { get; } + public string InvocationId { get; } + public bool IsWebTrigger => Trigger == "http"; + public string RequestMethod { get; private set; } + public string RequestPath { get; private set; } + public IReadOnlyDictionary Headers { get; private set; } + public bool? HasAspNetCoreExtensionReference => _hasAspNetCoreExtensionsReference; +} diff --git a/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AzureFunction/FunctionsHttpProxyingMiddlewareWrapper.cs b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AzureFunction/FunctionsHttpProxyingMiddlewareWrapper.cs index 4c32e63e70..5d9e8fe857 100644 --- a/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AzureFunction/FunctionsHttpProxyingMiddlewareWrapper.cs +++ b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AzureFunction/FunctionsHttpProxyingMiddlewareWrapper.cs @@ -1,6 +1,8 @@ // Copyright 2020 New Relic, Inc. All rights reserved. // SPDX-License-Identifier: Apache-2.0 +using System.Collections.Generic; +using Microsoft.Extensions.Primitives; using NewRelic.Agent.Api; using NewRelic.Agent.Extensions.Providers.Wrapper; @@ -33,6 +35,10 @@ public AfterWrappedMethodDelegate BeforeWrappedMethod(InstrumentedMethodCall ins agent.CurrentTransaction.SetRequestMethod(httpContext.Request.Method); agent.CurrentTransaction.SetUri(httpContext.Request.Path); + + // Only need to accept DT headers from incoming request. + var headers = httpContext.Request.Headers as IDictionary; + transaction.AcceptDistributedTraceHeaders(headers, GetHeaderValue, TransportType.HTTP); break; case "TryHandleHttpResult": if (!agent.CurrentTransaction.HasHttpResponseStatusCode) // these handlers seem to get called more than once; only set the status code one time @@ -52,5 +58,13 @@ public AfterWrappedMethodDelegate BeforeWrappedMethod(InstrumentedMethodCall ins } return Delegates.NoOp; + + static IEnumerable GetHeaderValue(IDictionary headers, string key) + { + if (!headers.ContainsKey(key)) + return []; + + return headers[key].ToArray(); + } } } diff --git a/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AzureFunction/InvokeFunctionAsyncWrapper.cs b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AzureFunction/InvokeFunctionAsyncWrapper.cs index 457219d2a8..e241d32e3f 100644 --- a/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AzureFunction/InvokeFunctionAsyncWrapper.cs +++ b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AzureFunction/InvokeFunctionAsyncWrapper.cs @@ -2,15 +2,12 @@ // SPDX-License-Identifier: Apache-2.0 using System; -using System.Collections; -using System.Collections.Concurrent; using System.Collections.Generic; using System.Linq; using System.Reflection; using System.Threading.Tasks; using NewRelic.Agent.Api; using NewRelic.Agent.Extensions.Providers.Wrapper; -using NewRelic.Reflection; namespace NewRelic.Providers.Wrapper.AzureFunction; @@ -87,6 +84,11 @@ public AfterWrappedMethodDelegate BeforeWrappedMethod(InstrumentedMethodCall ins { transaction.SetRequestMethod(functionDetails.RequestMethod); transaction.SetUri(functionDetails.RequestPath); + + if (functionDetails.Headers?.Count != 0) + { + transaction.AcceptDistributedTraceHeaders(functionDetails.Headers, GetHeaderValue, TransportType.HTTP); + } } var segment = transaction.StartTransactionSegment(instrumentedMethodCall.MethodCall, functionDetails.FunctionName); @@ -140,188 +142,13 @@ void InvokeFunctionAsyncResponse(Task responseTask) transaction.End(); } } - } -} - -internal class FunctionDetails -{ - private static MethodInfo _bindFunctionInputAsync; - private static MethodInfo _genericFunctionInputBindingFeatureGetter; - private static bool? _hasAspNetCoreExtensionsReference; - - private static readonly ConcurrentDictionary _functionTriggerCache = new(); - private static Func _functionDefinitionGetter; - private static Func _parametersGetter; - private static Func> _propertiesGetter; - - private const string AspNetCoreExtensionsAssemblyName = "Microsoft.Azure.Functions.Worker.Extensions.Http.AspNetCore"; - private const string IFunctionInputBindingFeatureTypeName = "Microsoft.Azure.Functions.Worker.Context.Features.IFunctionInputBindingFeature"; - - public FunctionDetails(dynamic functionContext, IAgent agent) - { - try - { - FunctionName = functionContext.FunctionDefinition.Name; - InvocationId = functionContext.InvocationId; - - // cache the trigger by function name - if (!_functionTriggerCache.TryGetValue(FunctionName, out string trigger)) - { - // functionContext.FunctionDefinition.Parameters is an ImmutableArray - var funcAsObj = (object)functionContext; - _functionDefinitionGetter ??= VisibilityBypasser.Instance.GeneratePropertyAccessor(funcAsObj.GetType(), "FunctionDefinition"); - var functionDefinition = _functionDefinitionGetter(funcAsObj); - - _parametersGetter ??= VisibilityBypasser.Instance.GeneratePropertyAccessor(functionDefinition.GetType(), "Parameters"); - var parameters = _parametersGetter(functionDefinition) as IEnumerable; - - // Trigger is normally the first parameter, but we'll check all parameters to be sure. - var foundTrigger = false; - foreach (var parameter in parameters) - { - // Properties is an IReadOnlyDictionary - _propertiesGetter ??= VisibilityBypasser.Instance.GeneratePropertyAccessor>(parameter.GetType(), "Properties"); - var properties = _propertiesGetter(parameter); - if (properties == null || properties.Count == 0) - { - continue; - } - - if (!properties.TryGetValue("bindingAttribute", out var triggerAttribute)) - { - foreach (var propVal in properties.Values) - { - if (propVal.GetType().Name.Contains("Trigger")) - { - triggerAttribute = propVal; - break; - } - } - - if (triggerAttribute == null) - { - continue; - } - } - - var triggerTypeName = triggerAttribute.GetType().Name; - Trigger = triggerTypeName.ResolveTriggerType(); - foundTrigger = true; - break; - } - - // shouldn't happen, as all functions are required to have a trigger - if (!foundTrigger) - { - agent.Logger.Debug($"Function {FunctionName} does not have a trigger, defaulting to 'other'"); - Trigger = "other"; - } - - _functionTriggerCache[FunctionName] = Trigger; - } - else - { - Trigger = trigger; - } - - if (IsWebTrigger) - { - ParseHttpTriggerParameters(agent, functionContext); - } - } - catch (Exception ex) - { - agent.Logger.Error(ex, "Error getting Azure Function details."); - throw; - } - } - - private void ParseHttpTriggerParameters(IAgent agent, dynamic functionContext) - { - if (!_hasAspNetCoreExtensionsReference.HasValue) - { - // see if the Microsoft.Azure.Functions.Worker.Extensions.Http.AspNetCore assembly is in the list of loaded assemblies - var loadedAssemblies = AppDomain.CurrentDomain.GetAssemblies(); - var assembly = loadedAssemblies.FirstOrDefault(a => a.GetName().Name == AspNetCoreExtensionsAssemblyName); - - _hasAspNetCoreExtensionsReference = assembly != null; - - if (_hasAspNetCoreExtensionsReference.Value) - agent.Logger.Debug($"{AspNetCoreExtensionsAssemblyName} assembly is loaded; InvokeFunctionAsyncWrapper will defer HttpTrigger parameter parsing to FunctionsHttpProxyingMiddlewareWrapper."); - } - - // don't parse request parameters here if the Microsoft.Azure.Functions.Worker.Extensions.Http.AspNetCore assembly is loaded. - // If it is loaded, parsing occurs over in FunctionsHttpProxyingMiddlewareWrapper - if (_hasAspNetCoreExtensionsReference.Value) - { - return; - } - - object features = functionContext.Features; - - if (_genericFunctionInputBindingFeatureGetter == null) // cache the methodinfo lookups for performance - { - var get = features.GetType().GetMethod("Get"); - if (get != null) - { - _genericFunctionInputBindingFeatureGetter = get.MakeGenericMethod(features.GetType().Assembly.GetType(IFunctionInputBindingFeatureTypeName)); - } - else - { - agent.Logger.Debug("Unable to find FunctionContext.Features.Get method; unable to parse request parameters."); - return; - } - - var bindFunctionInputType = features.GetType().Assembly.GetType(IFunctionInputBindingFeatureTypeName); - if (bindFunctionInputType == null) - { - agent.Logger.Debug("Unable to find IFunctionInputBindingFeature type; unable to parse request parameters."); - return; - } - _bindFunctionInputAsync = bindFunctionInputType.GetMethod("BindFunctionInputAsync"); - if (_bindFunctionInputAsync == null) - { - agent.Logger.Debug("Unable to find BindFunctionInputAsync method; unable to parse request parameters."); - return; - } - } - if (_genericFunctionInputBindingFeatureGetter != null) + IEnumerable GetHeaderValue(IReadOnlyDictionary headers, string key) { - // Get the input binding feature and bind the input from the function context - var inputBindingFeature = _genericFunctionInputBindingFeatureGetter.Invoke(features, []); - dynamic valueTask = _bindFunctionInputAsync.Invoke(inputBindingFeature, [functionContext]); - - valueTask.AsTask().Wait(); // BindFunctionInputAsync returns a ValueTask, so we need to convert it to a Task to wait on it - - object[] inputArguments = valueTask.Result.Values; - - if (inputArguments is { Length: > 0 }) - { - var reqData = (dynamic)inputArguments[0]; + if (!headers.ContainsKey(key)) + return []; - if (reqData != null && reqData.GetType().Name == "GrpcHttpRequestData" && !string.IsNullOrEmpty(reqData.Method)) - { - RequestMethod = reqData.Method; - Uri uri = reqData.Url; - RequestPath = $"/{uri.GetComponents(UriComponents.Path, UriFormat.Unescaped)}"; // has to start with a slash - } - } + return [headers[key].ToString()]; } } - - public bool IsValid() - { - return !string.IsNullOrEmpty(FunctionName) && !string.IsNullOrEmpty(Trigger) && !string.IsNullOrEmpty(InvocationId); - } - - public string FunctionName { get; } - - public string Trigger { get; } - public string InvocationId { get; } - public bool IsWebTrigger => Trigger == "http"; - public string RequestMethod { get; private set; } - public string RequestPath { get; private set; } - - public bool? HasAspNetCoreExtensionReference => _hasAspNetCoreExtensionsReference; } diff --git a/tests/Agent/IntegrationTests/IntegrationTests/AzureFunction/AzureFunctionHttpTriggerTests.cs b/tests/Agent/IntegrationTests/IntegrationTests/AzureFunction/AzureFunctionHttpTriggerTests.cs index 33921c0535..fcaba6e4e8 100644 --- a/tests/Agent/IntegrationTests/IntegrationTests/AzureFunction/AzureFunctionHttpTriggerTests.cs +++ b/tests/Agent/IntegrationTests/IntegrationTests/AzureFunction/AzureFunctionHttpTriggerTests.cs @@ -16,9 +16,18 @@ public enum AzureFunctionHttpTriggerTestMode AspNetCorePipeline, SimpleInvocation } + public abstract class AzureFunctionHttpTriggerTestsBase : NewRelicIntegrationTest where TFixture : AzureFunctionApplicationFixture { + const string TestTraceId = "12345678901234567890123456789012"; + const string AccountId = "1"; + const string ParentType = "App"; + const string AppId = "5043"; + const string TransactionId = "5569065a5b1313bd"; + const bool Sampled = true; + const string Priority = "1.23456"; + private readonly TFixture _fixture; private readonly AzureFunctionHttpTriggerTestMode _testMode; @@ -32,6 +41,7 @@ protected AzureFunctionHttpTriggerTestsBase(TFixture fixture, ITestOutputHelper setupConfiguration: () => { var configModifier = new NewRelicConfigModifier(fixture.DestinationNewRelicConfigFilePath); + configModifier.SetOrDeleteSpanEventsEnabled(true); configModifier .ForceTransactionTraces() .ConfigureFasterTransactionTracesHarvestCycle(20) @@ -141,6 +151,34 @@ public void Test_SimpleInvocationMode() Assert.Equal("HttpTriggerFunctionUsingSimpleInvocation", faasNameValue); Assert.True(firstTransaction.IntrinsicAttributes.TryGetValue("faas.trigger", out var faasTriggerValue)); Assert.Equal("http", faasTriggerValue); + + Assert.True(firstTransaction.IntrinsicAttributes.TryGetValue("parent.type", out var parentType)); + Assert.Equal(ParentType, parentType); + + Assert.True(firstTransaction.IntrinsicAttributes.TryGetValue("parent.app", out var appId)); + Assert.Equal(AppId, appId); + + Assert.True(firstTransaction.IntrinsicAttributes.TryGetValue("parent.account", out var accountId)); + Assert.Equal(AccountId, accountId); + + Assert.True(firstTransaction.IntrinsicAttributes.TryGetValue("parent.transportType", out var transportType)); + Assert.Equal("HTTP", transportType); + + Assert.True(firstTransaction.IntrinsicAttributes.TryGetValue("traceId", out var traceId)); + Assert.Equal(TestTraceId, traceId); + + Assert.True(firstTransaction.IntrinsicAttributes.TryGetValue("priority", out var priority)); + Assert.Equal(Priority, priority.ToString().Substring(0, 7)); // keep the values the same length + + Assert.True(firstTransaction.IntrinsicAttributes.TryGetValue("sampled", out var sampled)); + Assert.Equal(Sampled, sampled); + + Assert.True(firstTransaction.IntrinsicAttributes.TryGetValue("parentId", out var traceParent)); + Assert.Equal(TransactionId, traceParent); + + // changes - just make sure it is present. + Assert.True(firstTransaction.IntrinsicAttributes.TryGetValue("parent.transportDuration", out var transportDuration)); + Assert.True(firstTransaction.IntrinsicAttributes.TryGetValue("guid", out var guid)); } else { @@ -248,6 +286,34 @@ public void Test_PipelineMode() Assert.Equal("HttpTriggerFunctionUsingAspNetCorePipeline", faasNameValue); Assert.True(firstTransaction.IntrinsicAttributes.TryGetValue("faas.trigger", out var faasTriggerValue)); Assert.Equal("http", faasTriggerValue); + + Assert.True(firstTransaction.IntrinsicAttributes.TryGetValue("parent.type", out var parentType)); + Assert.Equal(ParentType, parentType); + + Assert.True(firstTransaction.IntrinsicAttributes.TryGetValue("parent.app", out var appId)); + Assert.Equal(AppId, appId); + + Assert.True(firstTransaction.IntrinsicAttributes.TryGetValue("parent.account", out var accountId)); + Assert.Equal(AccountId, accountId); + + Assert.True(firstTransaction.IntrinsicAttributes.TryGetValue("parent.transportType", out var transportType)); + Assert.Equal("HTTP", transportType); + + Assert.True(firstTransaction.IntrinsicAttributes.TryGetValue("traceId", out var traceId)); + Assert.Equal(TestTraceId, traceId); + + Assert.True(firstTransaction.IntrinsicAttributes.TryGetValue("priority", out var priority)); + Assert.Equal(Priority, priority.ToString().Substring(0, 7)); // keep the values the same length + + Assert.True(firstTransaction.IntrinsicAttributes.TryGetValue("sampled", out var sampled)); + Assert.Equal(Sampled, sampled); + + Assert.True(firstTransaction.IntrinsicAttributes.TryGetValue("parentId", out var traceParent)); + Assert.Equal(TransactionId, traceParent); + + // changes - just make sure it is present. + Assert.True(firstTransaction.IntrinsicAttributes.TryGetValue("parent.transportDuration", out var transportDuration)); + Assert.True(firstTransaction.IntrinsicAttributes.TryGetValue("guid", out var guid)); } else { diff --git a/tests/Agent/IntegrationTests/IntegrationTests/RemoteServiceFixtures/AzureFunctionApplicationFixture.cs b/tests/Agent/IntegrationTests/IntegrationTests/RemoteServiceFixtures/AzureFunctionApplicationFixture.cs index 5596656374..0724329d96 100644 --- a/tests/Agent/IntegrationTests/IntegrationTests/RemoteServiceFixtures/AzureFunctionApplicationFixture.cs +++ b/tests/Agent/IntegrationTests/IntegrationTests/RemoteServiceFixtures/AzureFunctionApplicationFixture.cs @@ -1,6 +1,7 @@ // Copyright 2020 New Relic, Inc. All rights reserved. // SPDX-License-Identifier: Apache-2.0 +using System.Collections.Generic; using NewRelic.Agent.IntegrationTestHelpers.RemoteServiceFixtures; namespace NewRelic.Agent.IntegrationTests.RemoteServiceFixtures; @@ -8,6 +9,19 @@ namespace NewRelic.Agent.IntegrationTests.RemoteServiceFixtures; public abstract class AzureFunctionApplicationFixture : RemoteApplicationFixture { private const string ApplicationDirectoryName = @"AzureFunctionApplication"; + private const string TestTraceId = "12345678901234567890123456789012"; + private const string TestTraceParent = "1234567890123456"; + private const string TestTracingVendors = "rojo,congo"; + private const string TestOtherVendorEntries = "rojo=1,congo=2"; + private const string AccountId = "1"; + private const string Version = "0"; + private const int ParentType = 0; + private const string AppId = "5043"; + private const string SpanId = "27ddd2d8890283b4"; + private const string TransactionId = "5569065a5b1313bd"; + private const string Sampled = "1"; + private const string Priority = "1.23456"; + private const string Timestamp = "1518469636025"; protected AzureFunctionApplicationFixture(string functionNames, string targetFramework, bool enableAzureFunctionMode) : base(new AzureFuncTool(ApplicationDirectoryName, targetFramework, ApplicationType.Bounded, true, true, true, enableAzureFunctionMode)) @@ -23,11 +37,16 @@ protected AzureFunctionApplicationFixture(string functionNames, string targetFra } - public string Get(string endpoint) + public void Get(string endpoint) { var address = $"http://{DestinationServerName}:{Port}/{endpoint}"; - - return GetString(address); + var headers = new List> + { + new KeyValuePair ("traceparent", $"00-{TestTraceId}-{TestTraceParent}-00"), + new KeyValuePair ("tracestate", $"{AccountId}@nr={Version}-{ParentType}-{AccountId}-{AppId}-{SpanId}-{TransactionId}-{Sampled}-" + Priority + $"-{Timestamp},{TestOtherVendorEntries}") + }; + + GetStringAndIgnoreResult(address, headers); } public void PostToAzureFuncTool(string triggerName, string payload) diff --git a/tests/Agent/UnitTests/NewRelic.Agent.Extensions.Tests/Helpers/DictionaryHelpersTests.cs b/tests/Agent/UnitTests/NewRelic.Agent.Extensions.Tests/Helpers/DictionaryHelpersTests.cs new file mode 100644 index 0000000000..dbdc156633 --- /dev/null +++ b/tests/Agent/UnitTests/NewRelic.Agent.Extensions.Tests/Helpers/DictionaryHelpersTests.cs @@ -0,0 +1,30 @@ +// Copyright 2020 New Relic, Inc. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +using NewRelic.Agent.Extensions.Helpers; +using NUnit.Framework; + +namespace Agent.Extensions.Tests.Helpers +{ + [TestFixture] + public class DictionaryHelpersTests + { + // Example: "{\"Host\":\"localhost:7071\",\"traceparent\":\"00-8141368177692588f683b7e7ce8db2a7-bba89a27c8c69cb0-00\"}" + [TestCase("{\"Host\":\"localhost:7071\",\"traceparent\":\"00-8141368177692588f683b7e7ce8db2a7-bba89a27c8c69cb0-00\"}")] + [TestCase("{\"Host\":42,\"traceparent\":{\"Test\":\"Code\"}}")] + [TestCase("")] + [TestCase(null)] + [TestCase("42")] + public void Successfuly_Returns_Dictionary_No_Errors(string test) + { + var result = DictionaryHelpers.FromJson(test); + + Assert.That(result, Is.Not.Null); + + if (!string.IsNullOrEmpty(test) && test.Contains("traceparent")) + { + Assert.That(result["traceparent"], Is.Not.Null); + } + } + } +}