public void ThrowExceptionsInParallel(string appName, string framework, string appAssembly) { StackTrace expectedStack; if (framework == "net45") { expectedStack = new StackTrace( new StackFrame("|lm:Samples.ExceptionGenerator |ns:Samples.ExceptionGenerator |ct:ParallelExceptionsScenario |fn:ThrowExceptions"), new StackFrame("|lm:mscorlib |ns:System.Threading |ct:ThreadHelper |fn:ThreadStart")); } else { expectedStack = new StackTrace( new StackFrame("|lm:Samples.ExceptionGenerator |ns:Samples.ExceptionGenerator |ct:ParallelExceptionsScenario |fn:ThrowExceptions"), new StackFrame("|lm:System.Private.CoreLib |ns:System.Threading |ct:ThreadHelper |fn:ThreadStart")); } var runner = new TestApplicationRunner(appName, framework, appAssembly, _output, commandLine: Scenario2); runner.Environment.SetVariable(EnvironmentVariables.ExceptionProfilerEnabled, "1"); runner.Environment.SetVariable(EnvironmentVariables.ExceptionSampleLimit, "10000"); using var agent = new MockDatadogAgent(_output); runner.Run(agent); Assert.True(agent.NbCallsOnProfilingEndpoint > 0); var exceptionSamples = ExtractExceptionSamples(runner.Environment.PprofDir).ToArray(); long total = 0; foreach (var sample in exceptionSamples) { total += sample.Count; sample.Type.Should().Be("System.Exception"); sample.Message.Should().BeEmpty(); sample.Stacktrace.Should().Be(expectedStack); } foreach (var file in Directory.GetFiles(runner.Environment.LogDir)) { _output.WriteLine($"Log file: {file}"); } var logFile = Directory.GetFiles(runner.Environment.LogDir) .Single(f => Path.GetFileName(f).StartsWith("DD-DotNet-Profiler-Native-")); // Stackwalk will fail if the walltime profiler tries to inspect the thread at the same time as the exception profiler // This is expected so we remove those from the expected count var missedExceptions = File.ReadLines(logFile) .Count(l => l.Contains("Failed to walk stack for thrown exception: CORPROF_E_STACKSNAPSHOT_UNSAFE (80131360)")); int expectedExceptionCount = (4 * 1000) - missedExceptions; expectedExceptionCount.Should().BeGreaterThan(0, "only a few exceptions should be missed"); total.Should().Be(expectedExceptionCount); }
public void RunAndCheck() { using (var datadogMockAgent = new MockDatadogAgent(_output)) { RunTest(datadogMockAgent.Port); PrintTestInfo(); // Avoid CI flackiness: checking pprof files is enough // RunChecks(datadogMockAgent); } }
public void ExplicitlyDisableExceptionProfiler(string appName, string framework, string appAssembly) { var runner = new TestApplicationRunner(appName, framework, appAssembly, _output, commandLine: Scenario1); runner.Environment.SetVariable(EnvironmentVariables.ExceptionProfilerEnabled, "0"); using var agent = new MockDatadogAgent(_output); runner.Run(agent); // On alpine, this check is flaky. // Disable it on alpine for now if (!EnvironmentHelper.IsAlpine) { Assert.True(agent.NbCallsOnProfilingEndpoint > 0); } ExtractExceptionSamples(runner.Environment.PprofDir).Should().BeEmpty(); }
private void CheckCpuProfiles(TestApplicationRunner runner, bool isEnabled) { using var agent = new MockDatadogAgent(_output); runner.Run(agent); Assert.True(agent.NbCallsOnProfilingEndpoint > 0); int cpuSamplesCount = 0; foreach (var file in Directory.EnumerateFiles(runner.Environment.PprofDir, "*.pprof", SearchOption.AllDirectories)) { cpuSamplesCount += GetCpuSamplesCount(file); } if (isEnabled) { Assert.True(cpuSamplesCount > 0); } else { Assert.Equal(0, cpuSamplesCount); } }
private void CheckAgent(MockDatadogAgent agent) { Assert.True(agent.NbReceivedCalls >= _minimumExpectedPprofsCount, $"The number of calls to the agent was not greater than or equal to {_minimumExpectedPprofsCount}. Actual value {agent.NbReceivedCalls}"); }
private void RunChecks(MockDatadogAgent agent) { CheckLogFiles(); CheckPprofFiles(); CheckAgent(agent); }
public void Run(MockDatadogAgent agent) { RunTest(agent.Port); PrintTestInfo(); }