internal EtwPerformanceMetricEvaluationContext(string logPath, TraceEventSource traceEventSource, IEnumerable <PerformanceTestInfo> testInfo, string runid) { LogPath = logPath; _traceEventSource = traceEventSource; _runid = runid; var benchmarkParser = new MicrosoftXunitBenchmarkTraceEventParser(traceEventSource); benchmarkParser.BenchmarkIterationStart += BenchmarkIterationStart; benchmarkParser.BenchmarkIterationStop += BenchmarkIterationStop; traceEventSource.Kernel.ProcessStart += ProcessStart; traceEventSource.Kernel.ProcessStop += ProcessStop; foreach (var info in testInfo) { var evaluators = info.Metrics.Cast <PerformanceMetric>().Select(m => new KeyValuePair <PerformanceMetric, PerformanceMetricEvaluator>(m, m.CreateEvaluator(this))).ToList(); _evaluators[info.TestCase.DisplayName] = evaluators; } }
public EtwPerformanceMetricEvaluationContext( string logPath, TraceEventSource traceEventSource, IEnumerable <PerformanceTestMessage> testInfo, string runid) { LogPath = logPath; _evaluators = new Dictionary <string, List <KeyValuePair <PerformanceMetric, PerformanceMetricEvaluator> > >(); _metricValues = new Dictionary <string, List <Dictionary <string, double> > >(); _traceEventSource = traceEventSource; _currentProcesses = new HashSet <int>(); _runid = runid; var benchmarkParser = new MicrosoftXunitBenchmarkTraceEventParser(_traceEventSource); benchmarkParser.BenchmarkIterationStart += delegate(BenchmarkIterationStartArgs args) { if (args.RunId != _runid) { return; } if (_currentTestCase != null) { throw new InvalidOperationException(args.TimeStampRelativeMSec.ToString()); } _currentTestCase = args.BenchmarkName; _currentIteration = args.Iteration; _currentProcesses.Add(args.ProcessID); var evaluators = _evaluators.GetOrDefault(_currentTestCase); if (evaluators != null) { foreach (var evaluator in _evaluators[_currentTestCase]) { evaluator.Value.BeginIteration(args); } } }; benchmarkParser.BenchmarkIterationStop += delegate(BenchmarkIterationStopArgs args) { if (args.RunId != _runid) { return; } if (_currentTestCase != args.BenchmarkName || _currentIteration != args.Iteration) { throw new InvalidOperationException(); } var evaluators = _evaluators.GetOrDefault(_currentTestCase); if (evaluators != null) { var allValues = _metricValues.GetOrAdd(_currentTestCase); while (allValues.Count < args.Iteration) { allValues.Add(null); } var values = new Dictionary <string, double>(); allValues.Add(values); foreach (var evaluator in _evaluators[_currentTestCase]) { values[evaluator.Key.Id] = evaluator.Value.EndIteration(args); } } _currentTestCase = null; _currentProcesses.Clear(); }; // The current process was already running before the etl tracing even started. benchmarkParser.Source.Kernel.ProcessStart += delegate(ProcessTraceData args) { if (_currentProcesses.Contains(args.ProcessID)) { _currentProcesses.Add(args.ProcessID); } }; benchmarkParser.Source.Kernel.ProcessStop += delegate(ProcessTraceData args) { _currentProcesses.Remove(args.ProcessID); }; foreach (var info in testInfo) { _evaluators[info.TestCase.DisplayName] = info.Metrics .Cast <PerformanceMetric>() .Select(m => new KeyValuePair <PerformanceMetric, PerformanceMetricEvaluator>(m, m.CreateEvaluator(this))) .ToList(); } }