private static Dictionary <string, Dictionary <string, TestResult> > SummarizeTestResults(IEnumerable <TestIterationResult> allIterations) { var testResults = new Dictionary <string, Dictionary <string, TestResult> >(); foreach (var iteration in allIterations) { Dictionary <string, TestResult> runResults; if (!testResults.TryGetValue(iteration.RunId, out runResults)) { testResults[iteration.RunId] = runResults = new Dictionary <string, TestResult>(); } TestResult result; if (!runResults.TryGetValue(iteration.TestName, out result)) { runResults[iteration.TestName] = result = new TestResult(); result.RunId = iteration.RunId; result.TestName = iteration.TestName; } foreach (var metric in iteration.MetricValues) { RunningStatistics stats; if (!result.Stats.TryGetValue(metric.Key, out stats)) { result.Stats[metric.Key] = stats = new RunningStatistics(); } stats.Push(metric.Value); } result.Iterations.Add(iteration); } return(testResults); }
protected override void WriteIndividualResults() { var resultElem = new XElement("results"); var xmlDoc = new XDocument(resultElem); foreach (var run in TestResults) { var runIdElem = new XElement("run", new XAttribute("id", run.Key)); resultElem.Add(runIdElem); foreach (var result in run.Value.Values) { var testElem = new XElement("test", new XAttribute("name", result.TestName)); runIdElem.Add(testElem); var summaryElem = new XElement("summary"); testElem.Add(summaryElem); foreach (var stat in result.Stats) { RunningStatistics runningStats = stat.Value.RunningStatistics; summaryElem.Add(new XElement(stat.Key, new XAttribute("min", runningStats.Minimum.ToString("G3")), new XAttribute("mean", runningStats.Mean.ToString("G3")), new XAttribute("max", runningStats.Maximum.ToString("G3")), new XAttribute("marginOfError", runningStats.MarginOfError(Properties.ErrorConfidence).ToString("G3")), new XAttribute("stddev", runningStats.StandardDeviation.ToString("G3")))); } } } // // Write comparison results // foreach (var metricName in ComparisonResults.Keys) { var comparisonResults = ComparisonResults[metricName]; foreach (var comparison in comparisonResults) { var comparisonElem = new XElement("comparison", new XAttribute("test", comparison.TestName), new XAttribute("baselineId", comparison.BaselineResult.RunId), new XAttribute("comparisonId", comparison.ComparisonResult.RunId)); resultElem.Add(comparisonElem); comparisonElem.Add( new XElement(metricName, new XAttribute("changeRatio", comparison.PercentChange.ToString("G3")), new XAttribute("changeRatioError", comparison.PercentChangeError.ToString("G3")))); } } xmlDoc.Save(this.OutputStream); }
/// <summary> /// Calculates a confidence interval as a percentage of the mean /// </summary> /// <remarks> /// This assumes a roughly normal distribution in the sample data. /// </remarks> /// <param name="stats">A <see cref="RunningStatistics"/> object pre-populated with the sample data.</param> /// <param name="confidence">The desired confidence in the resulting interval.</param> /// <returns>The confidence interval, as a percentage of the mean.</returns> public static double MarginOfError(this RunningStatistics stats, double confidence) { if (stats.Count < 2) { return(double.NaN); } var stderr = stats.StandardDeviation / Math.Sqrt(stats.Count); var t = TInv(1.0 - confidence, (int)stats.Count - 1); var mean = stats.Mean; var interval = t * stderr; return(interval / mean); }
private static void WriteStatisticsCSV(Dictionary <string, Dictionary <string, TestResult> > testResults, string analyzeOutputPath) { using (var writer = new StreamWriter(analyzeOutputPath)) { writer.WriteLine("Test, Iterations, Duration Min, Duration Max, Duration Average, Duration Stdev, Metrics"); foreach (var run in testResults) { foreach (var result in run.Value.Values) { RunningStatistics durationStats = result.Stats[DurationMetricName]; writer.WriteLine( "\"{0}\",\"{1}\",\"{2}\",\"{3}\",\"{4}\",\"{5}\",\"{6}\"", EscapeCsvString(result.TestName), durationStats.Count, durationStats.Minimum, durationStats.Maximum, durationStats.Mean, durationStats.StandardDeviation, EscapeCsvString(GetMetricsString(result.Stats.Keys)) ); } } } }
private static Dictionary <string, List <TestResultComparison> > DoComparisons(List <Tuple <string, string> > allComparisonIds, Dictionary <string, Dictionary <string, TestResult> > testResults) { var comparisonMatrix = new Dictionary <string, List <TestResultComparison> >(); foreach (var comparisonIds in allComparisonIds) { var baseline = testResults[comparisonIds.Item1]; var comparison = testResults[comparisonIds.Item2]; // // Analize results metric-by-metric // foreach (var metricName in Properties.AllMetrics.Keys) { var comparisonResults = new List <TestResultComparison>(); comparisonMatrix.Add(metricName, comparisonResults); foreach (var comparisonTest in comparison.Values) { var baselineTest = baseline[comparisonTest.TestName]; var baselineCount = baselineTest.Iterations.Count; var comparisonCount = comparisonTest.Iterations.Count; if (baselineCount <= 0 || comparisonCount <= 0) { continue; } if (!baselineTest.Iterations[0].MetricValues.ContainsKey(metricName) || !comparisonTest.Iterations[0].MetricValues.ContainsKey(metricName)) { continue; } // Compute the standard error in the difference var baselineSum = baselineTest.Iterations.Sum(iteration => iteration.MetricValues[metricName]); var baselineSumSquared = baselineSum * baselineSum; var baselineSumOfSquares = baselineTest.Iterations.Sum(iteration => iteration.MetricValues[metricName] * iteration.MetricValues[metricName]); var comparisonSum = comparisonTest.Iterations.Sum(iteration => iteration.MetricValues[metricName]); var comparisonSumSquared = comparisonSum * comparisonSum; var comparisonSumOfSquares = comparisonTest.Iterations.Sum(iteration => iteration.MetricValues[metricName] * iteration.MetricValues[metricName]); var stdErrorDiff = Math.Sqrt((baselineSumOfSquares - (baselineSumSquared / baselineCount) + comparisonSumOfSquares - (comparisonSumSquared / comparisonCount)) * (1.0 / baselineCount + 1.0 / comparisonCount) / (baselineCount + comparisonCount - 1)); var interval = stdErrorDiff * MathNet.Numerics.ExcelFunctions.TInv(1.0 - Properties.ErrorConfidence, baselineCount + comparisonCount - 2); RunningStatistics comparisonStats = comparisonTest.Stats[metricName].RunningStatistics; RunningStatistics baselineStats = baselineTest.Stats[metricName].RunningStatistics; var comparisonResult = new TestResultComparison(); comparisonResult.BaselineResult = baselineTest; comparisonResult.ComparisonResult = comparisonTest; comparisonResult.TestName = comparisonTest.TestName; comparisonResult.PercentChange = (comparisonStats.Mean - baselineStats.Mean) / baselineStats.Mean; comparisonResult.PercentChangeError = interval / baselineStats.Mean; comparisonResults.Add(comparisonResult); } } } return(comparisonMatrix); }