Пример #1
0
        /// <summary>
        /// Generates a complete set of static/non-interactive run reports for offline analysis.
        /// </summary>
        internal static void GenerateXmlReport(TestRecords tests, RunInfo runInfo, DirectoryInfo reportRoot, DirectoryInfo testBinRoot)
        {
            Profiler.StartMethod();

            ReportingUtilities.CreateScorchedDirectory(reportRoot);

            CopyStyleSheets(reportRoot, testBinRoot);

            TestRecords executedTests;
            TestRecords filteredTests;

            SeparateExecutedTests(tests, out executedTests, out filteredTests);

            //Note: Summary Report may well be superceded by the more elaborate Run Report...
            SummaryReportGenerator.Generate(executedTests, reportRoot);
            MachineSummaryReportGenerator.Generate(executedTests, reportRoot);
            VariationReportGenerator.Generate(executedTests, reportRoot);
            DrtReportGenerator.Generate(executedTests, reportRoot);
            InfraTrackingReportGenerator.Generate(executedTests, reportRoot);
            RunReportGenerator.Generate(executedTests, runInfo, reportRoot);
            XUnitReportGenerator.Generate(executedTests, reportRoot);

            FilteringReportGenerator.Generate(filteredTests, reportRoot);

            Profiler.EndMethod();
        }
Пример #2
0
        internal static void WriteSummaryReport(XmlTableWriter tableWriter, TestRecords tests)
        {
            //
            Dictionary <string, MachineSummaryEntry> SummaryTable = ProduceMachineSummaries(tests);

            tableWriter.WriteStartElement("Summary");
            foreach (MachineSummaryEntry entry in SummaryTable.Values)
            {
                tableWriter.WriteStartElement("MachineSummary");
                tableWriter.WriteAttributeString("MachineName", entry.Name);
                tableWriter.WriteAttributeString("FailingVariations", entry.FailedVariations.ToString(CultureInfo.InvariantCulture));
                tableWriter.WriteAttributeString("TotalVariations", (entry.TotalVariations).ToString(CultureInfo.InvariantCulture));
                tableWriter.WriteAttributeString("TestsWithoutVariation", (entry.TestsWithoutVariation).ToString(CultureInfo.InvariantCulture));
                float passRate         = 0;
                float adjustedPassRate = 0;
                if (entry.TotalVariations > 0)
                {
                    passRate = (((entry.TotalVariations - entry.FailedVariations) / (float)entry.TotalVariations) * 100);
                    //Failures on tests with known bugs can be treated as passing, but we make clear this is not the actual pass rate.
                    adjustedPassRate = (((entry.TotalVariations - entry.FailedVariations + entry.FailedVariationsWithBugs) / (float)entry.TotalVariations) * 100);
                }
                tableWriter.WriteAttributeString("PassRate", passRate.ToString("0.00", CultureInfo.InvariantCulture));
                tableWriter.WriteAttributeString("AdjustedPassRate", adjustedPassRate.ToString("0.00", CultureInfo.InvariantCulture));
                tableWriter.WriteAttributeString("TestExecutionTime", ReportingUtilities.FormatTimeSpanAsHms(entry.TestExecutionTime));

                tableWriter.WriteEndElement();
            }
            tableWriter.WriteEndElement();
        }
Пример #3
0
        /// <summary>
        /// Write a Test node under Test, to hold information for the Test: log, test result, duration, Test info, which
        /// won't show for every Variation in Xml viewer.
        /// </summary>
        /// <param name="tableWriter">Writer</param>
        /// <param name="test">test</param>
        /// <param name="testInfo">testInfo</param>
        /// <param name="testLogPath">test log path</param>
        private static void WriteChildTestNode(XmlTableWriter tableWriter, TestRecord test, TestInfo testInfo, string testLogPath)
        {
            bool logTruncated = false;

            tableWriter.WriteStartElement("Test");
            tableWriter.WriteAttributeString("Duration", ReportingUtilities.FormatTimeSpanAsSeconds(ReportingUtilities.GetTestDuration(test))); //Total test execution Time
            tableWriter.WriteAttributeString("Result", ReportingUtilities.InterpretTestOutcome(test).ToString());
            tableWriter.WriteAttributeString("Log", ReportingUtilities.ProcessLongLog(test.Log, testLogPath, ref logTruncated));
            if (logTruncated)
            {
                tableWriter.WriteAttributeString("LogPath", Path.Combine(ReportingUtilities.TestLogsDir, Path.GetFileName(testLogPath)));
            }
            tableWriter.WriteAttributeString("LogDir", ReportingUtilities.ReportPaths(test.LoggedFiles));

            int  failed      = 0;
            int  failedOnBug = 0;
            bool hasBugs     = ReportingUtilities.TestHasBugs(testInfo);

            foreach (VariationRecord variation in test.Variations)
            {
                failed      += ReportingUtilities.OneForFail(variation.Result);
                failedOnBug += ReportingUtilities.OneForFailOnBug(variation.Result, hasBugs);
            }
            tableWriter.WriteAttributeString("Failures", string.Format("{0}", failed - failedOnBug));
            tableWriter.WriteAttributeString("Total", string.Format("{0}", test.Variations.Count));
            tableWriter.WriteEndElement();
        }
Пример #4
0
        internal static SortedDictionary <string, AreaSummaryEntry> ProduceAreaSummaries(TestRecords tests)
        {
            SortedDictionary <string, AreaSummaryEntry> SummaryTable = new SortedDictionary <string, AreaSummaryEntry>(StringComparer.OrdinalIgnoreCase);

            //Go through VariationRecords in each test to build up SummaryStats
            foreach (TestRecord test in tests.TestCollection)
            {
                AreaSummaryEntry entry;
                string           area = test.TestInfo.Area;

                //Create Entry if area doesn't exist yet, else load it up
                if (!SummaryTable.ContainsKey(area))
                {
                    entry = new AreaSummaryEntry();
                    entry.TestExecutionTime = new TimeSpan();
                    entry.Name            = area;
                    entry.AssociatedTests = new TestCollection();
                    SummaryTable.Add(area, entry);
                }
                else
                {
                    entry = SummaryTable[area];
                }
                entry.AssociatedTests.Add(test);
                bool hasBugs = (test.TestInfo.Bugs != null && test.TestInfo.Bugs.Count > 0);
                foreach (VariationRecord variation in test.Variations)
                {
                    entry.TotalVariations          += ReportingUtilities.OneForCountable(variation.Result);
                    entry.FailedVariations         += ReportingUtilities.OneForFail(variation.Result);
                    entry.IgnoredVariations        += ReportingUtilities.OneForIgnore(variation.Result);
                    entry.FailedVariationsWithBugs += ReportingUtilities.OneForFailOnBug(variation.Result, hasBugs);
                }
                entry.TestExecutionTime += test.Duration;
            }

            foreach (ExecutionGroupRecord group in tests.ExecutionGroupRecords)
            {
                AreaSummaryEntry entry;
                string           area = group.Area;
                //Assumption - all areas have been defined by the list of tests scanned in above loop
                entry = SummaryTable[area];

                entry.TotalExecutionTime += ReportingUtilities.GetGroupDuration(group);
                // Take the earliest start time as the start of the entire area
                if (entry.StartTime < group.StartTime.DateTime)
                {
                    entry.StartTime = group.StartTime.DateTime;
                }

                // Take the last end time as the end of the entire area
                if (group.EndTime.DateTime > entry.EndTime)
                {
                    entry.EndTime = group.EndTime.DateTime;
                }
            }

            return(SummaryTable);
        }
Пример #5
0
        internal static void Generate(TestRecords tests, DirectoryInfo ReportRoot)
        {
            DirectoryInfo areasDirectory = new DirectoryInfo(Path.Combine(ReportRoot.FullName, "AreaReports"));

            ReportingUtilities.CreateAreaReportsDirectories(areasDirectory);

            Dictionary <string, Dictionary <string, TestCollection> > areas = ReportingUtilities.GroupByArea(tests);

            foreach (KeyValuePair <string, Dictionary <string, TestCollection> > bucket in areas)
            {
                string area = bucket.Key;
                Dictionary <string, TestCollection> areaDictionary = bucket.Value;

                Generate(areaDictionary, areasDirectory.FullName, area);
            }
        }
Пример #6
0
        private static void WriteVariationNode(XmlTableWriter tableWriter, VariationRecord variation, string variationLogPath)
        {
            bool logTruncated = false;

            tableWriter.WriteStartElement("Variation");
            tableWriter.WriteAttributeString("Variation", variation.VariationName);
            tableWriter.WriteAttributeString("VariationId", variation.VariationId.ToString(CultureInfo.InvariantCulture));
            tableWriter.WriteAttributeString("Duration", ReportingUtilities.FormatTimeSpanAsSeconds(ReportingUtilities.GetVariationDuration(variation)));
            tableWriter.WriteAttributeString("Result", variation.Result.ToString());
            tableWriter.WriteAttributeString("Log", ReportingUtilities.ProcessLongLog(variation.Log, variationLogPath, ref logTruncated));
            if (logTruncated)
            {
                tableWriter.WriteAttributeString("LogPath", Path.Combine(ReportingUtilities.TestLogsDir, Path.GetFileName(variationLogPath)));
            }
            tableWriter.WriteAttributeString("LogDir", ReportingUtilities.ReportPaths(variation.LoggedFiles));
            tableWriter.WriteEndElement();
        }
Пример #7
0
        /// <summary>
        ///  Write one test.
        /// </summary>
        /// <param name="tableWriter">Writer</param>
        /// <param name="areaReportsPath">Area path</param>
        /// <param name="area">Area name</param>
        /// <param name="test">TestRecord</param>
        /// <param name="testIndex">Index of the test</param>
        private static void WriteTestNode(XmlTableWriter tableWriter, string areaReportsPath, string area, TestRecord test, int testIndex)
        {
            TestInfo testInfo = test.TestInfo;

            string testLogsDirectory  = Path.Combine(areaReportsPath, ReportingUtilities.TestLogsDir);
            string testInfosDirectory = Path.Combine(areaReportsPath, ReportingUtilities.TestInfosDir);

            //Test Layer with attribute needed for all variation.
            tableWriter.WriteStartElement("Test");
            tableWriter.WriteAttributeString("Name", Escape(testInfo.Name));
            tableWriter.WriteAttributeString("KnownBugs", test.TestInfo.Bugs.ToCommaSeparatedList());
            tableWriter.WriteAttributeString("Priority", test.TestInfo.Priority.ToString());
            tableWriter.WriteAttributeString("Machine", ReportingUtilities.ReportMachine(test.Machine));

            string testInfoPath = Path.Combine(testInfosDirectory, String.Format("{0}_{1}.xml", area, testIndex));

            if (ReportingUtilities.InterpretTestOutcome(test) == Result.Fail)
            {
                ReportingUtilities.SaveTestInfo(testInfoPath, testInfo);
                tableWriter.WriteAttributeString("TestInfo", Path.Combine(ReportingUtilities.TestInfosDir, Path.GetFileName(testInfoPath)));
            }

            for (int variationIndex = 0; variationIndex < test.Variations.Count; variationIndex++)
            {
                VariationRecord variation = test.Variations[variationIndex];

                string variationLogPath = Path.Combine(testLogsDirectory, String.Format("{0}_{1}_{2}.log", area, testIndex, variationIndex));
                WriteVariationNode(tableWriter, variation, variationLogPath);
            }

            //SEMI HACK: Create a node to hold information for test, that are not needed for most variation.
            // This solves three problems:
            //           1 - Allows us to provide Test level information in tabular form
            //           2 - Allows us to include Test level execution logs in these reports
            // Note - To nest these things attribute in parent Test node, while less hacky, would create a different semantic from the perspective of any XML viewer, which would be bad. very bad.
            string testLogPath = Path.Combine(testLogsDirectory, String.Format("{0}_{1}.log", area, testIndex));

            WriteChildTestNode(tableWriter, test, testInfo, testLogPath);
            tableWriter.WriteEndElement();
        }
Пример #8
0
        private static Dictionary <string, MachineSummaryEntry> ProduceMachineSummaries(TestRecords tests)
        {
            Dictionary <string, MachineSummaryEntry> SummaryTable = new Dictionary <string, MachineSummaryEntry>(StringComparer.OrdinalIgnoreCase);

            //Go through VariationRecords in each test to build up SummaryStats
            foreach (TestRecord test in tests.TestCollection)
            {
                MachineSummaryEntry entry;
                string name = ReportingUtilities.ReportMachine(test.Machine);

                //Create Entry if machine doesn't exist yet, else load it up
                if (!SummaryTable.ContainsKey(name))
                {
                    entry = new MachineSummaryEntry();
                    entry.TestExecutionTime = TimeSpan.Zero;
                    entry.Name = name;
                    SummaryTable.Add(name, entry);
                }
                else
                {
                    entry = SummaryTable[name];
                }
                bool hasBugs = (test.TestInfo.Bugs != null && test.TestInfo.Bugs.Count > 0);
                foreach (VariationRecord variation in test.Variations)
                {
                    entry.TotalVariations          += ReportingUtilities.OneForCountable(variation.Result);
                    entry.FailedVariations         += ReportingUtilities.OneForFail(variation.Result);
                    entry.FailedVariationsWithBugs += ReportingUtilities.OneForFailOnBug(variation.Result, hasBugs);
                }
                entry.TestExecutionTime += ReportingUtilities.GetTestDuration(test);

                if (test.Variations.Count == 0)
                {
                    entry.TestsWithoutVariation += 1;
                }
            }
            return(SummaryTable);
        }
Пример #9
0
        private static void Generate(TestRecords records, string path)
        {
            using (XmlTableWriter tableWriter = new XmlTableWriter(path))
            {
                tableWriter.AddXsl(@"DrtReport.xsl");
                tableWriter.WriteStartElement("Variations");
                tableWriter.WriteAttributeString("PassRate", ReportingUtilities.CalculatePassRate(records));
                foreach (TestRecord test in FilterNonPassingTests(records))
                {
                    TestInfo testInfo = test.TestInfo;
                    {
                        tableWriter.WriteStartElement("Variation");
                        tableWriter.WriteAttributeString("Area", testInfo.Area);
                        tableWriter.WriteAttributeString("TestName", testInfo.Name);
                        tableWriter.WriteAttributeString("Variation", "Test Level Summary");
                        tableWriter.WriteAttributeString("Duration", ReportingUtilities.FormatTimeSpanAsSeconds(ReportingUtilities.GetTestDuration(test))); //Total test execution Time
                        tableWriter.WriteAttributeString("Result", ReportingUtilities.InterpretTestOutcome(test).ToString());
                        tableWriter.WriteAttributeString("Log", test.Log);
                        tableWriter.WriteAttributeString("LogDir", ReportingUtilities.ReportPaths(test.LoggedFiles));
                        tableWriter.WriteEndElement();
                    }

                    foreach (VariationRecord variation in test.Variations)
                    {
                        tableWriter.WriteStartElement("Variation");
                        tableWriter.WriteAttributeString("Area", testInfo.Area);
                        tableWriter.WriteAttributeString("TestName", testInfo.Name);
                        tableWriter.WriteAttributeString("Variation", variation.VariationName);
                        tableWriter.WriteAttributeString("Duration", ReportingUtilities.FormatTimeSpanAsSeconds(ReportingUtilities.GetVariationDuration(variation)));
                        tableWriter.WriteAttributeString("Result", variation.Result.ToString());
                        tableWriter.WriteAttributeString("Log", variation.Log);
                        tableWriter.WriteAttributeString("LogDir", ReportingUtilities.ReportPaths(variation.LoggedFiles));
                        tableWriter.WriteEndElement();
                    }
                }
                tableWriter.WriteEndElement();
            }
        }
Пример #10
0
        internal static void Generate(TestRecords records, DirectoryInfo ReportRoot)
        {
            // See https://xunit.net/docs/format-xml-v2.html for the documentation on how to format xunit logs
            var root = new XElement("assemblies");

            // We run our tests by Area in Helix. The SummaryReportGenerator does a nice job of aggregating the TestRecords
            // by area already, so we'l reuse that. This ensures we aren't making too many assummptions (like that all records are in the same Area)
            // that could break later.
            var resultsByArea = SummaryReportGenerator.ProduceAreaSummaries(records);

            foreach (SummaryReportGenerator.AreaSummaryEntry areaEntry in resultsByArea.Values)
            {
                string assemblyName = areaEntry.AssociatedTests.FirstOrDefault()?.TestInfo.DriverParameters["exe"];
                var    assembly     = new XElement("assembly");
                assembly.SetAttributeValue("name", assemblyName);
                assembly.SetAttributeValue("test-framework", "QualityVault");
                assembly.SetAttributeValue("run-date", DateTime.Now.ToString("yyyy-mm-dd"));
                assembly.SetAttributeValue("run-time", areaEntry.StartTime.ToString(@"hh\:mm\:ss"));
                assembly.SetAttributeValue("time", (areaEntry.EndTime - areaEntry.StartTime).TotalSeconds);
                assembly.SetAttributeValue("total", areaEntry.TotalVariations);
                assembly.SetAttributeValue("passed", areaEntry.TotalVariations - areaEntry.FailedVariations - areaEntry.IgnoredVariations);
                assembly.SetAttributeValue("failed", areaEntry.FailedVariations);
                assembly.SetAttributeValue("skipped", areaEntry.IgnoredVariations);
                assembly.SetAttributeValue("errors", 0);
                root.Add(assembly);

                foreach (TestRecord testRecord in areaEntry.AssociatedTests)
                {
                    var collection       = new XElement("collection");
                    int testPassedCount  = testRecord.Variations.Where(variation => variation.Result == Result.Pass).Count();
                    int testFailedCount  = testRecord.Variations.Where(variation => variation.Result == Result.Fail).Count();
                    int testSkippedCount = testRecord.Variations.Where(variation => variation.Result == Result.Ignore).Count();

                    collection.SetAttributeValue("total", testRecord.Variations.Count);
                    collection.SetAttributeValue("passed", testPassedCount);
                    collection.SetAttributeValue("failed", testFailedCount);
                    collection.SetAttributeValue("skipped", testSkippedCount);
                    collection.SetAttributeValue("name", testRecord.TestInfo.Name);
                    collection.SetAttributeValue("time", ReportingUtilities.FormatTimeSpanAsSeconds(ReportingUtilities.GetTestDuration(testRecord)));
                    assembly.Add(collection);

                    foreach (VariationRecord variation in testRecord.Variations)
                    {
                        var    test       = new XElement("test");
                        string className  = testRecord.TestInfo.DriverParameters["class"];
                        string methodName = testRecord.TestInfo.DriverParameters["method"];

                        test.SetAttributeValue("type", className);
                        test.SetAttributeValue("method", methodName);

                        test.SetAttributeValue("name", variation.VariationName);
                        test.SetAttributeValue("time", testRecord.Duration.TotalSeconds);
                        test.SetAttributeValue("result", variation.Result == Result.Ignore ? "Skip" : variation.Result.ToString());

                        if (variation.Result != Result.Pass)
                        {
                            var failure = new XElement("failure");
                            failure.SetAttributeValue("exception-type", "Exception");

                            var message = new XElement("message");

                            StringBuilder errorMessage = new StringBuilder();

                            errorMessage.AppendLine("Error Log: ");
                            errorMessage.AppendLine(testRecord.Log);

                            message.Add(new XCData(errorMessage.ToString()));
                            failure.Add(message);

                            test.Add(failure);
                        }
                        collection.Add(test);
                    }
                }
            }


            string xunitOutputPath = Path.Combine(ReportRoot.FullName, "testResults.xml");

            File.WriteAllText(xunitOutputPath, root.ToString());
        }
Пример #11
0
        /// <summary>
        /// Produces a simple console summary report
        /// </summary>
        internal static void WriteSummaryToConsole(TestRecords results)
        {
            Console.WriteLine();
            Console.WriteLine("A total of {0} test Infos were processed, with the following results.", results.TestCollection.Count);
            if (results.TestCollection.Count > 0)
            {
                int Pass          = 0;
                int Fail          = 0;
                int FailWithBugID = 0;
                int Ignore        = 0;

                #if REPORT_VERSIONS
                Dictionary <String, Tuple <int, List <TestInfo> > > dict = new Dictionary <String, Tuple <int, List <TestInfo> > >();
                #endif

                foreach (TestRecord test in results.TestCollection)
                {
                    TestInfo testInfo = test.TestInfo;

                    switch (ReportingUtilities.InterpretTestOutcome(test))
                    {
                    case Result.Ignore:
                        Ignore++;
                        break;

                    case Result.Pass:
                        Pass++;
                        break;

                    case Result.Fail:
                        Fail++;
                        if (ReportingUtilities.TestHasBugs(testInfo))
                        {
                            FailWithBugID++;
                        }
                        break;
                    }

                    #if REPORT_VERSIONS
                    String key = (testInfo.Versions == null)
                        ? String.Empty
                        : String.Join(",", ToArray(testInfo.Versions));

                    if (!dict.ContainsKey(key))
                    {
                        dict.Add(key, new Tuple <int, List <TestInfo> >(0, new List <TestInfo>()));
                    }

                    Tuple <int, List <TestInfo> > tuple = dict[key];
                    List <TestInfo> list = tuple.Item2;
                    if (list.Count < 5)
                    {
                        list.Add(testInfo);
                    }
                    dict[key] = new Tuple <int, List <TestInfo> >(tuple.Item1 + 1, list);
                    #endif
                }

                Console.WriteLine(" Passed: {0}", Pass);
                Console.WriteLine(" Failed (need to analyze): {0}", Fail - FailWithBugID);
                Console.WriteLine(" Failed (with BugIDs): {0}", FailWithBugID);
                Console.WriteLine(" Ignore: {0}", Ignore);
                Console.WriteLine();

                #if REPORT_VERSIONS
                foreach (KeyValuePair <string, Tuple <int, List <TestInfo> > > kvp in dict)
                {
                    Tuple <int, List <TestInfo> > tuple = kvp.Value;
                    Console.WriteLine("{0} tests with versions '{1}'", tuple.Item1, kvp.Key);
                    foreach (TestInfo testInfo in tuple.Item2)
                    {
                        Console.WriteLine("   /Area={0} /SubArea={1} /Name={2}",
                                          testInfo.Area, testInfo.SubArea, testInfo.Name);
                    }
                }
                #endif
            }
        }
Пример #12
0
        internal static TestCollection FilterNonPassingTests(TestRecords tests)
        {
            IEnumerable <TestRecord> failingTests = tests.TestCollection.Where(test => (ReportingUtilities.InterpretTestOutcome(test) != Result.Pass));

            return(new TestCollection(failingTests));
        }