Beispiel #1
0
        static async Task <int> RunTests(string[] original_args)
        {
            Console.WriteLine("Running tests");
            var options = ApplicationOptions.Current;

            // we generate the logs in two different ways depending if the generate xml flag was
            // provided. If it was, we will write the xml file to the tcp writer if present, else
            // we will write the normal console output using the LogWriter
            var logger = new LogWriter(Console.Out);

            logger.MinimumLogLevel = MinimumLogLevel.Info;
            var testAssemblies = GetTestAssemblies();
            var runner         = RegisterType.IsXUnit ? (TestRunner) new XUnitTestRunner(logger) : new NUnitTestRunner(logger);
            var categories     = IgnoreFileParser.ParseTraitsContentFile(NSBundle.MainBundle.ResourcePath, RegisterType.IsXUnit);

            runner.SkipCategories(categories);
            var skippedTests = IgnoreFileParser.ParseContentFiles(NSBundle.MainBundle.ResourcePath);

            if (skippedTests.Any())
            {
                // ensure that we skip those tests that have been passed via the ignore files
                runner.SkipTests(skippedTests);
            }
            await runner.Run(testAssemblies).ConfigureAwait(false);

            if (options.ResultFile != null)
            {
                using (var writer = new StreamWriter(options.ResultFile))
                {
                    runner.WriteResultsToFile(writer, TestRunner.Jargon.NUnitV3);
                }
                logger.Info($"Xml result can be found {options.ResultFile}");
            }

            logger.Info($"Tests run: {runner.TotalTests} Passed: {runner.PassedTests} Inconclusive: {runner.InconclusiveTests} Failed: {runner.FailedTests} Ignored: {runner.FilteredTests + runner.SkippedTests}");

            return(runner.FailedTests != 0 ? 1 : 0);
        }
        void RunTests()
        {
            var        options = ApplicationOptions.Current;
            TextWriter writer  = null;

            if (!string.IsNullOrEmpty(options.HostName) && string.IsNullOrEmpty(options.LogFile))
            {
                http_writer = new HttpTextWriter()
                {
                    HostName = options.HostName.Split(',')[0], Port = options.HostPort
                };
                Console.WriteLine("Sending results to {0}:{1} using HTTP", http_writer.HostName, http_writer.Port);
                http_writer.Open();
                writer = http_writer;
            }
            else if (!string.IsNullOrEmpty(options.LogFile))
            {
                writer = new StreamWriter(options.LogFile);
            }

            // we generate the logs in two different ways depending if the generate xml flag was
            // provided. If it was, we will write the xml file to the tcp writer if present, else
            // we will write the normal console output using the LogWriter
            var logger = (writer == null || options.EnableXml) ? new LogWriter() : new LogWriter(writer);

            logger.MinimumLogLevel = MinimumLogLevel.Info;
            var testAssemblies = GetTestAssemblies();

            runner = RegisterType.IsXUnit ? (Xamarin.iOS.UnitTests.TestRunner) new XUnitTestRunner(logger) : new NUnitTestRunner(logger);
            var categories = IgnoreFileParser.ParseTraitsContentFile(NSBundle.MainBundle.BundlePath, RegisterType.IsXUnit);

            // add category filters if they have been added
            runner.SkipCategories(categories);

            // if we have ignore files, ignore those tests
            var skippedTests = IgnoreFileParser.ParseContentFiles(NSBundle.MainBundle.BundlePath);

            if (skippedTests.Any())
            {
                // ensure that we skip those tests that have been passed via the ignore files
                runner.SkipTests(skippedTests);
            }

            ThreadPool.QueueUserWorkItem((v) =>
            {
                BeginInvokeOnMainThread(async() =>
                {
                    lblStatus.SetText(string.Format("{0} tests", runner.TotalTests));
                    await runner.Run(testAssemblies).ConfigureAwait(false);
                    RenderResults();
                    cmdRun.SetEnabled(true);
                    cmdRun.SetHidden(false);
                    if (options.EnableXml)
                    {
                        runner.WriteResultsToFile(writer);
                        logger.Info("Xml file was written to the http listener.");
                    }
                    else
                    {
                        string resultsFilePath = runner.WriteResultsToFile();
                        logger.Info($"Xml result can be found {resultsFilePath}");
                    }
                    logger.Info($"Tests run: {runner.TotalTests} Passed: {runner.PassedTests} Inconclusive: {runner.InconclusiveTests} Failed: {runner.FailedTests} Ignored: {runner.FilteredTests}");
                    if (options.TerminateAfterExecution)
                    {
                        var writer_finished_task = http_writer?.FinishedTask;
                        http_writer?.Close();
                        Task.Run(async() => {
                            if (writer_finished_task != null)
                            {
                                await writer_finished_task;
                            }
                            TerminateWithSuccess();
                        });
                    }
                });
            });
        }
Beispiel #3
0
        void RunTests()
        {
            var        options = ApplicationOptions.Current;
            TextWriter writer  = null;

            if (!string.IsNullOrEmpty(options.HostName) && string.IsNullOrEmpty(options.LogFile))
            {
                writer = new HttpTextWriter()
                {
                    HostName = options.HostName, Port = options.HostPort
                }
            }
            ;
            if (!string.IsNullOrEmpty(options.LogFile))
            {
                writer = new StreamWriter(options.LogFile);
            }

            // we generate the logs in two different ways depending if the generate xml flag was
            // provided. If it was, we will write the xml file to the tcp writer if present, else
            // we will write the normal console output using the LogWriter
            var logger = (writer == null || options.EnableXml) ? new LogWriter() : new LogWriter(writer);

            logger.MinimumLogLevel = MinimumLogLevel.Info;
            var testAssemblies = GetTestAssemblies();

            runner = RegisterType.IsXUnit ? (Xamarin.iOS.UnitTests.TestRunner) new XUnitTestRunner(logger) : new NUnitTestRunner(logger);
            var categories = RegisterType.IsXUnit ?
                             new List <string> {
                "failing",
                "nonmonotests",
                "outerloop",
                "nonosxtests"
            } :
            new List <string> {
                "MobileNotWorking",
                "NotOnMac",
                "NotWorking",
                "ValueAdd",
                "CAS",
                "InetAccess",
                "NotWorkingLinqInterpreter",
                "BitcodeNotSupported",
            };

            if (RegisterType.IsXUnit)
            {
                // special case when we are using the xunit runner,
                // there is a trait we are not interested in which is
                // the Benchmark one
                var xunitRunner = runner as XUnitTestRunner;
                xunitRunner.AddFilter(XUnitFilter.CreateTraitFilter("Benchmark", "true", true));
            }
            // add category filters if they have been added
            runner.SkipCategories(categories);

            // if we have ignore files, ignore those tests
            var skippedTests = IgnoreFileParser.ParseContentFiles(NSBundle.MainBundle.BundlePath);

            if (skippedTests.Any())
            {
                // ensure that we skip those tests that have been passed via the ignore files
                runner.SkipTests(skippedTests);
            }

            ThreadPool.QueueUserWorkItem((v) =>
            {
                BeginInvokeOnMainThread(() =>
                {
                    lblStatus.SetText(string.Format("{0} tests", runner.TotalTests));
                    runner.Run((IList <TestAssemblyInfo>)testAssemblies);
                    RenderResults();
                    cmdRun.SetEnabled(true);
                    cmdRun.SetHidden(false);
                    if (options.EnableXml)
                    {
                        runner.WriteResultsToFile(writer);
                        logger.Info("Xml file was written to the http listener.");
                    }
                    else
                    {
                        string resultsFilePath = runner.WriteResultsToFile();
                        logger.Info($"Xml result can be found {resultsFilePath}");
                    }
                    logger.Info($"Tests run: {runner.TotalTests} Passed: {runner.PassedTests} Inconclusive: {runner.InconclusiveTests} Failed: {runner.FailedTests} Ignored: {runner.FilteredTests}");
                    if (options.TerminateAfterExecution)
                    {
                        TerminateWithSuccess();
                    }
                });
            });
        }

        void RenderResults()
        {
            var options = ApplicationOptions.Current;

            if (runner.TotalTests == 0)
            {
                return;
            }

            lblSuccess.SetText(string.Format("P: {0}/{1} {2}%", runner.PassedTests, runner.TotalTests, 100 * runner.PassedTests / runner.TotalTests));
            lblFailed.SetText(string.Format("F: {0}/{1} {2}%", runner.FailedTests, runner.TotalTests, 100 * runner.FailedTests / runner.TotalTests));
            lblIgnInc.SetText(string.Format("I: {0}/{1} {2}%", (runner.SkippedTests + runner.InconclusiveTests), runner.TotalTests, 100 * (runner.SkippedTests + runner.InconclusiveTests) / runner.TotalTests));

            if (running == false && runner.PassedTests > 0)
            {
                if (runner.FailedTests == 0)
                {
                    lblSuccess.SetTextColor(UIKit.UIColor.Green);
                    lblStatus.SetTextColor(UIKit.UIColor.Green);
                    lblStatus.SetText("Success");
                }
                if (runner.FailedTests > 0)
                {
                    lblFailed.SetTextColor(UIKit.UIColor.Red);
                    lblStatus.SetTextColor(UIKit.UIColor.Red);
                    lblStatus.SetText("Failed");
                }
            }
        }

        partial void RunTests(NSObject obj)
        {
            RunTests();
        }
    }
        static int RunTests(string [] original_args)
        {
            Console.WriteLine("Running tests");
            var options = ApplicationOptions.Current;

            // we generate the logs in two different ways depending if the generate xml flag was
            // provided. If it was, we will write the xml file to the tcp writer if present, else
            // we will write the normal console output using the LogWriter
            var logger = new LogWriter(Console.Out);

            logger.MinimumLogLevel = MinimumLogLevel.Info;
            var testAssemblies = GetTestAssemblies();
            var runner         = RegisterType.IsXUnit ? (TestRunner) new XUnitTestRunner(logger) : new NUnitTestRunner(logger);
            var categories     = RegisterType.IsXUnit ?
                                 new List <string> {
                "failing",
                "nonmonotests",
                "outerloop",
                "nonosxtests"
            } :
            new List <string> {
                "MacNotWorking",
                "MobileNotWorking",
                "NotOnMac",
                "NotWorking",
                "ValueAdd",
                "CAS",
                "InetAccess",
                "NotWorkingLinqInterpreter"
            };

            if (RegisterType.IsXUnit)
            {
                // special case when we are using the xunit runner,
                // there is a trait we are not interested in which is
                // the Benchmark one
                var xunitRunner = runner as XUnitTestRunner;
                xunitRunner.AddFilter(XUnitFilter.CreateTraitFilter("Benchmark", "true", true));
            }

            runner.SkipCategories(categories);
            var skippedTests = IgnoreFileParser.ParseContentFiles(NSBundle.MainBundle.ResourcePath);

            if (skippedTests.Any())
            {
                // ensure that we skip those tests that have been passed via the ignore files
                runner.SkipTests(skippedTests);
            }
            runner.Run(testAssemblies.ToList());

            if (options.ResultFile != null)
            {
                using (var writer = new StreamWriter(options.ResultFile)) {
                    runner.WriteResultsToFile(writer);
                }
                logger.Info($"Xml result can be found {options.ResultFile}");
            }

            logger.Info($"Tests run: {runner.TotalTests} Passed: {runner.PassedTests} Inconclusive: {runner.InconclusiveTests} Failed: {runner.FailedTests} Ignored: {runner.FilteredTests}");
            return(runner.FailedTests != 0 ? 1 : 0);
        }
    public void WidgetPerformUpdate(Action <NCUpdateResult> completionHandler)
    {
        var        options = ApplicationOptions.Current;
        TextWriter writer  = null;

        if (!string.IsNullOrEmpty(options.LogFile))
        {
            writer = new StreamWriter(options.LogFile);
        }

        // we generate the logs in two different ways depending if the generate xml flag was
        // provided. If it was, we will write the xml file to the tcp writer if present, else
        // we will write the normal console output using the LogWriter
        var logger = (writer == null || options.EnableXml) ? new LogWriter() : new LogWriter(writer);

        logger.MinimumLogLevel = MinimumLogLevel.Info;
        var testAssemblies = GetTestAssemblies();

        runner = RegisterType.IsXUnit ? (TestRunner) new XUnitTestRunner(logger) : new NUnitTestRunner(logger);
        var categories = RegisterType.IsXUnit ?
                         new List <string> {
            "failing",
            "nonmonotests",
            "outerloop",
            "nonosxtests"
        } :
        new List <string> {
            "MobileNotWorking",
            "NotOnMac",
            "NotWorking",
            "ValueAdd",
            "CAS",
            "InetAccess",
            "NotWorkingLinqInterpreter",
            "BitcodeNotSupported",
        };

        if (RegisterType.IsXUnit)
        {
            // special case when we are using the xunit runner,
            // there is a trait we are not interested in which is
            // the Benchmark one
            var xunitRunner = runner as XUnitTestRunner;
            xunitRunner.AddFilter(XUnitFilter.CreateTraitFilter("Benchmark", "true", true));
        }
        // add category filters if they have been added
        runner.SkipCategories(categories);

        // if we have ignore files, ignore those tests
        var skippedTests = IgnoreFileParser.ParseContentFiles(NSBundle.MainBundle.BundlePath);

        if (skippedTests.Any())
        {
            // ensure that we skip those tests that have been passed via the ignore files
            runner.SkipTests(skippedTests);
        }

        ThreadPool.QueueUserWorkItem((v) =>
        {
            BeginInvokeOnMainThread(() =>
            {
                runner.Run((IList <TestAssemblyInfo>)testAssemblies);
            });
        });

        completionHandler(NCUpdateResult.NewData);
    }
        void RunTests()
        {
            var        options = ApplicationOptions.Current;
            TextWriter writer  = null;

            if (!string.IsNullOrEmpty(options.HostName) && string.IsNullOrEmpty(options.LogFile))
            {
                writer = new HttpTextWriter()
                {
                    HostName = options.HostName, Port = options.HostPort
                }
            }
            ;
            if (!string.IsNullOrEmpty(options.LogFile))
            {
                writer = new StreamWriter(options.LogFile);
            }

            // we generate the logs in two different ways depending if the generate xml flag was
            // provided. If it was, we will write the xml file to the tcp writer if present, else
            // we will write the normal console output using the LogWriter
            var logger = (writer == null || options.EnableXml) ? new LogWriter() : new LogWriter(writer);

            logger.MinimumLogLevel = MinimumLogLevel.Info;
            var testAssemblies = GetTestAssemblies();

            if (RegisterType.IsXUnit)
            {
                runner = new XUnitTestRunner(logger);
            }
            else
            {
                runner = new NUnitTestRunner(logger);
            }

            var skippedTests = IgnoreFileParser.ParseContentFiles(NSBundle.MainBundle.BundlePath);

            if (skippedTests.Any())
            {
                // ensure that we skip those tests that have been passed via the ignore files
                runner.SkipTests(skippedTests);
            }

            ThreadPool.QueueUserWorkItem((v) =>
            {
                BeginInvokeOnMainThread(() =>
                {
                    lblStatus.SetText(string.Format("{0} tests", runner.TotalTests));
                    runner.Run((IList <TestAssemblyInfo>)testAssemblies);
                    RenderResults();
                    cmdRun.SetEnabled(true);
                    cmdRun.SetHidden(false);
                    if (options.EnableXml)
                    {
                        runner.WriteResultsToFile(writer);
                        logger.Info("Xml file was written to the http listener.");
                    }
                    else
                    {
                        string resultsFilePath = runner.WriteResultsToFile();
                        logger.Info($"Xml result can be found {resultsFilePath}");
                    }
                    logger.Info($"Tests run: {runner.TotalTests} Passed: {runner.PassedTests} Inconclusive: {runner.InconclusiveTests} Failed: {runner.FailedTests} Ignored: {runner.SkippedTests}");
                    if (options.TerminateAfterExecution)
                    {
                        TerminateWithSuccess();
                    }
                });
            });
        }

        void RenderResults()
        {
            var options = ApplicationOptions.Current;

            if (runner.TotalTests == 0)
            {
                return;
            }

            lblSuccess.SetText(string.Format("P: {0}/{1} {2}%", runner.PassedTests, runner.TotalTests, 100 * runner.PassedTests / runner.TotalTests));
            lblFailed.SetText(string.Format("F: {0}/{1} {2}%", runner.FailedTests, runner.TotalTests, 100 * runner.FailedTests / runner.TotalTests));
            lblIgnInc.SetText(string.Format("I: {0}/{1} {2}%", (runner.SkippedTests + runner.InconclusiveTests), runner.TotalTests, 100 * (runner.SkippedTests + runner.InconclusiveTests) / runner.TotalTests));

            if (running == false && runner.PassedTests > 0)
            {
                if (runner.FailedTests == 0)
                {
                    lblSuccess.SetTextColor(UIKit.UIColor.Green);
                    lblStatus.SetTextColor(UIKit.UIColor.Green);
                    lblStatus.SetText("Success");
                }
                if (runner.FailedTests > 0)
                {
                    lblFailed.SetTextColor(UIKit.UIColor.Red);
                    lblStatus.SetTextColor(UIKit.UIColor.Red);
                    lblStatus.SetText("Failed");
                }
            }
        }

        partial void RunTests(NSObject obj)
        {
            RunTests();
        }
    }