public void TestCustomLogger() { InMemoryLogger log = new InMemoryLogger(); var config = this.GetConfiguration().WithVerbosityEnabled().WithTestingIterations(3); TestingEngine engine = TestingEngine.Create(config, (ICoyoteRuntime runtime) => { runtime.Logger.WriteLine("Hi mom!"); }); engine.Logger = log; engine.Run(); var result = log.ToString(); result = result.RemoveNonDeterministicValues(); var expected = @"... Task 0 is using 'random' strategy (seed:4005173804). ..... Iteration #1 <TestLog> Running test. Hi mom! ..... Iteration #2 <TestLog> Running test. Hi mom! ..... Iteration #3 <TestLog> Running test. Hi mom! "; expected = expected.RemoveNonDeterministicValues(); Assert.Equal(expected, result); }
private static void ReplayTest() { TelemetryClient = new CoyoteTelemetryClient(Configuration); TelemetryClient.TrackEventAsync("replay").Wait(); if (Debugger.IsAttached) { TelemetryClient.TrackEventAsync("replay-debug").Wait(); } Stopwatch watch = new Stopwatch(); watch.Start(); // Set some replay specific options. Configuration.SchedulingStrategy = "replay"; Configuration.EnableColoredConsoleOutput = true; Configuration.DisableEnvironmentExit = false; Console.WriteLine($". Replaying {Configuration.ScheduleFile}"); TestingEngine engine = TestingEngine.Create(Configuration); engine.Run(); Console.WriteLine(engine.GetReport()); watch.Stop(); if (!Debugger.IsAttached) { TelemetryClient.TrackMetricAsync("replay-time", watch.Elapsed.TotalSeconds).Wait(); } }
private static void RunTest(Func <ICoyoteRuntime, Task> test, Configuration configuration, string testName, params string[] expectedBugs) { var engine = TestingEngine.Create(configuration, test); RunTest(engine, testName, expectedBugs); }
private static void RunTest(Action <IActorRuntime> test, Configuration configuration, string testName, params string[] expectedBugs) { var engine = TestingEngine.Create(configuration, test); RunTest(engine, testName, expectedBugs); }
public void TestCustomActorRuntimeLogFormatter() { var logger = new CustomActorRuntimeLog(); Action <IActorRuntime> test = r => { r.RegisterLog(logger); r.CreateActor(typeof(M)); }; TestingEngine engine = TestingEngine.Create(GetConfiguration().WithDFSStrategy(), test); try { engine.Run(); var numErrors = engine.TestReport.NumOfFoundBugs; Assert.True(numErrors == 1, GetBugReport(engine)); Assert.True(engine.ReadableTrace != null, "Readable trace is null."); Assert.True(engine.ReadableTrace.Length > 0, "Readable trace is empty."); string expected = @"CreateStateMachine StateTransition CreateStateMachine StateTransition "; string actual = RemoveNonDeterministicValuesFromReport(logger.ToString()); Assert.Equal(expected, actual); } catch (Exception ex) { Assert.False(true, ex.Message + "\n" + ex.StackTrace); } }
public static void Main(string[] args) { // Optional: increases verbosity level to see the Coyote runtime log. Configuration configuration = Configuration.Create().WithTestingIterations(100); configuration.WithMaxSchedulingSteps(100); TestingEngine engine = TestingEngine.Create(configuration, DefaultImpl.Execute); engine.Run(); string bug = engine.TestReport.BugReports.FirstOrDefault(); if (bug != null) { Console.WriteLine(bug); Environment.Exit(1); } Environment.Exit(0); // for debugging: /* For replaying a bug and single stepping * Configuration configuration = Configuration.Create(); * configuration.WithVerbosityEnabled(true); * // update the path to the schedule file. * configuration.WithReplayStrategy("AfterNewUpdate.schedule"); * TestingEngine engine = TestingEngine.Create(configuration, DefaultImpl.Execute); * engine.Run(); * string bug = engine.TestReport.BugReports.FirstOrDefault(); * if (bug != null) * { * Console.WriteLine(bug); * }*/ }
public void MeasureTaskInterleavingsThroughput() { var configuration = Configuration.Create().WithTestingIterations(100); var testingEngine = TestingEngine.Create(configuration, this.RunTaskInterleavings); testingEngine.Logger = new IO.NullLogger(); testingEngine.Run(); }
public void TestActorRuntimeXmlLogFormatter() { StringBuilder builder = new StringBuilder(); var logger = new ActorRuntimeLogXmlFormatter(XmlWriter.Create(builder, new XmlWriterSettings() { Indent = true, IndentChars = " " })); Action <IActorRuntime> test = r => { r.RegisterLog(logger); r.CreateActor(typeof(M)); }; TestingEngine engine = TestingEngine.Create(GetConfiguration().WithDFSStrategy(), test); try { engine.Run(); var numErrors = engine.TestReport.NumOfFoundBugs; Assert.True(numErrors == 1, GetBugReport(engine)); string expected = @"<?xml version='1.0' encoding='utf-16'?> <Log> <CreateStateMachine id='M()' creatorName='' creatorType='task' /> <State id='M()' state='Init' isEntry='True' /> <Action id='M()' state='Init' action='InitOnEntry' /> <CreateStateMachine id='N()' creatorName='M()' creatorType='M' /> <Send target='N()' senderName='M()' senderType='M' senderState='Init' event='E' isTargetHalted='False' /> <EnqueueEvent id='N()' event='E' /> <State id='N()' state='Init' isEntry='True' /> <DequeueEvent id='N()' state='Init' event='E' /> <Action id='N()' state='Init' action='Act' /> <Send target='M()' senderName='N()' senderType='N' senderState='Init' event='E' isTargetHalted='False' /> <EnqueueEvent id='M()' event='E' /> <DequeueEvent id='M()' state='Init' event='E' /> <Action id='M()' state='Init' action='Act' /> <AssertionFailure><ErrorLog> Reached test assertion.</AssertionFailure> <AssertionFailure>StackTrace: at Microsoft.Coyote.SystematicTesting.Tests.Runtime.CustomActorRuntimeLogTests.M.Act() </AssertionFailure> <Strategy strategy='dfs'>dfs</Strategy> </Log> "; string actual = builder.ToString().Replace("\"", "'"); actual = RemoveNonDeterministicValuesFromReport(actual); actual = RemoveStackTraceFromXmlReport(actual); Assert.Equal(expected, actual); } catch (Exception ex) { Assert.False(true, ex.Message + "\n" + ex.StackTrace); } }
private static void ReplayTest() { // Set some replay specific options. Configuration.SchedulingStrategy = "replay"; Configuration.EnableColoredConsoleOutput = true; Configuration.DisableEnvironmentExit = false; Console.WriteLine($". Replaying {Configuration.ScheduleFile}"); TestingEngine engine = TestingEngine.Create(Configuration); engine.Run(); Console.WriteLine(engine.GetReport()); }
public void TestCustomActorRuntimeLogTextFormatter() { var logger = new CustomActorRuntimeLogSubclass(); Action <IActorRuntime> test = r => { r.RegisterLog(logger); r.CreateActor(typeof(M)); }; TestingEngine engine = TestingEngine.Create(GetConfiguration().WithDFSStrategy(), test); try { engine.Run(); var numErrors = engine.TestReport.NumOfFoundBugs; Assert.True(numErrors == 1, GetBugReport(engine)); Assert.True(engine.ReadableTrace != null, "Readable trace is null."); Assert.True(engine.ReadableTrace.Length > 0, "Readable trace is empty."); string expected = @"<TestLog> Running test. <CreateLog> M() was created by task ''. <StateLog>. <ActionLog> M() invoked action 'InitOnEntry' in state 'Init'. <CreateLog> N() was created by M(). <StateLog>. <DequeueLog> N() dequeued event 'E' in state 'Init'. <ActionLog> N() invoked action 'Act' in state 'Init'. <DequeueLog> M() dequeued event 'E' in state 'Init'. <ActionLog> M() invoked action 'Act' in state 'Init'. <ErrorLog> Reached test assertion. <StackTrace> at Microsoft.Coyote.SystematicTesting.Tests.Runtime.CustomActorRuntimeLogTests.M.Act() <StrategyLog> Found bug using 'dfs' strategy. <StrategyLog> Testing statistics: <StrategyLog> Found 1 bug. <StrategyLog> Scheduling statistics: <StrategyLog> Explored 1 schedule: 0 fair and 1 unfair. <StrategyLog> Found 100.00% buggy schedules."; string actual = engine.ReadableTrace.ToString(); actual = RemoveStackTraceFromReport(actual); actual = RemoveNonDeterministicValuesFromReport(actual); expected = RemoveNonDeterministicValuesFromReport(expected); Assert.Equal(expected, actual); } catch (Exception ex) { Assert.False(true, ex.Message + "\n" + ex.StackTrace); } }
/// <summary> /// Runs the test specified in the configuration. /// </summary> private static ExitCode RunTest(Configuration configuration) { try { Console.WriteLine($". Testing {configuration.AssemblyToBeAnalyzed}."); using TestingEngine engine = TestingEngine.Create(configuration); engine.Run(); string directory = OutputFileManager.CreateOutputDirectory(configuration); string fileName = OutputFileManager.GetResolvedFileName(configuration.AssemblyToBeAnalyzed, directory); // Emit the test reports. Console.WriteLine($"... Emitting trace-related reports:"); if (engine.TryEmitReports(directory, fileName, out IEnumerable <string> reportPaths)) { foreach (var path in reportPaths) { Console.WriteLine($"..... Writing {path}."); } } else { Console.WriteLine($"..... No test reports available."); } // Emit the coverage reports. Console.WriteLine($"... Emitting coverage reports:"); if (engine.TryEmitCoverageReports(directory, fileName, out reportPaths)) { foreach (var path in reportPaths) { Console.WriteLine($"..... Writing {path}."); } } else { Console.WriteLine($"..... No coverage reports available."); } Console.WriteLine(engine.TestReport.GetText(configuration, "...")); Console.WriteLine($"... Elapsed {engine.Profiler.Results()} sec."); return(GetExitCodeFromTestReport(engine.TestReport)); } catch (Exception ex) { IO.Debug.WriteLine(ex.Message); IO.Debug.WriteLine(ex.StackTrace); return(ExitCode.Error); } }
public static void Main(string[] args) { // Optional: increases verbosity level to see the Coyote runtime log. var configuration = Configuration.Create().WithTestingIterations(10); var engine = TestingEngine.Create(configuration, Test0.Execute); engine.Run(); var bug = engine.TestReport.BugReports.FirstOrDefault(); if (bug != null) { Console.WriteLine(bug); } }
/// <summary> /// Replays an execution that is specified in the configuration. /// </summary> private static void ReplayTest() { // Set some replay specific options. Configuration.SchedulingStrategy = "replay"; Configuration.EnableColoredConsoleOutput = true; Configuration.DisableEnvironmentExit = false; // Load the configuration of the assembly to be replayed. LoadAssemblyConfiguration(Configuration.AssemblyToBeAnalyzed); Console.WriteLine($". Replaying {Configuration.ScheduleFile}"); TestingEngine engine = TestingEngine.Create(Configuration); engine.Run(); Console.WriteLine(engine.GetReport()); }
private static void RunTest(Action <IActorRuntime> test, string outdir) { Directory.CreateDirectory(outdir); Configuration conf = Configuration.Create().WithTestingIterations(10).WithPCTStrategy(false, 10).WithActivityCoverageEnabled(); TestingEngine engine = TestingEngine.Create(conf, test); engine.Run(); System.Threading.Thread.Sleep(2000); foreach (var name in engine.TryEmitTraces(outdir, "foo")) { Console.WriteLine(name); } Console.WriteLine("Test complete"); }
/// <summary> /// Invoke the Coyote systematic testing engine to run the specified test multiple iterations, /// each iteration exploring potentially different interleavings using some underlying program /// exploration strategy (by default a uniform probabilistic strategy). /// </summary> /// <remarks> /// Learn more in our documentation: https://microsoft.github.io/coyote/how-to/unit-testing /// </remarks> private static void RunCoyoteTest(Func<Task> test, string reproducibleScheduleFilePath = null) { // Configuration for how to run a concurrency unit test with Coyote. // This configuration will run the test 1000 times exploring different paths each time. var config = Configuration.Create().WithTestingIterations(1000); if (reproducibleScheduleFilePath != null) { var trace = File.ReadAllText(reproducibleScheduleFilePath); config = config.WithReplayStrategy(trace); } var testingEngine = TestingEngine.Create(config, test); try { testingEngine.Run(); string assertionText = testingEngine.TestReport.GetText(config); assertionText += $"{Environment.NewLine} Random Generator Seed: " + $"{testingEngine.TestReport.Configuration.RandomGeneratorSeed}{Environment.NewLine}"; foreach (var bugReport in testingEngine.TestReport.BugReports) { assertionText += $"{Environment.NewLine}" + "Bug Report: " + bugReport.ToString(CultureInfo.InvariantCulture); } if (testingEngine.TestReport.NumOfFoundBugs > 0) { var timeStamp = DateTime.UtcNow.ToString("yyyy-MM-ddTHH-mm-ssZ", CultureInfo.InvariantCulture); var reproducibleTraceFileName = $"buggy-{timeStamp}.schedule"; assertionText += Environment.NewLine + "Reproducible trace which leads to the bug can be found at " + $"{Path.Combine(Directory.GetCurrentDirectory(), reproducibleTraceFileName)}"; File.WriteAllText(reproducibleTraceFileName, testingEngine.ReproducibleTrace); } Assert.True(testingEngine.TestReport.NumOfFoundBugs == 0, assertionText); } finally { testingEngine.Stop(); } }
/// <summary> /// Invoke the Coyote systematic testing engine to run the specified test multiple iterations, /// each iteration exploring potentially different interleavings using some underlying program /// exploration strategy (by default a uniform probabilistic strategy). /// </summary> /// <remarks> /// Learn more in our documentation: https://microsoft.github.io/coyote/how-to/unit-testing /// </remarks> private static void RunSystematicTest(Func <Task> test, string testName) { Console.WriteLine($"Starting systematic test..."); var configuration = Configuration.Create(). WithTestingIterations(1000). // Change this to tweak the number of iterations. WithVerbosityEnabled(); var testingEngine = TestingEngine.Create(configuration, test); testingEngine.Run(); Console.WriteLine($"Done testing. Found {testingEngine.TestReport.NumOfFoundBugs} bugs."); if (testingEngine.TestReport.NumOfFoundBugs > 0) { var error = testingEngine.TestReport.BugReports.First(); var traceFile = WriteReproducibleTrace(testingEngine.ReproducibleTrace, testName); Assert.Fail("Found bug: {0}\n Replay trace using Coyote by running:\n TraceReplayer.exe {1} {2}", error, testName, traceFile); } }
public void TestStateMachineFinalizerInvoked() { var tracker = new GCTracker(); var config = this.GetConfiguration().WithTestingIterations(2); TestingEngine engine = TestingEngine.Create(config, (IActorRuntime r) => { var setup = new SetupEvent(tracker); r.CreateActor(typeof(M), setup); }); engine.Run(); // Force a full GC. GC.Collect(2); GC.WaitForFullGCComplete(); GC.WaitForPendingFinalizers(); Assert.True(tracker.IsFinalized, "Finalizer was not called."); }
public void TestCustomTaskRuntimeLog() { var config = this.GetConfiguration().WithRandomGeneratorSeed(0); using TestingEngine engine = TestingEngine.Create(config, this.RunAsync); try { engine.Run(); var numErrors = engine.TestReport.NumOfFoundBugs; Assert.True(numErrors is 1, GetBugReport(engine)); Assert.True(engine.ReadableTrace != null, "Readable trace is null."); Assert.True(engine.ReadableTrace.Length > 0, "Readable trace is empty."); string expected = @"<TestLog> Runtime '' started test on thread ''. Task '' is running. Task '' completed. Task '' is running. Task '' completed. <ErrorLog> Reached test assertion. <TestLog> Exploration finished [found a bug using the 'random' strategy]. <StrategyLog> Testing statistics: <StrategyLog> Found 1 bug. <StrategyLog> Scheduling statistics: <StrategyLog> Explored 1 schedule: 1 fair and 0 unfair. <StrategyLog> Found 100.00% buggy schedules. <StrategyLog> Controlled 3 operations: 3 (), 3 (), 3 (). <StrategyLog> Degree of concurrency: 2 (), 2 (), 2 (). <StrategyLog> Number of scheduling decisions in fair terminating schedules: 4 (), 4 (), 4 ()."; string actual = engine.ReadableTrace.ToString(); actual = actual.RemoveNonDeterministicValues(); expected = expected.RemoveNonDeterministicValues(); Assert.Equal(expected, actual); } catch (Exception ex) { Assert.False(true, ex.Message + "\n" + ex.StackTrace); } }
private void RunSystematicTest(Func <Task> test, string testName) { _output.WriteLine("Start testing " + testName); var configuration = Configuration.Create(). WithTestingIterations(10). WithVerbosityEnabled(); var testingEngine = TestingEngine.Create(configuration, test); testingEngine.Run(); Console.WriteLine($"Done testing. Found {testingEngine.TestReport.NumOfFoundBugs} bugs."); if (testingEngine.TestReport.NumOfFoundBugs > 0) { var error = testingEngine.TestReport.BugReports.First(); Assert.True(false, $"Found bug: {error}"); } }
public void TestCustomTaskRuntimeLog() { var config = GetConfiguration().WithRandomGeneratorSeed(0); TestingEngine engine = TestingEngine.Create(config, this.RunAsync); try { engine.Run(); var numErrors = engine.TestReport.NumOfFoundBugs; Assert.True(numErrors == 1, GetBugReport(engine)); Assert.True(engine.ReadableTrace != null, "Readable trace is null."); Assert.True(engine.ReadableTrace.Length > 0, "Readable trace is empty."); string expected = @"<TestLog> Running test. Task '' is running. Task '' completed. Task '' is running. Task '' completed. <ErrorLog> Reached test assertion. <StackTrace> <StrategyLog> Found bug using 'random' strategy. <StrategyLog> Testing statistics: <StrategyLog> Found 1 bug. <StrategyLog> Scheduling statistics: <StrategyLog> Explored 1 schedule: 1 fair and 0 unfair. <StrategyLog> Found 100.00% buggy schedules. <StrategyLog> Number of scheduling points in fair terminating schedules: 9 (), 9 (), 9 ()."; string actual = engine.ReadableTrace.ToString(); actual = actual.RemoveStackTrace("<StrategyLog>"); actual = actual.RemoveNonDeterministicValues(); expected = expected.RemoveNonDeterministicValues(); Assert.Equal(expected, actual); } catch (Exception ex) { Assert.False(true, ex.Message + "\n" + ex.StackTrace); } }
public void TestCustomTaskRuntimeLog() { TestingEngine engine = TestingEngine.Create(GetConfiguration().WithDFSStrategy(), this.RunAsync); try { engine.Run(); var numErrors = engine.TestReport.NumOfFoundBugs; Assert.True(numErrors == 1, GetBugReport(engine)); Assert.True(engine.ReadableTrace != null, "Readable trace is null."); Assert.True(engine.ReadableTrace.Length > 0, "Readable trace is empty."); string expected = @"<TestLog> Running test. Task '' is running. Task '' completed. Task '' is running. Task '' completed. <ErrorLog> Reached test assertion. <StackTrace> <StrategyLog> Found bug using 'dfs' strategy. <StrategyLog> Testing statistics: <StrategyLog> Found 1 bug. <StrategyLog> Scheduling statistics: <StrategyLog> Explored 1 schedule: 0 fair and 1 unfair. <StrategyLog> Found 100.00% buggy schedules."; string actual = engine.ReadableTrace.ToString(); actual = RemoveStackTraceFromReport(actual, "<StrategyLog>"); actual = RemoveNonDeterministicValuesFromReport(actual); expected = RemoveNonDeterministicValuesFromReport(expected); Assert.Equal(expected, actual); } catch (Exception ex) { Assert.False(true, ex.Message + "\n" + ex.StackTrace); } }
public void TestCustomLogger() { InMemoryLogger log = new InMemoryLogger(); var config = this.GetConfiguration().WithTestingIterations(3).WithRandomGeneratorSeed(0); using TestingEngine engine = TestingEngine.Create(config, (ICoyoteRuntime runtime) => { runtime.Logger.WriteLine("Hello world!"); }); engine.Logger = log; engine.Run(); var result = log.ToString(); result = result.RemoveNonDeterministicValues(); var expected = @"... Setting up the test: ..... Using the random[seed:0] exploration strategy. ... Running test iterations: ..... Iteration #1 <TestLog> Runtime '' started test on thread ''. Hello world! <TestLog> Exploration finished [reached the end of the test method]. ..... Iteration #2 <TestLog> Runtime '' started test on thread ''. Hello world! <TestLog> Exploration finished [reached the end of the test method]. ..... Iteration #3 <TestLog> Runtime '' started test on thread ''. Hello world! <TestLog> Exploration finished [reached the end of the test method]. "; expected = expected.RemoveNonDeterministicValues(); Assert.Equal(expected, result); }
public static void RunTestInCoyote(Func <Task> toRun) { var configuration = Configuration.Create().WithTestingIterations(50).WithRandomStrategy(); var testingEngine = TestingEngine.Create(configuration, toRun); testingEngine.Run(); var report = testingEngine.TestReport; var bugCount = report.BugReports.Count; if (bugCount > 0) { var reports = new string[bugCount]; report.BugReports.CopyTo(reports); var message = $"Test failed. Found {bugCount} bugs. Errors: {string.Join(",", reports)}" + "\n"; message += report.GetText(configuration); Assert.True(false, message); } Console.WriteLine("Test passed"); }