public void TestWithMarkers() { Profiler.Reset(); const int timeToWait = 10; var watcher = ExpectLog(new List <MatchMessageDelegate>() { MessageStartWith("[Profiler] #0: Begin: TestProfiler.Test"), MessageStartWith("[Profiler] #0: Mark: TestProfiler.Test", timeToWait), MessageStartWith("[Profiler] #0: Mark: TestProfiler.Test", timeToWait * 2), MessageStartWith("[Profiler] #0: End: TestProfiler.Test", timeToWait * 2), }); { Profiler.EnableAll(); using (var profile = Profiler.Begin(TestKey)) { Utilities.Sleep(timeToWait); profile.Mark(); Utilities.Sleep(timeToWait); profile.Mark(); } } watcher.Finish(); }
public void AddMediumMulti() { var Profile = new Profiler(Intervals); Profile.Add("Test"); Profile.Reset(); //**************************************** using (Profile.Profile("Test")) { Thread.Yield(); } WaitFor(ShortInterval); using (Profile.Profile("Test")) { Thread.Yield(); } //**************************************** var Results = Profile.Get("Test"); Assert.AreEqual(1, Results[0].Samples); Assert.AreEqual(2, Results[1].Samples); Assert.AreEqual(0, Results[2].Samples); }
public void AllTime2() { var Profile = new Profiler(new[] { TimeSpan.Zero }); Profile.Add("Test"); Profile.Reset(); //**************************************** using (Profile.Profile("Test")) { Thread.Yield(); } using (Profile.Profile("Test")) { Thread.Yield(); } //**************************************** var Result = Profile.Get("Test")[0]; Assert.AreEqual(2, Result.Samples); }
public void AddLong() { var Profile = new Profiler(Intervals); Profile.Add("Test"); Profile.Reset(); //**************************************** using (Profile.Profile("Test")) { Thread.Yield(); } // Need to wait 40ms for the medium interval to tick over WaitFor(LongInterval + ShortInterval); //**************************************** var Results = Profile.Get("Test"); Assert.AreEqual(0, Results[0].Samples); Assert.AreEqual(0, Results[1].Samples); Assert.AreEqual(1, Results[2].Samples); }
public void TestSimpleNotEnabled() { Profiler.Reset(); using (var watcher = ExpectLog(new List <MatchMessageDelegate>())) { using (var profile = Profiler.Begin(TestKey)) { Utilities.Sleep(100); } } }
public Position AI() { profiler.Reset(); cut_count = 0; search_count = 0; float time = Time.realtimeSinceStartup; NegaMax(true, DEPTH, -99999999, 99999999); float delta = Time.realtimeSinceStartup - time; Debug.LogFormat("cost time {0} s", delta); Debug.LogFormat("get blank list {0} s, count {1}", profiler.profiler[(int)ProfilerFunction.GET_BLANK_LIST], profiler.count[(int)ProfilerFunction.GET_BLANK_LIST]); Debug.LogFormat("cal score {0} s, count {1}", profiler.profiler[(int)ProfilerFunction.CAL_SCORE], profiler.count[(int)ProfilerFunction.CAL_SCORE]); Debug.LogFormat("evaluate cost {0} s, count {1}", profiler.profiler[(int)ProfilerFunction.EVALUATE], profiler.count[(int)ProfilerFunction.EVALUATE]); return(next_point); }
public void TestSimpleEnabled() { Profiler.Reset(); const int timeToWait = 200; using (var watcher = ExpectLog(new List <MatchMessageDelegate>() { MessageStartWith("[Profiler] #0: Begin: TestProfiler.Test"), MessageStartWith("[Profiler] #0: End: TestProfiler.Test", timeToWait), })) { Profiler.Enable(TestKey); using (var profile = Profiler.Begin(TestKey)) { Utilities.Sleep(timeToWait); } } }
public void TestEvolution() { Profiler.Reset(); var random = new SystemRandom(42); var configuration = new ZeldaConfiguration(random); var factory = new ZeldaGenomeFactory(configuration); var initialVariables = new VariableAssignment(); var environment = new ZeldaEnvironment(new[] { initialVariables }, 10000); var writer = File.CreateText("stats.csv"); var evolution = new ElitistEvolution(200, 100, factory, environment, ZeldaIndividual.NumAttributes, writer); var genomes = evolution.Initialize(); // TODO Jonas: replace fixed weight multirank optimization by dynamic randomized weighting // i.e. in some generations prefer some attribute over others // evolve for (int i = 0; i < 1000; i++) { Profiler.BeginFrame(); Console.WriteLine("gen " + i); genomes = evolution.Evolve(genomes, random); Profiler.EndFrame(); } writer.Close(); var best = (ZeldaIndividual)evolution.GetBest(new TestComparer()); Console.WriteLine(best); var crawler = best.Crawler; var builder = new DotBuilder(); crawler.Express(builder); // TODO: output genome to puzzle unit test (puzzle building statements) Profiler.ExportToUnityProfileAnalyzer("w:\\EvolutionTest-TestEvolution.pdata"); }
public void CreateIndividual() { Profiler.Reset(); Profiler.BeginFrame(); var random = new SystemRandom(42); var configuration = new ZeldaConfiguration(random); var factory = new ZeldaGenomeFactory(configuration); var initialVariables = new VariableAssignment(); var environment = new ZeldaEnvironment(new[] { initialVariables }, 10000); var example = factory.CreateGenome(); var individual = environment.Evaluate(example); Console.WriteLine("test: " + individual); Profiler.EndFrame(); Profiler.ExportToUnityProfileAnalyzer("w:\\EvolutionTest-CreateIndividual.pdata"); }
public void TestWitAttributes() { Profiler.Reset(); const int timeToWait = 100; using (var watcher = ExpectLog(new List <MatchMessageDelegate>() { MessageStartWith("[Profiler] #0: Begin: TestProfiler.Test"), MessageStartWith("[Profiler] #0: Mark: TestProfiler.Test", message => message.Contains("MyAttribute")), MessageStartWith("[Profiler] #0: End: TestProfiler.Test", timeToWait), })) { Profiler.EnableAll(); using (var profile = Profiler.Begin(TestKey)) { profile.SetAttribute("MyAttribute", 5); Utilities.Sleep(timeToWait); profile.Mark(); } } }
public void CompareProfilingWaysCheapOperation() { const int iterations = 10000000; // 1. Test without warm up var test = new PerformanceTest <int> { TestName = "DoTest performance without warm up", Iterations = iterations, WarmUp = false, Repeat = 5, } .AddCase(DoNothing, nameof(DoNothing)); test.DoTest().DumpResults(Console.Out); // 2. Test without warm up test.TestName = "DoTest performance with warm up"; test.WarmUp = true; test.DoTest().DumpResults(Console.Out); // 3. Direct test Console.WriteLine("===========Direct measurement test==============="); GC.Collect(); GC.WaitForPendingFinalizers(); GC.Collect(); // warming up for (int i = 0; i < iterations; i++) { DoNothing(); } Stopwatch watch = new Stopwatch(); watch.Start(); for (int i = 0; i < iterations; i++) { DoNothing(); } watch.Stop(); Console.WriteLine("Total Time: {0:N2} ms", watch.Elapsed.TotalMilliseconds); Console.WriteLine(); Console.WriteLine("The difference of DoTest and Direct measurement is the overhead cost of using a delegate in DoTest."); Console.WriteLine(); // 4. Profiler test Console.WriteLine("===========Profiler test==============="); Profiler.Reset(); Profiler.AutoSaveResults = false; GC.Collect(); GC.WaitForPendingFinalizers(); GC.Collect(); for (int i = 0; i < iterations; i++) { using (Profiler.Measure("ProfilerTest", "WarmingUp")) { } } GC.Collect(); GC.WaitForPendingFinalizers(); GC.Collect(); for (int i = 0; i < iterations; i++) { using (Profiler.Measure("ProfilerTest", "SelfCostWithoutOp")) { } } GC.Collect(); GC.WaitForPendingFinalizers(); GC.Collect(); using (Profiler.Measure("ProfilerTest", "TotalWithSubMeasures")) { for (int i = 0; i < iterations; i++) { using (Profiler.Measure("ProfilerTest", "DoNothingCall")) DoNothing(); } } GC.Collect(); GC.WaitForPendingFinalizers(); GC.Collect(); using (Profiler.Measure("ProfilerTest", "PureTotal")) { for (int i = 0; i < iterations; i++) { DoNothing(); } } foreach (IMeasureItem item in Profiler.GetMeasurementResults("ProfilerTest")) { Console.WriteLine("{0}: {1:N2} ms", item.Operation, item.TotalTime.TotalMilliseconds); } Console.WriteLine(); Console.WriteLine("PureTotal should be nearly the same as Direct measurement, and PureTotal should be DoNothingCall - SelfCostWithoutOp"); Console.WriteLine("TotalWithSubMeasures - DoNothingCall = the cost of the Profiler itself (SelfCostWithoutOp does not contain the administration costs of the results)"); }
public void CompareProfilingWaysExpensiveOperation() { const int iterations = 10000; var test = new PerformanceTest <int> { TestName = "DoTest performance without warm up", Iterations = iterations, WarmUp = false, Repeat = 5, } .AddCase(DoSomething, nameof(DoSomething)); // 1. DoTest without warmup test.DoTest().DumpResults(Console.Out); test.TestName = "DoTest performance with warmup"; test.WarmUp = true; // 2. DoTest with warmup test.DoTest().DumpResults(Console.Out); // 3. Direct test Console.WriteLine("===========Direct measurement test==============="); GC.Collect(); GC.WaitForPendingFinalizers(); GC.Collect(); // warming up for (int i = 0; i < iterations; i++) { DoSomething(); } Stopwatch watch = new Stopwatch(); watch.Start(); for (int i = 0; i < iterations; i++) { DoSomething(); } watch.Stop(); Console.WriteLine("Total Time: {0:N2} ms", watch.Elapsed.TotalMilliseconds); Console.WriteLine(); // 4. Profiler test Console.WriteLine("===========Profiler test==============="); Profiler.AutoSaveResults = false; Profiler.Reset(); GC.Collect(); GC.WaitForPendingFinalizers(); GC.Collect(); for (int i = 0; i < iterations; i++) { using (Profiler.Measure("ProfilerTest", "WarmingUp")) { } } GC.Collect(); GC.WaitForPendingFinalizers(); GC.Collect(); for (int i = 0; i < iterations; i++) { using (Profiler.Measure("ProfilerTest", "SelfCostWithoutOp")) { } } GC.Collect(); GC.WaitForPendingFinalizers(); GC.Collect(); using (Profiler.Measure("ProfilerTest", "TotalWithSubMeasures")) { for (int i = 0; i < iterations; i++) { using (Profiler.Measure("ProfilerTest", "DoSomethingCall")) DoSomething(); } } GC.Collect(); GC.WaitForPendingFinalizers(); GC.Collect(); using (Profiler.Measure("ProfilerTest", "PureTotal")) { for (int i = 0; i < iterations; i++) { DoSomething(); } } foreach (IMeasureItem item in Profiler.GetMeasurementResults("ProfilerTest")) { Console.WriteLine("{0}: {1:N2} ms", item.Operation, item.TotalTime.TotalMilliseconds); } Console.WriteLine(); Console.WriteLine("In case of a costly operation the DirectTotal < PureTotal < DoTest < DoSomethingCall < TotalWithSubMeasures should have nearly the same value."); }