Exemple #1
0
        static void OutputResults(object sender, BenchmarkOptions benchmarkOptions, RecordReaderBenchmarkArguments benchmarkArgs, long iterationIndex, BenchmarkResult result)
        {
            var     fi   = new System.IO.FileInfo(benchmarkArgs.Path);
            decimal rate = ((decimal)fi.Length / 1024 / 1024) / ((decimal)result.Timer.ElapsedMilliseconds / 1000);

            Console.WriteLine("{0}: {1,25}: {2,10} ticks, {3,10} bytes, {4,4} gc0, {5,4} gc1, {6,4} gc2, {7,6:F} MB/s", iterationIndex, result.Name, result.Timer.ElapsedTicks, result.UsedMemory, result.GC0, result.GC1, result.GC2, rate);
            Trace.WriteLine(string.Format("{0},{1},{2},{3},{4},{5},{6},{7:F}", iterationIndex, result.Name, result.Timer.ElapsedTicks, result.UsedMemory, result.GC0, result.GC1, result.GC2, rate));
        }
Exemple #2
0
        public static void DoTest()
        {
            var option = new BenchmarkOptions {
                DllDir = "c://", TxNumber = 300, GroupRange = new[] { 1, 8 }
            };

            Console.WriteLine(option.Print());
        }
Exemple #3
0
        private static BenchmarkResultHandler CreateResultHandler(BenchmarkOptions options)
        {
            BenchmarkResultHandler handler = new ConsoleResultHandler(options.DisplayRawData);

            if (options.XmlFile != null)
            {
                var xmlHandler = new XmlResultHandler(options.XmlFile);
                handler = new CompositeResultHandler(new[] { handler, xmlHandler });
            }
            return(handler);
        }
Exemple #4
0
        IEnumerable <BenchmarkTaskBase> AddPIETests(FileReference InProjectFile, BenchmarkOptions InOptions)
        {
            if (InProjectFile == null)
            {
                return(Enumerable.Empty <BenchmarkTaskBase>());
            }

            List <BenchmarkTaskBase> NewTasks = new List <BenchmarkTaskBase>();

            string DefaultExtraArgs = InOptions.PIEArgs;

            if (!string.IsNullOrEmpty(InOptions.MapName))
            {
                DefaultExtraArgs += " -map=" + InOptions.MapName;
            }

            if (InOptions.DoPIETests)
            {
                // if no options assume warm
                if (InOptions.DDCOptions == DDCTaskOptions.None || InOptions.DDCOptions.HasFlag(DDCTaskOptions.WarmDDC))
                {
                    NewTasks.Add(new BenchmarkRunEditorTask(InProjectFile, DDCTaskOptions.WarmDDC, DefaultExtraArgs));
                }

                // hot ddc
                if (InOptions.DDCOptions.HasFlag(DDCTaskOptions.HotDDC))
                {
                    NewTasks.Add(new BenchmarkRunEditorTask(InProjectFile, DDCTaskOptions.HotDDC, DefaultExtraArgs));
                }

                // cold ddc
                if (InOptions.DDCOptions.HasFlag(DDCTaskOptions.ColdDDC))
                {
                    NewTasks.Add(new BenchmarkRunEditorTask(InProjectFile, DDCTaskOptions.ColdDDC, DefaultExtraArgs));
                }

                // no shaders in the ddc
                if (InOptions.DDCOptions.HasFlag(DDCTaskOptions.NoShaderDDC))
                {
                    NewTasks.Add(new BenchmarkRunEditorTask(InProjectFile, DDCTaskOptions.NoShaderDDC, DefaultExtraArgs));
                }

                // no ddc!
                if (InOptions.DDCOptions.HasFlag(DDCTaskOptions.NoSharedDDC))
                {
                    NewTasks.Add(new BenchmarkRunEditorTask(InProjectFile, DDCTaskOptions.NoSharedDDC, DefaultExtraArgs));
                }
            }

            return(NewTasks);
        }
Exemple #5
0
        private static int Main(string[] args)
        {
            var options = BenchmarkOptions.FromCommandLine(args);

            // Help screen / error
            if (options == null)
            {
                return(1);
            }
            var handler = CreateResultHandler(options);
            var runner  = new BenchmarkRunner(options, handler);

            runner.RunTests();
            return(0);
        }
Exemple #6
0
        protected override void Run()
        {
            var graphs  = GraphLoader.GetRandomGraphsVariety(10);
            var options = new BenchmarkOptions()
            {
                EarlyStopTime = 5000,
            };

            var scenarios = new List <BenchmarkScenario <int> >()
            {
                // GetNonCorridorScenario(graphs),
                GetCorridorScenario(graphs),
            };

            var scenarioGroup = new MinimumDistanceScenario().GetScenario(graphs, new MinimumDistanceScenario.Options());

            var generators = new List <ILevelGeneratorFactory <int> >()
            {
                GetNewGenerator <int>(options),
                GetOldGenerator <int>(options),
                GetNewGenerator <int>(options),
                GetOldGenerator <int>(options),
                //GetNewGenerator<int>(options, true),
                //GetOldGenerator<int>(options, true),

                //GetNewGenerator<int>(options),
                //GetNewGenerator<int>(options, optimizeCorridorConstraints: true, name: "CorCons"),
                //GetNewGenerator<int>(options),
                //GetNewGenerator<int>(options, optimizeCorridorConstraints: true, name: "CorCons"),

                // GetNewGenerator<int>(options),
                // GetBeforeMasterThesisGenerator<int>(options),
                // GetOldGenerator<int>(options),
                // GetOldGenerator<int>(options, true),
                // GetNewGenerator<int>(options),
            };

            // LoadFromFolder<int>();
            RunBenchmark(scenarios, generators);
            // RunBenchmark(scenarioGroup, generators);
        }
        protected override void Run()
        {
            var graphs  = GraphLoader.GetRandomGraphsVariety(20);
            var options = new BenchmarkOptions()
            {
                EarlyStopTime = 5000,
            };

            var scenarios = new List <BenchmarkScenario <int> >()
            {
                GetDeformedScenario(graphs),
                GetNormalScenario(graphs),
            };

            var generators = new List <ILevelGeneratorFactory <int> >()
            {
                GetOldGenerator <int>(options),
                GetNewGenerator <int>(options),
            };

            RunBenchmark(scenarios, generators);
        }
Exemple #8
0
        internal static void DoBenchmark(BenchmarkOptions options)
        {
            Dictionary <string, double> checksumTimes = new Dictionary <string, double>();

            Core.Benchmark.InitProgressEvent   += Progress.InitProgress;
            Core.Benchmark.UpdateProgressEvent += Progress.UpdateProgress;
            Core.Benchmark.EndProgressEvent    += Progress.EndProgress;

            BenchmarkResults results = Core.Benchmark.Do(options.BufferSize * 1024 * 1024, options.BlockSize);

            DicConsole.WriteLine("Took {0} seconds to fill buffer, {1:F3} MiB/sec.", results.FillTime,
                                 results.FillSpeed);
            DicConsole.WriteLine("Took {0} seconds to read buffer, {1:F3} MiB/sec.", results.ReadTime,
                                 results.ReadSpeed);
            DicConsole.WriteLine("Took {0} seconds to entropy buffer, {1:F3} MiB/sec.", results.EntropyTime,
                                 results.EntropySpeed);

            foreach (KeyValuePair <string, BenchmarkEntry> entry in results.Entries)
            {
                checksumTimes.Add(entry.Key, entry.Value.TimeSpan);
                DicConsole.WriteLine("Took {0} seconds to {1} buffer, {2:F3} MiB/sec.", entry.Value.TimeSpan, entry.Key,
                                     entry.Value.Speed);
            }

            DicConsole.WriteLine("Took {0} seconds to do all algorithms at the same time, {1} MiB/sec.",
                                 results.TotalTime, results.TotalSpeed);
            DicConsole.WriteLine("Took {0} seconds to do all algorithms sequentially, {1} MiB/sec.",
                                 results.SeparateTime, results.SeparateSpeed);

            DicConsole.WriteLine();
            DicConsole.WriteLine("Max memory used is {0} bytes", results.MaxMemory);
            DicConsole.WriteLine("Min memory used is {0} bytes", results.MinMemory);

            Core.Statistics.AddCommand("benchmark");
            Core.Statistics.AddBenchmark(checksumTimes, results.EntropyTime, results.TotalTime, results.SeparateTime,
                                         results.MaxMemory, results.MinMemory);
        }
Exemple #9
0
 public EcsliteV1BouncyShooterBenchmark(BenchmarkOptions options)
 {
     _options = (BouncyShooterOptions)options;
 }
Exemple #10
0
 public LeoEcsResizeBouncyShooterBenchmark(BenchmarkOptions options)
 {
     _options     = (BouncyShooterOptions)options;
     _isRendering = this._options.IsRendering;
 }
Exemple #11
0
        //TODO: for now, the main program code is very tied to IO benchmarks. Needs to refactor to make it more modular.

        static void Main(string[] args)
        {
            AppDomain.CurrentDomain.UnhandledException += new UnhandledExceptionEventHandler(CurrentDomain_UnhandledException);

            var benchmarkOptions = new BenchmarkOptions {
                ActionIterationCount = 1, BenchmarkIterationCount = 3, ConcurrencyLevel = 1
            };

            var  tests     = BenchmarkTests.All;
            bool profiling = false;

            var converter = new StringValueConverter();

            if (args.Length > 0)
            {
                tests = (BenchmarkTests)converter.ConvertTo(args[0], TrimmingOptions.Both, typeof(BenchmarkTests), BenchmarkTests.All);

                if (args.Length > 1)
                {
                    benchmarkOptions.BenchmarkIterationCount = converter.ConvertToInt64(args[1], TrimmingOptions.Both, benchmarkOptions.BenchmarkIterationCount, null);
                }

                if (args.Length > 2)
                {
                    profiling = converter.ConvertToBoolean(args[2], TrimmingOptions.Both, false, null);
                }
            }

            #region FixedWidthReader

            if (tests.HasFlag(BenchmarkTests.FixedWidthReader))
            {
                var files = profiling
                                        ? new string[] { @"IO\Text\files\fixed.txt" }
                                        : new string[] { @"IO\Text\files\fixed.txt", @"IO\Text\files\test1.csv", @"IO\Text\files\test2.csv" };

                foreach (var file in files)
                {
                    Console.WriteLine("--- FixedWidthReader - {0} ---", file);

                    var benchmarkArgs = new FixedRecordReaderBenchmarkArguments();
                    benchmarkArgs.Path = file;

                    Benchmark.Execute("NLight", benchmarkOptions, benchmarkArgs, OutputResults, FixedRecordReaderBenchmarks.ReadAll);
                    Benchmark.Execute("DataStreams", benchmarkOptions, benchmarkArgs, OutputResults, FixedRecordReaderBenchmarks.ReadAll_DataStreams);
                }
            }

            #endregion

            #region DelimitedReader

            if (tests.HasFlag(BenchmarkTests.DelimitedReader))
            {
                var files = profiling
                                        ? new string[] { @"IO\Text\files\test1.csv" }
                                        : new string[] { @"IO\Text\files\test1.csv", @"IO\Text\files\test2.csv", @"IO\Text\files\test3.csv", @"IO\Text\files\test4.csv", @"IO\Text\files\test5.csv" };

                foreach (var file in files)
                {
                    Console.WriteLine("--- DelimitedReader - {0} ---", file);

                    var benchmarkArgs = new DelimitedRecordReaderBenchmarkArguments();
                    benchmarkArgs.Path            = file;
                    benchmarkArgs.TrimWhiteSpaces = true;

                    Benchmark.Execute("LumenWorks", benchmarkOptions, benchmarkArgs, OutputResults, DelimitedRecordReaderBenchmarks.ReadAll_LumenWorks);
                    Benchmark.Execute("NLight", benchmarkOptions, benchmarkArgs, OutputResults, DelimitedRecordReaderBenchmarks.ReadAll);
                    Benchmark.Execute("DataStreams", benchmarkOptions, benchmarkArgs, OutputResults, DelimitedRecordReaderBenchmarks.ReadAll_DataStreams);
                    Benchmark.Execute("CsvHelper", benchmarkOptions, benchmarkArgs, OutputResults, DelimitedRecordReaderBenchmarks.ReadAll_CsvHelper);
                    //Benchmark.Execute("OleDb", benchmarkOptions, benchmarkArgs, OutputResults, DelimitedRecordReaderBenchmarks.ReadAll_OleDb);
                    //Benchmark.Execute("Regex", benchmarkOptions, benchmarkArgs, OutputResults, DelimitedRecordReaderBenchmarks.ReadAll_Regex);
                }
            }

            #endregion

            #region DelimitedReaderAdvancedEscaping

            if (tests.HasFlag(BenchmarkTests.DelimitedReaderAdvancedEscaping))
            {
                var files = profiling
                                        ? new string[] { @"IO\Text\files\test4.csv" }
                                        : new string[] { @"IO\Text\files\test3.csv", @"IO\Text\files\test4.csv" };

                foreach (var file in files)
                {
                    Console.WriteLine("--- DelimitedReader with advanced escaping - {0} ---", file);

                    var benchmarkArgs = new DelimitedRecordReaderBenchmarkArguments();
                    benchmarkArgs.Path                    = file;
                    benchmarkArgs.TrimWhiteSpaces         = true;
                    benchmarkArgs.AdvancedEscapingEnabled = true;

                    Benchmark.Execute("NLight", benchmarkOptions, benchmarkArgs, OutputResults, DelimitedRecordReaderBenchmarks.ReadAll);
                    Benchmark.Execute("DataStreams", benchmarkOptions, benchmarkArgs, OutputResults, DelimitedRecordReaderBenchmarks.ReadAll_DataStreams);
                }
            }

            #endregion

            Console.WriteLine("\nDone");
            Console.ReadLine();
        }
Exemple #12
0
        IEnumerable <BenchmarkTaskBase> AddCookTests(FileReference InProjectFile, UnrealTargetPlatform InPlatform, BenchmarkOptions InOptions)
        {
            if (InProjectFile == null)
            {
                return(Enumerable.Empty <BenchmarkTaskBase>());
            }

            List <BenchmarkTaskBase> NewTasks = new List <BenchmarkTaskBase>();

            // Cook a client if the project supports i
            bool CookClient = ProjectSupportsClientBuild(InProjectFile);

            if (InOptions.DoCookTests)
            {
                // no/warm options
                if (InOptions.DDCOptions == DDCTaskOptions.None || InOptions.DDCOptions.HasFlag(DDCTaskOptions.WarmDDC))
                {
                    NewTasks.Add(new BenchmarkCookTask(InProjectFile, InPlatform, CookClient, DDCTaskOptions.WarmDDC, InOptions.CookArgs));
                }

                if (InOptions.DDCOptions.HasFlag(DDCTaskOptions.HotDDC))
                {
                    NewTasks.Add(new BenchmarkCookTask(InProjectFile, InPlatform, CookClient, DDCTaskOptions.HotDDC, InOptions.CookArgs));
                }

                if (InOptions.DDCOptions.HasFlag(DDCTaskOptions.ColdDDC))
                {
                    NewTasks.Add(new BenchmarkCookTask(InProjectFile, InPlatform, CookClient, DDCTaskOptions.ColdDDC, InOptions.CookArgs));
                }

                if (InOptions.DDCOptions.HasFlag(DDCTaskOptions.NoShaderDDC))
                {
                    NewTasks.Add(new BenchmarkCookTask(InProjectFile, InPlatform, CookClient, DDCTaskOptions.NoShaderDDC, InOptions.CookArgs));
                }

                if (InOptions.DDCOptions.HasFlag(DDCTaskOptions.NoSharedDDC))
                {
                    NewTasks.Add(new BenchmarkCookTask(InProjectFile, InPlatform, CookClient, DDCTaskOptions.NoSharedDDC, InOptions.CookArgs));
                }
            }

            return(NewTasks);
        }
Exemple #13
0
        IEnumerable <BenchmarkTaskBase> AddBuildTests(FileReference InProjectFile, UnrealTargetPlatform InPlatform, string InTargetName, BenchmarkOptions InOptions)
        {
            BuildOptions CleanFlag = InOptions.NoClean ? BuildOptions.None : BuildOptions.Clean;

            BuildOptions NoAndSingleCompileOptions = BuildOptions.None;

            List <BenchmarkTaskBase> NewTasks = new List <BenchmarkTaskBase>();

            if (InOptions.DoAcceleratedCompileTests)
            {
                NewTasks.Add(new BenchmarkBuildTask(InProjectFile, InTargetName, InPlatform, CleanFlag));
            }

            if (InOptions.DoNoAcceleratedCompileTests)
            {
                foreach (int ProcessorCount in InOptions.CoresForLocalJobs)
                {
                    NewTasks.Add(new BenchmarkBuildTask(InProjectFile, InTargetName, InPlatform, CleanFlag | BuildOptions.NoAcceleration, "", ProcessorCount));
                }
                // do single compilation with these results
                NoAndSingleCompileOptions |= BuildOptions.NoAcceleration;
            }

            if (InOptions.DoNoCompileTests)
            {
                // note, don't clean since we build normally then build a single file
                NewTasks.Add(new BenchmarkNopCompileTask(InProjectFile, InTargetName, InPlatform, NoAndSingleCompileOptions));
            }

            if (InOptions.DoSingleCompileTests)
            {
                FileReference SourceFile = FindProjectSourceFile(InProjectFile);

                // note, don't clean since we build normally then build again
                NewTasks.Add(new BenchmarkSingleCompileTask(InProjectFile, InTargetName, InPlatform, SourceFile, NoAndSingleCompileOptions));
            }

            return(NewTasks);
        }
Exemple #14
0
        public override ExitCode Execute()
        {
            BenchmarkOptions Options = new BenchmarkOptions();

            Options.ParseParams(this.Params);

            List <BenchmarkTaskBase> Tasks = new List <BenchmarkTaskBase>();

            Dictionary <BenchmarkTaskBase, List <TimeSpan> > Results = new Dictionary <BenchmarkTaskBase, List <TimeSpan> >();

            for (int ProjectIndex = 0; ProjectIndex < Options.ProjectsToTest.Count(); ProjectIndex++)
            {
                string Project = Options.ProjectsToTest.ElementAt(ProjectIndex);

                FileReference ProjectFile = ProjectUtils.FindProjectFileFromName(Project);

                if (ProjectFile == null && !Project.Equals("UE4", StringComparison.OrdinalIgnoreCase))
                {
                    throw new AutomationException("Could not find project file for {0}", Project);
                }

                if (Options.DoBuildEditorTests)
                {
                    Tasks.AddRange(AddBuildTests(ProjectFile, BuildHostPlatform.Current.Platform, "Editor", Options));
                }

                // do startup tests
                if (Options.DoPIETests)
                {
                    Tasks.AddRange(AddPIETests(ProjectFile, Options));
                }

                foreach (var ClientPlatform in Options.PlatformsToTest)
                {
                    // build a client if the project supports it
                    string TargetName = ProjectSupportsClientBuild(ProjectFile) ? "Client" : "Game";

                    if (Options.DoBuildClientTests)
                    {
                        // do build tests
                        Tasks.AddRange(AddBuildTests(ProjectFile, ClientPlatform, TargetName, Options));
                    }

                    // do cook tests
                    if (Options.DoCookTests)
                    {
                        Tasks.AddRange(AddCookTests(ProjectFile, ClientPlatform, Options));
                    }
                }
            }

            Log.TraceInformation("Will execute tests:");

            foreach (var Task in Tasks)
            {
                Log.TraceInformation("{0}", Task.GetFullTaskName());
            }

            if (!Options.Preview)
            {
                // create results lists
                foreach (var Task in Tasks)
                {
                    Results.Add(Task, new List <TimeSpan>());
                }

                DateTime StartTime = DateTime.Now;

                for (int i = 0; i < Options.Iterations; i++)
                {
                    foreach (var Task in Tasks)
                    {
                        Log.TraceInformation("Starting task {0} (Pass {1})", Task.GetFullTaskName(), i + 1);

                        Task.Run();

                        Log.TraceInformation("Task {0} took {1}", Task.GetFullTaskName(), Task.TaskTime.ToString(@"hh\:mm\:ss"));

                        Results[Task].Add(Task.TaskTime);

                        // write results so far
                        WriteCSVResults(Options.FileName, Tasks, Results);

                        Log.TraceInformation("Waiting {0} secs until next task", Options.TimeBetweenTasks);
                        Thread.Sleep(Options.TimeBetweenTasks * 1000);
                    }
                }

                Log.TraceInformation("**********************************************************************");
                Log.TraceInformation("Test Results:");
                foreach (var Task in Tasks)
                {
                    string TimeString = "";

                    IEnumerable <TimeSpan> TaskTimes = Results[Task];

                    foreach (var TaskTime in TaskTimes)
                    {
                        if (TimeString.Length > 0)
                        {
                            TimeString += ", ";
                        }

                        if (TaskTime == TimeSpan.Zero)
                        {
                            TimeString += "Failed";
                        }
                        else
                        {
                            TimeString += TaskTime.ToString(@"hh\:mm\:ss");
                        }
                    }

                    var AvgTimeString = "";

                    if (TaskTimes.Count() > 1)
                    {
                        var AvgTime = new TimeSpan(TaskTimes.Sum(T => T.Ticks) / TaskTimes.Count());

                        AvgTimeString = string.Format(" (Avg: {0})", AvgTime.ToString(@"hh\:mm\:ss"));
                    }

                    Log.TraceInformation("Task {0}:\t{1}{2}", Task.GetFullTaskName(), TimeString, AvgTimeString);
                }
                Log.TraceInformation("**********************************************************************");

                TimeSpan Elapsed = DateTime.Now - StartTime;

                Log.TraceInformation("Total benchmark time: {0}", Elapsed.ToString(@"hh\:mm\:ss"));

                WriteCSVResults(Options.FileName, Tasks, Results);
            }

            return(ExitCode.Success);
        }
Exemple #15
0
        int RunBenchmark(BenchmarkOptions options)
        {
            InitializeOutput(options.MaxToolsPerRow);
            _logger.Info("Starting Benchmark");
            var toolsInfo     = JsonConvert.DeserializeObject <ToolsJson>(File.ReadAllText("./Tools/tools.json"));
            var testFilesDir  = "./TestFiles";
            var workingDir    = "./temp";
            var testFilesInfo =
                JsonConvert.DeserializeObject <TestFilesJson>(File.ReadAllText($"{testFilesDir}/testFiles.json"));

            var benchmarkResults = new List <IBenchmarkResult>();

            foreach (var testFile in testFilesInfo.TestFiles)
            {
                var testFilePath = Path.GetFullPath($"{testFilesDir}/{testFile.Directory}/lib.js");
                if (!File.Exists(testFilePath))
                {
                    _logger.Warn($"test file: {testFilePath} Was not found!");
                    continue;
                }

                _logger.Info($"Starting benchmark suite of {testFile.Name}@{testFile.Version}");

                var originalContent = File.ReadAllText(testFilePath);
                var result          = new BenchmarkResult($"{testFile.Name}@{testFile.Version}", originalContent.Utf8Length(), originalContent.GZipLength());
                foreach (var tool in toolsInfo.Tools)
                {
                    _logger.Info($"Starting benchmark with tool {tool.Name}");
                    var toolDirPath = $"{workingDir}/{tool.Name.Replace(' ', '_')}{(tool.Npm == null ? "" : "/node_modules/.bin")}{(tool.ExecDir == null ? "" : $"/{tool.ExecDir}")}";

                    var execCommand   = tool.ExecCommand;
                    var execArguments = tool.ExecArguments.Replace("%INPUT_FILE%", testFilePath);
                    var isScript      = tool.ExecCommand.StartsWith("./");

                    if (isScript)
                    {
                        execArguments = $"{(IsWindows ? $"/C {execCommand.Substring(2)}" : $"-c \"{execCommand}")} {execArguments}{(IsWindows ? "" : "\"")}";
                        execCommand   = ShellExecutable;
                    }

                    var startInfo = new ProcessStartInfo
                    {
                        FileName               = execCommand,
                        Arguments              = execArguments,
                        UseShellExecute        = false,
                        CreateNoWindow         = true,
                        WorkingDirectory       = Path.GetFullPath(toolDirPath),
                        RedirectStandardOutput = true,
                        RedirectStandardError  = true
                    };

                    var processResult = startInfo.RunAndMeasureProcess();
                    if (processResult.IsTimeoutExpired)
                    {
                        _logger.Warn("Benchmark has timeouted!");
                    }
                    else
                    {
                        _logger.Info("Benchmark finished");
                    }

                    result.ExecutionResults.Add(new ExecutionResult
                    {
                        ToolName         = tool.Name,
                        ExecutionTime    = processResult.ExecutionTime,
                        Result           = processResult.StdOut,
                        Error            = processResult.StdErr,
                        ExitCode         = processResult.ExitCode,
                        IsTimeoutExpired = processResult.IsTimeoutExpired
                    });
                }
 protected static BenchmarkEngine CreateEngine <T>(BenchmarkOptions options)
 {
     return(new BenchmarkEngine(options, new Type[] { typeof(T) }));
 }
 public BenchmarkHandler(BenchmarkOptions options)
 {
     _options = options;
 }
Exemple #18
0
        IEnumerable <BenchmarkTaskBase> AddPIETests(FileReference InProjectFile, BenchmarkOptions InOptions)
        {
            if (InProjectFile == null || !InOptions.DoPIETests)
            {
                return(Enumerable.Empty <BenchmarkTaskBase>());
            }

            List <BenchmarkTaskBase> NewTasks = new List <BenchmarkTaskBase>();

            string DefaultExtraArgs = InOptions.PIEArgs;

            List <string> MapsToTest = InOptions.MapList.ToList();

            if (!MapsToTest.Any())
            {
                MapsToTest.Add("");
            }

            List <string> BackendsToTest = InOptions.DCCTypes.ToList();

            if (!BackendsToTest.Any())
            {
                BackendsToTest.Add("");
            }

            foreach (var Map in MapsToTest)
            {
                foreach (string Backend in BackendsToTest)
                {
                    string FinalArgs = DefaultExtraArgs;

                    if (!string.IsNullOrEmpty(Map))
                    {
                        FinalArgs += " -map=" + Map;
                    }

                    if (!string.IsNullOrEmpty(Backend))
                    {
                        FinalArgs += string.Format(" -ddc={0}", Backend);
                    }

                    // if no options assume warm
                    if (InOptions.DDCOptions == DDCTaskOptions.None || InOptions.DDCOptions.HasFlag(DDCTaskOptions.WarmDDC))
                    {
                        NewTasks.Add(new BenchmarkRunEditorTask(InProjectFile, DDCTaskOptions.WarmDDC, FinalArgs));
                    }

                    // hot ddc
                    if (InOptions.DDCOptions.HasFlag(DDCTaskOptions.HotDDC))
                    {
                        NewTasks.Add(new BenchmarkRunEditorTask(InProjectFile, DDCTaskOptions.HotDDC, FinalArgs));
                    }

                    // cold ddc
                    if (InOptions.DDCOptions.HasFlag(DDCTaskOptions.ColdDDC))
                    {
                        NewTasks.Add(new BenchmarkRunEditorTask(InProjectFile, DDCTaskOptions.ColdDDC, FinalArgs));
                    }

                    // no shaders in the ddc
                    if (InOptions.DDCOptions.HasFlag(DDCTaskOptions.NoShaderDDC))
                    {
                        NewTasks.Add(new BenchmarkRunEditorTask(InProjectFile, DDCTaskOptions.NoShaderDDC, FinalArgs));
                    }

                    // no ddc!
                    if (InOptions.DDCOptions.HasFlag(DDCTaskOptions.NoSharedDDC))
                    {
                        NewTasks.Add(new BenchmarkRunEditorTask(InProjectFile, DDCTaskOptions.NoSharedDDC, FinalArgs));
                    }
                }
            }

            return(NewTasks);
        }