public static void Simple() { var simpleDemo = new SimpleDemo(); simpleDemo.Initialize(new DemoRenderer.Camera(1, 1, 1, 1)); for (int i = 0; i < 128; ++i) { simpleDemo.Update(null, 1 / 60f); } double time = 0; const int frameCount = 1000; int largestOverlapCount = 0; for (int i = 0; i < frameCount; ++i) { CacheBlaster.Blast(); var start = Stopwatch.GetTimestamp(); simpleDemo.Update(null, 1 / 60f); var end = Stopwatch.GetTimestamp(); time += (end - start) / (double)Stopwatch.Frequency; var overlapCount = simpleDemo.Simulation.NarrowPhase.PairCache.Mapping.Count; if (overlapCount > largestOverlapCount) { largestOverlapCount = overlapCount; } //Console.WriteLine($"FRAME {i}, time (us): {1e6 * simpleDemo.Simulation.Timings[simpleDemo.Simulation.NarrowPhase]}"); Console.WriteLine($"FRAME {i}"); } Console.WriteLine($"Time per frame (us): {1e6 * time / frameCount}, maximum overlap count: {largestOverlapCount}"); simpleDemo.Dispose(); }
public static void Test <T>(ContentArchive content, int runCount, int warmUpFrames, int frameCount) where T : Demo, new() { var runFrameTimes = new double[runCount]; for (int runIndex = 0; runIndex < runCount; ++runIndex) { var demo = new T(); demo.Initialize(content, new DemoRenderer.Camera(1, 1, 1, 1)); GC.Collect(3, GCCollectionMode.Forced, true, true); for (int i = 0; i < warmUpFrames; ++i) { demo.Update(null, 1 / 60f); } Console.WriteLine($"Warmup {runIndex} complete"); double time = 0; int largestOverlapCount = 0; Console.Write("Completed frames: "); for (int i = 0; i < frameCount; ++i) { CacheBlaster.Blast(); var start = Stopwatch.GetTimestamp(); demo.Update(null, 1 / 60f); var end = Stopwatch.GetTimestamp(); time += (end - start) / (double)Stopwatch.Frequency; if (i % 32 == 0) { Console.Write($"{i}, "); } } Console.WriteLine(); var frameTime = time / frameCount; Console.WriteLine($"Time per frame (ms): {1e3 * frameTime}, maximum overlap count: {largestOverlapCount}"); runFrameTimes[runIndex] = frameTime; demo.Dispose(); } var min = double.MaxValue; var max = double.MinValue; var sum = 0.0; var sumOfSquares = 0.0; for (int runIndex = 0; runIndex < runCount; ++runIndex) { var time = runFrameTimes[runIndex]; min = Math.Min(time, min); max = Math.Max(time, max); sum += time; sumOfSquares += time * time; } var average = sum / runCount; var stdDev = Math.Sqrt(sumOfSquares / runCount - average * average); Console.WriteLine($"Average (ms): {average * 1e3}"); Console.WriteLine($"Min, max (ms): {min * 1e3}, {max * 1e3}"); Console.WriteLine($"Std Dev (ms): {stdDev * 1e3}"); }
public static void Test <T>(int warmUpFrames, int frameCount) where T : Demo, new() { var demo = new T(); demo.Initialize(new DemoRenderer.Camera(1, 1, 1, 1)); for (int i = 0; i < warmUpFrames; ++i) { demo.Update(null, 1 / 60f); } double time = 0; int largestOverlapCount = 0; for (int i = 0; i < frameCount; ++i) { CacheBlaster.Blast(); var start = Stopwatch.GetTimestamp(); demo.Update(null, 1 / 60f); var end = Stopwatch.GetTimestamp(); time += (end - start) / (double)Stopwatch.Frequency; Console.WriteLine($"FRAME {i}"); } Console.WriteLine($"Time per frame (us): {1e6 * time / frameCount}, maximum overlap count: {largestOverlapCount}"); demo.Dispose(); }
public static double Time <TDataLayout>(int iterationCount, int flagCount, IThreadDispatcher dispatcher) where TDataLayout : IDataLayout, new() { CacheBlaster.Blast(); var dataLayout = new TDataLayout(); dataLayout.Initialize(); dataLayout.InitializeIteration(flagCount); Action <int> executeFunction = workerIndex => { int jobIndex; while ((jobIndex = Interlocked.Increment(ref globalJobCounter) - 1) < jobs.Length) { dataLayout.Execute(jobs[jobIndex]); } }; globalJobCounter = 0; dispatcher.DispatchWorkers(executeFunction); //jit warmup dataLayout.Validate(flagCount); long time = 0; for (int i = 0; i < iterationCount; ++i) { //Note that individual executions of each approach do not reuse the same memory. The goal is to force cache misses. dataLayout.InitializeIteration(flagCount); globalJobCounter = 0; var start = Stopwatch.GetTimestamp(); dispatcher.DispatchWorkers(executeFunction); var end = Stopwatch.GetTimestamp(); time += end - start; dataLayout.Validate(flagCount); } dataLayout.Dispose(); GC.Collect(3, GCCollectionMode.Forced, true); return(time / (iterationCount * (double)Stopwatch.Frequency)); }
public static SimulationTimeSamples Solve <TBodyBuilder, TConstraintBuilder, TConstraint>(TBodyBuilder bodyBuilder, TConstraintBuilder constraintBuilder, int width, int height, int length, int frameCount, int threadCount, IThreadDispatcher initializationThreadPool, IThreadDispatcher threadDispatcher) where TBodyBuilder : IBodyBuilder where TConstraintBuilder : IConstraintBuilder where TConstraint : IConstraintDescription <TConstraint> { //const int bodyCount = 8; //SimulationSetup.BuildStackOfBodiesOnGround(bodyCount, false, true, out var bodies, out var solver, out var graph, out var bodyHandles, out var constraintHandles); GC.Collect(3, GCCollectionMode.Forced, true); SimulationSetup.BuildLattice( bodyBuilder, constraintBuilder, width, height, length, out var simulation, out var bodyHandles, out var constraintHandles); SimulationScrambling.ScrambleBodies(simulation); SimulationScrambling.ScrambleConstraints(simulation.Solver); SimulationScrambling.ScrambleBodyConstraintLists(simulation); SimulationScrambling.AddRemoveChurn <TConstraint>(simulation, bodyHandles.Length * 2, bodyHandles, constraintHandles); const int batchCompressionIterations = 1000; simulation.SolverBatchCompressor.TargetCandidateFraction = .005f; simulation.SolverBatchCompressor.MaximumCompressionFraction = 0.0005f; for (int i = 0; i < batchCompressionIterations; ++i) { simulation.SolverBatchCompressor.Compress(simulation.BufferPool, initializationThreadPool); } //Attempt cache optimization. int bodyOptimizationIterations = bodyHandles.Length / 4; simulation.BodyLayoutOptimizer.OptimizationFraction = 0.005f; for (int i = 0; i < bodyOptimizationIterations; ++i) { simulation.BodyLayoutOptimizer.IncrementalOptimize(simulation.BufferPool, initializationThreadPool); } simulation.ConstraintLayoutOptimizer.OptimizationFraction = 0.044f; int constraintOptimizationIterations = 1024; for (int i = 0; i < constraintOptimizationIterations; ++i) { simulation.ConstraintLayoutOptimizer.Update(simulation.BufferPool, initializationThreadPool); } var simulationTimeSamples = new SimulationTimeSamples(frameCount); const float dt = 1 / 60f; const int iterationCount = 8; simulation.Solver.IterationCount = iterationCount; for (int frameIndex = 0; frameIndex < frameCount; ++frameIndex) { CacheBlaster.Blast(); simulation.Timestep(dt, threadDispatcher); simulationTimeSamples.RecordFrame(simulation); } simulation.Dispose(); simulation.BufferPool.Clear(); return(simulationTimeSamples); }
public static void Test() { //var random = new Random(5); //var comparer = new CollidablePairComparer(); //for (int i = 0; i < 10000; ++i) //{ // var a = new CollidableReference((CollidableMobility)random.Next(3), random.Next(1 << 30)); // var b = new CollidableReference((CollidableMobility)random.Next(3), random.Next(1 << 30)); // var pair1 = new CollidablePair(a, b); // var pair2 = new CollidablePair(b, a); // Debug.Assert(comparer.Hash(ref pair1) == comparer.Hash(ref pair2)); // Debug.Assert(comparer.Equals(ref pair1, ref pair2)); //} //for (int i = 0; i < 10000; ++i) //{ // var a = new CollidableReference((CollidableMobility)random.Next(3), random.Next(1 << 30)); // var b = new CollidableReference((CollidableMobility)random.Next(3), random.Next(1 << 30)); // var pair1 = new CollidablePair(a, b); // CollidablePair pair2; // do // { // var a2 = new CollidableReference((CollidableMobility)random.Next(3), random.Next(1 << 30)); // var b2 = new CollidableReference((CollidableMobility)random.Next(3), random.Next(1 << 30)); // pair2 = new CollidablePair(a2, b2); // } while ( // (pair2.A.Packed == pair1.A.Packed && pair2.B.Packed == pair1.B.Packed) || // (pair2.B.Packed == pair1.A.Packed && pair2.A.Packed == pair1.B.Packed)); // Debug.Assert(!comparer.Equals(ref pair1, ref pair2)); //} const int iterationCount = 1000; const int perLayerCollidableCount = 900; const int layerCount = 10; int[] creationRemap = new int[perLayerCollidableCount * (layerCount - 1)]; int[] lookupRemap = new int[creationRemap.Length]; for (int i = 0; i < creationRemap.Length; ++i) { creationRemap[i] = i; lookupRemap[i] = i; } BufferPool pool = new BufferPool(); var dictionary = new QuickDictionary <CollidablePair, int, CollidablePairComparer>(creationRemap.Length, 1, pool); var random = new Random(5); for (int i = 0; i < creationRemap.Length - 1; ++i) { { var temp = creationRemap[i]; var swapTarget = random.Next(i + 1, creationRemap.Length); creationRemap[i] = creationRemap[swapTarget]; creationRemap[swapTarget] = temp; } { var temp = lookupRemap[i]; var swapTarget = random.Next(i + 1, lookupRemap.Length); lookupRemap[i] = lookupRemap[swapTarget]; lookupRemap[swapTarget] = temp; } } int accumulator = 0; double totalTime = 0; const int warmupIterations = 128; for (int iterationIndex = 0; iterationIndex < iterationCount + warmupIterations; ++iterationIndex) { dictionary.Clear(); for (int i = 0; i < creationRemap.Length; ++i) { var index = creationRemap[i]; var pair = new CollidablePair { A = new CollidableReference(CollidableMobility.Kinematic, index), B = new CollidableReference(CollidableMobility.Dynamic, index + perLayerCollidableCount) }; dictionary.AddUnsafely(ref pair, index); } CacheBlaster.Blast(); //Prewarm the remap into cache to more closely mirror the behavior in the narrow phase. for (int i = 0; i < lookupRemap.Length; ++i) { accumulator += lookupRemap[i]; } var start = Stopwatch.GetTimestamp(); for (int i = 0; i < lookupRemap.Length; ++i) { var collidableIndex = lookupRemap[i]; var pair = new CollidablePair { A = new CollidableReference(CollidableMobility.Kinematic, collidableIndex), B = new CollidableReference(CollidableMobility.Dynamic, collidableIndex + perLayerCollidableCount) }; var dictionaryIndex = dictionary.IndexOf(ref pair); accumulator += dictionaryIndex; } var end = Stopwatch.GetTimestamp(); if (iterationIndex >= warmupIterations) { totalTime += (end - start) / (double)Stopwatch.Frequency; } } Console.WriteLine($"Time per lookup (ns): {1e9 * totalTime / (iterationCount * creationRemap.Length)}, acc{accumulator}"); pool.Clear(); }