private static void AssertDefinition(SampleGroup sampleGroup, string name, SampleUnit sampleUnit, bool increaseIsBetter) { Assert.AreEqual(sampleGroup.Name, name); Assert.AreEqual(sampleGroup.Unit, sampleUnit); Assert.AreEqual(sampleGroup.IncreaseIsBetter, increaseIsBetter); }
public static double ConvertSample(SampleUnit from, SampleUnit to, double value) { double f = RelativeSampleUnit(@from); double t = RelativeSampleUnit(to); return(value * (t / f)); }
public static double GetRatio(SampleUnit from, SampleUnit to) { double f = RelativeSampleUnit(from); double t = RelativeSampleUnit(to); return(f / t); }
public FramesMeasurement Definition(string name = "Time", SampleUnit sampleUnit = SampleUnit.Millisecond, AggregationType aggregationType = AggregationType.Median, double threshold = 0.1D, bool increaseIsBetter = false, bool failOnBaseline = true) { return(Definition(new SampleGroupDefinition(name, sampleUnit, aggregationType, threshold, increaseIsBetter, failOnBaseline))); }
public static double RelativeSampleUnit(SampleUnit unit) { switch (unit) { case SampleUnit.Nanosecond: return 1; case SampleUnit.Microsecond: return 1000; case SampleUnit.Millisecond: return 1000000; case SampleUnit.Second: return 1000000000; case SampleUnit.Byte: return 1; case SampleUnit.Kilobyte: return 1000; case SampleUnit.Megabyte: return 1000000; case SampleUnit.Gigabyte: return 1000000000; default: throw new PerformanceTestException( "Wrong SampleUnit type used."); } }
public static double ConvertSample(SampleUnit from, SampleUnit to, double value) { if (from.Equals(to)) { return(value); } var ratio = GetRatio(from, to); return(value * ratio); }
public SampleGroup(string name, SampleUnit unit, bool increaseIsBetter) { Name = name; Unit = unit; IncreaseIsBetter = increaseIsBetter; if (string.IsNullOrEmpty(name)) { throw new PerformanceTestException("Sample group name is empty. Please assign a valid name."); } }
public SampleGroupDefinition(string name = "Time", SampleUnit sampleUnit = SampleUnit.Millisecond, AggregationType aggregationType = AggregationType.Median, double threshold = 0.15D, bool increaseIsBetter = false, bool failOnBaseline = true) { Threshold = threshold; Name = name; SampleUnit = sampleUnit; AggregationType = aggregationType; IncreaseIsBetter = increaseIsBetter; Percentile = 0; FailOnBaseline = failOnBaseline; }
public SampleGroupDefinition(string name, SampleUnit sampleUnit, AggregationType aggregationType, double percentile, double threshold = 0.15D, bool increaseIsBetter = false, bool failOnBaseline = true) { Threshold = threshold; Name = name; SampleUnit = sampleUnit; AggregationType = aggregationType; Percentile = percentile; IncreaseIsBetter = increaseIsBetter; FailOnBaseline = failOnBaseline; if (Percentile > 1D || Percentile < 0D) { throw new PerformanceTestException("Percentile has to be defined in range [0:1]."); } }
public static double RelativeSampleUnit(SampleUnit unit) { switch (unit) { case SampleUnit.Nanosecond: return(1000000); case SampleUnit.Microsecond: return(1000); case SampleUnit.Millisecond: return(1); case SampleUnit.Second: return(0.001); default: throw new PerformanceTestException( "Wrong SampleUnit type used. Are you trying to convert between time and size units?"); } }
public void SweepPerformanceTests(int count, uint seed, int preallocate = 1) { Random random = new Random(seed); random.InitState(seed); NativeArray <AabbEntity> randomAabbs = new NativeArray <AabbEntity>(count, Allocator.TempJob); var jh = new GenerateRandomAabbs { random = random, aabbs = randomAabbs }.Schedule(); jh = randomAabbs.SortJob(jh); jh.Complete(); NativeList <EntityPair> pairsNaive = new NativeList <EntityPair>(preallocate, Allocator.TempJob); NativeList <EntityPair> pairsBool4 = new NativeList <EntityPair>(preallocate, Allocator.TempJob); NativeList <EntityPair> pairsLessNaive = new NativeList <EntityPair>(preallocate, Allocator.TempJob); NativeList <EntityPair> pairsFunny = new NativeList <EntityPair>(preallocate, Allocator.TempJob); NativeList <EntityPair> pairsBetter = new NativeList <EntityPair>(preallocate, Allocator.TempJob); NativeList <EntityPair> pairsNew = new NativeList <EntityPair>(preallocate, Allocator.TempJob); SampleUnit unit = count > 999 ? SampleUnit.Millisecond : SampleUnit.Microsecond; Measure.Method(() => { new NaiveSweep { aabbs = randomAabbs, overlaps = pairsNaive }.Run(); }) .SampleGroup("NaiveSweep") .WarmupCount(0) .MeasurementCount(1) .Run(); Measure.Method(() => { new Bool4Sweep { aabbs = randomAabbs, overlaps = pairsBool4 }.Run(); }) .SampleGroup("Bool4Sweep") .WarmupCount(0) .MeasurementCount(1) .Run(); Measure.Method(() => { new LessNaiveSweep { aabbs = randomAabbs, overlaps = pairsLessNaive }.Run(); }) .SampleGroup("LessNaiveSweep") .WarmupCount(0) .MeasurementCount(1) .Run(); Measure.Method(() => { new FunnySweep { aabbs = randomAabbs, overlaps = pairsFunny }.Run(); }) .SampleGroup("FunnySweep") .WarmupCount(0) .MeasurementCount(1) .Run(); Measure.Method(() => { new BetterSweep { aabbs = randomAabbs, overlaps = pairsBetter }.Run(); }) .SampleGroup("BetterSweep") .WarmupCount(0) .MeasurementCount(1) .Run(); Measure.Method(() => { new NewSweep { aabbs = randomAabbs, overlaps = pairsNew }.Run(); }) .SampleGroup("NewSweep") .WarmupCount(0) .MeasurementCount(1) .Run(); UnityEngine.Debug.Log("Pairs: " + pairsNaive.Length); UnityEngine.Debug.Log("Pairs: " + pairsBetter.Length); UnityEngine.Debug.Log("Pairs: " + pairsNew.Length); randomAabbs.Dispose(); pairsNaive.Dispose(); pairsBool4.Dispose(); pairsLessNaive.Dispose(); pairsFunny.Dispose(); pairsBetter.Dispose(); pairsNew.Dispose(); }
public SampleGroup(string name, SampleUnit unit = SampleUnit.Millisecond, bool increaseIsBetter = false) : base(name, unit, increaseIsBetter) { }
public FramesMeasurement Definition(string name, SampleUnit sampleUnit, AggregationType aggregationType, double percentile, double threshold = 0.1D, bool increaseIsBetter = false, bool failOnBaseline = true) { return(Definition(new SampleGroupDefinition(name, sampleUnit, aggregationType, percentile, threshold, increaseIsBetter, failOnBaseline))); }
public void BuildLayerPerformanceTests(int count, uint seed, CollisionLayerSettings settings) { Random random = new Random(seed); random.InitState(seed); World world = new World("Test World"); var system = world.CreateSystem <FixedSimulationSystemGroup>(); var eq = world.EntityManager.CreateEntityQuery(typeof(Translation), typeof(Rotation), typeof(Collider), typeof(LocalToWorld)); var archetype = world.EntityManager.CreateArchetype(typeof(Translation), typeof(Rotation), typeof(Collider), typeof(LocalToWorld)); world.EntityManager.CreateEntity(archetype, count, Allocator.Temp); new GenerateJob { random = new Random(seed), aabb = settings.worldAABB }.Run(eq); var typeGroup = BuildCollisionLayerP4.BuildLayerChunkTypeGroup(system); var layer = new TestCollisionLayer(count, settings, Allocator.TempJob); var layerIndices = new NativeArray <int>(count, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var aabbs = new NativeArray <Aabb>(count, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var transforms = new NativeArray <RigidTransform>(count, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); SampleUnit unit = count > 9999 ? SampleUnit.Millisecond : SampleUnit.Microsecond; Measure.Method(() => { new BuildCollisionLayerP4.Part1FromQueryJob { layer = layer, aabbs = aabbs, typeGroup = typeGroup, layerIndices = layerIndices, rigidTransforms = transforms }.Run(eq); }) .SampleGroup("Part1") .WarmupCount(0) .MeasurementCount(1) .Run(); Measure.Method(() => { new BuildCollisionLayerP4.Part2Job { layer = layer, layerIndices = layerIndices }.Run(); }) .SampleGroup("Part2") .WarmupCount(0) .MeasurementCount(1) .Run(); Measure.Method(() => { new BuildCollisionLayerP4.Part3FromQueryJob { layer = layer, aabbs = aabbs, layerIndices = layerIndices, rigidTranforms = transforms, typeGroup = typeGroup }.Run(eq); }) .SampleGroup("Part3") .WarmupCount(0) .MeasurementCount(1) .Run(); var layerBetter = new TestCollisionLayer(layer, Allocator.TempJob); var layerNew = new TestCollisionLayer(layer, Allocator.TempJob); Measure.Method(() => { new BuildCollisionLayerP4.Part4Job { layer = layer }.Run(layer.BucketCount); //}.Schedule(layer.BucketCount, 1).Complete(); }) .SampleGroup("Part4Original") .WarmupCount(0) .MeasurementCount(1) .Run(); Measure.Method(() => { new BuildCollisionLayerP4.Part4JobBetter { layer = layerBetter }.Run(layerBetter.BucketCount); //}.Schedule(layer.BucketCount, 1).Complete(); }) .SampleGroup("Part4Better") .WarmupCount(0) .MeasurementCount(1) .Run(); Measure.Method(() => { new BuildCollisionLayerP4.Part4JobNew { layer = layerBetter }.Run(layerBetter.BucketCount); //}.Schedule(layer.BucketCount, 1).Complete(); }) .SampleGroup("Part4New") .WarmupCount(0) .MeasurementCount(1) .Run(); layerNew.Dispose(); layerBetter.Dispose(); layer.Dispose(); world.Dispose(); }
// Turn a string into a sample group public static SampleGroup ToSampleGroup(this string groupName, SampleUnit unit = SampleUnit.Undefined, bool increaseIsBetter = false) => new SampleGroup(groupName, unit, increaseIsBetter);
private static void AssertDefinition(SampleGroupDefinition definition, string name, SampleUnit sampleUnit, AggregationType aggregationType, double percentile, double threshhold, bool increaseIsBetter) { Assert.AreEqual(definition.Name, name); Assert.AreEqual(definition.SampleUnit, sampleUnit); Assert.AreEqual(definition.AggregationType, aggregationType); Assert.AreEqual(definition.Percentile, percentile); Assert.AreEqual(definition.Threshold, threshhold, 0.001D); Assert.AreEqual(definition.IncreaseIsBetter, increaseIsBetter); }
public static double ConvertSample(SampleUnit from, SampleUnit to, double value) { var ratio = GetRatio(from, to); return value * ratio; }
public void BuildLayerPerformanceTests(int count, uint seed, CollisionLayerSettings settings) { Random random = new Random(seed); random.InitState(seed); World world = new World("Test World"); var system = world.CreateSystem <FixedSimulationSystemGroup>(); var eq = world.EntityManager.CreateEntityQuery(typeof(Translation), typeof(Rotation), typeof(Collider), typeof(LocalToWorld)); var archetype = world.EntityManager.CreateArchetype(typeof(Translation), typeof(Rotation), typeof(Collider), typeof(LocalToWorld)); var e = world.EntityManager.CreateEntity(archetype, count, Allocator.Persistent); e.Dispose(); new GenerateJob { random = new Random(seed), aabb = settings.worldAABB }.Run(eq); { var typeGroup = BuildCollisionLayerP4.BuildLayerChunkTypeGroup(system); var layer = new TestCollisionLayer(count, settings, Allocator.Persistent); var layerIndices = new NativeArray <int>(count, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); var aabbs = new NativeArray <Aabb>(count, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); var transforms = new NativeArray <RigidTransform>(count, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); SampleUnit unit = count > 9999 ? SampleUnit.Millisecond : SampleUnit.Microsecond; Measure.Method(() => { new BuildCollisionLayerP4.Part1FromQueryJob { layer = layer, aabbs = aabbs, typeGroup = typeGroup, layerIndices = layerIndices, rigidTransforms = transforms }.Run(eq); new BuildCollisionLayerP4.Part2Job { layer = layer, layerIndices = layerIndices }.Run(); new BuildCollisionLayerP4.Part3FromQueryJob { layer = layer, aabbs = aabbs, layerIndices = layerIndices, rigidTranforms = transforms, typeGroup = typeGroup }.Run(eq); new BuildCollisionLayerP4.Part4Job { layer = layer }.Run(layer.BucketCount); }) .SampleGroup("OldVersion") .WarmupCount(0) .MeasurementCount(1) .Run(); layer.Dispose(); } { var typeGroup = BuildCollisionLayerP4.BuildLayerChunkTypeGroup(system); var layer = new TestCollisionLayer(count, settings, Allocator.Persistent); var layerIndices = new NativeArray <int>(count, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); var aabbs = new NativeArray <Aabb>(count, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); var transforms = new NativeArray <RigidTransform>(count, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); SampleUnit unit = count > 9999 ? SampleUnit.Millisecond : SampleUnit.Microsecond; Measure.Method(() => { new BuildCollisionLayerP4.Part1FromQueryJob { layer = layer, aabbs = aabbs, typeGroup = typeGroup, layerIndices = layerIndices, rigidTransforms = transforms }.Run(eq); new BuildCollisionLayerP4.Part2Job { layer = layer, layerIndices = layerIndices }.Run(); new BuildCollisionLayerP4.Part3FromQueryJob { layer = layer, aabbs = aabbs, layerIndices = layerIndices, rigidTranforms = transforms, typeGroup = typeGroup }.Run(eq); new BuildCollisionLayerP4.Part4JobBetter { layer = layer }.Run(layer.BucketCount); }) .SampleGroup("OldVersionBetter") .WarmupCount(0) .MeasurementCount(1) .Run(); layer.Dispose(); } //NewVersion { var typeGroup = BuildCollisionLayerOldVsNew.BuildLayerChunkTypeGroup(system); var layer = new TestCollisionLayer(count, settings, Allocator.Persistent); var layerIndices = new NativeArray <int>(count, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); var remapSrcArray = new NativeArray <int>(count, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); var xmins = new NativeArray <float>(count, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); var aos = new NativeArray <BuildCollisionLayerOldVsNew.ColliderAoSData>(count, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); SampleUnit unit = count > 9999 ? SampleUnit.Millisecond : SampleUnit.Microsecond; Measure.Method(() => { new BuildCollisionLayerOldVsNew.Part1FromQueryJob { layer = layer, typeGroup = typeGroup, layerIndices = layerIndices, xmins = xmins, colliderAoS = aos }.Run(eq); new BuildCollisionLayerOldVsNew.Part2Job { layer = layer, layerIndices = layerIndices }.Run(); new BuildCollisionLayerOldVsNew.Part3Job { layerIndices = layerIndices, unsortedSrcIndices = remapSrcArray }.Run(count); new BuildCollisionLayerOldVsNew.Part4Job { unsortedSrcIndices = remapSrcArray, xmins = xmins, bucketStartAndCounts = layer.bucketStartsAndCounts }.Run(layer.BucketCount); new BuildCollisionLayerOldVsNew.Part5Job { layer = layer, colliderAoS = aos, remapSrcIndices = remapSrcArray }.Run(count); }) .SampleGroup("NewVersion") .WarmupCount(0) .MeasurementCount(1) .Run(); remapSrcArray.Dispose(); layer.Dispose(); } { var typeGroup = BuildCollisionLayerOldVsNew.BuildLayerChunkTypeGroup(system); var layer = new TestCollisionLayer(count, settings, Allocator.Persistent); var layerIndices = new NativeArray <int>(count, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); var remapSrcArray = new NativeArray <int>(count, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); var xmins = new NativeArray <float>(count, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); var aos = new NativeArray <BuildCollisionLayerOldVsNew.ColliderAoSData>(count, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); SampleUnit unit = count > 9999 ? SampleUnit.Millisecond : SampleUnit.Microsecond; Measure.Method(() => { new BuildCollisionLayerOldVsNew.Part1FromQueryJob { layer = layer, typeGroup = typeGroup, layerIndices = layerIndices, xmins = xmins, colliderAoS = aos }.Run(eq); new BuildCollisionLayerOldVsNew.Part2Job { layer = layer, layerIndices = layerIndices }.Run(); new BuildCollisionLayerOldVsNew.Part3Job { layerIndices = layerIndices, unsortedSrcIndices = remapSrcArray }.Run(count); new BuildCollisionLayerOldVsNew.Part4JobUnity { unsortedSrcIndices = remapSrcArray, xmins = xmins, bucketStartAndCounts = layer.bucketStartsAndCounts }.Run(layer.BucketCount); new BuildCollisionLayerOldVsNew.Part5Job { layer = layer, colliderAoS = aos, remapSrcIndices = remapSrcArray }.Run(count); }) .SampleGroup("NewVersionUnity") .WarmupCount(0) .MeasurementCount(1) .Run(); remapSrcArray.Dispose(); layer.Dispose(); } world.Dispose(); }
public SampleGroup(string name, SampleUnit unit, bool increaseIsBetter) { Name = name; Unit = unit; IncreaseIsBetter = increaseIsBetter; }