public IEnumerator TestPerformanceOnLargeGraphBudget10() { var planGraph = PlanGraphUtility.BuildLattice(midLatticeDepth: 10); var nodeCount = planGraph.Size; var depthMap = new NativeHashMap <int, int>(nodeCount, Allocator.TempJob); var queue = new NativeQueue <StateHorizonPair <int> >(Allocator.TempJob); planGraph.GetExpandedDepthMap(0, depthMap, queue); var selectedUnexpandedStates = new NativeList <int>(1, Allocator.Persistent); var allExpandedStates = new NativeMultiHashMap <int, int>(1, Allocator.Persistent); yield return(null); // Set up performance test Measure.Method(() => { var selectJob = new SelectionJob <int, int>() { StateExpansionBudget = 10, RootStateKey = 0, StateDepthLookup = depthMap, StateInfoLookup = planGraph.StateInfoLookup, ActionLookup = planGraph.ActionLookup, ActionInfoLookup = planGraph.ActionInfoLookup, ResultingStateLookup = planGraph.ResultingStateLookup, StateTransitionInfoLookup = planGraph.StateTransitionInfoLookup, SelectedUnexpandedStates = selectedUnexpandedStates, AllSelectedStates = allExpandedStates }; selectJob.Schedule().Complete(); }).WarmupCount(1).MeasurementCount(1).IterationsPerMeasurement(1).CleanUp(() => { depthMap.Clear(); queue.Clear(); planGraph.GetExpandedDepthMap(0, depthMap, queue); selectedUnexpandedStates.Clear(); allExpandedStates.Clear(); }).Run(); queue.Dispose(); depthMap.Dispose(); planGraph.Dispose(); selectedUnexpandedStates.Dispose(); allExpandedStates.Dispose(); // Check performance times PerformanceUtility.AssertRange(0.00, 5); }
public IEnumerator TestPerformanceOnLargeGraphBudget10Parallel() { var planGraph = PlanGraphUtility.BuildLattice(10); var nodeCount = planGraph.Size; var depthMap = new NativeHashMap <int, int>(nodeCount, Allocator.TempJob); var queue = new NativeQueue <StateHorizonPair <int> >(Allocator.TempJob); planGraph.GetExpandedDepthMap(0, depthMap, queue); int budget = 10; int size = math.min(budget, depthMap.Count()); UnityEngine.Assertions.Assert.IsTrue(size > 0); var inputStates = new NativeList <int>(size, Allocator.TempJob); inputStates.Add(0); var inputBudgets = new NativeList <int>(size, Allocator.TempJob); inputBudgets.Add(budget); var outputStateBudgets = new NativeMultiHashMap <int, int>(size, Allocator.TempJob); var m_SelectedStateHorizons = new NativeMultiHashMap <int, int>(size, Allocator.TempJob); var m_SelectedUnexpandedStates = new NativeHashMap <int, byte>(size, Allocator.TempJob); // Determine max number of job iterations int maxDepth = 0; using (var depths = depthMap.GetValueArray(Allocator.Temp)) { for (int i = 0; i < depths.Length; i++) { maxDepth = math.max(maxDepth, depths[i]); } } yield return(null); // Set up performance test Measure.Method(() => { JobHandle lastHandle = default; for (int iteration = 0; iteration <= maxDepth; iteration++) { // Selection job lastHandle = new ParallelSelectionJob <int, int> { StateDepthLookup = depthMap, StateInfoLookup = planGraph.StateInfoLookup, ActionInfoLookup = planGraph.ActionInfoLookup, ActionLookup = planGraph.ActionLookup, ResultingStateLookup = planGraph.ResultingStateLookup, StateTransitionInfoLookup = planGraph.StateTransitionInfoLookup, Horizon = iteration, InputStates = inputStates.AsDeferredJobArray(), InputBudgets = inputBudgets.AsDeferredJobArray(), OutputStateBudgets = outputStateBudgets.AsParallelWriter(), SelectedStateHorizons = m_SelectedStateHorizons.AsParallelWriter(), SelectedUnexpandedStates = m_SelectedUnexpandedStates.AsParallelWriter(), }.Schedule(inputStates, default, lastHandle);
public void MatchManyExistingStates() { const int kRootState = 0; const int kActionCount = 1000; PlanGraph <int, StateInfo, int, ActionInfo, StateTransitionInfo> planGraph = default; NativeMultiHashMap <int, int> binnedStateKeys = default; NativeQueue <int> newStatesQueue = default; NativeList <StateTransitionInfoPair <int, int, StateTransitionInfo> > statesToProcess = default; NativeQueue <int> newStatesToDestroy = default; Measure.Method(() => { var stateTransitionInfoLookup = planGraph.StateTransitionInfoLookup; var resultingStateLookup = planGraph.ResultingStateLookup; var expansionJob = new GraphExpansionJob <int, int, TestStateDataContext, int> { BinnedStateKeys = binnedStateKeys, NewStateTransitionInfoPairs = statesToProcess.AsDeferredJobArray(), ActionLookup = planGraph.ActionLookup.AsParallelWriter(), ActionInfoLookup = planGraph.ActionInfoLookup.AsParallelWriter(), StateTransitionInfoLookup = stateTransitionInfoLookup.AsParallelWriter(), ResultingStateLookup = resultingStateLookup.AsParallelWriter(), NewStates = newStatesQueue.AsParallelWriter(), PredecessorGraph = planGraph.PredecessorGraph.AsParallelWriter(), StateDataContext = new TestStateDataContext(), StatesToDestroy = newStatesToDestroy.AsParallelWriter(), }; expansionJob.Schedule(statesToProcess, default).Complete(); }).SetUp(() => { // One root node and all children nodes of a single depth planGraph = PlanGraphUtility.BuildTree(kActionCount, 1, 1); planGraph.ExpandBy(kActionCount, kActionCount); newStatesQueue = new NativeQueue <int>(Allocator.TempJob); newStatesToDestroy = new NativeQueue <int>(Allocator.TempJob); // Extend graph by one depth with the same number of actions / resulting states that loop back on themselves statesToProcess = new NativeList <StateTransitionInfoPair <int, int, StateTransitionInfo> >(kActionCount, Allocator.TempJob); for (var i = 0; i < kActionCount; i++) { statesToProcess.Add(new StateTransitionInfoPair <int, int, StateTransitionInfo>(kRootState, i, i, new StateTransitionInfo() { Probability = 1, TransitionUtilityValue = 1 })); } binnedStateKeys = GetBinnedStateKeys(planGraph); }).CleanUp(() => { planGraph.Dispose(); newStatesQueue.Dispose(); statesToProcess.Dispose(); binnedStateKeys.Dispose(); newStatesToDestroy.Dispose(); }).WarmupCount(1).MeasurementCount(30).IterationsPerMeasurement(1).Run(); PerformanceUtility.AssertRange(4.3, 6.25); }