public static void TestQueueResizing(IUnmanagedMemoryPool pool) { Random random = new Random(5); var queue = new QuickQueue <int>(4, pool); Queue <int> controlQueue = new Queue <int>(); for (int iterationIndex = 0; iterationIndex < 1000000; ++iterationIndex) { if (random.NextDouble() < 0.7) { queue.Enqueue(iterationIndex, pool); controlQueue.Enqueue(iterationIndex); } if (random.NextDouble() < 0.2) { queue.Dequeue(); controlQueue.Dequeue(); } if (iterationIndex % 1000 == 0) { queue.EnsureCapacity(queue.Count * 3, pool); } else if (iterationIndex % 7777 == 0) { queue.Compact(pool); } } Debug.Assert(queue.Count == controlQueue.Count, "e"); while (queue.Count > 0) { var a = queue.Dequeue(); var b = controlQueue.Dequeue(); Debug.Assert(a == b); Debug.Assert(queue.Count == controlQueue.Count); } queue.Dispose(pool); }
unsafe void CollectSubtreesForNodeDirect(int nodeIndex, int remainingDepth, ref QuickList <int> subtrees, ref QuickQueue <int> internalNodes, out float treeletCost) { internalNodes.Enqueue(nodeIndex); treeletCost = 0; var node = nodes + nodeIndex; var children = &node->ChildA; var bounds = &node->A; --remainingDepth; if (remainingDepth >= 0) { for (int i = 0; i < node->ChildCount; ++i) { if (children[i] >= 0) { treeletCost += ComputeBoundsMetric(ref bounds[i]); float childCost; CollectSubtreesForNodeDirect(children[i], remainingDepth, ref subtrees, ref internalNodes, out childCost); treeletCost += childCost; } else { //It's a leaf, immediately add it to subtrees. subtrees.Add(children[i]); } } } else { //Recursion has bottomed out. Add every child. //Once again, note that the treelet costs of these nodes are not considered, even if they are internal. //That's because the subtree internal nodes cannot change size due to the refinement. for (int i = 0; i < node->ChildCount; ++i) { subtrees.Add(children[i]); } } }
unsafe void CollectSubtreesForNodeDirect(int nodeIndex, int remainingDepth, ref QuickList<int> subtrees, ref QuickQueue<int> internalNodes, out float treeletCost) { internalNodes.Enqueue(nodeIndex); treeletCost = 0; var node = nodes + nodeIndex; var children = &node->ChildA; var bounds = &node->A; --remainingDepth; if (remainingDepth >= 0) { for (int i = 0; i < node->ChildCount; ++i) { if (children[i] >= 0) { treeletCost += ComputeBoundsMetric(ref bounds[i]); float childCost; CollectSubtreesForNodeDirect(children[i], remainingDepth, ref subtrees, ref internalNodes, out childCost); treeletCost += childCost; } else { //It's a leaf, immediately add it to subtrees. subtrees.Add(children[i]); } } } else { //Recursion has bottomed out. Add every child. //Once again, note that the treelet costs of these nodes are not considered, even if they are internal. //That's because the subtree internal nodes cannot change size due to the refinement. for (int i = 0; i < node->ChildCount; ++i) { subtrees.Add(children[i]); } } }
public static void TestQueueResizing() { Random random = new Random(5); UnsafeBufferPool <int> pool = new UnsafeBufferPool <int>(); QuickQueue <int> queue = new QuickQueue <int>(pool, 2); Queue <int> controlQueue = new Queue <int>(); for (int iterationIndex = 0; iterationIndex < 1000000; ++iterationIndex) { if (random.NextDouble() < 0.7) { queue.Enqueue(iterationIndex); controlQueue.Enqueue(iterationIndex); } if (random.NextDouble() < 0.2) { queue.Dequeue(); controlQueue.Dequeue(); } if (iterationIndex % 1000 == 0) { queue.EnsureCapacity(queue.Count * 3); } else if (iterationIndex % 7777 == 0) { queue.Compact(); } } Assert.IsTrue(queue.Count == controlQueue.Count); while (queue.Count > 0) { var a = queue.Dequeue(); var b = controlQueue.Dequeue(); Assert.IsTrue(a == b); Assert.IsTrue(queue.Count == controlQueue.Count); } }
void CommandReadSubcodeQ() { bool playing = pce.CDAudio.Mode != CDAudio.CDAudioMode_Stopped; int sectorNum = playing ? pce.CDAudio.CurrentSector : CurrentReadingSector; DataIn.Clear(); switch (pce.CDAudio.Mode) { case CDAudio.CDAudioMode_Playing: DataIn.Enqueue(0); break; case CDAudio.CDAudioMode_Paused: DataIn.Enqueue(2); break; case CDAudio.CDAudioMode_Stopped: DataIn.Enqueue(3); break; } DiscSectorReader.ReadLBA_SubQ(sectorNum, out subchannelQ); DataIn.Enqueue(subchannelQ.q_status); //status (control and q-mode; control is useful to know if it's a data or audio track) DataIn.Enqueue(subchannelQ.q_tno.BCDValue); // track //zero 03-jul-2015 - did I adapt this right> DataIn.Enqueue(subchannelQ.q_index.BCDValue); // index //zero 03-jul-2015 - did I adapt this right> DataIn.Enqueue(subchannelQ.min.BCDValue); // M(rel) DataIn.Enqueue(subchannelQ.sec.BCDValue); // S(rel) DataIn.Enqueue(subchannelQ.frame.BCDValue); // F(rel) DataIn.Enqueue(subchannelQ.ap_min.BCDValue); // M(abs) DataIn.Enqueue(subchannelQ.ap_sec.BCDValue); // S(abs) DataIn.Enqueue(subchannelQ.ap_frame.BCDValue); // F(abs) SetPhase(BusPhase_DataIn); }
void CommandReadSubcodeQ() { bool playing = pce.CDAudio.Mode != CDAudio.CDAudioMode_Stopped; var sectorEntry = disc.ReadLBA_SectorEntry(playing ? pce.CDAudio.CurrentSector : CurrentReadingSector); DataIn.Clear(); switch (pce.CDAudio.Mode) { case CDAudio.CDAudioMode_Playing: DataIn.Enqueue(0); break; case CDAudio.CDAudioMode_Paused: DataIn.Enqueue(2); break; case CDAudio.CDAudioMode_Stopped: DataIn.Enqueue(3); break; } DataIn.Enqueue(sectorEntry.q_status); // I do not know what status is DataIn.Enqueue(sectorEntry.q_tno.BCDValue); // track DataIn.Enqueue(sectorEntry.q_index.BCDValue); // index DataIn.Enqueue(sectorEntry.q_min.BCDValue); // M(rel) DataIn.Enqueue(sectorEntry.q_sec.BCDValue); // S(rel) DataIn.Enqueue(sectorEntry.q_frame.BCDValue); // F(rel) DataIn.Enqueue(sectorEntry.q_amin.BCDValue); // M(abs) DataIn.Enqueue(sectorEntry.q_asec.BCDValue); // S(abs) DataIn.Enqueue(sectorEntry.q_aframe.BCDValue); // F(abs) SetPhase(BusPhase_DataIn); }
void CommandReadSubcodeQ() { bool playing = pce.CDAudio.Mode != CDAudio.CDAudioMode_Stopped; int sectorNum = playing ? pce.CDAudio.CurrentSector : CurrentReadingSector; DataIn.Clear(); switch (pce.CDAudio.Mode) { case CDAudio.CDAudioMode_Playing: DataIn.Enqueue(0); break; case CDAudio.CDAudioMode_Paused: DataIn.Enqueue(2); break; case CDAudio.CDAudioMode_Stopped: DataIn.Enqueue(3); break; } subcodeReader.ReadLBA_SubchannelQ(sectorNum, ref subchannelQ); DataIn.Enqueue(subchannelQ.q_status); // I do not know what status is DataIn.Enqueue(subchannelQ.q_tno); // track DataIn.Enqueue(subchannelQ.q_index); // index DataIn.Enqueue(subchannelQ.min.BCDValue); // M(rel) DataIn.Enqueue(subchannelQ.sec.BCDValue); // S(rel) DataIn.Enqueue(subchannelQ.frame.BCDValue); // F(rel) DataIn.Enqueue(subchannelQ.ap_min.BCDValue); // M(abs) DataIn.Enqueue(subchannelQ.ap_sec.BCDValue); // S(abs) DataIn.Enqueue(subchannelQ.ap_frame.BCDValue); // F(abs) SetPhase(BusPhase_DataIn); }
public static void TestQueueResizing() { Random random = new Random(5); UnsafeBufferPool<int> pool = new UnsafeBufferPool<int>(); QuickQueue<int> queue = new QuickQueue<int>(pool, 2); Queue<int> controlQueue = new Queue<int>(); for (int iterationIndex = 0; iterationIndex < 1000000; ++iterationIndex) { if (random.NextDouble() < 0.7) { queue.Enqueue(iterationIndex); controlQueue.Enqueue(iterationIndex); } if (random.NextDouble() < 0.2) { queue.Dequeue(); controlQueue.Dequeue(); } if (iterationIndex % 1000 == 0) { queue.EnsureCapacity(queue.Count * 3); } else if (iterationIndex % 7777 == 0) { queue.Compact(); } } Assert.IsTrue(queue.Count == controlQueue.Count); while (queue.Count > 0) { var a = queue.Dequeue(); var b = controlQueue.Dequeue(); Assert.IsTrue(a == b); Assert.IsTrue(queue.Count == controlQueue.Count); } }
public unsafe void GetSelfOverlapsViaStreamingQueries <TResultList>(ref TResultList results) where TResultList : IList <Overlap> { //var startTime = Stopwatch.GetTimestamp(); var rootTarget = new StreamingTarget { LeafGroups = new QuickList <StreamingLeafGroup>(BufferPools <StreamingLeafGroup> .Locking, BufferPool <StreamingLeafGroup> .GetPoolIndex(LeafCount)) }; rootTarget.LeafGroups.Add(new StreamingLeafGroup()); for (int i = 0; i < LeafCount; ++i) { BoundingBoxWide leafWide; BoundingBoxWide.GetBoundingBox(ref Levels[leaves[i].LevelIndex].Nodes[leaves[i].NodeIndex].BoundingBoxes, leaves[i].ChildIndex, out leafWide); var leafIndexWide = new Vector <int>(i); rootTarget.Add(ref leafIndexWide, ref leafWide, singleMasks); } //var endTime = Stopwatch.GetTimestamp(); //Console.WriteLine($"Initial target construction time: {(endTime - startTime) / (double)Stopwatch.Frequency}"); QuickQueue <StreamingTarget> targets = new QuickQueue <StreamingTarget>(BufferPools <StreamingTarget> .Locking, BufferPool <StreamingLeafGroup> .GetPoolIndex(LeafCount)); targets.Enqueue(ref rootTarget); QuickList <int> fallbackResults = new QuickList <int>(BufferPools <int> .Locking); StreamingTarget target; while (targets.TryDequeueLast(out target)) { const int GroupFallbackThreshold = 2; //unfortunately, this should be as high as possible right now because the regular query is faster, period. if (target.LeafGroups.Count <= GroupFallbackThreshold) { var max = target.LastLeavesCount == Vector <int> .Count ? target.LeafGroups.Count : target.LeafGroups.Count - 1; for (int leafGroupIndex = 0; leafGroupIndex < max; ++leafGroupIndex) { for (int leafInGroupIndex = 0; leafInGroupIndex < Vector <int> .Count; ++leafInGroupIndex) { BoundingBoxWide leafWide; BoundingBoxWide.GetBoundingBox(ref target.LeafGroups.Elements[leafGroupIndex].BoundingBoxes, leafInGroupIndex, out leafWide); TestRecursive(target.LevelIndex, target.NodeIndex, ref leafWide, ref fallbackResults); for (int resultIndex = 0; resultIndex < fallbackResults.Count; ++resultIndex) { var queryLeafIndex = target.LeafGroups.Elements[leafGroupIndex].Leaves[leafInGroupIndex]; if (queryLeafIndex < fallbackResults.Elements[resultIndex]) { results.Add(new Overlap { A = queryLeafIndex, B = fallbackResults.Elements[resultIndex] }); } } fallbackResults.Count = 0; } } if (target.LastLeavesCount < Vector <int> .Count) { var leafGroupIndex = target.LeafGroups.Count - 1; for (int leafInGroupIndex = 0; leafInGroupIndex < target.LastLeavesCount; ++leafInGroupIndex) { BoundingBoxWide leafWide; BoundingBoxWide.GetBoundingBox(ref target.LeafGroups.Elements[leafGroupIndex].BoundingBoxes, leafInGroupIndex, out leafWide); TestRecursive(target.LevelIndex, target.NodeIndex, ref leafWide, ref fallbackResults); for (int resultIndex = 0; resultIndex < fallbackResults.Count; ++resultIndex) { var queryLeafIndex = target.LeafGroups.Elements[leafGroupIndex].Leaves[leafInGroupIndex]; if (queryLeafIndex < fallbackResults.Elements[resultIndex]) { results.Add(new Overlap { A = queryLeafIndex, B = fallbackResults.Elements[resultIndex] }); } } fallbackResults.Count = 0; } } } else { var node = Levels[target.LevelIndex].Nodes[target.NodeIndex]; //Test each node child against all of the leaves for this node. for (int nodeChildIndex = 0; nodeChildIndex < Vector <int> .Count; ++nodeChildIndex) { if (node.Children[nodeChildIndex] == -1) { continue; } BoundingBoxWide nodeChildWide; BoundingBoxWide.GetBoundingBox(ref node.BoundingBoxes, nodeChildIndex, out nodeChildWide); if (node.Children[nodeChildIndex] >= 0) { //Internal node. Can spawn more targets. StreamingTarget newTarget = new StreamingTarget { LevelIndex = target.LevelIndex + 1, NodeIndex = node.Children[nodeChildIndex], LeafGroups = new QuickList <StreamingLeafGroup>(BufferPools <StreamingLeafGroup> .Locking, BufferPool <StreamingLeafGroup> .GetPoolIndex(target.LeafGroups.Count)) }; newTarget.LeafGroups.Add(new StreamingLeafGroup()); for (int leafGroupIndex = 0; leafGroupIndex < target.LeafGroups.Count; ++leafGroupIndex) { Vector <int> intersectionMask; BoundingBoxWide.Intersects(ref nodeChildWide, ref target.LeafGroups.Elements[leafGroupIndex].BoundingBoxes, out intersectionMask); int leafCountInGroup = leafGroupIndex == target.LeafGroups.Count - 1 ? target.LastLeavesCount : Vector <int> .Count; for (int leafIndexInGroup = 0; leafIndexInGroup < leafCountInGroup; ++leafIndexInGroup) { if (intersectionMask[leafIndexInGroup] < 0) { newTarget.Add(ref target, leafGroupIndex, leafIndexInGroup, singleMasks); } } } targets.Enqueue(ref newTarget); } else { //Leaf node. var nodeLeafIndex = Encode(node.Children[nodeChildIndex]); for (int leafGroupIndex = 0; leafGroupIndex < target.LeafGroups.Count; ++leafGroupIndex) { Vector <int> intersectionMask; BoundingBoxWide.Intersects(ref nodeChildWide, ref target.LeafGroups.Elements[leafGroupIndex].BoundingBoxes, out intersectionMask); int leafCountInGroup = leafGroupIndex == target.LeafGroups.Count - 1 ? target.LastLeavesCount : Vector <int> .Count; for (int leafIndexInGroup = 0; leafIndexInGroup < leafCountInGroup; ++leafIndexInGroup) { if (intersectionMask[leafIndexInGroup] < 0) { var leafIndex = target.LeafGroups[leafGroupIndex].Leaves[leafIndexInGroup]; if (leafIndex < nodeLeafIndex) //The other leaf will also find a collision! { results.Add(new Overlap { A = leafIndex, B = nodeLeafIndex }); } } } } } } } target.LeafGroups.Count = 0; //Don't bother forcing a clear on these. TODO: buffer safety check disable target.LeafGroups.Dispose(); } targets.Dispose(); fallbackResults.Dispose(); //Console.WriteLine("Streaming Query based results:"); //for (int i = 0; i < results.Count; ++i) //{ // Console.WriteLine($"{results[i].A}, {results[i].B}"); //} }