static void time_dynamic_pq(int MAXID, int MAXCOUNT, int mod, int rounds, LocalProfiler profiler) { profiler.Start("dynam_all"); profiler.Start("dynam_initialize"); DynamicPriorityQueue <TestDynamicNode> PQ_Dynamic = new DynamicPriorityQueue <TestDynamicNode>(); MemoryPool <TestDynamicNode> Dynamic_Pool = new MemoryPool <TestDynamicNode>(); SparseObjectList <TestDynamicNode> IDMap = new SparseObjectList <TestDynamicNode>(MAXID, MAXCOUNT); profiler.StopAndAccumulate("dynam_initialize"); for (int ri = 0; ri < rounds; ++ri) { profiler.Start("dynam_push"); int count = 0; int id = 0; while (count < MAXCOUNT) { id = (id + mod) % MAXID; TestDynamicNode node = new TestDynamicNode(); //Dynamic_Pool.Allocate(); //node.Initialize(id); node.id = id; PQ_Dynamic.Enqueue(node, count); IDMap[id] = node; count++; } profiler.StopAndAccumulate("dynam_push"); profiler.Start("dynam_update"); Random r = new Random(31337); id = 0; count = 0; while (count++ < MAXCOUNT) { id = (id + mod) % MAXID; float new_p = count + ((r.Next() % 1000) - 1000); PQ_Dynamic.Update(IDMap[id], new_p); } profiler.StopAndAccumulate("dynam_update"); profiler.Start("dynam_pop"); while (PQ_Dynamic.Count > 0) { TestDynamicNode node = PQ_Dynamic.Dequeue(); //if (rounds > 1) // Dynamic_Pool.Return(node); } profiler.StopAndAccumulate("dynam_pop"); } //Dynamic_Pool.FreeAll(); profiler.StopAndAccumulate("dynam_all"); }
static void check_same(IndexPriorityQueue PQ_Index, DynamicPriorityQueue <TestDynamicNode> PQ_Dynamic) { List <int> indices = new List <int>(PQ_Index); List <TestDynamicNode> nodes = new List <TestDynamicNode>(PQ_Dynamic); Util.gDevAssert(indices.Count == nodes.Count); for (int i = 0; i < indices.Count; ++i) { Util.gDevAssert(indices[i] == nodes[i].id); } }
public virtual bool Apply() { merge_r2 = MergeDistance * MergeDistance; // construct hash table for edge midpoints MeshBoundaryEdgeMidpoints pointset = new MeshBoundaryEdgeMidpoints(this.Mesh); PointSetHashtable hash = new PointSetHashtable(pointset); int hashN = 64; if (Mesh.TriangleCount > 100000) { hashN = 128; } if (Mesh.TriangleCount > 1000000) { hashN = 256; } hash.Build(hashN); Vector3d a = Vector3d.Zero, b = Vector3d.Zero; Vector3d c = Vector3d.Zero, d = Vector3d.Zero; // find edge equivalence sets. First we find all other edges with same // midpoint, and then we check if endpoints are the same in second loop int[] buffer = new int[1024]; List <int>[] EquivSets = new List <int> [Mesh.MaxEdgeID]; HashSet <int> remaining = new HashSet <int>(); foreach (int eid in Mesh.BoundaryEdgeIndices()) { Vector3d midpt = Mesh.GetEdgePoint(eid, 0.5); int N; while (hash.FindInBall(midpt, MergeDistance, buffer, out N) == false) { buffer = new int[buffer.Length]; } if (N == 1 && buffer[0] != eid) { throw new Exception("MergeCoincidentEdges.Apply: how could this happen?!"); } if (N <= 1) { continue; // unique edge } Mesh.GetEdgeV(eid, ref a, ref b); // if same endpoints, add to equivalence set List <int> equiv = new List <int>(N - 1); for (int i = 0; i < N; ++i) { if (buffer[i] != eid) { Mesh.GetEdgeV(buffer[i], ref c, ref d); if (is_same_edge(ref a, ref b, ref c, ref d)) { equiv.Add(buffer[i]); } } } if (equiv.Count > 0) { EquivSets[eid] = equiv; remaining.Add(eid); } } // [TODO] could replace remaining hashset w/ PQ, and use conservative count? // add potential duplicate edges to priority queue, sorted by // number of possible matches. // [TODO] Does this need to be a PQ? Not updating PQ below anyway... DynamicPriorityQueue <DuplicateEdge> Q = new DynamicPriorityQueue <DuplicateEdge>(); foreach (int i in remaining) { if (OnlyUniquePairs) { if (EquivSets[i].Count != 1) { continue; } foreach (int j in EquivSets[i]) { if (EquivSets[j].Count != 1 || EquivSets[j][0] != i) { continue; } } } Q.Enqueue(new DuplicateEdge() { eid = i }, EquivSets[i].Count); } while (Q.Count > 0) { DuplicateEdge e = Q.Dequeue(); if (Mesh.IsEdge(e.eid) == false || EquivSets[e.eid] == null || remaining.Contains(e.eid) == false) { continue; // dealt with this edge already } if (Mesh.IsBoundaryEdge(e.eid) == false) { continue; } List <int> equiv = EquivSets[e.eid]; // find viable match // [TODO] how to make good decisions here? prefer planarity? bool merged = false; int failed = 0; for (int i = 0; i < equiv.Count && merged == false; ++i) { int other_eid = equiv[i]; if (Mesh.IsEdge(other_eid) == false || Mesh.IsBoundaryEdge(other_eid) == false) { continue; } DMesh3.MergeEdgesInfo info; MeshResult result = Mesh.MergeEdges(e.eid, other_eid, out info); if (result != MeshResult.Ok) { equiv.RemoveAt(i); i--; EquivSets[other_eid].Remove(e.eid); //Q.UpdatePriority(...); // how need ref to queue node to do this...?? // maybe equiv set is queue node?? failed++; } else { // ok we merged, other edge is no longer free merged = true; EquivSets[other_eid] = null; remaining.Remove(other_eid); } } if (merged) { EquivSets[e.eid] = null; remaining.Remove(e.eid); } else { // should we do something else here? doesn't make sense to put // back into Q, as it should be at the top, right? EquivSets[e.eid] = null; remaining.Remove(e.eid); } } return(true); }
public static void test_pq_debuggable() { int MAXID = 10; IndexPriorityQueue QIndex = new IndexPriorityQueue(MAXID); DynamicPriorityQueue <TestDynamicNode> QDynamic = new DynamicPriorityQueue <TestDynamicNode>(); TestDynamicNode[] dyn_nodes = new TestDynamicNode[MAXID]; bool verbose = false; //int n = 1; for (int i = 0; i < MAXID; ++i) { //n = (n + 17) % 17; int id = i; float priority = 1.0f - (float)i / 10.0f; QIndex.Enqueue(id, priority); if (verbose) { System.Console.WriteLine("i = {0}", i); } QIndex.DebugPrint(); if (verbose) { System.Console.WriteLine("---", i); } dyn_nodes[i] = new TestDynamicNode() { id = id }; QDynamic.Enqueue(dyn_nodes[i], priority); QDynamic.DebugPrint(); } System.Console.WriteLine("Dequeing..."); for (int i = 0; i < MAXID; ++i) { float newp = (float)((i + MAXID / 2) % MAXID) / 10.0f; QIndex.Update(i, newp); QDynamic.Update(dyn_nodes[i], newp); //System.Console.WriteLine("UPDATE {0} {1}", QIndex.First, QDynamic.First.id); //QIndex.DebugPrint(); //System.Console.WriteLine("---", i); //QDynamic.DebugPrint(); Util.gDevAssert(QIndex.First == QDynamic.First.id); } for (int i = 0; i < MAXID; ++i) { int id = QIndex.Dequeue(); var node = QDynamic.Dequeue(); Util.gDevAssert(id == node.id); if (verbose) { System.Console.WriteLine("DEQUEUE {0} {1}", id, node.id); } if (verbose) { QIndex.DebugPrint(); } if (verbose) { System.Console.WriteLine("---", i); } if (verbose) { QDynamic.DebugPrint(); } } }
public static void test_pq() { System.Console.WriteLine("testing priority queues..."); int MAXID = 1000000; int MAXCOUNT = 100; int mod = 31337; for (int kk = 0; kk < 3; ++kk) { if (kk == 1) { MAXCOUNT = MAXID / 10; } else if (kk == 2) { MAXCOUNT = MAXID; } IndexPriorityQueue PQ_Index = new IndexPriorityQueue(MAXID); DynamicPriorityQueue <TestDynamicNode> PQ_Dynamic = new DynamicPriorityQueue <TestDynamicNode>(); MemoryPool <TestDynamicNode> Dynamic_Pool = new MemoryPool <TestDynamicNode>(); Dictionary <int, TestDynamicNode> DynamicNodes = new Dictionary <int, TestDynamicNode>(); System.Console.WriteLine("inserting {0} of {1}", MAXCOUNT, MAXID); int count = 0; int id = 0; while (count < MAXCOUNT) { id = (id + mod) % MAXID; PQ_Index.Enqueue(id, count); TestDynamicNode node = Dynamic_Pool.Allocate(); node.Initialize(id); PQ_Dynamic.Enqueue(node, count); DynamicNodes[id] = node; count++; } Util.gDevAssert(PQ_Index.IsValidQueue()); Util.gDevAssert(PQ_Dynamic.IsValidQueue()); Util.gDevAssert(PQ_Index.Count == PQ_Dynamic.Count); Util.gDevAssert(PQ_Index.First == PQ_Dynamic.First.id); check_same(PQ_Index, PQ_Dynamic); Random r = new Random(31337); System.Console.WriteLine("updating..."); id = 0; count = 0; while (count++ < MAXCOUNT) { id = (id + mod) % MAXID; float new_p = count + ((r.Next() % 1000) - 1000); PQ_Index.Update(id, new_p); PQ_Dynamic.Update(DynamicNodes[id], new_p); } Util.gDevAssert(PQ_Index.IsValidQueue()); Util.gDevAssert(PQ_Dynamic.IsValidQueue()); check_same(PQ_Index, PQ_Dynamic); System.Console.WriteLine("removing..."); while (PQ_Index.Count > 0) { int index_id = PQ_Index.Dequeue(); TestDynamicNode node = PQ_Dynamic.Dequeue(); Util.gDevAssert(index_id == node.id); } } }