/// <summary> /// Collects a limited set of subtrees hanging from the specified node and performs a local treelet rebuild using a bottom-up agglomerative approach. /// </summary> /// <param name="nodeIndex">Root of the refinement treelet.</param> /// <param name="nodesInvalidated">True if the refinement process invalidated node pointers, false otherwise.</param> public unsafe void AgglomerativeRefine(int nodeIndex, ref QuickList <int> spareNodes, out bool nodesInvalidated) { var maximumSubtrees = ChildrenCapacity * ChildrenCapacity; var poolIndex = BufferPool <int> .GetPoolIndex(maximumSubtrees); var subtrees = new QuickList <int>(BufferPools <int> .Thread, poolIndex); var treeletInternalNodes = new QuickList <int>(BufferPools <int> .Thread, poolIndex); float originalTreeletCost; var entries = stackalloc SubtreeHeapEntry[maximumSubtrees]; CollectSubtrees(nodeIndex, maximumSubtrees, entries, ref subtrees, ref treeletInternalNodes, out originalTreeletCost); //We're going to create a little binary tree via agglomeration, and then we'll collapse it into an n-ary tree. //Note the size: we first put every possible subtree in, so subtrees.Count. //Then, we add up subtrees.Count - 1 internal nodes without removing earlier slots. int tempNodesCapacity = subtrees.Count * 2 - 1; var tempNodes = stackalloc TempNode[tempNodesCapacity]; int tempNodeCount = subtrees.Count; int remainingNodesCapacity = subtrees.Count; var remainingNodes = stackalloc int[remainingNodesCapacity]; int remainingNodesCount = subtrees.Count; for (int i = 0; i < subtrees.Count; ++i) { var tempNode = tempNodes + i; tempNode->A = Encode(i); if (subtrees.Elements[i] >= 0) { //It's an internal node, so look at the parent. var subtreeNode = nodes + subtrees.Elements[i]; tempNode->BoundingBox = (&nodes[subtreeNode->Parent].A)[subtreeNode->IndexInParent]; tempNode->LeafCount = (&nodes[subtreeNode->Parent].LeafCountA)[subtreeNode->IndexInParent]; } else { //It's a leaf node, so grab the bounding box from the owning node. var leafIndex = Encode(subtrees.Elements[i]); var leaf = leaves + leafIndex; var parentNode = nodes + leaf->NodeIndex; tempNode->BoundingBox = (&parentNode->A)[leaf->ChildIndex]; tempNode->LeafCount = 1; } //Add a reference to the remaining list. remainingNodes[i] = i; } while (remainingNodesCount >= 2) { //Determine which pair of subtrees has the smallest cost. //(Smallest absolute cost is used instead of *increase* in cost because absolute tends to move bigger objects up the tree, which is desirable.) float bestCost = float.MaxValue; int bestA = 0, bestB = 0; for (int i = 0; i < remainingNodesCount; ++i) { for (int j = i + 1; j < remainingNodesCount; ++j) { var nodeIndexA = remainingNodes[i]; var nodeIndexB = remainingNodes[j]; BoundingBox merged; BoundingBox.Merge(ref tempNodes[nodeIndexA].BoundingBox, ref tempNodes[nodeIndexB].BoundingBox, out merged); var cost = ComputeBoundsMetric(ref merged); if (cost < bestCost) { bestCost = cost; bestA = i; bestB = j; } } } { //Create a new temp node based on the best pair. TempNode newTempNode; newTempNode.A = remainingNodes[bestA]; newTempNode.B = remainingNodes[bestB]; //Remerging here may or may not be faster than repeatedly caching 'best' candidates from above. It is a really, really cheap operation, after all, apart from cache issues. BoundingBox.Merge(ref tempNodes[newTempNode.A].BoundingBox, ref tempNodes[newTempNode.B].BoundingBox, out newTempNode.BoundingBox); newTempNode.LeafCount = tempNodes[newTempNode.A].LeafCount + tempNodes[newTempNode.B].LeafCount; //Remove the best options from the list. //BestA is always lower than bestB, so remove bestB first to avoid corrupting bestA index. TempNode.FastRemoveAt(bestB, remainingNodes, ref remainingNodesCount); TempNode.FastRemoveAt(bestA, remainingNodes, ref remainingNodesCount); //Add the reference to the new node. var newIndex = TempNode.Add(ref newTempNode, tempNodes, ref tempNodeCount); remainingNodes[remainingNodesCount++] = newIndex; } } //The 2-ary proto-treelet is ready. //Collapse it into an n-ary tree. const int collapseCount = ChildrenCapacity == 32 ? 4 : ChildrenCapacity == 16 ? 3 : ChildrenCapacity == 8 ? 2 : ChildrenCapacity == 4 ? 1 : 0; //Remember: All positive indices in the tempnodes array refer to other temp nodes: they are internal references. Encoded references point back to indices in the subtrees list. Debug.Assert(remainingNodesCount == 1); int parent = nodes[nodeIndex].Parent; int indexInParent = nodes[nodeIndex].IndexInParent; var stagingNodeCapacity = maximumSubtrees - 1; var stagingNodes = stackalloc Node[maximumSubtrees - 1]; int stagingNodeCount = 0; float newTreeletCost; var stagingRootIndex = BuildStagingChild(parent, indexInParent, tempNodes, tempNodeCount - 1, collapseCount, stagingNodes, ref stagingNodeCount, out newTreeletCost); Debug.Assert(stagingNodeCount < stagingNodeCapacity); if (newTreeletCost < originalTreeletCost) { //The refinement is an actual improvement. //Apply the staged nodes to real nodes! int nextInternalNodeIndexToUse = 0; ReifyStagingNodes(nodeIndex, stagingNodes, ref subtrees, ref treeletInternalNodes, ref nextInternalNodeIndexToUse, ref spareNodes, out nodesInvalidated); //If any nodes are left over, put them into the spares list for later reuse. for (int i = nextInternalNodeIndexToUse; i < treeletInternalNodes.Count; ++i) { spareNodes.Add(treeletInternalNodes.Elements[i]); } } else { nodesInvalidated = false; } subtrees.Dispose(); treeletInternalNodes.Dispose(); }
/// <summary> /// 分析节点,将分析结果添加到指定缓存节点 /// </summary> /// <param name="node"></param> /// <param name="memberName"></param> /// <param name="path"></param> /// <param name="temp"></param> /// <returns></returns> private void Analysis(XmlNode node, string memberName, string path, TempNode temp) { if (node.ChildNodes.Count == 0) { return; } else if (node.ChildNodes.Count == 1) { XmlNode line = node.FirstChild; if (line.NodeType == XmlNodeType.Text) { temp.Add(Combine(path, 0), line); } else if (Enum.TryParse(line.Name, out ElementType type)) { if (ElementType.Skip.HasFlag(type) || ElementType.Insert.HasFlag(type)) { return; } else if (ElementType.Separate.HasFlag(type)) { Analysis(line, memberName, Combine(path, 0), temp); } } } else { int index = 0; StringBuilder textBuilder = new StringBuilder(); bool check = false; string nodeValue, nodeCheck; int startIndex = -1; int length = 0; for (int i = 0; i < node.ChildNodes.Count; i++) { if (node.ChildNodes[i].NodeType == XmlNodeType.Text) { if (startIndex == -1) { startIndex = i; } length++; nodeValue = node.ChildNodes[i].Value; nodeCheck = nodeValue; textBuilder.Append(node.ChildNodes[i].Value); check = check || !string.IsNullOrWhiteSpace(nodeCheck); } else if (Enum.TryParse(node.ChildNodes[i].Name, out ElementType type)) { if (ElementType.Insert.HasFlag(type)) { if (startIndex == -1) { startIndex = i; } length++; nodeValue = node.ChildNodes[i].OuterXml; nodeCheck = node.ChildNodes[i].InnerText; textBuilder.Append(nodeValue); check = check || !string.IsNullOrWhiteSpace(nodeCheck); } else if (ElementType.Skip.HasFlag(type)) { if (check) { temp.Add(Combine(path, index), node, startIndex, length); } startIndex = -1; length = 0; textBuilder.Clear(); check = false; index++; } else if (ElementType.Separate.HasFlag(type)) { if (check) { temp.Add(Combine(path, index++), node, startIndex, length); } startIndex = -1; length = 0; textBuilder.Clear(); check = false; Analysis(node.ChildNodes[i], memberName, Combine(path, index++), temp); } } } if (check) { temp.Add(Combine(path, index++), node, startIndex, length); } } }