/// <summary> /// Inserts a new value into the Min Heap. /// </summary> /// <param name="keyValue">The new key-value pair to be inserted in the tree.</param> /// <param name="heapArrayLength">The length of the heap array. </param> public override void Insert(KeyValuePair <TKey, TValue> keyValue, int heapArrayLength) { HeapArray.Add(keyValue); int index = heapArrayLength; BubbleUp_Recursively(index, heapArrayLength + 1); }
/// <summary>Runs the Greedy search algorithm algorithm on a graph.</summary> /// <param name="start">The node to start at.</param> /// <param name="neighbors">Step function for all neigbors of a given node.</param> /// <param name="heuristic">Computes the heuristic value of a given node in a graph.</param> /// <param name="goal">Predicate for determining if we have reached the goal node.</param> /// <returns>Stepper of the shortest path or null if no path exists.</returns> public static Stepper <T> Greedy(T start, Neighbors neighbors, Heuristic heuristic, Goal goal) { // using a heap (aka priority queue) to store nodes based on their computed heuristic value Heap <Greedy_Node> fringe = new HeapArray <Greedy_Node>( // NOTE: I just reversed the order of left and right because smaller values are higher priority (Greedy_Node left, Greedy_Node right) => { return(Compute <Math> .Compare(right.Priority, left.Priority)); }); // push starting node Greedy_Node start_node = new Greedy_Node(null, start, default(Math)); fringe.Enqueue(start_node); // run the algorithm while (fringe.Count != 0) { Greedy_Node current = fringe.Dequeue(); if (goal(current.Value)) { return(Greedy_BuildPath(current)); } else { neighbors(current.Value, (T neighbor) => { Greedy_Node newNode = new Greedy_Node(current, neighbor, heuristic(neighbor)); fringe.Enqueue(newNode); }); } } return(null); // goal node was not reached (no path exists) }
internal HeapArray(HeapArray <T> heap) { _compare = heap._compare; _heap = (T[])heap._heap.Clone(); _minimumCapacity = heap._minimumCapacity; _count = heap._count; }
/// <summary> /// Removes the min element from the heap. /// </summary> /// <param name="keyValue">If the operation is successful, contains the minimum element in the array.</param> /// <param name="heapArrayLength">The length of the heap array. </param> /// <returns>True in case of success, and false otherwise</returns> public override bool TryRemoveRoot(out KeyValuePair <TKey, TValue> keyValue, int heapArrayLength) { keyValue = new KeyValuePair <TKey, TValue>((TKey)typeof(TKey).GetField("MinValue").GetValue(null), default(TValue)); // T.MinValue; /* If array is empty, returns false. */ if (heapArrayLength == 0) { return(false); } /* If array has only one element left, it is the minimum value, and clear the array after removing it.*/ if (heapArrayLength == 1) { keyValue = HeapArray[0]; HeapArray.Clear();; return(true); } /* If array has more than 1 element, the next instructions are executed. */ keyValue = HeapArray[0]; /* In a minHeap the minimum value is always in the root, which is at index 0.*/ HeapArray[0] = HeapArray[heapArrayLength - 1]; /* Move the last element to the place of root, and then bubble down. */ HeapArray.RemoveAt(heapArrayLength - 1); /* Removing the last element, as it is now placed in the root's position, and needs to be bubbled down.*/ BubbleDown_Recursively(0, heapArrayLength - 1); /* Call this method to bubble down the (new) root.*/ /* Also notice that the array is shorter by one value now, thus the new array length is one smaller. */ return(true); }
/* * Time((V+E)logV), Space(V) * can't handle negative distances; can return shortest paths from vertex 0 to all other vertice. * 1. initialize the distance of vetex 0 to 0 and all others to INF. * 2. add all vertice to a minimum heap based on their distances from vetex 0 * 2. loop until heap is empty, extract min from heap and update the distance of its all adjacent vertices. */ int[] Dijkstra(int n, IList <int[]>[] adj) { var dist = new int[n]; for (int i = 1; i < n; i++) { dist[i] = INF; } var heap = new HeapArray(dist); var prev = new int[n]; // used to recover the path for (int i = 0; i < n - 1; i++) { var min = heap.ExtractMin(); foreach (var e in adj[min]) { int src = e[0], dst = e[1], w = e[2]; if (dist[dst] > dist[src] + w) { heap.Update(dst, dist[src] + w); dist[dst] = dist[src] + w; prev[dst] = src; } } } return(dist); }
internal HeapArray(HeapArray <T> heap) { _compare = heap._compare; _heap = new T[heap._heap.Length]; heap._heap.CopyTo(_heap); _minimumCapacity = heap._minimumCapacity; _count = heap._count; }
/// <summary> /// Inserts a new value into the Max Heap. /// </summary> /// <param name="value">The new value to be inserted in the tree.</param> /// <param name="heapArrayLength">The length/size of the heap array. </param> public override void Insert(KeyValuePair <TKey, TValue> value, int heapArrayLength) { HeapArray.Add(value);// means gets added to the end of the list. // Bubble up this element/node int nodeIndex = heapArrayLength; BubbleUp_Iteratively(nodeIndex, heapArrayLength + 1); // Notice that the size of the array is grown by one now. }
public void Add() { IHeap <Person> heap = new HeapArray <Person>(); foreach (Person person in RandomTestData) { heap.Enqueue(person); } }
/// <summary> /// Inserts a new value into the Min Heap. /// </summary> /// <param name="newValue">The new key-value pair to be inserted in the tree.</param> /// <param name="heapArrayLength">The length of the heap array. </param> public override void Insert(KeyValuePair <TKey, TValue> newValue, int heapArrayLength) { /* Add the new value to the end of the array. List is a dynamic array and grows in size automatically. */ HeapArray.Add(newValue); /* Bubble up the new value, and stop when the parent is no longer bigger than the new value, or when new value is bubbled up to the root's position. */ int nodeIndex = heapArrayLength; BubbleUp_Iteratively(nodeIndex, heapArrayLength + 1); }
public void Enqueue() { IHeap <int> heap = new HeapArray <int>(); int enqueueCount = EnqueueCount; for (int i = 0; i < enqueueCount; i++) { heap.Enqueue(i); } }
/// <summary> /// This method is for finding the root of the heap, without removing it. /// </summary> /// <param name="keyValue">The key-value of the root.</param> /// <param name="heapArrayLength">The length of the heap array. </param> /// <returns>True in case of success, and false in case of failure.</returns> public override bool TryFindRoot(out KeyValuePair <TKey, TValue> keyValue, int heapArrayLength) { if (HeapArray.Any()) { keyValue = HeapArray[0]; return(true); } keyValue = new KeyValuePair <TKey, TValue>((TKey)typeof(TKey).GetField("MaxValue").GetValue(null), default(TValue)); return(false); }
public void InsertKey(T key) { if (HeapSize == HeapArray.Length) { T[] newHeap = new T[HeapArray.Length * 2]; HeapArray.CopyTo(newHeap, 0); HeapArray = newHeap; } HeapArray[HeapSize] = key; FixUp(HeapSize++); }
/// <summary>Runs the A* search algorithm algorithm on a graph.</summary> /// <param name="start">The node to start at.</param> /// <param name="neighbors">Step function for all neigbors of a given node.</param> /// <param name="heuristic">Computes the heuristic value of a given node in a graph.</param> /// <param name="cost">Computes the cost of moving from the current node to a specific neighbor.</param> /// <param name="goal">Predicate for determining if we have reached the goal node.</param> /// <returns>Stepper of the shortest path or null if no path exists.</returns> public static Stepper <T> Astar(T start, Neighbors neighbors, Heuristic heuristic, Cost cost, Goal goal) { // using a heap (aka priority queue) to store nodes based on their computed A* f(n) value Heap <Astar_Node> fringe = new HeapArray <Astar_Node>( // NOTE: Typical A* implementations prioritize smaller values (Astar_Node left, Astar_Node right) => { Comparison comparison = Compute.Compare <Math>(right.Priority, left.Priority); return(comparison); }); // using a map (aka dictionary) to store costs from start to current nodes Map <Math, Astar_Node> computed_costs = new MapHashArray <Math, Astar_Node>(); // construct the f(n) for this A* execution Astar_function function = (T node, Astar_Node previous) => { Math previousCost = computed_costs.Get(previous); Math currentCost = cost(previous.Value, node); Math costFromStart = Compute.Add <Math>(previousCost, currentCost); Math hueristic = heuristic(node); return(Compute.Add <Math>(costFromStart, hueristic)); }; // push starting node Astar_Node start_node = new Astar_Node(null, start, default(Math)); fringe.Enqueue(start_node); computed_costs.Add(start_node, default(Math)); // run the algorithm while (fringe.Count != 0) { Astar_Node current = fringe.Dequeue(); if (goal(current.Value)) { return(Astar_BuildPath(current)); } else { neighbors(current.Value, (T neighbor) => { Astar_Node newNode = new Astar_Node(current, neighbor, function(neighbor, current)); Math costValue = Compute.Add <Math>(computed_costs.Get(current), cost(current.Value, neighbor)); computed_costs.Add(newNode, costValue); fringe.Enqueue(newNode); }); } } return(null); // goal node was not reached (no path exists) }
/// <summary>Runs the A* search algorithm algorithm on a graph.</summary> /// <typeparam name="Node">The node type of the graph being searched.</typeparam> /// <typeparam name="Numeric">The numeric to use when performing calculations.</typeparam> /// <param name="start">The node to start at.</param> /// <param name="neighbors">Step function for all neigbors of a given node.</param> /// <param name="heuristic">Computes the heuristic value of a given node in a graph.</param> /// <param name="cost">Computes the cost of moving from the current node to a specific neighbor.</param> /// <param name="goal">Predicate for determining if we have reached the goal node.</param> /// <returns>Stepper of the shortest path or null if no path exists.</returns> public static Stepper <Node> Graph <Node, Numeric>(Node start, Neighbors <Node> neighbors, Heuristic <Node, Numeric> heuristic, Cost <Node, Numeric> cost, Goal <Node> goal) { // using a heap (aka priority queue) to store nodes based on their computed A* f(n) value IHeap <AstarNode <Node, Numeric> > fringe = new HeapArray <AstarNode <Node, Numeric> >( // NOTE: Typical A* implementations prioritize smaller values (a, b) => Compute.Compare(b.Priority, a.Priority)); // push starting node fringe.Enqueue( new AstarNode <Node, Numeric>() { Previous = null, Value = start, Priority = default,
public static void HeapSort <T>(T[] data, int size) where T : IComparable { var heapArray = new HeapArray <T>(size); heapArray.BulidHeapBottomUp(data, size); while (size > 1) { var maxValue = data[1]; data[1] = data[size]; data[size] = maxValue; size--; RestoreDown(1, data, size); } }
[TestMethod] public void Dequeue_Testing() { void Test <T>(T[] values, Compare <T> compare) { T[] clonedValues = (T[])values.Clone(); Towel.Sort.Shuffle(clonedValues); IHeap <T> heap = new HeapArray <T>(compare); clonedValues.Stepper(x => heap.Enqueue(x)); foreach (T value in values) { T dequeue = heap.Dequeue(); Assert.IsTrue(value.Equals(dequeue)); } } { // int compare const int count = 100; int[] values = new int[count]; Stepper.Iterate(count, i => values[i] = i); Array.Sort(values, (a, b) => - a.CompareTo(b)); Test(values, (a, b) => Compare.Wrap(a.CompareTo(b))); } { // string compare const int count = 100; string[] values = new string[count]; Stepper.Iterate(count, i => values[i] = i.ToString()); Array.Sort(values, (a, b) => - a.CompareTo(b)); Test(values, (a, b) => Compare.Wrap(a.CompareTo(b))); } { // int reverse compare const int count = 100; int[] values = new int[count]; Stepper.Iterate(count, i => values[i] = i); Array.Sort(values); Test(values, (a, b) => Compare.Wrap(-a.CompareTo(b))); } { // string reverse compare const int count = 100; string[] values = new string[count]; Stepper.Iterate(count, i => values[i] = i.ToString()); Array.Sort(values); Test(values, (a, b) => Compare.Wrap(-a.CompareTo(b))); } }
/// <summary> /// Removes the min element from the heap. /// </summary> /// <param name="keyValue">If the operation is successful, contains the minimum element in the array.</param> /// <param name="heapArrayLength">The length of the heap array. </param> /// <returns></returns> public override bool TryRemoveRoot(out KeyValuePair <TKey, TValue> keyValue, int heapArrayLength) { keyValue = new KeyValuePair <TKey, TValue>((TKey)typeof(TKey).GetField("MinValue").GetValue(null), default(TValue)); if (heapArrayLength == 0) { return(false); } if (heapArrayLength == 1) { keyValue = HeapArray[0]; HeapArray.Clear(); return(true); } keyValue = HeapArray[0]; HeapArray[0] = HeapArray[heapArrayLength - 1]; HeapArray.RemoveAt(heapArrayLength - 1); BubbleDownMin_Recursively(0, heapArrayLength - 1); /* Calling this method, because this is a min-max heap and 0 is expected to be on a min level.*/ /* Also notice that the array is shorter by one value now, thus the new array length is one smaller. */ return(true); }
/// <summary>Runs the A* search algorithm algorithm on a graph.</summary> /// <param name="start">The node to start at.</param> /// <param name="neighbors">Step function for all neigbors of a given node.</param> /// <param name="heuristic">Computes the heuristic value of a given node in a graph.</param> /// <param name="cost">Computes the cost of moving from the current node to a specific neighbor.</param> /// <param name="goal">Predicate for determining if we have reached the goal node.</param> /// <returns>Stepper of the shortest path or null if no path exists.</returns> public static Stepper <NODE> Graph <NODE, NUMERIC>(NODE start, Neighbors <NODE> neighbors, Heuristic <NODE, NUMERIC> heuristic, Cost <NODE, NUMERIC> cost, Goal <NODE> goal) { // using a heap (aka priority queue) to store nodes based on their computed A* f(n) value IHeap <AstarNode <NODE, NUMERIC> > fringe = new HeapArray <AstarNode <NODE, NUMERIC> >( // NOTE: Typical A* implementations prioritize smaller values (a, b) => Compute.Compare(b.Priority, a.Priority)); // push starting node fringe.Enqueue( new AstarNode <NODE, NUMERIC>( null, start, default(NUMERIC), Constant <NUMERIC> .Zero)); // run the algorithm while (fringe.Count != 0) { AstarNode <NODE, NUMERIC> current = fringe.Dequeue(); if (goal(current.Value)) { return(BuildPath(current)); } else { neighbors(current.Value, (NODE neighbor) => { NUMERIC costValue = Compute.Add(current.Cost, cost(current.Value, neighbor)); fringe.Enqueue( new AstarNode <NODE, NUMERIC>( current, neighbor, Compute.Add(heuristic(neighbor), costValue), costValue)); }); } } return(null); // goal node was not reached (no path exists) }
/// <summary> /// Removes the max element from the heap. /// </summary> /// <param name="keyValue">If the operation is successful, contains the maximum element in the array.</param> /// <param name="heapArrayLength">The length of the heap array. </param> /// <returns>True in case of success, and false otherwise</returns> public override bool TryRemoveRoot(out KeyValuePair <TKey, TValue> keyValue, int heapArrayLength) { keyValue = new KeyValuePair <TKey, TValue>((TKey)typeof(TKey).GetField("MaxValue").GetValue(null), default(TValue)); if (heapArrayLength == 0) { return(false); } if (heapArrayLength == 1) { keyValue = HeapArray[0]; HeapArray.Clear(); return(true); } keyValue = HeapArray[0]; HeapArray[0] = HeapArray[heapArrayLength - 1]; HeapArray.RemoveAt(heapArrayLength - 1); BubbleDown_Recursively(0, heapArrayLength - 1); /* notice that the array is shorter by one value now, thus the new array length is one smaller. */ return(true); }
public void Dequeue_Testing() { void Test <T>(T[] values, Func <T, T, CompareResult> compare) { T[] clonedValues = (T[])values.Clone(); Shuffle <T>(clonedValues); IHeap <T> heap = HeapArray.New <T>(compare); clonedValues.Stepper(x => heap.Enqueue(x)); foreach (T value in values) { T dequeue = heap.Dequeue(); Assert.IsTrue(value !.Equals(dequeue)); } } { // int compare int[] values = (..100).ToArray(); Array.Sort(values, (a, b) => - a.CompareTo(b)); Test(values, Compare); } { // string compare string[] values = (..100).ToArray(i => i.ToString()); Array.Sort(values, (a, b) => - a.CompareTo(b)); Test(values, Compare); } { // int reverse compare int[] values = (..100).ToArray(); Array.Sort(values); Test(values, (a, b) => Compare(b, a)); } { // string reverse compare string[] values = (..100).ToArray(i => i.ToString()); Array.Sort(values); Test(values, (a, b) => Compare(b, a)); } }
/// <summary>Runs the Greedy search algorithm algorithm on a graph.</summary> /// <param name="start">The node to start at.</param> /// <param name="neighbors">Step function for all neigbors of a given node.</param> /// <param name="heuristic">Computes the heuristic value of a given node in a graph.</param> /// <param name="goal">Predicate for determining if we have reached the goal node.</param> /// <returns>Stepper of the shortest path or null if no path exists.</returns> public static Stepper <NODE> Graph <NODE, NUMERIC>(NODE start, Neighbors <NODE> neighbors, Heuristic <NODE, NUMERIC> heuristic, Goal <NODE> goal) { // using a heap (aka priority queue) to store nodes based on their computed heuristic value IHeap <GreedyNode <NODE, NUMERIC> > fringe = new HeapArray <GreedyNode <NODE, NUMERIC> >( // NOTE: Typical graph search implementations prioritize smaller values (a, b) => Compute.Compare(b.Priority, a.Priority)); // push starting node fringe.Enqueue( new GreedyNode <NODE, NUMERIC>( null, start, default(NUMERIC))); // run the algorithm while (fringe.Count != 0) { GreedyNode <NODE, NUMERIC> current = fringe.Dequeue(); if (goal(current.Value)) { return(BuildPath(current)); } else { neighbors(current.Value, (NODE neighbor) => { fringe.Enqueue( new GreedyNode <NODE, NUMERIC>( current, neighbor, heuristic(neighbor))); }); } } return(null); // goal node was not reached (no path exists) }
static void Main(string[] args) { Random random = new Random(); int test = 10; Console.WriteLine("You are runnning the Data Structures example."); Console.WriteLine("======================================================"); Console.WriteLine(); #region Link (aka Tuple) Console.WriteLine(" Link------------------------------------"); Console.WriteLine(); Console.WriteLine(" A \"Link\" is like a System.Tuple that implements"); Console.WriteLine(" Towel.DataStructures.DataStructure. A Link/Tuple is"); Console.WriteLine(" used when you have a small, known-sized set of objects"); Console.WriteLine(" that you want to bundle together without making a custom"); Console.WriteLine(" custom class."); Console.WriteLine(); Link link = new Link <int, int, int, int, int, int>(0, 1, 2, 3, 4, 5); Console.Write(" Traversal: "); link.Stepper(i => Console.Write(i)); Console.WriteLine(); Console.WriteLine(" Size: " + link.Size); Console.WriteLine(); #endregion #region Indexed (aka Array) Console.WriteLine(" Indexed---------------------------------"); Console.WriteLine(); Console.WriteLine(" An \"Indexed\" is just a wrapper for arrays that implements"); Console.WriteLine(" Towel.DataStructures.DataStructure. An array is used when"); Console.WriteLine(" dealing with static-sized, known-sized sets of data. Arrays"); Console.WriteLine(" can be sorted along 1 dimensions for binary searching algorithms."); Console.WriteLine(); IIndexed <int> indexed = new IndexedArray <int>(test); Console.Write(" Filling in (0-" + (test - 1) + ")..."); for (int i = 0; i < test; i++) { indexed[i] = i; } Console.WriteLine(); Console.Write(" Traversal: "); indexed.Stepper(i => Console.Write(i)); Console.WriteLine(); Console.WriteLine(" Length: " + indexed.Length); Console.WriteLine(); #endregion #region Addable (aka List) Console.WriteLine(" Addable---------------------------------"); Console.WriteLine(); Console.WriteLine(" An \"Addable\" is like an IList that implements"); Console.WriteLine(" Towel.DataStructures.DataStructure. \"AddableArray\" is"); Console.WriteLine(" the array implementation while \"AddableLinked\" is the"); Console.WriteLine(" the linked-list implementation. An Addable/List is used"); Console.WriteLine(" when dealing with an unknown quantity of data that you"); Console.WriteLine(" will likely have to enumerate/step through everything. The"); Console.WriteLine(" AddableArray shares the properties of an Indexed/Array in"); Console.WriteLine(" that it can be relateively quickly sorted along 1 dimensions"); Console.WriteLine(" for binary search algorithms."); Console.WriteLine(); // AddableArray --------------------------------------- IAddable <int> addableArray = new AddableArray <int>(test); Console.Write(" [AddableArray] Adding (0-" + (test - 1) + ")..."); for (int i = 0; i < test; i++) { addableArray.Add(i); } Console.WriteLine(); Console.Write(" [AddableArray] Traversal: "); addableArray.Stepper(i => Console.Write(i)); Console.WriteLine(); Console.WriteLine(" [AddableArray] Count: " + addableArray.Count); addableArray.Clear(); // Clears the addable Console.WriteLine(); // AddableLinked --------------------------------------- IAddable <int> addableLinked = new AddableLinked <int>(); Console.Write(" [AddableLinked] Adding (0-" + (test - 1) + ")..."); for (int i = 0; i < test; i++) { addableLinked.Add(i); } Console.WriteLine(); Console.Write(" [AddableLinked] Traversal: "); addableLinked.Stepper(i => Console.Write(i)); Console.WriteLine(); Console.WriteLine(" [AddableLinked] Count: " + addableLinked.Count); addableLinked.Clear(); // Clears the addable Console.WriteLine(); #endregion #region FirstInLastOut (aka stack) { Console.WriteLine(" FirstInLastOut---------------------------------"); Console.WriteLine(); Console.WriteLine(" An \"FirstInLastOut\" is a Stack that implements"); Console.WriteLine(" Towel.DataStructures.DataStructure. \"FirstInLastOutArray\" is"); Console.WriteLine(" the array implementation while \"FirstInLastOutLinked\" is the"); Console.WriteLine(" the linked-list implementation. A FirstInLastOut/Stack is used"); Console.WriteLine(" specifically when you need the algorithm provided by the Push"); Console.WriteLine(" and Pop functions."); Console.WriteLine(); IFirstInLastOut <int> firstInLastOutArray = new FirstInLastOutArray <int>(); Console.Write(" [FirstInLastOutArray] Pushing (0-" + (test - 1) + ")..."); for (int i = 0; i < test; i++) { firstInLastOutArray.Push(i); } Console.WriteLine(); Console.Write(" [FirstInLastOutArray] Traversal: "); firstInLastOutArray.Stepper(i => Console.Write(i)); Console.WriteLine(); Console.WriteLine(" [FirstInLastOutArray] Pop: " + firstInLastOutArray.Pop()); Console.WriteLine(" [FirstInLastOutArray] Pop: " + firstInLastOutArray.Pop()); Console.WriteLine(" [FirstInLastOutArray] Peek: " + firstInLastOutArray.Peek()); Console.WriteLine(" [FirstInLastOutArray] Pop: " + firstInLastOutArray.Pop()); Console.WriteLine(" [FirstInLastOutArray] Count: " + firstInLastOutArray.Count); firstInLastOutArray.Clear(); // Clears the firstInLastOut Console.WriteLine(); IFirstInLastOut <int> firstInLastOutLinked = new FirstInLastOutLinked <int>(); Console.Write(" [FirstInLastOutLinked] Pushing (0-" + (test - 1) + ")..."); for (int i = 0; i < test; i++) { firstInLastOutLinked.Push(i); } Console.WriteLine(); Console.Write(" [FirstInLastOutLinked] Traversal: "); firstInLastOutLinked.Stepper(i => Console.Write(i)); Console.WriteLine(); Console.WriteLine(" [FirstInLastOutLinked] Pop: " + firstInLastOutLinked.Pop()); Console.WriteLine(" [FirstInLastOutLinked] Pop: " + firstInLastOutLinked.Pop()); Console.WriteLine(" [FirstInLastOutLinked] Peek: " + firstInLastOutLinked.Peek()); Console.WriteLine(" [FirstInLastOutLinked] Pop: " + firstInLastOutLinked.Pop()); Console.WriteLine(" [FirstInLastOutLinked] Count: " + firstInLastOutLinked.Count); firstInLastOutLinked.Clear(); // Clears the firstInLastOut Console.WriteLine(); } #endregion #region FirstInFirstOut (aka Queue) { Console.WriteLine(" FirstInFirstOut---------------------------------"); Console.WriteLine(); Console.WriteLine(" An \"FirstInFirstOut\" is a Queue that implements"); Console.WriteLine(" Towel.DataStructures.DataStructure. \"FirstInFirstOutArray\" is"); Console.WriteLine(" the array implementation while \"FirstInFirstOutLinked\" is the"); Console.WriteLine(" the linked-list implementation. A FirstInFirstOut/Stack is used"); Console.WriteLine(" specifically when you need the algorithm provided by the Queue"); Console.WriteLine(" and Dequeue functions."); Console.WriteLine(); IFirstInFirstOut <int> firstInFirstOutArray = new FirstInFirstOutArray <int>(); Console.Write(" [FirstInFirstOutArray] Enqueuing (0-" + (test - 1) + ")..."); for (int i = 0; i < test; i++) { firstInFirstOutArray.Enqueue(i); } Console.WriteLine(); Console.Write(" [FirstInFirstOutArray] Traversal: "); firstInFirstOutArray.Stepper(i => Console.Write(i)); Console.WriteLine(); Console.WriteLine(" [FirstInFirstOutArray] Dequeue: " + firstInFirstOutArray.Dequeue()); Console.WriteLine(" [FirstInFirstOutArray] Dequeue: " + firstInFirstOutArray.Dequeue()); Console.WriteLine(" [FirstInFirstOutArray] Peek: " + firstInFirstOutArray.Peek()); Console.WriteLine(" [FirstInFirstOutArray] Dequeue: " + firstInFirstOutArray.Dequeue()); Console.WriteLine(" [FirstInFirstOutArray] Count: " + firstInFirstOutArray.Count); firstInFirstOutArray.Clear(); // Clears the firstInLastOut Console.WriteLine(); IFirstInFirstOut <int> firstInFirstOutLinked = new FirstInFirstOutLinked <int>(); Console.Write(" [FirstInFirstOutLinked] Enqueuing (0-" + (test - 1) + ")..."); for (int i = 0; i < test; i++) { firstInFirstOutLinked.Enqueue(i); } Console.WriteLine(); Console.Write(" [FirstInFirstOutLinked] Traversal: "); firstInFirstOutLinked.Stepper(i => Console.Write(i)); Console.WriteLine(); Console.WriteLine(" [FirstInFirstOutLinked] Pop: " + firstInFirstOutLinked.Dequeue()); Console.WriteLine(" [FirstInFirstOutLinked] Pop: " + firstInFirstOutLinked.Dequeue()); Console.WriteLine(" [FirstInFirstOutLinked] Peek: " + firstInFirstOutLinked.Peek()); Console.WriteLine(" [FirstInFirstOutLinked] Pop: " + firstInFirstOutLinked.Dequeue()); Console.WriteLine(" [FirstInFirstOutLinked] Count: " + firstInFirstOutLinked.Count); firstInFirstOutLinked.Clear(); // Clears the firstInLastOut Console.WriteLine(); } #endregion #region Heap { Console.WriteLine(" Heap---------------------------------"); Console.WriteLine(); Console.WriteLine(" An \"Heap\" is a binary tree that stores items based on priorities."); Console.WriteLine(" It implements Towel.DataStructures.DataStructure like the others."); Console.WriteLine(" It uses sifting algorithms to move nodes vertically through itself."); Console.WriteLine(" It is often the best data structure for standard priority queues."); Console.WriteLine(" \"HeapArray\" is an implementation where the tree has been flattened"); Console.WriteLine(" into an array."); Console.WriteLine(); Console.WriteLine(" Let's say the priority is how close a number is to \"5\"."); Console.WriteLine(" So \"Dequeue\" will give us the next closest value to \"5\"."); Comparison Priority(int a, int b) { int _a = Compute.AbsoluteValue(a - 5); int _b = Compute.AbsoluteValue(b - 5); Comparison comparison = Compare.Wrap(_b.CompareTo(_a)); return(comparison); } Console.WriteLine(); IHeap <int> heapArray = new HeapArray <int>(Priority); Console.Write(" [HeapArray] Enqueuing (0-" + (test - 1) + ")..."); for (int i = 0; i < test; i++) { heapArray.Enqueue(i); } Console.WriteLine(); Console.WriteLine(" [HeapArray] Dequeue: " + heapArray.Dequeue()); Console.WriteLine(" [HeapArray] Dequeue: " + heapArray.Dequeue()); Console.WriteLine(" [HeapArray] Peek: " + heapArray.Peek()); Console.WriteLine(" [HeapArray] Dequeue: " + heapArray.Dequeue()); Console.WriteLine(" [HeapArray] Count: " + heapArray.Count); heapArray.Clear(); // Clears the heapArray Console.WriteLine(); } #endregion #region Tree //Console.WriteLine(" Tree-----------------------------"); //Tree<int> tree_Map = new TreeMap<int>(0, Compute.Equal, Hash.Default); //for (int i = 1; i < test; i++) //{ // tree_Map.Add(i, i / Compute.SquareRoot(i)); //} //Console.Write(" Children of 0 (root): "); //tree_Map.Children(0, (int i) => { Console.Write(i + " "); }); //Console.WriteLine(); //Console.Write(" Children of " + ((int)System.Math.Sqrt(test) - 1) + " (root): "); //tree_Map.Children(((int)System.Math.Sqrt(test) - 1), (int i) => { Console.Write(i + " "); }); //Console.WriteLine(); //Console.Write(" Traversal: "); //tree_Map.Stepper((int i) => { Console.Write(i + " "); }); //Console.WriteLine(); //Console.WriteLine(); #endregion #region AVL Tree { Console.WriteLine(" AvlTree------------------------------------------------"); Console.WriteLine(); Console.WriteLine(" An AVL Tree is a sorted binary tree."); Console.WriteLine(" It implements Towel.DataStructures.DataStructure like the others."); Console.WriteLine(" It allows for very fast 1D ranged queries/traversals."); Console.WriteLine(" It is very similar to an Red Black tree, but uses a different sorting algorithm."); Console.WriteLine(); IAvlTree <int> avlTree = new AvlTreeLinked <int>(); Console.Write(" Adding (0-" + (test - 1) + ")..."); for (int i = 0; i < test; i++) { avlTree.Add(i); } Console.WriteLine(); Console.Write(" Traversal: "); avlTree.Stepper(i => Console.Write(i)); Console.WriteLine(); //// Note: Because the nodes in AVL Tree linked do not have //// a parent pointer, the IEnumerable "foreach" iteration //// is extremely slow and should be avoided. It requires //// a stack for it's iteration. // //Console.Write(" Traversal Foreach: "); //foreach (int i in avlTree) //{ // Console.Write(i); //} //Console.WriteLine(); int minimum = random.Next(1, test / 2); int maximum = random.Next(1, test / 2) + test / 2; Console.Write(" Ranged Traversal [" + minimum + "-" + maximum + "]: "); avlTree.Stepper(i => Console.Write(i), minimum, maximum); Console.WriteLine(); int removal = random.Next(0, test); Console.Write(" Remove(" + removal + "): "); avlTree.Remove(removal); avlTree.Stepper(i => Console.Write(i)); Console.WriteLine(); int contains = random.Next(0, test); Console.WriteLine(" Contains(" + contains + "): " + avlTree.Contains(contains)); Console.WriteLine(" Current Least: " + avlTree.CurrentLeast); Console.WriteLine(" Current Greatest: " + avlTree.CurrentGreatest); Console.WriteLine(" Count: " + avlTree.Count); avlTree.Clear(); // Clears the AVL tree Console.WriteLine(); } #endregion #region Red-Black Tree { Console.WriteLine(" Red-Black Tree------------------------------------------------"); Console.WriteLine(); Console.WriteLine(" An Red-Black Tree is a sorted binary tree."); Console.WriteLine(" It implements Towel.DataStructures.DataStructure like the others."); Console.WriteLine(" It allows for very fast 1D ranged queries/traversals."); Console.WriteLine(" It is very similar to an AVL tree, but uses a different sorting algorithm."); Console.WriteLine(); IRedBlackTree <int> redBlackTree = new RedBlackTreeLinked <int>(); Console.Write(" Adding (0-" + (test - 1) + ")..."); for (int i = 0; i < test; i++) { redBlackTree.Add(i); } Console.WriteLine(); Console.Write(" Traversal: "); redBlackTree.Stepper(i => Console.Write(i)); Console.WriteLine(); int minimum = random.Next(1, test / 2); int maximum = random.Next(1, test / 2) + test / 2; Console.Write(" Ranged Traversal [" + minimum + "-" + maximum + "]: "); redBlackTree.Stepper(i => Console.Write(i), minimum, maximum); Console.WriteLine(); int removal = random.Next(0, test); Console.Write(" Remove(" + removal + "): "); redBlackTree.Remove(removal); redBlackTree.Stepper(i => Console.Write(i)); Console.WriteLine(); int contains = random.Next(0, test); Console.WriteLine(" Contains(" + contains + "): " + redBlackTree.Contains(contains)); Console.WriteLine(" Current Least: " + redBlackTree.CurrentLeast); Console.WriteLine(" Current Greatest: " + redBlackTree.CurrentGreatest); Console.WriteLine(" Count: " + redBlackTree.Count); redBlackTree.Clear(); // Clears the Red Black tree Console.WriteLine(); } #endregion #region BTree { Console.WriteLine(" B Tree------------------------------------------------"); Console.WriteLine(); Console.WriteLine(" A B Tree is a sorted binary tree that allows multiple values to"); Console.WriteLine(" be stored per node. This makes it sort of a hybrid between a"); Console.WriteLine(" binary tree and an array. Because multiple values are stored "); Console.WriteLine(" per node, it means less nodes must be traversed to completely"); Console.WriteLine(" traverse the values in the B tree."); Console.WriteLine(); Console.WriteLine(" The generic B Tree in Towel is still in development."); Console.WriteLine(); } #endregion #region Set { Console.WriteLine(" Set------------------------------------------------"); Console.WriteLine(); Console.WriteLine(" A Set is like an Addable/List, but it does not allow duplicates. Sets are"); Console.WriteLine(" usually implemented using hash codes. Implementations with hash codes"); Console.WriteLine(" usually have very fast \"Contains\" checks to see if a value has already"); Console.WriteLine(" been added to the set."); Console.WriteLine(); ISet <int> setHashLinked = new SetHashLinked <int>(); Console.Write(" Adding (0-" + (test - 1) + ")..."); for (int i = 0; i < test; i++) { setHashLinked.Add(i); } Console.WriteLine(); Console.Write(" Traversal: "); setHashLinked.Stepper(i => Console.Write(i)); Console.WriteLine(); int a = random.Next(0, test); setHashLinked.Remove(a); Console.Write(" Remove(" + a + "): "); setHashLinked.Stepper(i => Console.Write(i)); Console.WriteLine(); int b = random.Next(0, test); Console.WriteLine(" Contains(" + b + "): " + setHashLinked.Contains(b)); Console.WriteLine(" Count: " + setHashLinked.Count); Console.WriteLine(); } #endregion #region Map (aka Dictionary) { Console.WriteLine(" Map------------------------------------------------"); Console.WriteLine(); Console.WriteLine(" A Map (aka Dictionary) is similar to a Set, but it stores two values (a "); Console.WriteLine(" key and a value). Maps do not allow duplicate keys much like Sets don't"); Console.WriteLine(" allow duplicate values. When provided with the key, the Map uses that key"); Console.WriteLine(" to look up the value that it is associated with. Thus, it allows you to "); Console.WriteLine(" \"map\" one object to another. As with Sets, Maps are usually implemented"); Console.WriteLine(" using hash codes."); Console.WriteLine(); // Note: the first generic is the value, the second is the key IMap <string, int> mapHashLinked = new MapHashLinked <string, int>(); Console.WriteLine(" Let's map each int to its word representation (ex 1 -> One)."); Console.Write(" Adding (0-" + (test - 1) + ")..."); for (int i = 0; i < test; i++) { mapHashLinked.Add(i, ((decimal)i).ToEnglishWords()); } Console.WriteLine(); Console.WriteLine(" Traversal: "); mapHashLinked.Keys(i => Console.WriteLine(" " + i + "->" + mapHashLinked[i])); Console.WriteLine(); int a = random.Next(0, test); mapHashLinked.Remove(a); Console.Write(" Remove(" + a + "): "); mapHashLinked.Keys(i => Console.Write(i)); Console.WriteLine(); int b = random.Next(0, test); Console.WriteLine(" Contains(" + b + "): " + mapHashLinked.Contains(b)); Console.WriteLine(" Count: " + mapHashLinked.Count); Console.WriteLine(); } #endregion #region OmnitreePoints { Console.WriteLine(" OmnitreePoints--------------------------------------"); Console.WriteLine(); Console.WriteLine(" An Omnitree is an ND SPT that allows for"); Console.WriteLine(" multidimensional sorting. Any time you need to look"); Console.WriteLine(" items up based on multiple fields/properties, then"); Console.WriteLine(" you might want to use an Omnitree. If you need to"); Console.WriteLine(" perform ranged queries on multiple dimensions, then"); Console.WriteLine(" the Omnitree is the data structure for you."); Console.WriteLine(); Console.WriteLine(" The \"OmnitreePoints\" stores individual points (vectors),"); Console.WriteLine(" and the \"OmnitreeBounds\" stores bounded objects (spaces)."); Console.WriteLine(); IOmnitreePoints <int, double, string, decimal> omnitree = new OmnitreePointsLinked <int, double, string, decimal>( // This is a location delegate. (how to locate the item along each dimension) (int index, out double a, out string b, out decimal c) => { a = index; b = index.ToString(); c = index; }); Console.Write(" Adding (0-" + (test - 1) + ")..."); for (int i = 0; i < test; i++) { omnitree.Add(i); } Console.WriteLine(); Console.Write(" Traversal: "); omnitree.Stepper(i => Console.Write(i)); Console.WriteLine(); int minimumXZ = random.Next(1, test / 2); int maximumXZ = random.Next(1, test / 2) + test / 2; string minimumY = minimumXZ.ToString(); string maximumY = maximumXZ.ToString(); Console.Write(" Spacial Traversal [" + "(" + minimumXZ + ", \"" + minimumY + "\", " + minimumXZ + ")->" + "(" + maximumXZ + ", \"" + maximumY + "\", " + maximumXZ + ")]: "); omnitree.Stepper(i => Console.Write(i), minimumXZ, maximumXZ, minimumY, maximumY, minimumXZ, maximumXZ); Console.WriteLine(); // Note: this "look up" is just a very narrow spacial query that (since we know the data) // wil only give us one result. int lookUp = random.Next(0, test); string lookUpToString = lookUp.ToString(); Console.Write(" Look Up (" + lookUp + ", \"" + lookUpToString + "\", " + lookUp + "): "); omnitree.Stepper(i => Console.Write(i), lookUp, lookUp, lookUp.ToString(), lookUp.ToString(), lookUp, lookUp); Console.WriteLine(); // Ignoring dimensions on traversals example. // If you want to ignore a column on a traversal, you can do so like this: omnitree.Stepper(i => { /*Do Nothing*/ }, lookUp, lookUp, Omnitree.Bound <string> .None, Omnitree.Bound <string> .None, Omnitree.Bound <decimal> .None, Omnitree.Bound <decimal> .None); Console.Write(" Counting Items In a Space [" + "(" + minimumXZ + ", \"" + minimumY + "\", " + minimumXZ + ")->" + "(" + maximumXZ + ", \"" + maximumY + "\", " + maximumXZ + ")]: "); omnitree.CountSubSpace( minimumXZ, maximumXZ, minimumY, maximumY, minimumXZ, maximumXZ); Console.WriteLine(); int removalMinimum = random.Next(1, test / 2); int removalMaximum = random.Next(1, test / 2) + test / 2; string removalMinimumY = removalMinimum.ToString(); string removalMaximumY = removalMaximum.ToString(); Console.Write(" Remove (" + removalMinimum + "-" + removalMaximum + "): "); omnitree.Remove( removalMinimum, removalMaximum, removalMinimumY, removalMaximumY, removalMinimum, removalMaximum); omnitree.Stepper(i => Console.Write(i)); Console.WriteLine(); Console.WriteLine(" Dimensions: " + omnitree.Dimensions); Console.WriteLine(" Count: " + omnitree.Count); omnitree.Clear(); // Clears the Omnitree Console.WriteLine(); } #endregion #region OmnitreeBounds { Console.WriteLine(" OmnitreeBounds--------------------------------------"); Console.WriteLine(); Console.WriteLine(" An Omnitree is an ND SPT that allows for"); Console.WriteLine(" multidimensional sorting. Any time you need to look"); Console.WriteLine(" items up based on multiple fields/properties, then"); Console.WriteLine(" you might want to use an Omnitree. If you need to"); Console.WriteLine(" perform ranged queries on multiple dimensions, then"); Console.WriteLine(" the Omnitree is the data structure for you."); Console.WriteLine(); Console.WriteLine(" The \"OmnitreePoints\" stores individual points (vectors),"); Console.WriteLine(" and the \"OmnitreeBounds\" stores bounded objects (spaces)."); Console.WriteLine(); IOmnitreeBounds <int, double, string, decimal> omnitree = new OmnitreeBoundsLinked <int, double, string, decimal>( // This is a location delegate. (how to locate the item along each dimension) (int index, out double min1, out double max1, out string min2, out string max2, out decimal min3, out decimal max3) => { string indexToString = index.ToString(); min1 = index; max1 = index; min2 = indexToString; max2 = indexToString; min3 = index; max3 = index; }); Console.Write(" Adding (0-" + (test - 1) + ")..."); for (int i = 0; i < test; i++) { omnitree.Add(i); } Console.WriteLine(); Console.Write(" Traversal: "); omnitree.Stepper(i => Console.Write(i)); Console.WriteLine(); int minimumXZ = random.Next(1, test / 2); int maximumXZ = random.Next(1, test / 2) + test / 2; string minimumY = minimumXZ.ToString(); string maximumY = maximumXZ.ToString(); Console.Write(" Spacial Traversal [" + "(" + minimumXZ + ", \"" + minimumY + "\", " + minimumXZ + ")->" + "(" + maximumXZ + ", \"" + maximumY + "\", " + maximumXZ + ")]: "); omnitree.StepperOverlapped(i => Console.Write(i), minimumXZ, maximumXZ, minimumY, maximumY, minimumXZ, maximumXZ); Console.WriteLine(); // Note: this "look up" is just a very narrow spacial query that (since we know the data) // wil only give us one result. int lookUpXZ = random.Next(0, test); string lookUpY = lookUpXZ.ToString(); Console.Write(" Look Up (" + lookUpXZ + ", \"" + lookUpY + "\", " + lookUpXZ + "): "); omnitree.StepperOverlapped(i => Console.Write(i), lookUpXZ, lookUpXZ, lookUpY, lookUpY, lookUpXZ, lookUpXZ); Console.WriteLine(); // Ignoring dimensions on traversals example. // If you want to ignore a dimension on a traversal, you can do so like this: omnitree.StepperOverlapped(i => { /*Do Nothing*/ }, lookUpXZ, lookUpXZ, // The "None" means there is no bound, so all values are valid Omnitree.Bound <string> .None, Omnitree.Bound <string> .None, Omnitree.Bound <decimal> .None, Omnitree.Bound <decimal> .None); Console.Write(" Counting Items In a Space [" + "(" + minimumXZ + ", \"" + minimumY + "\", " + minimumXZ + ")->" + "(" + maximumXZ + ", \"" + maximumY + "\", " + maximumXZ + ")]: " + omnitree.CountSubSpaceOverlapped( minimumXZ, maximumXZ, minimumY, maximumY, minimumXZ, maximumXZ)); Console.WriteLine(); int removalMinimumXZ = random.Next(1, test / 2); int removalMaximumXZ = random.Next(1, test / 2) + test / 2; string removalMinimumY = removalMinimumXZ.ToString(); string removalMaximumY = removalMaximumXZ.ToString(); Console.Write(" Remove (" + removalMinimumXZ + "-" + removalMaximumXZ + "): "); omnitree.RemoveOverlapped( removalMinimumXZ, removalMaximumXZ, removalMinimumY, removalMaximumY, removalMinimumXZ, removalMaximumXZ); omnitree.Stepper(i => Console.Write(i)); Console.WriteLine(); Console.WriteLine(" Dimensions: " + omnitree.Dimensions); Console.WriteLine(" Count: " + omnitree.Count); omnitree.Clear(); // Clears the Omnitree Console.WriteLine(); } #endregion #region KD Tree { Console.WriteLine(" KD Tree------------------------------------------------"); Console.WriteLine(); Console.WriteLine(" A KD Tree binary tree that stores points sorted along along an"); Console.WriteLine(" arbitrary number of dimensions. So it performs multidimensional"); Console.WriteLine(" sorting similar to the Omnitree (Quadtree/Octree) in Towel, but"); Console.WriteLine(" it uses a completely different algorithm and format."); Console.WriteLine(); Console.WriteLine(" The generic KD Tree in Towel is still in development."); Console.WriteLine(); } #endregion #region Graph { Console.WriteLine(" Graph------------------------------------------------"); Console.WriteLine(); Console.WriteLine(" A Graph is a data structure of nodes and edges. Nodes are values"); Console.WriteLine(" and edges are connections between those values. Graphs are often"); Console.WriteLine(" used to model real world data such as maps, and are often used in"); Console.WriteLine(" path finding algoritms. See the \"Algorithms\" example for path"); Console.WriteLine(" finding examples. This is just an example of how to make a graph."); Console.WriteLine(" A \"GraphSetOmnitree\" is an implementation where nodes are stored."); Console.WriteLine(" in a Set and edges are stored in an Omnitree (aka Quadtree)."); Console.WriteLine(); IGraph <int> graphSetOmnitree = new GraphSetOmnitree <int>(); Console.WriteLine(" Adding Nodes (0-" + (test - 1) + ")..."); for (int i = 0; i < test; i++) { graphSetOmnitree.Add(i); } int edgesPerNode = 3; Console.WriteLine(" Adding Random Edges (0-3 per node)..."); for (int i = 0; i < test; i++) { // lets use a heap to randomize the edges using random priorities IHeap <(int, int)> heap = new HeapArray <(int, int)>((x, y) => Compare.Wrap(x.Item2.CompareTo(y.Item2))); for (int j = 0; j < test; j++) { if (j != i) { heap.Enqueue((j, random.Next())); } } // dequeue some random edges from the heap and add them to the graph int randomEdgeCount = random.Next(edgesPerNode + 1); for (int j = 0; j < randomEdgeCount; j++) { graphSetOmnitree.Add(i, heap.Dequeue().Item1); } } Console.Write(" Nodes (Traversal): "); graphSetOmnitree.Stepper(i => Console.Write(i)); Console.WriteLine(); Console.WriteLine(" Edges (Traversal): "); graphSetOmnitree.Stepper((from, to) => Console.WriteLine(" " + from + "->" + to)); Console.WriteLine(); int a = random.Next(0, test); Console.Write(" Neighbors (" + a + "):"); graphSetOmnitree.Neighbors(a, i => Console.Write(" " + i)); Console.WriteLine(); int b = random.Next(0, test / 2); int c = random.Next(test / 2, test); Console.WriteLine(" Are Adjacent (" + b + ", " + c + "): " + graphSetOmnitree.Adjacent(b, c)); Console.WriteLine(" Node Count: " + graphSetOmnitree.NodeCount); Console.WriteLine(" Edge Count: " + graphSetOmnitree.EdgeCount); graphSetOmnitree.Clear(); // Clears the graph Console.WriteLine(); } #endregion #region Trie { Console.WriteLine(" Trie------------------------------------------------"); Console.WriteLine(); Console.WriteLine(" A Trie is a tree where portions of the data are stored in each node"); Console.WriteLine(" such that when you traverse the tree to a leaf, you have read the contents"); Console.WriteLine(" of that leaf along the way. Because of this, a Trie allows for its values"); Console.WriteLine(" to share data, which is a form of compression. So a Trie may be used to save"); Console.WriteLine(" memory. A trie may also be a very useful tool in pattern matching, because it"); Console.WriteLine(" it allows for culling based are portions of the data."); Console.WriteLine(); Console.WriteLine(" The generic Trie in Towel is still in development."); Console.WriteLine(); } #endregion Console.WriteLine("============================================"); Console.WriteLine("Examples Complete..."); Console.ReadLine(); }
static void Main(string[] args) { Random random = new Random(); int test = 10; Console.WriteLine("You are runnning the Data Structures example."); Console.WriteLine("======================================================"); Console.WriteLine(); #region Link Console.WriteLine(" Testing Link-------------------------------"); Console.WriteLine(" Size: 6"); Link link = new Link <int, int, int, int, int, int>(0, 1, 2, 3, 4, 5); Console.Write(" Traversal: "); link.Stepper((dynamic current) => { Console.Write(current); }); Console.WriteLine(); // Saving to a file //string linklink_file = "link." + ToExtension(link.GetType()); //Console.WriteLine(" File: \"" + linklink_file + "\""); //Console.WriteLine(" Serialized: " + Serialize(linklink_file, link)); //Link<int, int, int, int, int, int> deserialized_linklink; //Console.WriteLine(" Deserialized: " + Deserialize(linklink_file, out deserialized_linklink)); Console.WriteLine(); #endregion #region Array Console.WriteLine(" Testing Array_Array<int>-------------------"); Indexed <int> array = new IndexedArray <int>(test); for (int i = 0; i < test; i++) { array[i] = i; } Console.Write(" Traversal: "); array.Stepper((int current) => { Console.Write(current); }); Console.WriteLine(); // Saving to a file //string arrayarray_file = "array." + ToExtension(array.GetType()); //Console.WriteLine(" File: \"" + arrayarray_file + "\""); //Console.WriteLine(" Serialized: " + Serialize(arrayarray_file, array)); //ArrayArray<int> deserialized_arrayarray; //Console.WriteLine(" Deserialized: " + Deserialize(arrayarray_file, out deserialized_arrayarray)); Console.WriteLine(); #endregion #region List Console.WriteLine(" Testing List_Array<int>--------------------"); Addable <int> list_array = new AddableArray <int>(test); for (int i = 0; i < test; i++) { list_array.Add(i); } Console.Write(" Traversal: "); list_array.Stepper((int current) => { Console.Write(current); }); Console.WriteLine(); //string list_array_serialization = (list_array as ListArray<int>).Serialize(x => x.ToString()); //using (StreamWriter writer = new StreamWriter("ListArray.ListArray")) //{ // writer.WriteLine(list_array_serialization); //} //using (StreamReader reader = new StreamReader("ListArray.ListArray")) //{ // list_array = ListArray<int>.Deserialize(reader.ReadToEnd(), x => Int16.Parse(x.Trim())); //} //Console.Write(" Serialization/Deserialization is possible."); list_array.Add(11); list_array.Remove(7); Console.WriteLine(); Console.WriteLine(); //ListArray<ListArray<int>> list_array2 = new ListArray<ListArray<int>>(test); //for (int i = 0; i < test; i++) //{ // ListArray<int> nested_list = new ListArray<int>(); // for (int j = 0; j < test; j++) // { // nested_list.Add(j); // } // list_array2.Add(nested_list); //} //string list_array2_serialization = list_array2.Serialize(x => x.Serialize(y => y.ToString())); //using (StreamWriter writer = new StreamWriter("ListArray2.ListArray")) //{ // writer.WriteLine(list_array2_serialization); //} //using (StreamReader reader = new StreamReader("ListArray2.ListArray")) //{ // list_array2 = ListArray<ListArray<int>>.Deserialize(reader.ReadToEnd(), x => ListArray<int>.Deserialize(x, y => Int16.Parse(y.Trim()))); //} Console.WriteLine(" Testing List_Linked<int>-------------------"); Addable <int> list_linked = new AddableLinked <int>(); for (int i = 0; i < test; i++) { list_linked.Add(i); } Console.Write(" Traversal: "); list_linked.Stepper((int current) => { Console.Write(current); }); Console.WriteLine(); // Saving to a file //string listlinked_file = "list_linked." + ToExtension(list_linked.GetType()); //Console.WriteLine(" File: \"" + listlinked_file + "\""); //Console.WriteLine(" Serialized: " + Serialize(listlinked_file, list_linked)); //ListLinked<int> deserialized_listlinked; //Console.WriteLine(" Deserialized: " + Deserialize(listlinked_file, out deserialized_listlinked)); Console.WriteLine(); #endregion #region Stack Console.WriteLine(" Testing Stack_Linked<int>------------------"); FirstInLastOut <int> stack_linked = new FirstInLastOutLinked <int>(); for (int i = 0; i < test; i++) { stack_linked.Push(i); } Console.Write(" Traversal: "); stack_linked.Stepper((int current) => { Console.Write(current); }); Console.WriteLine(); // Saving to a file //string stacklinked_file = "stack_linked." + ToExtension(stack_linked.GetType()); //Console.WriteLine(" File: \"" + stacklinked_file + "\""); //Console.WriteLine(" Serialized: " + Serialize(stacklinked_file, stack_linked)); //StackLinked<int> deserialized_stacklinked; //Console.WriteLine(" Deserialized: " + Deserialize(stacklinked_file, out deserialized_stacklinked)); Console.WriteLine(); #endregion #region Queue Console.WriteLine(" Testing Queue_Linked<int>------------------"); FirstInFirstOut <int> queue_linked = new FirstInFirstOutLinked <int>(); for (int i = 0; i < test; i++) { queue_linked.Enqueue(i); } Console.Write(" Traversal: "); queue_linked.Stepper((int current) => { Console.Write(current); }); Console.WriteLine(); // Saving to a file //string queuelinked_file = "queue_linked." + ToExtension(queue_linked.GetType()); //Console.WriteLine(" File: \"" + queuelinked_file + "\""); //Console.WriteLine(" Serialized: " + Serialize(queuelinked_file, queue_linked)); //QueueLinked<int> deserialized_queuelinked; //Console.WriteLine(" Deserialized: " + Deserialize(queuelinked_file, out deserialized_queuelinked)); Console.WriteLine(); #endregion #region Heap Console.WriteLine(" Testing Heap_Array<int>--------------------"); Heap <int> heap_array = new HeapArray <int>(Compute.Compare); for (int i = 0; i < test; i++) { heap_array.Enqueue(i); } Console.Write(" Delegate: "); heap_array.Stepper((int current) => { Console.Write(current); }); Console.WriteLine(); // Saving to a file //string heaplinked_file = "heap_array." + ToExtension(heap_array.GetType()); //Console.WriteLine(" File: \"" + heaplinked_file + "\""); //Console.WriteLine(" Serialized: " + Serialize(heaplinked_file, heap_array)); //HeapArray<int> deserialized_heaplinked; //Console.WriteLine(" Deserialized: " + Deserialize(heaplinked_file, out deserialized_heaplinked)); Console.WriteLine(); #endregion #region Tree Console.WriteLine(" Testing Tree_Map<int>----------------------"); Tree <int> tree_Map = new TreeMap <int>(0, Compute.Equal, Hash.Default); for (int i = 1; i < test; i++) { tree_Map.Add(i, i / (int)System.Math.Sqrt(test)); } Console.Write(" Children of 0 (root): "); tree_Map.Children(0, (int i) => { Console.Write(i + " "); }); Console.WriteLine(); Console.Write(" Children of " + ((int)System.Math.Sqrt(test) - 1) + " (root): "); tree_Map.Children(((int)System.Math.Sqrt(test) - 1), (int i) => { Console.Write(i + " "); }); Console.WriteLine(); Console.Write(" Traversal: "); tree_Map.Stepper((int i) => { Console.Write(i + " "); }); Console.WriteLine(); // Saving to a file //string treelinked_file = "tree_Map." + ToExtension(tree_Map.GetType()); //Console.WriteLine(" File: \"" + treelinked_file + "\""); //Console.WriteLine(" Serialized: " + Serialize(treelinked_file, tree_Map)); //TreeMap<int> deserialized_treelinked; //Console.WriteLine(" Deserialized: " + Deserialize(treelinked_file, out deserialized_treelinked)); Console.WriteLine(); #endregion #region AVL Tree //Console.WriteLine(" Testing AvlTree_Linked<int>----------------"); //// Construction //AvlTree<int> avlTree_linked = new AvlTree_Linked<int>(Logic.compare); //// Adding Items //Console.Write(" Adding (0-" + test + ")..."); //for (int i = 0; i < test; i++) // avlTree_linked.Add(i); //Console.WriteLine(); //// Iteration //Console.Write(" Traversal: "); //avlTree_linked.Stepper((int current) => { Console.Write(current); }); //Console.WriteLine(); //// Removal //int avl_tree_linked_removal = random.Next(0, test); //avlTree_linked.Remove(avl_tree_linked_removal); //Console.Write(" Remove(" + avl_tree_linked_removal + "): "); //avlTree_linked.Stepper((int current) => { Console.Write(current); }); //Console.WriteLine(); //// Look Up Items //int avl_tree_linked_lookup = random.Next(0, test); //while (avl_tree_linked_lookup == avl_tree_linked_removal) // avl_tree_linked_lookup = random.Next(0, test); //Console.WriteLine(" Look Up (" + avl_tree_linked_lookup + "): " + avlTree_linked.TryGet(avl_tree_linked_lookup, Logic.compare, out temp)); //Console.WriteLine(" Look Up (" + avl_tree_linked_removal + "): " + avlTree_linked.TryGet(avl_tree_linked_removal, Logic.compare, out temp)); //avlTree_linked.Get(avl_tree_linked_lookup, Logic.compare); //// Current Min-Max Values //Console.WriteLine(" Least: " + avlTree_linked.CurrentLeast + " Greatest: " + avlTree_linked.CurrentGreatest); //// Saving to a file //string avltreelinked_file = "avlTree_linked." + ToExtension(avlTree_linked.GetType()); //Console.WriteLine(" File: \"" + avltreelinked_file + "\""); //Console.WriteLine(" Serialized: " + Serialize(avltreelinked_file, avlTree_linked)); //AvlTree_Linked<int> deserialized_avltreelinked; //Console.WriteLine(" Deserialized: " + Deserialize(avltreelinked_file, out deserialized_avltreelinked)); //Console.WriteLine(); #endregion #region Red-Black Tree Console.WriteLine(" Testing RedBlack_Linked<int>---------------"); RedBlackTree <int> redBlackTree_linked = new RedBlackTreeLinked <int>(Compute.Compare); for (int i = 0; i < test; i++) { redBlackTree_linked.Add(i); } Console.Write(" Traversal: "); redBlackTree_linked.Stepper((int current) => { Console.Write(current); }); Console.WriteLine(); // Saving to a file //string redblacktreelinked_file = "redBlackTree_linked." + ToExtension(redBlackTree_linked.GetType()); //Console.WriteLine(" File: \"" + redblacktreelinked_file + "\""); //Console.WriteLine(" Serialized: " + Serialize(redblacktreelinked_file, redBlackTree_linked)); //RedBlackTreeLinked<int> deserialized_redblacktreelinked; //Console.WriteLine(" Deserialized: " + Deserialize(redblacktreelinked_file, out deserialized_redblacktreelinked)); Console.WriteLine(); #endregion #region BTree //Console.WriteLine(" Testing BTree_LinkedArray<int>-------------"); //BTree<int> btree_linked = new BTree_LinkedArray<int>(Logic.compare, 3); //for (int i = 0; i < test; i++) // btree_linked.Add(i); //Console.Write(" Delegate: "); //btree_linked.Stepper((int current) => { Console.Write(current); }); //Console.WriteLine(); //Console.Write(" IEnumerator: "); //foreach (int current in btree_linked) // Console.Write(current); //Console.WriteLine(); //Console.WriteLine(" Press Enter to continue..."); //string maplinked_file = "maplinked.quad"; //Console.WriteLine(" File: \"" + maplinked_file + "\""); //Console.WriteLine(" Serialized: " + Serialize(maplinked_file, hashTable_linked)); //Omnitree_LinkedLinkedLists<int, double> deserialized_maplinked; //Console.WriteLine(" Deserialized: " + Deserialize(maplinked_file, out deserialized_maplinked)); //Console.ReadLine(); //Console.WriteLine(); #endregion #region Set Console.WriteLine(" Testing Set_Hash<int>----------------------"); Set <int> set_linked = new SetHashList <int>(Compute.Equal, Hash.Default); for (int i = 0; i < test; i++) { set_linked.Add(i); } // Traversal Console.Write(" Traversal: "); set_linked.Stepper((int current) => { Console.Write(current); }); Console.WriteLine(); Console.Write(" Table Size: " + (set_linked as SetHashList <int>).TableSize); Console.WriteLine(); Console.WriteLine(); #endregion #region Map Console.WriteLine(" Testing MapHashList<int, int>--------------"); Map <int, int> map_sethash = new MapHashLinked <int, int>(Compute.Equal, Hash.Default); for (int i = 0; i < test; i++) { map_sethash.Add(i, i); } Console.Write(" Look Ups: "); for (int i = 0; i < test; i++) { Console.Write(map_sethash[i]); } Console.WriteLine(); // Traversal Console.Write(" Traversal: "); map_sethash.Stepper((int current) => { Console.Write(current); }); Console.WriteLine(); Console.Write(" Table Size: " + (map_sethash as MapHashLinked <int, int>).TableSize); Console.WriteLine(); Console.WriteLine(); #endregion #region OmnitreePoints { Console.WriteLine(" Testing OmnitreeLinkedLinked<int, double>-------"); // Construction OmnitreePoints <int, double, double, double> omnitree_linked = new OmnitreePointsLinked <int, double, double, double>( (int index, out double a, out double b, out double c) => { a = index; b = index; c = index; }); // axis average function // Properties Console.WriteLine(" Dimensions: " + omnitree_linked.Dimensions); Console.WriteLine(" Count: " + omnitree_linked.Count); // Addition Console.Write(" Adding 0-" + test + ": "); for (int i = 0; i < test; i++) { omnitree_linked.Add(i); } omnitree_linked.Stepper((int current) => { Console.Write(current); }); Console.WriteLine(); Console.WriteLine(" Count: " + omnitree_linked.Count); // Traversal Console.Write(" Traversal [ALL]: "); omnitree_linked.Stepper((int current) => { Console.Write(current); }); Console.WriteLine(); // Look Up 1 Console.Write(" Traversal [(" + (test / 2) + ", " + (test / 2) + ", " + (test / 2) + ")->(" + test + ", " + test + ", " + test + ")]: "); omnitree_linked.Stepper((int current) => { Console.Write(current); }, test / 2, test, test / 2, test, test / 2, test); Console.WriteLine(); // Look Up 2 Console.Write(" Look Up [" + (test / 3) + ", " + (test / 3) + ", " + (test / 3) + "]: "); omnitree_linked[(test / 3), (test / 3), (test / 3)]((int current) => { Console.Write(current); }); Console.WriteLine(); // Removal Console.Write(" Remove 0-" + test / 3 + ": "); omnitree_linked.Remove( 0, test / 3, 0, test / 3, 0, test / 3); omnitree_linked.Stepper((int current) => { Console.Write(current); }); Console.WriteLine(); Console.WriteLine(" Count: " + omnitree_linked.Count); // Clear Console.Write(" Clear: "); omnitree_linked.Clear(); omnitree_linked.Stepper((int current) => { Console.Write(current); }); Console.WriteLine(); Console.WriteLine(" Count: " + omnitree_linked.Count); // Saving to a file //string omnitreelinked_file = "omnitree_linkedlinkedlists." + ToExtension(omnitree_linked.GetType()); //Console.WriteLine(" File: \"" + omnitreelinked_file + "\""); //Console.WriteLine(" Serialized: " + Serialize(omnitreelinked_file, omnitree_linked)); //OmnitreeLinkedLinkedLists<int, double> deserialized_omnitreeLinked; //Console.WriteLine(" Deserialized: " + Deserialize(omnitreelinked_file, out deserialized_omnitreeLinked)); Console.WriteLine(); //Console.WriteLine(" Testing Omnitree_LinkedArrayLists<int, double>--------"); //// Construction //Omnitree<int, double> omnitree_array = new OmnitreeLinkedArray<int, double>( // new double[] { -test - 1, -test - 1, -test - 1 }, // minimum dimensions of the omnitree // new double[] { test + 1, test + 1, test + 1 }, // maximum dimensions of the omnitree // (int index) => { return Accessor.Get(new double[] { index, index, index }); }, // "N-D" location function // Compute<double>.Compare, // comparison function // (double a, double b) => { return (a + b) / 2; }); // average function //// Properties //Console.WriteLine(" Origin: [" + omnitree_array.Origin(0) + ", " + omnitree_array.Origin(1) + ", " + omnitree_array.Origin(2) + "]"); //Console.WriteLine(" Minimum: [" + omnitree_array.Min(0) + ", " + omnitree_array.Min(1) + ", " + omnitree_array.Min(2) + "]"); //Console.WriteLine(" Maximum: [" + omnitree_array.Max(0) + ", " + omnitree_array.Max(1) + ", " + omnitree_array.Max(2) + "]"); //Console.WriteLine(" Dimensions: " + omnitree_array.Dimensions); //Console.WriteLine(" Count: " + omnitree_array.Count); //// Addition //Console.Write(" Adding 0-" + test + ": "); //for (int i = 0; i < test; i++) // omnitree_array.Add(i); //omnitree_array.Stepper((int current) => { Console.Write(current); }); //Console.WriteLine(); //Console.WriteLine(" Count: " + omnitree_array.Count); //// Traversal //Console.Write(" Traversal [ALL]: "); // omnitree_array.Stepper((int current) => { Console.Write(current); }); //Console.WriteLine(); //// Look Up //Console.Write(" Traversal [" + (test / 2) + "-" + test + "]: "); // omnitree_array.Stepper((int current) => { Console.Write(current); }, // new double[] { test / 2, test / 2, test / 2 }, // new double[] { test, test, test }); //Console.WriteLine(); //// Removal //Console.Write(" Remove 0-" + test / 3 + ": "); //omnitree_array.Remove( // new double[] { 0, 0, 0 }, // new double[] { test / 3, test / 3, test / 3 }); //omnitree_array.Stepper((int current) => { Console.Write(current); }); //Console.WriteLine(); //Console.WriteLine(" Count: " + omnitree_array.Count); //// Clear //Console.Write(" Clear: "); //omnitree_array.Clear(); // omnitree_array.Stepper((int current) => { Console.Write(current); }); //Console.WriteLine(); //Console.WriteLine(" Count: " + omnitree_array.Count); //// Saving to a file ////string omnitreearray_file = "omnitree_linkedarraylists." + ToExtension(omnitree_array.GetType()); ////Console.WriteLine(" File: \"" + omnitreearray_file + "\""); ////Console.WriteLine(" Serialized: " + Serialize(omnitreearray_file, omnitree_array)); ////OmnitreeLinkedLinkedLists<int, double> deserialized_omnitreearray; ////Console.WriteLine(" Deserialized: " + Deserialize(omnitreearray_file, out deserialized_omnitreearray)); //Console.WriteLine(); } #endregion #region OmnitreeBounds { Console.WriteLine(" Testing OmnitreeBoundsLinked<int, double>-------"); // Construction OmnitreeBounds <int, double, double, double> omnitreeBounds_linked = new OmnitreeBoundsLinked <int, double, double, double>( (int index, out double min1, out double max1, out double min2, out double max2, out double min3, out double max3) => { min1 = index; max1 = index; min2 = index; max2 = index; min3 = index; max3 = index; }); // Properties Console.WriteLine(" Dimensions: " + omnitreeBounds_linked.Dimensions); Console.WriteLine(" Count: " + omnitreeBounds_linked.Count); // Addition Console.Write(" Adding 0-" + test + ": "); for (int i = 0; i < test; i++) { omnitreeBounds_linked.Add(i); } omnitreeBounds_linked.Stepper((int current) => { Console.Write(current); }); Console.WriteLine(); Console.WriteLine(" Count: " + omnitreeBounds_linked.Count); // Traversal Console.Write(" Traversal [ALL]: "); omnitreeBounds_linked.Stepper((int current) => { Console.Write(current); }); Console.WriteLine(); // Look Up 1 //Console.Write(" Traversal [(" + (test / 2) + ", " + (test / 2) + ", " + (test / 2) + ")->(" + test + ", " + test + ", " + test + ")]: "); //omnitreeBounds_linked.Stepper((int current) => { Console.Write(current); }, // test / 2, test, // test / 2, test, // test / 2, test); //Console.WriteLine(); // Removal Console.Write(" Remove 0-" + test / 3 + ": "); omnitreeBounds_linked.RemoveOverlapped( 0, test / 3, 0, test / 3, 0, test / 3); omnitreeBounds_linked.Stepper((int current) => { Console.Write(current); }); Console.WriteLine(); Console.WriteLine(" Count: " + omnitreeBounds_linked.Count); // Clear Console.Write(" Clear: "); omnitreeBounds_linked.Clear(); omnitreeBounds_linked.Stepper((int current) => { Console.Write(current); }); Console.WriteLine(); Console.WriteLine(" Count: " + omnitreeBounds_linked.Count); Console.WriteLine(); } #endregion #region KD Tree ////List<KdTreeNode<float, string>> testNodes = new List_Linked<KdTreeNode<float, string>>(); //KdTree_Linked<string, float> tree = new KdTree_Linked<string, float>( // 2, // Logic.compare, // float.MinValue, // float.MaxValue, // 0, // Arithmetic.Add, // Arithmetic.Subtract, // Arithmetic.Multiply); //List<KdTree_Linked<string, float>.Node> testNodes = // new List_Linked<KdTree_Linked<string, float>.Node> //{ // new KdTree_Linked<string, float>.Node(new float[] { 5, 5 }, "Root"), // new KdTree_Linked<string, float>.Node(new float[] { 2.5f, 2.5f }, "Root-Left"), // new KdTree_Linked<string, float>.Node(new float[] { 7.5f, 7.5f }, "Root-Right"), // new KdTree_Linked<string, float>.Node(new float[] { 1, 10 }, "Root-Left-Left"), // new KdTree_Linked<string, float>.Node(new float[] { 10, 10 }, "Root-Right-Right") //}; //foreach (var node in testNodes) // if (!tree.Add(node.Point, node.Value)) // throw new Exception("Failed to add node to tree"); //var nodesToRemove = new KdTreeNode<float, string>[] { // testNodes[1], // Root-Left // testNodes[0] // Root //}; //foreach (var nodeToRemove in nodesToRemove) //{ // tree.RemoveAt(nodeToRemove.Point); // testNodes.Remove(nodeToRemove); // Assert.IsNull(tree.FindValue(nodeToRemove.Value)); // Assert.IsNull(tree.FindValueAt(nodeToRemove.Point)); // foreach (var testNode in testNodes) // { // Assert.AreEqual(testNode.Value, tree.FindValueAt(testNode.Point)); // Assert.AreEqual(testNode.Point, tree.FindValue(testNode.Value)); // } // Assert.AreEqual(testNodes.Count, tree.Count); //} #endregion #region Graph Console.WriteLine(" Testing Graph_SetOmnitree<int>-------------"); Graph <int> graph = new GraphSetOmnitree <int>(Compute.Equal, Compute.Compare, Hash.Default); // add nodes for (int i = 0; i < test; i++) { graph.Add(i); } // add edges for (int i = 0; i < test - 1; i++) { graph.Add(i, i + 1); } Console.Write(" Traversal: "); graph.Stepper((int current) => { Console.Write(current); }); Console.WriteLine(); Console.WriteLine(" Edges: "); //((Graph_SetQuadtree<int>)graph)._edges.Foreach((Graph_SetQuadtree<int>.Edge e) => { Console.WriteLine(" " + e.Start + " " + e.End); }); graph.Stepper( (int current) => { Console.Write(" " + current + ": "); graph.Neighbors(current, (int a) => { Console.Write(a); }); Console.WriteLine(); }); Console.WriteLine(); #endregion Console.WriteLine("============================================"); Console.WriteLine("Examples Complete..."); Console.ReadLine(); }
IEnumerator Search(Vector3 beginning, Vector3 end) { HeapArray <State> openSet = new HeapArray <State>(gridMap.LargestSize); State goal = gridMap.RetrieveState(end); State start = gridMap.RetrieveState(beginning); openSet.Enqueue(start); HashSet <State> cameFrom = new HashSet <State>(); Vector3[] intermediates = new Vector3[0]; bool foundPath = false; while (openSet.Count > 0) { State present = openSet.Dequeue(); cameFrom.Add(present); if (present == goal) { foundPath = true; intermediates = ReconstructPath(start, goal); break; } else { List <State> adjacents = gridMap.RetrieveAdjacentStates(present); foreach (State s in adjacents) { if (cameFrom.Contains(s) || !(s.unblocked)) { continue; } else { int yWeight = Mathf.Abs(present.yCoordinate - s.yCoordinate); int xWeight = Mathf.Abs(present.xCoordinate - s.xCoordinate); int edgeWeight = 0; if (xWeight < yWeight) { edgeWeight = 10 * (yWeight - xWeight) + 14 * xWeight; } else { edgeWeight = 10 * (xWeight - yWeight) + 14 * yWeight; } int tentativeCost = present.gOfN + edgeWeight; if (!(openSet.Contains(s)) || tentativeCost < s.gOfN) { s.parent = present; s.gOfN = tentativeCost; yWeight = Mathf.Abs(s.yCoordinate - goal.yCoordinate); xWeight = Mathf.Abs(s.xCoordinate - goal.xCoordinate); if (xWeight < yWeight) { s.hOfN = 10 * (yWeight - xWeight) + 14 * xWeight; } else { s.hOfN = 10 * (xWeight - yWeight) + 14 * yWeight; } if (!openSet.Contains(s)) { openSet.Enqueue(s); } else { openSet.UpdateState(s); } } } } } } yield return(null); searchHandler.FinishedPresentSearch(intermediates, foundPath); }
public override Structuring BuildStructuring() { try { if (Set == null) throw new NullReferenceException(); int _current = 1; int _max = Set.ElementsCount; if (IContainerProgressBar != null) { IContainerProgressBar.ResetProgressBar(1, _max, true); IContainerProgressBar.UpdateProgressBar(1, "Running Hierarchical agglomerative algorithm with Lifetime...", true); } double AlfaI = 0, AlfaJ = 0, Beta = 0, Gamma = 0; //Al inicio cada elemento es un cluster double[,] DMatrix = new double[Set.Elements.Count, Set.Elements.Count]; List<Cluster> clusters = new List<Cluster>(); List<Cluster> best_clusters = null; List<double> distances = new List<double>(); bool[] des = new bool[Set.Elements.Count];//true si ese elemento ya no es representativo for (int i = 0; i < Set.Elements.Count; i++) { List<Element> l = new List<Element>(); l.Add(Set[i]); clusters.Add(new Cluster("C-" + i, l)); distances.Add(0); } //Construir para cada cluster una cola con prioridad //con las disimilitudes de el con el resto de los clusters //y la matriz de todas las disimilitudes O(n2*log n) List<HeapArray<Container>> lh = new List<HeapArray<Container>>(); for (int i = 0; i < Set.Elements.Count; i++) { lh.Add(new HeapArray<Container>(Set.Elements.Count - 1));//No se pone la diss de un elemento con el mismo } for (int i = 0; i < Set.Elements.Count; i++) { for (int j = i + 1; j < Set.Elements.Count; j++) { double temp_diss = Proximity.CalculateProximity(Set.Elements[i], Set.Elements[j]); DMatrix[i, j] = temp_diss; DMatrix[j, i] = temp_diss; lh[i].Add(new Container { Rank = temp_diss, Name = i, Cluster = j }); lh[j].Add(new Container { Rank = temp_diss, Name = j, Cluster = i }); } } //Variables de las stopping rules double step = Set.ElementsCount - 1, bestST = double.MinValue; double stoprule = 1; //Algoritmo O(n2*log n) for (int i = Set.Elements.Count; i > 2; i--) { if (IContainerProgressBar != null) IContainerProgressBar.UpdateProgressBar(_current++, "Running Hierarchical agglomerative algorithm with Lifetime...", false); //Seleccionar los 2 clusters mas parecidos O(n) double min = double.MaxValue; int cluster_i = 0, cluster_j = 0; int pos_cluster_i = 0, pos_cluster_j = 0; for (int j = lh.Count - 1; j >= 0; j--) { if (lh[j].First.Rank < min) { min = lh[j].First.Rank; cluster_i = lh[j].First.Name; cluster_j = lh[j].First.Cluster; pos_cluster_i = j; } } for (int j = 0; j < lh.Count; j++) { if (lh[j].First != null && lh[j].First.Name == cluster_j) { pos_cluster_j = j; break; } } //Calcular posiciones para borrar y guardar int erase_pos = 0, final_pos = 0; erase_pos = pos_cluster_i > pos_cluster_j ? pos_cluster_i : pos_cluster_j; final_pos = pos_cluster_i < pos_cluster_j ? pos_cluster_i : pos_cluster_j; lh.RemoveAt(erase_pos); //Actualizar los parametros AlfaI, AlfaJ y Beta double cluster_i_count = clusters[erase_pos].ElementsCount; double cluster_j_count = clusters[final_pos].ElementsCount; AlfaI = UpdateAlfaI(cluster_i_count, cluster_j_count); AlfaJ = UpdateAlfaJ(cluster_i_count, cluster_j_count); Beta = UpdateBeta(cluster_i_count, cluster_j_count); Gamma = UpdateGamma(cluster_i_count, cluster_j_count); //Llamado al Stopping Rule stoprule = LifeTimeStoppingRule(distances, final_pos, erase_pos, min); //stoprule = CHStoppingRule(clusters, step, final_pos); if (stoprule > bestST) { bestST = stoprule; best_clusters = new List<Cluster>(); foreach (Cluster item in clusters) { Cluster temp = new Cluster(item.Name); foreach (var e in item.Elements) { temp.Elements.Add(e); } best_clusters.Add(temp); } } step--; //Unir los clusters foreach (Element item in clusters[erase_pos].Elements) { clusters[final_pos].AddElement(item); } clusters.RemoveAt(erase_pos); //Actualizar DMatrix con la disimilitud del nuevo cluster //y el resto de los clusters O(n) //Formula que se usa segun el algoritmo //La posicion del cluster i en Dmatrix es cluster_i //La posicion del cluster j en Dmatrix es cluster_j int pos_h = -1; int pos_i = cluster_i; int pos_j = cluster_j; if (erase_pos == pos_cluster_i) { des[cluster_i] = true; pos_h = cluster_j; } else { des[cluster_j] = true; pos_h = cluster_i; } for (int k = 0; k < DMatrix.GetLength(0); k++) { DMatrix[pos_h, k] = AlfaI * DMatrix[pos_i, k] + AlfaJ * DMatrix[pos_j, k] + Beta * DMatrix[pos_i, pos_j] + Gamma * Math.Abs(DMatrix[pos_i, k] - DMatrix[pos_j, k]); DMatrix[k, pos_h] = DMatrix[pos_h, k]; } //Actualizar array de Heaps con la disimilitud del nuevo cluster //y el resto de los clusters O(n*log n) lh[final_pos] = new HeapArray<Container>(Set.Elements.Count - 1); for (int j = 0; j < lh.Count; j++) { Container[] lc = lh[j].ToArray; lh[j] = new HeapArray<Container>(Set.Elements.Count - 1); for (int k = 1; k < lc.Length; k++) { if (lc[k] == null) break; if (lc[k].Cluster != cluster_i && lc[k].Cluster != cluster_j) lh[j].Add(lc[k]); } if (j != final_pos && lh[j].First != null) lh[j].Add(new Container { Rank = DMatrix[pos_h, lh[j].First.Name], Name = lh[j].First.Name, Cluster = pos_h }); } for (int j = 0; j < DMatrix.GetLength(0); j++) { if (pos_h != j && pos_j != j && pos_i != j && !des[j]) lh[final_pos].Add(new Container { Rank = DMatrix[pos_h, j], Name = pos_h, Cluster = j }); } } //Crear Dictionary<string,Cluster> para construir la particion Dictionary<string, Cluster> dic_clusters = new Dictionary<string, Cluster>(); int cont = 0; for (int i = 0; i < best_clusters.Count; i++) { if (best_clusters[i] != null) { best_clusters[i].Name = "C-" + cont; dic_clusters.Add(best_clusters[i].Name, best_clusters[i]); cont++; } } Structuring = new Partition() { Clusters = dic_clusters, Proximity = Proximity }; if (IContainerProgressBar != null) IContainerProgressBar.FinishProgressBar(); return Structuring; } catch { if (IContainerProgressBar != null) IContainerProgressBar.ShowError("Error occurred in Hierarchical agglomerative algorithm with Lifetime."); return null; } }
public override Structuring BuildStructuring() { try { if (Set == null) { throw new NullReferenceException(); } int _current = 1; int _max = Set.ElementsCount; if (IContainerProgressBar != null) { IContainerProgressBar.ResetProgressBar(1, _max, true); IContainerProgressBar.UpdateProgressBar(1, "Running Hierarchical agglomerative algorithm...", true); } if (ClustersCount > Set.ElementsCount) { ClustersCount = Set.ElementsCount; } if (ClustersCount <= 0) { throw new Exception("La cantidad de clusters debe ser mayor que cero"); } if (ClustersCount == 1) { Dictionary <string, Cluster> dic_clus = new Dictionary <string, Cluster>(); string name = "C-0"; List <Element> temp = new List <Element>(); for (int i = 0; i < Set.ElementsCount; i++) { if (IContainerProgressBar != null) { IContainerProgressBar.UpdateProgressBar(_current++, "Running Hierarchical agglomerative algorithm...", false); } temp.Add(Set[i]); } dic_clus.Add(name, new Cluster(name) { Elements = temp }); Structuring = new Partition() { Clusters = dic_clus, Proximity = Proximity }; if (IContainerProgressBar != null) { IContainerProgressBar.FinishProgressBar(); } return(Structuring); } double AlfaI = 0, AlfaJ = 0, Beta = 0, Gamma = 0; //Al inicio cada elemento es un cluster double[,] DMatrix = new double[Set.Elements.Count, Set.Elements.Count]; List <Cluster> clusters = new List <Cluster>(); bool[] des = new bool[Set.Elements.Count];//true si ese elemento ya no es representativo for (int i = 0; i < Set.Elements.Count; i++) { List <Element> l = new List <Element>(); l.Add(Set[i]); clusters.Add(new Cluster("C-" + i, l)); } //Construir para cada cluster una cola con prioridad //con las disimilitudes de el con el resto de los clusters //y la matriz de todas las disimilitudes O(n2*log n) List <HeapArray <Container> > lh = new List <HeapArray <Container> >(); for (int i = 0; i < Set.Elements.Count; i++) { lh.Add(new HeapArray <Container>(Set.Elements.Count - 1));//No se pone la diss de un elemento con el mismo } for (int i = 0; i < Set.Elements.Count; i++) { for (int j = i + 1; j < Set.Elements.Count; j++) { double temp_diss = Proximity.CalculateProximity(Set.Elements[i], Set.Elements[j]); DMatrix[i, j] = temp_diss; DMatrix[j, i] = temp_diss; lh[i].Add(new Container { Rank = temp_diss, Name = i, Cluster = j }); lh[j].Add(new Container { Rank = temp_diss, Name = j, Cluster = i }); } } //Algoritmo O(n2*log n) for (int i = Set.Elements.Count; i > ClustersCount; i--) { if (IContainerProgressBar != null) { IContainerProgressBar.UpdateProgressBar(_current++, "Running Hierarchical agglomerative algorithm...", false); } //Seleccionar los 2 clusters mas similares O(n) double min = double.MaxValue; int cluster_i = 0, cluster_j = 0; int pos_cluster_i = 0, pos_cluster_j = 0; for (int j = lh.Count - 1; j >= 0; j--) { if (lh[j].First.Rank < min) { min = lh[j].First.Rank; cluster_i = lh[j].First.Name; cluster_j = lh[j].First.Cluster; pos_cluster_i = j; } } for (int j = 0; j < lh.Count; j++) { if (lh[j].First != null && lh[j].First.Name == cluster_j) { pos_cluster_j = j; break; } } //Calcular posiciones para borrar y guardar int erase_pos = 0, final_pos = 0; erase_pos = pos_cluster_i > pos_cluster_j ? pos_cluster_i : pos_cluster_j; final_pos = pos_cluster_i < pos_cluster_j ? pos_cluster_i : pos_cluster_j; lh.RemoveAt(erase_pos); //Actualizar los parametros AlfaI, AlfaJ, Beta y Gamma double cluster_i_count = clusters[erase_pos].ElementsCount; double cluster_j_count = clusters[final_pos].ElementsCount; AlfaI = UpdateAlfaI(cluster_i_count, cluster_j_count); AlfaJ = UpdateAlfaJ(cluster_i_count, cluster_j_count); Beta = UpdateBeta(cluster_i_count, cluster_j_count); Gamma = UpdateGamma(cluster_i_count, cluster_j_count); //Unir los clusters foreach (Element item in clusters[erase_pos].Elements) { clusters[final_pos].AddElement(item); } clusters.RemoveAt(erase_pos); //Actualizar DMatrix con la disimilitud del nuevo cluster //y el resto de los clusters O(n) //Formula que se usa segun el algoritmo //La posicion del cluster i en Dmatrix es cluster_i //La posicion del cluster j en Dmatrix es cluster_j int pos_h = -1; int pos_i = cluster_i; int pos_j = cluster_j; if (erase_pos == pos_cluster_i) { des[cluster_i] = true; pos_h = cluster_j; } else { des[cluster_j] = true; pos_h = cluster_i; } for (int k = 0; k < DMatrix.GetLength(0); k++) { DMatrix[pos_h, k] = AlfaI * DMatrix[pos_i, k] + AlfaJ * DMatrix[pos_j, k] + Beta * DMatrix[pos_i, pos_j] + Gamma * Math.Abs(DMatrix[pos_i, k] - DMatrix[pos_j, k]); DMatrix[k, pos_h] = DMatrix[pos_h, k]; } //Actualizar array de Heaps con la disimilitud del nuevo cluster //y el resto de los clusters O(n*log n) lh[final_pos] = new HeapArray <Container>(Set.Elements.Count - 1); for (int j = 0; j < lh.Count; j++) { Container[] lc = lh[j].ToArray; lh[j] = new HeapArray <Container>(Set.Elements.Count - 1); for (int k = 1; k < lc.Length; k++) { if (lc[k] == null) { break; } if (lc[k].Cluster != cluster_i && lc[k].Cluster != cluster_j) { lh[j].Add(lc[k]); } } if (j != final_pos && lh[j].First != null) { lh[j].Add(new Container { Rank = DMatrix[pos_h, lh[j].First.Name], Name = lh[j].First.Name, Cluster = pos_h }); } } for (int j = 0; j < DMatrix.GetLength(0); j++) { if (pos_h != j && pos_j != j && pos_i != j && !des[j]) { lh[final_pos].Add(new Container { Rank = DMatrix[pos_h, j], Name = pos_h, Cluster = j }); } } } //Crear Dictionary<string,Cluster> para construir la particion Dictionary <string, Cluster> dic_clusters = new Dictionary <string, Cluster>(); int cont = 0; for (int i = 0; i < clusters.Count; i++) { if (clusters[i] != null) { clusters[i].Name = "C-" + cont; dic_clusters.Add(clusters[i].Name, clusters[i]); cont++; } } Structuring = new Partition() { Clusters = dic_clusters, Proximity = Proximity }; if (IContainerProgressBar != null) { IContainerProgressBar.FinishProgressBar(); } return(Structuring); } catch { if (IContainerProgressBar != null) { IContainerProgressBar.ShowError("Error occurred in Hierarchical agglomerative algorithm."); } return(null); } }