/// <summary> /// /// </summary> /// <param name="json"></param> /// <returns></returns> // [Authorize] public IEnumerable<CabDuration> GetCabDurations(string json) { List<KeyValuePair> model = new List<KeyValuePair>(); model = JsonConvert.DeserializeObject<List<KeyValuePair>>(json); DistanceMatrix distanceService = new DistanceMatrix(); IEnumerable<CabDuration> cabDurations = distanceService.GetCabDurations(model.Where(x=>x.Key.Equals("latitude",StringComparison.OrdinalIgnoreCase)).First().Value, model.Where(x => x.Key.Equals("longitude", StringComparison.OrdinalIgnoreCase)).First().Value, Int32.Parse(model.Where(x => x.Key.Equals("driverId", StringComparison.OrdinalIgnoreCase)).First().Value), this.driverService.GetAllActiveDrivers().ToList()); return cabDurations; }
public static void MyClassInitialize(TestContext testContext) { random = new MersenneTwister(); coordinates = new DoubleMatrix(ProblemSize, 2); distances = new DistanceMatrix(ProblemSize, ProblemSize); for (int i = 0; i < ProblemSize; i++) { coordinates[i, 0] = random.Next(ProblemSize * 10); coordinates[i, 1] = random.Next(ProblemSize * 10); } for (int i = 0; i < ProblemSize - 1; i++) { for (int j = i + 1; j < ProblemSize; j++) { distances[i, j] = Math.Round(Math.Sqrt(Math.Pow(coordinates[i, 0] - coordinates[j, 0], 2) + Math.Pow(coordinates[i, 1] - coordinates[j, 1], 2))); distances[j, i] = distances[i, j]; } } tour = new Permutation(PermutationTypes.RelativeUndirected, ProblemSize, random); }
public static void MyClassInitialize(TestContext testContext) { random = new MersenneTwister(); coordinates = new DoubleMatrix(ProblemSize, 2); distances = new DistanceMatrix(ProblemSize, ProblemSize); for (var i = 0; i < ProblemSize; i++) { coordinates[i, 0] = random.Next(ProblemSize * 10); coordinates[i, 1] = random.Next(ProblemSize * 10); } for (var i = 0; i < ProblemSize - 1; i++) { for (var j = i + 1; j < ProblemSize; j++) { distances[i, j] = Math.Round(Math.Sqrt(Math.Pow(coordinates[i, 0] - coordinates[j, 0], 2) + Math.Pow(coordinates[i, 1] - coordinates[j, 1], 2))); distances[j, i] = distances[i, j]; } } probabilities = new DoubleArray(ProblemSize); for (var i = 0; i < ProblemSize; i++) { probabilities[i] = random.NextDouble(); } realizations = new ItemList<BoolArray>(RealizationsSize); for (var i = 0; i < RealizationsSize; i++) { var countOnes = 0; var newRealization = new BoolArray(ProblemSize); while (countOnes < 4) { //only generate realizations with at least 4 cities visited countOnes = 0; for (var j = 0; j < ProblemSize; j++) { newRealization[j] = random.NextDouble() < probabilities[j]; if (newRealization[j]) countOnes++; } } realizations.Add(newRealization); } tour = new Permutation(PermutationTypes.RelativeUndirected, ProblemSize, random); }
/// <summary> /// Creates a random graph, based on an mst. /// </summary> /// <param name="d">distance matrix used to construct the graph</param> /// <param name="alpha">the resulting average degree of the graph</param> /// /// <param name="expP">The probability of adding an edge depends on its distance: 1/d^expP</param> /// <returns></returns> public static LightWeightGraph GetRandomGraph(DistanceMatrix d, int alpha, double expP) { int numNodes = d.Count; var nodes = new LightWeightGraph.LightWeightNode[numNodes]; // make an array to hold all possible edges, less the edges in the mst oneNode[] myDistances = new oneNode[numNodes * (numNodes - 1) / 2 - (numNodes - 1)]; LightWeightGraph mst = LightWeightGraph.GetStackedMST(d, 1); LightWeightGraph.LightWeightNode[] mstNodes = mst.Nodes; int myDistancesIndex = 0; double myDistancesTotal = 0; //Create a list to hold edge values List <int>[] edges = new List <int> [numNodes]; List <double>[] weights = new List <double> [numNodes]; for (int i = 0; i < numNodes; i++) { edges[i] = new List <int>(); } // add edges from the mst to the edges list, to facilitate adding additional edges later on for (int i = 0; i < numNodes; i++) { for (int j = 0; j < mstNodes[i].Edge.Length; j++) { edges[i].Add(mstNodes[i].Edge[j]); } } // cycle through each possible edge // if the edge exists in the mst, continue // otherwise, add the edge to distances array, and add distance to the edge to the cummulative total for (int i = 0; i < numNodes - 1; i++) { for (int j = i + 1; j < numNodes; j++) { if (mstNodes[i].Edge.Contains(j)) { continue; } else { double addlProb = 1.0 / Math.Pow(d[i, j], expP); myDistancesTotal += addlProb; oneNode nd = new oneNode { prob = myDistancesTotal, fromNode = i, toNode = j, alreadyExists = false }; myDistances[myDistancesIndex] = nd; myDistancesIndex++; } } } // how many edges do we want to add? int desiredNewEdges = (alpha * numNodes) - (numNodes - 1); Random rnd = Utility.Util.Rng; // add edges randomly until we have added the desired number of edges while (desiredNewEdges > 0) { // generate a random number between 0 and myDistancesTotal double rand = rnd.NextDouble() * myDistancesTotal; // walk through the array until you find the random number for (int m = 0; m < myDistances.Length; m++) { if (myDistances[m].prob > rand) { // we have found the edge to add. // add the edge if it does not already exist if (!myDistances[m].alreadyExists) { edges[myDistances[m].fromNode].Add(myDistances[m].toNode); edges[myDistances[m].toNode].Add(myDistances[m].fromNode); myDistances[m].alreadyExists = true; desiredNewEdges--; break; } } } } for (int i = 0; i < numNodes; i++) { nodes[i] = new LightWeightGraph.LightWeightNode(i, true, edges[i], weights[i]); } return(new LightWeightGraph(nodes, true)); }
public ResultClusterer ExecuteAlgorithm(Core core, int isPreview, bool doNotCluster, ArgsClusterer args, ConfigurationClusterer tag, ProgressReporter prog, out IntensityMatrix vmatrixOut, out DistanceMatrix dmatrixOut) { IReadOnlyList <Peak> peaks; if (isPreview > 0 && isPreview < core.Peaks.Count) { List <Peak> p = core.Peaks.ToList(); p.Shuffle(); p = p.GetRange(0, Math.Min(isPreview, p.Count)).ToList(); // Make sure any seed peaks are in the list foreach (Peak peak in tag.Args.Parameters.OfType <WeakReference <Peak> >().Select(par => (par).GetTargetOrThrow())) { p.Insert(0, peak); p.RemoveAt(p.Count - 1); } peaks = p; } else { peaks = core.Peaks; } // FILTER PEAKS PeakFilter pfilter = args.PeakFilter ?? PeakFilter.Empty; IntensityMatrix src = args.SourceMatrix; Filter <Peak> .Results filter = pfilter.Test(peaks); Cluster insigs; if (filter.Failed.Count == 0) { insigs = null; } else { insigs = new Cluster("Insig", tag); insigs.States |= Session.Main.Cluster.EStates.Insignificants; // We still need the vmatrix for plotting later IntensityMatrix operational = src.Subset(args.PeakFilter, args.ObsFilter, ESubsetFlags.InvertPeakFilter); if (args.SplitGroups) { operational = operational.SplitGroups(); } for (int index = 0; index < operational.NumRows; index++) { Vector p = new Vector(operational, index); insigs.Assignments.Add(new Assignment(p, insigs, double.NaN)); } } // CREATE VMATRIX AND FILTER OBSERVATIONS PeakFilter temp = new PeakFilter("filtered in", null, new[] { new PeakFilter.ConditionPeak(Filter.ELogicOperator.And, false, filter.Failed, Filter.EElementOperator.IsNot) }); IntensityMatrix vmatrix = src.Subset(args.PeakFilter, args.ObsFilter, ESubsetFlags.None); if (args.SplitGroups) { vmatrix = vmatrix.SplitGroups(); } prog.Enter("Creating distance matrix"); DistanceMatrix dmatrix = RequiresDistanceMatrix ? DistanceMatrix.Create(core, vmatrix, args.Distance, prog) : null; prog.Leave(); IEnumerable <Cluster> clusters; if (doNotCluster) { vmatrixOut = vmatrix; dmatrixOut = dmatrix; return(null); } // CLUSTER USING VMATRIX OR DMATRIX prog.Enter("Clustering"); clusters = Cluster(vmatrix, dmatrix, args, tag, prog); prog.Leave(); vmatrixOut = vmatrix; dmatrixOut = dmatrix; List <Cluster> result = new List <Cluster>(); if (insigs != null) { result.Add(insigs); } result.AddRange(clusters); return(new ResultClusterer(result)); }
/// <summary> /// Generates a Random graph using the defined paramaters /// </summary> /// <param name="d">The DistanceMatrix used for construction</param> /// <returns></returns> public LightWeightGraph GenerateGraph(DistanceMatrix d) { return(GetRandomGraph(d, _alpha, _expP)); }
public static bool KNNGraphIsConnected(DistanceMatrix distances, int neighbors, double percentage) { return(GetKNNGraph(distances, neighbors).isConnected(percentage)); }
public DirectionViewModel(Step[] steps, DistanceMatrix distance) { this.steps = steps; this.distance = distance; }
public int QuadraticMinKNN(DistanceMatrix distance) { int min = 1; int max = distance.Count - 1; int highestDisconnected = min; int lowestConnected = max; int numNodes = distance.Count; bool[] exclusion = new bool[numNodes]; int countExcluded = 0; while (countExcluded < _numSkip) { int index = Utility.Util.Rng.Next(0, numNodes); if (!exclusion[index]) { exclusion[index] = true; countExcluded++; } } //This dictionary is used to store results so recalculations are not needed Dictionary <int, bool> connectedDict = new Dictionary <int, bool>(); int inc = 1; for (int i = min; i < max; i += (inc * inc)) { if (i > lowestConnected) { inc = 1; i = highestDisconnected; continue; } if (connectedDict.ContainsKey(i)) { continue; } bool isConn = GetLOOGraph(distance, i, exclusion).isConnected(); connectedDict.Add(i, isConn); if (isConn && i < lowestConnected) { lowestConnected = i; if (connectedDict.ContainsKey(lowestConnected - 1) && !connectedDict[lowestConnected - 1]) { return(lowestConnected); } } else if (!isConn && i > highestDisconnected) { highestDisconnected = i; if (connectedDict.ContainsKey(highestDisconnected + 1) && connectedDict[highestDisconnected + 1]) { return(highestDisconnected + 1); } } inc++; } return(lowestConnected); }
public static bool GeoGraphIsConnected(DistanceMatrix distances, double threshold) { return(GetGeometricGraph(distances, threshold).isConnected()); }
private static int FindShortestPathRecursive(string city1, List <string> cities, DistanceMatrix <string, string> distances) { var shortestLength = int.MaxValue; //This algorithm is not extremely efficient in either memory or processing time. var citiesCopy = cities.ToList(); citiesCopy.Remove(city1); if (citiesCopy.Count == 1) { if (distances.ContainsPair(city1, citiesCopy.First())) { shortestLength = distances[city1, citiesCopy.First()]; } } else { //loop to find the shortest foreach (var city2 in citiesCopy) { if (!distances.ContainsPair(city1, city2)) { //Not an option. continue; } var length = distances[city1, city2] + FindShortestPathRecursive(city2, citiesCopy, distances); //negative values would indicate we added MaxValue if (length > 0 && length < shortestLength) { shortestLength = length; } } } return(shortestLength); }
public void MtreeBuildTest() { var points = new double[][] { new double[] { 1, 3 }, new double[] { 2, 1 }, new double[] { 4, 2 }, new double[] { 10, 10 }, new double[] { 9, 9 }, new double[] { 8, 9 }, new double[] { 9.5, 9.5 }, new double[] { 9.75, 9.75 } }; var mtree = new MTree <double[]>(Metrics.L2Norm, 3, points); /* * 1. Ensure all node values are in the right place */ /* * * The M-Tree node entry values should look like the following: * +-------------+ |(1,3),(10,10)| +--+------+---+ | | +---------------+ +---------------+ | | +-----+-----+ +-------+-----+ |(1,3),(4,2)| |(10,10),(9,9)| +--+-----+--+ +---+------+--+ | | | | +------+ +---+ +--------+ +-----+ | | | | +----+------+ +----+------+ +-------+-----------+ +-----+---------+ |(1,3),(2,1)| |(4,2),(8,9)| |(10,10),(9.75,9.75)| |(9,9),(9.5,9.5)| +-----------+ ------------+ +-------------------+ +---------------+ | */ var rootEntries = mtree.Root.Entries; var middleEntries = rootEntries[0].ChildNode.Entries.Concat(rootEntries[1].ChildNode.Entries).ToArray(); var leafEntries = new List <MNodeEntry <int> >(); foreach (var entry in middleEntries) { leafEntries.AddRange(entry.ChildNode.Entries); } // Check node entry values // Test Root entries Assert.That(mtree[rootEntries[0].Value], Is.EqualTo(points[0])); Assert.That(mtree[rootEntries[1].Value], Is.EqualTo(points[3])); // Test mid entries Assert.That(mtree[middleEntries[0].Value], Is.EqualTo(points[0])); Assert.That(mtree[middleEntries[1].Value], Is.EqualTo(points[2])); Assert.That(mtree[middleEntries[2].Value], Is.EqualTo(points[3])); Assert.That(mtree[middleEntries[3].Value], Is.EqualTo(points[4])); // Test leaf entries Assert.That(mtree[leafEntries[0].Value], Is.EqualTo(points[0])); Assert.That(mtree[leafEntries[1].Value], Is.EqualTo(points[1])); Assert.That(mtree[leafEntries[2].Value], Is.EqualTo(points[2])); Assert.That(mtree[leafEntries[3].Value], Is.EqualTo(points[5])); Assert.That(mtree[leafEntries[4].Value], Is.EqualTo(points[3])); Assert.That(mtree[leafEntries[5].Value], Is.EqualTo(points[7])); Assert.That(mtree[leafEntries[6].Value], Is.EqualTo(points[4])); Assert.That(mtree[leafEntries[7].Value], Is.EqualTo(points[6])); // 2. Ensure all node properties are correct at each level // Ensure that root node has no parent entry Assert.That(mtree.Root.ParentEntry, Is.Null); // Check node entry properties foreach (var entry in rootEntries) { Assert.That(entry.EnclosingNode, Is.EqualTo(mtree.Root)); Assert.That(entry.ChildNode, Is.Not.Null); Assert.That(entry.DistanceFromParent, Is.EqualTo(-1)); Assert.That(entry.CoveringRadius, Is.GreaterThan(0)); Assert.That(entry.EnclosingNode.IsInternalNode, Is.True); } foreach (var entry in middleEntries) { Assert.That(entry.EnclosingNode.ParentEntry.EnclosingNode, Is.EqualTo(mtree.Root)); Assert.That(entry.ChildNode, Is.Not.Null); Assert.That(entry.DistanceFromParent, Is.GreaterThanOrEqualTo(0)); Assert.That(entry.CoveringRadius, Is.GreaterThan(0)); Assert.That(entry.EnclosingNode.IsInternalNode, Is.True); } foreach (var entry in leafEntries) { Assert.That(entry.EnclosingNode.ParentEntry.EnclosingNode.ParentEntry.EnclosingNode, Is.EqualTo(mtree.Root)); Assert.That(entry.ChildNode, Is.Null); Assert.That(entry.DistanceFromParent, Is.GreaterThanOrEqualTo(0)); Assert.That(entry.CoveringRadius, Is.EqualTo(-1)); Assert.That(entry.EnclosingNode.IsInternalNode, Is.False); } // 3. Ensure all Distance from parents are correct // Root entries have a distance from parent of -1, so they are not checked here var distanceMatrix = new DistanceMatrix <double[]>(points, Metrics.L2Norm); Assert.That(middleEntries[0].DistanceFromParent, Is.EqualTo(distanceMatrix[0, 0])); Assert.That(middleEntries[1].DistanceFromParent, Is.EqualTo(distanceMatrix[0, 2])); Assert.That(middleEntries[2].DistanceFromParent, Is.EqualTo(distanceMatrix[3, 3])); Assert.That(middleEntries[3].DistanceFromParent, Is.EqualTo(distanceMatrix[3, 4])); Assert.That(leafEntries[0].DistanceFromParent, Is.EqualTo(distanceMatrix[0, 0])); Assert.That(leafEntries[1].DistanceFromParent, Is.EqualTo(distanceMatrix[0, 1])); Assert.That(leafEntries[2].DistanceFromParent, Is.EqualTo(distanceMatrix[2, 2])); Assert.That(leafEntries[3].DistanceFromParent, Is.EqualTo(distanceMatrix[2, 5])); Assert.That(leafEntries[4].DistanceFromParent, Is.EqualTo(distanceMatrix[3, 3])); Assert.That(leafEntries[5].DistanceFromParent, Is.EqualTo(distanceMatrix[3, 7])); Assert.That(leafEntries[6].DistanceFromParent, Is.EqualTo(distanceMatrix[4, 4])); Assert.That(leafEntries[7].DistanceFromParent, Is.EqualTo(distanceMatrix[4, 6])); }
public SearchResult(ComplexCodeBlock[] blocks) { this.blocks = new List <ComplexCodeBlock>(blocks); this.matrix = new DistanceMatrix(blocks); this.CalculateDistances(); }
private static int FindLongestPathRecursive(string city1, List <string> cities, DistanceMatrix <string, string> distances) { var longestLength = 0; //This algorithm is not extremely efficient in either memory or processing time. var citiesCopy = cities.ToList(); citiesCopy.Remove(city1); if (citiesCopy.Count == 1) { if (distances.ContainsPair(city1, citiesCopy.First())) { longestLength = distances[city1, citiesCopy.First()]; } } else { //loop to find the longest foreach (var city2 in citiesCopy) { if (!distances.ContainsPair(city1, city2)) { //Not an option. continue; } var length = distances[city1, city2] + FindLongestPathRecursive(city2, citiesCopy, distances); if (length > longestLength) { longestLength = length; } } } return(longestLength); }
///<summary> /// Solves the travelling salesmen problem. /// </summary> /// <param name="matrix">A precalculated distance matrix (n x n).</param> /// <param name="tspOptimization">The metric in which to base the TSP algorithm.</param> /// <returns>An efficient path between all waypoints based on time or distance.</returns> public static async Task <TspResult> Solve(DistanceMatrix matrix, TspOptimizationType tspOptimization) { return(await GetTspAlgorithm(matrix.Origins).Solve(matrix, tspOptimization).ConfigureAwait(false)); }
/// <summary> /// Calculates an efficient path between all waypoints based on time or distance. /// </summary> /// <param name="waypoints">The waypoints to calculate a path through.</param> /// <param name="travelMode">The mode of transportation.</param> /// <param name="tspOptimization">The metric in which to base the TSP algorithm.</param> /// <param name="departureTime">The departure time in which to consider predictive traffic for. Only used when travel mode is driving and tsp optimiation is based on travel time or travel distance. Ignored if there is more than 10 waypoints as that would exceed limits of a distance matrix call.</param> /// <param name="bingMapsKey">A bing maps key.</param> /// <returns>An efficient path between all waypoints based on time or distance.</returns> public async Task <TspResult> Solve(List <SimpleWaypoint> waypoints, TravelModeType?travelMode, TspOptimizationType?tspOptimization, DateTime?departureTime, string bingMapsKey) { if (waypoints == null && waypoints.Count > 0) { //Ensure that unique waypoints are in the list. This will reduce the number of cells generated in the distance matrix, thus lower cost. waypoints = waypoints.Distinct().ToList(); } if (tspOptimization == null || !tspOptimization.HasValue) { tspOptimization = TspOptimizationType.TravelTime; } DistanceMatrix dm = null; if (tspOptimization.Value == TspOptimizationType.StraightLineDistance) { //Calculate a distance matrix based on straight line distances (haversine). dm = await DistanceMatrix.CreateStraightLineNxNMatrix(waypoints, DistanceUnitType.Kilometers, bingMapsKey); } else { if (travelMode == null || !travelMode.HasValue) { //Default to driving if not specified. travelMode = TravelModeType.Driving; } var distanceMatrixRequest = new DistanceMatrixRequest() { TravelMode = travelMode.Value, BingMapsKey = bingMapsKey }; if (departureTime.HasValue && (distanceMatrixRequest.TravelMode == TravelModeType.Driving && waypoints.Count <= 10 || distanceMatrixRequest.TravelMode == TravelModeType.Truck)) { distanceMatrixRequest.StartTime = departureTime.Value; } distanceMatrixRequest.Origins = waypoints; var r = await distanceMatrixRequest.Execute(); if (r != null) { if (r.ErrorDetails != null && r.ErrorDetails.Length > 0) { throw new Exception(String.Join("", r.ErrorDetails)); } if (r.ResourceSets != null && r.ResourceSets.Length > 0 && r.ResourceSets[0] != null && r.ResourceSets[0].Resources != null && r.ResourceSets.Length > 0 && r.ResourceSets[0].Resources[0] != null && r.ResourceSets[0].Resources[0] is DistanceMatrix) { dm = r.ResourceSets[0].Resources[0] as DistanceMatrix; } } } if (dm != null) { var solution = await Solve(dm, tspOptimization.Value); solution.TspOptimization = tspOptimization.Value; solution.TravelMode = travelMode.Value; return(solution); } throw new Exception("Unable to calculate distance matrix."); }
/// <summary> /// Calculates an efficient path between all waypoints based on time or distance. /// </summary> /// <param name="matrix">A precalculated distance matrix (n x n).</param> /// <param name="tspOptimization">The metric in which to base the TSP algorithm.</param> /// <returns>An efficient path between all waypoints based on time or distance.</returns> #pragma warning disable 1998 public virtual async Task <TspResult> Solve(DistanceMatrix matrix, TspOptimizationType tspOptimization) { throw new NotImplementedException(); }
/// <summary> /// GetGPartition is different from GetPartition in 2 ways: /// 1. It does not require a connected graph. /// 2. If there are too many clusters, it combines them such that the desired number of clusters is returned /// </summary> /// <returns>A partitioning of the graph</returns> public Partition GetGPartition() { DistanceMatrix mat = null; if (_data.Type == AbstractDataset.DataType.DistanceMatrix) { mat = (DistanceMatrix)_data; } else if (_data.Type == AbstractDataset.DataType.PointSet) { mat = ((PointSet)_data).GetDistanceMatrix(); } //get the actual partition (if graph not necessarily connected) Partition partition = Partition.GetPartition((LightWeightGraph)_data); //Dictionary to hold VAT var vatMap = new Dictionary <int, VAT>(); //Dictionary to hold subset array var subsetMap = new Dictionary <int, int[]>(); while (partition.Clusters.Count < _minK) //while (clusterList.Count < _minK) { Console.WriteLine("Count = " + partition.Clusters.Count); Console.WriteLine("mink = " + _minK); //Calculate the VAT for all values foreach (var c in partition.Clusters.Where(c => !vatMap.ContainsKey(c.ClusterId))) { //We must calculate a graph for this subset of data List <int> clusterSubset = c.Points.Select(p => p.Id).ToList(); //Now calculate Vat LightWeightGraph lwg; if (_data.Type == AbstractDataset.DataType.Graph) { bool[] exclusion = new bool[_data.Count]; for (int i = 0; i < _data.Count; i++) { exclusion[i] = true; } foreach (var p in c.Points) { exclusion[p.Id] = false; } lwg = new LightWeightGraph((LightWeightGraph)_data, exclusion); } else //Distance matrix or Pointset { Debug.Assert(mat != null, "mat != null"); var subMatrix = mat.GetReducedDataSet(clusterSubset); //Generate our graph lwg = _graphGen.GenerateGraph(subMatrix.Mat); } subsetMap.Add(c.ClusterId, clusterSubset.ToArray()); lwg.IsWeighted = _weighted; VAT v = new VAT(lwg, _reassignNodes, _alpha, _beta); _vatNodeRemovalOrder = v.NodeRemovalOrder; _vatNumNodesRemoved = v.NumNodesRemoved; if (_hillClimb) { v.HillClimb(); } ////VATClust v = new VATClust(subMatrix.Mat, _weighted, _useKnn, _kNNOffset, _alpha, _beta); vatMap.Add(c.ClusterId, v); Console.WriteLine("Calculated Vat for cluster " + c.ClusterId); } meta.AppendLine("All calculated VATs:"); //Now find the minimum vat value int minVatCluster = 0; double minVatValue = double.MaxValue; foreach (var c in vatMap) { meta.Append(String.Format("{0} ", c.Value.MinVat)); if (c.Value.MinVat < minVatValue) { minVatCluster = c.Key; minVatValue = c.Value.MinVat; } } meta.AppendLine(); //now merge the partition into the cluster var minVAT = vatMap[minVatCluster]; var subPartition = minVAT.GetPartition(); var nodeIndexMap = subsetMap[minVatCluster]; meta.AppendFormat("Vat: MinVat={0}\r\n", minVAT.MinVat); meta.AppendFormat("Removed Count:{0}\r\n", minVAT.NumNodesRemoved); meta.AppendLine(String.Join(",", minVAT.NodeRemovalOrder.GetRange(0, minVAT.NumNodesRemoved).Select(c => nodeIndexMap[c]))); partition.MergeSubPartition(subPartition, nodeIndexMap, minVatCluster); vatMap.Remove(minVatCluster); subsetMap.Remove(minVatCluster); Console.WriteLine("Found min cluster"); Console.WriteLine(meta); } partition.MetaData = meta.ToString(); // The idea is now that we have partitions, combine them so that partition.Clusters.Count == minK if (partition.Clusters.Count > _minK) { combineClusters(partition, _minK); } return(partition); }
/// <summary> /// Recalculates the statistics. /// </summary> /// <param name="core">Core</param> /// <param name="metric">Metric for statistics</param> /// <param name="statistics">What to calculate</param> /// <param name="prog">Report progress to</param> /// <param name="vmatrix">Value matrix</param> /// <param name="dmatrix">Distance matrix (optional - if not present will be calculated if necessary)</param> internal void RecalculateStatistics(Core core, ConfigurationMetric metric, IntensityMatrix vmatrix, DistanceMatrix dmatrix, EClustererStatistics statistics, ProgressReporter prog) { // Add basics ClustererStatistics[STAT_NUM_VECTORS] = vmatrix.NumRows; ClustererStatistics[STAT_LENGTH_OF_VECTORS] = vmatrix.NumCols; // Don't calculate metrics? if (statistics == EClustererStatistics.None) { return; } // Get the non-insig clusters Cluster[] realClusters = RealClusters.ToArray(); // If we don't have a DMatrix we should calculate the sil. width manually // The DMatrix might be too big to pass to R so its better just to avoid it. prog.Enter("Calculating statistics"); List <ObsFilter> groupFilters = new List <ObsFilter>(); // No filter groupFilters.Add(null); if (!vmatrix.HasSplitGroups) { // Defined filters if (statistics.HasFlag(EClustererStatistics.IncludePartialVectorsForFilters)) { groupFilters.AddRange(core.ObsFilters); } // Group filters (if not already) if (statistics.HasFlag(EClustererStatistics.IncludePartialVectorsForGroups)) { AllGroupsFilters(core, groupFilters); } } List <ForStat> needsCalculating = new List <ForStat>(); prog.Enter("Input vectors"); ProgressParallelHandler progP = prog.CreateParallelHandler(groupFilters.Count); ProgressParallelHandler closure1 = progP; Parallel.ForEach(groupFilters, obsFilter => Thread_AddFilterToCalculationList(core, metric, vmatrix, dmatrix, statistics, realClusters, obsFilter, needsCalculating, closure1)); prog.Leave(); // ASSIGNMENT STATS prog.Enter("Assignments"); progP = prog.CreateParallelHandler(needsCalculating.Count); ProgressParallelHandler closure2 = progP; Parallel.ForEach(needsCalculating, z => Thread_CalculateAssignmentStatistics(statistics, z, realClusters, metric, closure2)); prog.Leave(); // CLUSTER STATS prog.Enter("Clusters"); progP = prog.CreateParallelHandler(this.Clusters.Length); Parallel.ForEach(this.Clusters, z => Thread_CalculateClusterStatistics(core, statistics, z, progP)); prog.Leave(); // SUMMARY STATS prog.Enter("Summary"); CalculateSummaryStatistics(core, statistics, realClusters); prog.Leave(); prog.Leave(); }
public Partition GetPartition() { DistanceMatrix mat = null; if (_data.Type == AbstractDataset.DataType.DistanceMatrix) { mat = (DistanceMatrix)_data; } else if (_data.Type == AbstractDataset.DataType.PointSet) { mat = ((PointSet)_data).GetDistanceMatrix(); } //Setup our partition with a single cluster, with all points List <Cluster> clusterList = new List <Cluster> { new Cluster(0, Enumerable.Range(0, _data.Count).ToList()) }; Partition partition = new Partition(clusterList, _data); //Dictionary to hold VAT var vatMap = new Dictionary <int, Integrity>(); //Dictionary to hold subset array var subsetMap = new Dictionary <int, int[]>(); while (clusterList.Count < _minK) { //Calculate the VAT for all values foreach (var c in partition.Clusters.Where(c => !vatMap.ContainsKey(c.ClusterId))) { //We must calculate a graph for this subset of data List <int> clusterSubset = c.Points.Select(p => p.Id).ToList(); //Now calculate Vat LightWeightGraph lwg; if (_data.Type == AbstractDataset.DataType.Graph) { bool[] exclusion = new bool[_data.Count]; for (int i = 0; i < _data.Count; i++) { exclusion[i] = true; } foreach (var p in c.Points) { exclusion[p.Id] = false; } lwg = new LightWeightGraph((LightWeightGraph)_data, exclusion); } else //Distance matrix or Pointset { Debug.Assert(mat != null, "mat != null"); var subMatrix = mat.GetReducedDataSet(clusterSubset); //Generate our graph lwg = _graphGen.GenerateGraph(subMatrix.Mat); } subsetMap.Add(c.ClusterId, clusterSubset.ToArray()); lwg.IsWeighted = _weighted; Integrity v = new Integrity(lwg, _reassignNodes, _alpha, _beta); _vatNodeRemovalOrder = v.NodeRemovalOrder; _vatNumNodesRemoved = v.NumNodesRemoved; if (_hillClimb) { v.HillClimb(); } ////VATClust v = new VATClust(subMatrix.Mat, _weighted, _useKnn, _kNNOffset, _alpha, _beta); vatMap.Add(c.ClusterId, v); } meta.AppendLine("All calculated Integritys:"); //Now find the minimum vat value int minVatCluster = 0; double minVatValue = double.MaxValue; foreach (var c in vatMap) { meta.Append(String.Format("{0} ", c.Value.MinVat)); if (c.Value.MinVat < minVatValue) { minVatCluster = c.Key; minVatValue = c.Value.MinVat; } } meta.AppendLine(); //now merge the partition into the cluster var minVAT = vatMap[minVatCluster]; var subPartition = minVAT.GetPartition(); var nodeIndexMap = subsetMap[minVatCluster]; meta.AppendFormat("Integrity: MinIntegrity={0}\n", minVAT.MinVat); meta.AppendFormat("Removed Count:{0} \n", minVAT.NumNodesRemoved); meta.AppendLine(String.Join(",", minVAT.NodeRemovalOrder.GetRange(0, minVAT.NumNodesRemoved).Select(c => nodeIndexMap[c]))); partition.MergeSubPartition(subPartition, nodeIndexMap, minVatCluster); vatMap.Remove(minVatCluster); subsetMap.Remove(minVatCluster); } partition.MetaData = meta.ToString(); return(partition); }
// TODO: If we are willing to take a performance hit, we could abstract both the promote and partition methods // TODO: Some partition methods actually DEPEND on the partition method. /// <summary> /// Chooses two <see cref="MNodeEntry{T}"/>s to be promoted up the tree. The two nodes are chosen /// according to the mM_RAD split policy with balanced partitions defined in reference [1] pg. 431. /// </summary> /// <param name="entries">The entries for which two node will be choose from.</param> /// <param name="isInternalNode">Specifies if the <paramref name="entries"/> list parameter comes from an internal node.</param> /// <returns>The indexes of the element pairs which are the two objects to promote</returns> private PromotionResult <int> Promote(MNodeEntry <int>[] entries, bool isInternalNode) { var uniquePairs = Utilities.UniquePairs(entries.Length); var distanceMatrix = new DistanceMatrix <T>(entries.Select(e => this.internalArray[e.Value]).ToArray(), this.Metric); // we only store the indexes of the pairs // var uniqueDistances = uniquePairs.Select(p => this.Metric(entries[p.Item1].Value, entries[p.Item2].Value)).Reverse().ToArray(); /* * 2. mM_RAD Promotion an Balanced Partitioning * Part of performing the mM_RAD promotion algorithm is * implicitly calculating all possible partitions. * For each pair of objects we calculate a balanced partition. * The pair for which the maximum of the two covering radii is the smallest * is the objects we promote. * In the iterations below, every thing is index-based to keep it as fast as possible. */ // The minimum values which will be traced through out the mM_RAD algorithm var minPair = new Tuple <int, int>(-1, -1); var minMaxRadius = double.MaxValue; var minFirstPartition = new List <int>(); var minSecondPartition = new List <int>(); var minFirstPromotedObject = new MNodeEntry <int>(); var minSecondPromotedObject = new MNodeEntry <int>(); // We iterate through each pair performing a balanced partition of the remaining points. foreach (var pair in uniquePairs) { // Get the indexes of the points not in the current pair var pointsNotInPair = Enumerable.Range(0, entries.Length).Except(new[] { pair.Item1, pair.Item2 }).ToList(); // TODO: Optimize var partitions = this.BalancedPartition(pair, pointsNotInPair, distanceMatrix); var localFirstPartition = partitions.Item1; var localSecondPartition = partitions.Item2; /* * As specified in reference [1] pg. 430. If we are splitting a leaf node, * then the covering radius of promoted object O_1 with partition P_1 is * coveringRadius_O_1 = max{ distance(O_1, O_i) | where O_i in P_1 } * If we are splitting an internal node then the covering radius * of promoted object O_1 with partition P_1 is * coveringRadius_O_1 = max{ distance(O_1, O_i) + CoveringRadius of O_i | where O_i in P_1 } */ var firstPromotedObjectCoveringRadius = localFirstPartition.MaxDistanceFromFirst(distanceMatrix); var secondPromotedObjectCoveringRadius = localSecondPartition.MaxDistanceFromFirst(distanceMatrix); var localMinMaxRadius = Math.Max( firstPromotedObjectCoveringRadius, secondPromotedObjectCoveringRadius); if (isInternalNode) { firstPromotedObjectCoveringRadius = this.CalculateCoveringRadius( pair.Item1, localFirstPartition, distanceMatrix, entries); secondPromotedObjectCoveringRadius = this.CalculateCoveringRadius( pair.Item2, localSecondPartition, distanceMatrix, entries); } if (localMinMaxRadius < minMaxRadius) { minMaxRadius = localMinMaxRadius; minPair = pair; minFirstPromotedObject.CoveringRadius = firstPromotedObjectCoveringRadius; minFirstPartition = localFirstPartition; minSecondPromotedObject.CoveringRadius = secondPromotedObjectCoveringRadius; minSecondPartition = localSecondPartition; } } /* * 3. Creating the MNodeEntry Objects * Now that we have correctly identified the objects to be promoted an each partition * we start setting and/or calculating some of the properties on the node entries */ // set values of promoted objects var firstPartition = new List <MNodeEntry <int> >(); var secondPartition = new List <MNodeEntry <int> >(); minFirstPromotedObject.Value = entries[minPair.Item1].Value; minSecondPromotedObject.Value = entries[minPair.Item2].Value; // TODO: Set distance from parent in partition firstPartition.AddRange(entries.WithIndexes(minFirstPartition)); for (int i = 0; i < firstPartition.Count; i++) { firstPartition[i].DistanceFromParent = distanceMatrix[minFirstPartition[0], minFirstPartition[i]]; } secondPartition.AddRange(entries.WithIndexes(minSecondPartition)); for (int i = 0; i < secondPartition.Count; i++) { secondPartition[i].DistanceFromParent = distanceMatrix[minSecondPartition[0], minSecondPartition[i]]; } var promotionResult = new PromotionResult <int> { FirstPromotionObject = minFirstPromotedObject, SecondPromotionObject = minSecondPromotedObject, FirstPartition = firstPartition, SecondPartition = secondPartition }; // TODO: This method is called from the split method. In the split method we call both promote an partition in one method return(promotionResult); }
public LightWeightGraph GetLOOGraph(DistanceMatrix distances, int numNeighbors, bool[] exclusion = null, int numToLeaveOut = 0) { int numNodes = distances.Count; var nodes = new LightWeightGraph.LightWeightNode[numNodes]; List <int>[] edgeLists = new List <int> [numNodes]; List <double>[] edgeWeights = new List <double> [numNodes]; for (int i = 0; i < numNodes; i++) { edgeLists[i] = new List <int>(); edgeWeights[i] = new List <double>(); } //prevent redundant edges HashSet <Tuple <int, int> > addedEdges = new HashSet <Tuple <int, int> >(); //Our comparator MinHeapPriorityQueue <Tuple <int, double> > .isGreaterThan comp = ((x, y) => x.Item2 > y.Item2); //Deal with _skipLast Choice int lastNeighbor = (_skipLast) ? numNodes - 1 : numNodes; //Add Edges for (int i = 0; i < lastNeighbor; i++) { if (exclusion != null && exclusion[i]) { continue; } // Deal with the leave one out double probability = (double)numToLeaveOut / numNodes; double rollDie = rnd.NextDouble(); if (rollDie < probability) { //Console.WriteLine("Skipped Node " + i); _numLeftOut++; continue; } //get list of edges List <Tuple <int, double> > edges = new List <Tuple <int, double> >(); for (int j = 0; j < numNodes; j++) { //Make sure we don't load our heap with repeated edges if (i != j) { edges.Add(new Tuple <int, double>(j, distances[i, j])); } } //Build the heap MinHeapPriorityQueue <Tuple <int, double> > heap = new MinHeapPriorityQueue <Tuple <int, double> >(comp); heap.addAll(edges); //Now add all of the neighbors for (int edgeNum = 0; edgeNum < numNeighbors; edgeNum++) { if (heap.isEmpty()) { break; } Tuple <int, double> e = heap.extractMin(); Tuple <int, int> edgeNodePair = (e.Item1 < i) ? new Tuple <int, int>(e.Item1, i) : new Tuple <int, int>(i, e.Item1); //if (!addedEdges.Contains(edgeNodePair)) if (!addedEdges.Contains(edgeNodePair)) { //make sure we don't add this edge again in the future //addedEdges.Add(edgeNodePair); addedEdges.Add(edgeNodePair); //Add the double edge now edgeLists[i].Add(e.Item1); edgeLists[e.Item1].Add(i); edgeWeights[i].Add((double)e.Item2); edgeWeights[e.Item1].Add((double)e.Item2); } } } for (int i = 0; i < numNodes; i++) { nodes[i] = new LightWeightGraph.LightWeightNode(i, true, edgeLists[i], edgeWeights[i]); } return(new LightWeightGraph(nodes, true)); }
/// <summary> /// Calculates an efficient path between all waypoints based on time or distance. /// </summary> /// <param name="matrix">A precalculated distance matrix (n x n).</param> /// <param name="tspOptimization">The metric in which to base the TSP algorithm.</param> /// <returns>An efficient path between all waypoints based on time or distance.</returns> public override async Task <TspResult> Solve(DistanceMatrix matrix, TspOptimizationType tspOptimization) { return(await Task <TspResult> .Run <TspResult>(() => { int population = matrix.Origins.Count; double[] weight = new double[population]; var minTour = new int[population]; int[,] chromosome = new int[population, population]; double minWeight = double.MaxValue; for (int p = 0; p < population; p++) { bool[] used = new bool[population]; int[] currentOrder = new int[population]; for (int n = 0; n < population; n++) { used[n] = false; } for (int n = 0; n < population; n++) { int i; do { i = random.Next(population); }while (used[i]); used[i] = true; currentOrder[n] = i; } for (int n = 0; n < population; n++) { chromosome[p, n] = currentOrder[n]; } if (tspOptimization == TspOptimizationType.TravelTime) { weight[p] = matrix.GetEdgeTime(currentOrder, true); } else { weight[p] = matrix.GetEdgeDistance(currentOrder, true); } if (weight[p] < minWeight) { minWeight = weight[p]; for (int n = 0; n < population; n++) { minTour[n] = chromosome[p, n]; } } } for (int g = 0; g < Generations; g++) { if (random.NextDouble() < MutationRate) { int i, j, parent1, parent2; int[] p1 = new int[population]; int[] p2 = new int[population]; int[] o1 = new int[population]; int[] o2 = new int[population]; i = random.Next(population); j = random.Next(population); if (weight[i] < weight[j]) { parent1 = i; } else { parent1 = j; } i = random.Next(population); j = random.Next(population); if (weight[i] < weight[j]) { parent2 = i; } else { parent2 = j; } for (i = 0; i < population; i++) { p1[i] = chromosome[parent1, i]; p2[i] = chromosome[parent2, i]; } int cp1 = -1, cp2 = -1; do { cp1 = random.Next(population); cp2 = random.Next(population); } while (cp1 == cp2 || cp1 > cp2); Crossover(cp1, cp2, p1, p2, o1, o2, population, random); double o1Fitness; if (tspOptimization == TspOptimizationType.TravelTime) { o1Fitness = matrix.GetEdgeTime(o1, true); } else { o1Fitness = matrix.GetEdgeDistance(o1, true); } if (o1Fitness < weight[parent1]) { for (i = 0; i < population; i++) { chromosome[parent1, i] = o1[i]; } } double o2Fitness; if (tspOptimization == TspOptimizationType.TravelTime) { o2Fitness = matrix.GetEdgeTime(o2, true); } else { o2Fitness = matrix.GetEdgeDistance(o2, true); } if (o2Fitness < weight[parent2]) { for (i = 0; i < population; i++) { chromosome[parent2, i] = o2[i]; } } for (int p = 0; p < population; p++) { if (weight[p] < minWeight) { minWeight = weight[p]; for (int n = 0; n < population; n++) { minTour[n] = chromosome[p, n]; } } } } else { int i, j, p; int[] child = new int[population]; i = random.Next(population); j = random.Next(population); if (weight[i] < weight[j]) { p = i; } else { p = j; } double childWeight; for (int n = 0; n < population; n++) { child[n] = chromosome[p, n]; } do { i = random.Next(population); j = random.Next(population); }while (i == j); int t = child[i]; child[i] = child[j]; child[j] = t; if (tspOptimization == TspOptimizationType.TravelTime) { childWeight = matrix.GetEdgeTime(child, true); } else { childWeight = matrix.GetEdgeDistance(child, true); } int maxIndex = int.MaxValue; double maxD = double.MinValue; for (int q = 0; q < population; q++) { if (weight[q] >= maxD) { maxIndex = q; maxD = weight[q]; } } int[] index = new int[population]; int count = 0; for (int q = 0; q < population; q++) { if (weight[q] == maxD) { index[count++] = q; } } maxIndex = index[random.Next(count)]; if (childWeight < weight[maxIndex]) { weight[maxIndex] = childWeight; for (int n = 0; n < population; n++) { chromosome[maxIndex, n] = child[n]; } if (childWeight < minWeight) { minWeight = childWeight; for (int n = 0; n < population; n++) { minTour[n] = child[n]; } } } } } //Ensure first point is the starting point. Indicies form a cycle, so just need to shift. if (minTour[0] != 0) { var minTourList = minTour.ToList(); var startIdx = minTourList.IndexOf(0); var order = new List <int>(); order.AddRange(minTourList.GetRange(startIdx, minTourList.Count - startIdx)); order.AddRange(minTourList.GetRange(0, startIdx)); minTour = order.ToArray(); } return new TspResult() { DistanceMatrix = matrix, OptimizedWeight = minWeight, OptimizedWaypoints = GetOptimizedWaypoints(matrix.Origins, minTour) }; }).ConfigureAwait(false)); }
public static bool KNNGraphIsConnected(DistanceMatrix distances, int neighbors) { return(GetKNNGraph(distances, neighbors).isConnected()); }
/// <summary> /// Clustering /// /// If the cluster does't make use of the distance matrix OR the distance metric it should flag itself with DoesNotSupportDistanceMetrics. /// </summary> protected abstract IEnumerable <Cluster> Cluster(IntensityMatrix vmatrix, DistanceMatrix dmatrix, ArgsClusterer args, ConfigurationClusterer tag, ProgressReporter prog);
/// <summary> /// Determines what needs calculating. /// </summary> private void Thread_AddFilterToCalculationList([Const] Core core, [Const] ConfigurationMetric metric, [Const] IntensityMatrix vmatrix, [Const] DistanceMatrix dmatrix, [Const] EClustererStatistics statistics, [Const] Cluster[] realClusters, [Const] ObsFilter obsFilter, [MutableUnsafe] List <ForStat> needsCalculating, [MutableSafe] ProgressParallelHandler progP) { progP.SafeIncrement(); IntensityMatrix vmatFiltered; DistanceMatrix dmatFiltered; int[] filteredIndices; if (obsFilter == null) { vmatFiltered = vmatrix; dmatFiltered = dmatrix; filteredIndices = null; } else { filteredIndices = vmatrix.Columns.Which(z => obsFilter.Test(z.Observation)).ToArray(); // TODO: Multuple iteration vmatFiltered = vmatrix.Subset(null, obsFilter, ESubsetFlags.None); dmatFiltered = null; } Dictionary <Cluster, IReadOnlyList <double> > centreVectors = new Dictionary <Cluster, IReadOnlyList <double> >(); foreach (Cluster cluster in realClusters) { ///////////////////// // ASSIGNMENT STATS var centre = cluster.GetCentre(ECentreMode.Average, ECandidateMode.Assignments); IReadOnlyList <double> centreVector = centre.Count != 0 ? centre[0] : null; if (filteredIndices != null) { centreVector = centreVector.Extract(filteredIndices); } centreVectors.Add(cluster, centreVector); } foreach (Assignment ass in Assignments) { ForStat f = new ForStat(); f.Assignment = ass; f.ObsFilter = obsFilter; if (filteredIndices != null) { f.AssignmentVector = vmatFiltered.Vectors[ass.Vector.Index]; } else { f.AssignmentVector = ass.Vector; } f.ClusterVector = centreVectors[ass.Cluster]; if (statistics.HasFlag(EClustererStatistics.SilhouetteWidth)) { if (dmatFiltered == null) { dmatFiltered = DistanceMatrix.Create(core, vmatrix, metric, ProgressReporter.GetEmpty()); } } f.DistanceMatrix = dmatFiltered; lock (needsCalculating) { needsCalculating.Add(f); } } }
/// <summary> /// Calculates a distance matrix based on truck routing attributes. /// </summary> /// <param name="request">The distance matrix request to base the request on.</param> /// <param name="remainingTimeCallback">A callback function in which the estimated remaining time in seconds is sent.</param> /// <returns>A response which contains the truck routing based distance matrix.</returns> public async Task <Response> Calculate(DistanceMatrixRequest request, Action <int> remainingTimeCallback) { if (request.Destinations == null || request.Destinations.Count == 0) { request.Destinations = request.Origins; } int numIntervals = 1; int intervalMin = 0; double numBatches = 0; if (request.StartTime != null && request.StartTime.HasValue) { TimeIntervals = new List <DateTime>() { request.StartTime.Value }; if (request.EndTime != null && request.EndTime.HasValue) { intervalMin = request.Resolution * 15; numIntervals = (int)Math.Floor((request.EndTime.Value - request.StartTime.Value).TotalMinutes / intervalMin); } numBatches = Math.Ceiling((double)(request.Destinations.Count * request.Origins.Count * numIntervals) / (double)ServiceManager.QpsLimit); } else { TimeIntervals = null; numBatches = Math.Ceiling((double)(request.Destinations.Count * request.Origins.Count) / (double)ServiceManager.QpsLimit); } //Assume an average processing time of 2 seconds per batch. remainingTimeCallback?.Invoke((int)Math.Round(numBatches * 2)); MatrixCells = new List <DistanceMatrixCell>(); //Calculate the first cell on it's own to verify that the request can be made. If it fails, do not proceed. var firstResponse = await CalculateTruckRoute(request.Origins[0], request.Destinations[0], 0, request); if (firstResponse != null && firstResponse.ErrorDetails != null && firstResponse.ErrorDetails.Length > 0) { return(firstResponse); } else if (firstResponse == null || firstResponse.ResourceSets == null || firstResponse.ResourceSets.Length == 0 || firstResponse.ResourceSets[0].Resources == null || firstResponse.ResourceSets[0].Resources.Length == 0) { return(new Response() { ErrorDetails = new string[] { "Unabble to calculate distance matrix." }, StatusCode = 400, StatusDescription = "Bad request" }); } var truckRoute = firstResponse.ResourceSets[0].Resources[0] as Route; MatrixCells.Add(new DistanceMatrixCell() { OriginIndex = 0, DestinationIndex = 0, HasError = false, TravelDistance = truckRoute.TravelDistance, TravelDuration = (request.TimeUnits == TimeUnitType.Minute) ? truckRoute.TravelDuration * 60 : truckRoute.TravelDuration }); var cellTasks = new List <Task>(); if (request.StartTime != null && request.StartTime.HasValue) { TimeIntervals.Clear(); for (var k = 0; k < numIntervals; k++) { TimeIntervals.Add(request.StartTime.Value.AddMinutes(k * intervalMin)); for (var i = 0; i < request.Origins.Count; i++) { for (var j = 0; j < request.Destinations.Count; j++) { //Skip the first cell as we already calculated it. if (!(k == 0 && i == 0 && j == 0)) { cellTasks.Add(CalculateCell(i, j, k, request)); } } } } } else { for (var i = 0; i < request.Origins.Count; i++) { for (var j = 0; j < request.Destinations.Count; j++) { //Skip the first cell as we already calculated it. if (!(i == 0 && j == 0)) { cellTasks.Add(CalculateCell(i, j, -1, request)); } } } } await ServiceHelper.WhenAllTaskLimiter(cellTasks); var dm = new DistanceMatrix() { Origins = request.Origins, Destinations = request.Destinations, Results = MatrixCells.ToArray(), TimeIntervals = TimeIntervals }; return(new Response() { StatusCode = firstResponse.StatusCode, StatusDescription = firstResponse.StatusDescription, TraceId = firstResponse.TraceId, AuthenticationResultCode = firstResponse.AuthenticationResultCode, BrandLogoUri = firstResponse.BrandLogoUri, Copyright = firstResponse.Copyright, ResourceSets = new ResourceSet[] { new ResourceSet() { Resources = new Resource[] { dm } } } }); }
/// <summary> /// Action completed - calculate statisstics /// </summary> internal void FinalizeResults(Core core, ConfigurationMetric metric, IntensityMatrix vmatrix, DistanceMatrix dmatrix, EClustererStatistics statistics, ProgressReporter prog) { UiControls.Assert(Assignments.IsEmpty(), "FinalizeResults on ClusterResults already called."); // Get ALL the assignments foreach (Cluster cluster in RealClusters) { Assignments.AddRange(cluster.Assignments.List); } RecalculateStatistics(core, metric, vmatrix, dmatrix, statistics, prog); }
private List <int> ExtractENNSPopulation(List <T> mixedPopulation, List <int> pool, int desiredEliteSize) { int poolSize = pool.Count; int mixedSize = mixedPopulation.Count; var filtered = pool.Where(index => float.IsPositiveInfinity(mixedPopulation[index].Diversity)).Distinct().ToList(); int numInf = filtered.Count; if (desiredEliteSize <= numInf) { return(filtered.Take(desiredEliteSize).ToList()); } var elite = pool.Distinct().ToList(); pool.Clear(); if (desiredEliteSize >= elite.Count) { return(elite); } var distance = new float[poolSize, poolSize]; var indexArray = new int[poolSize]; var originalArray = new int[mixedSize]; for (int i = 0; i < mixedSize; ++i) { originalArray[i] = -1; } int counter = 0; foreach (int index in elite) { indexArray[counter] = index; originalArray[indexArray[counter]] = counter++; } var distArray = new List <DistanceMatrix>(); for (int i = 0; i < poolSize; ++i) { for (int j = i + 1; j < poolSize; ++j) { var distMatrix = new DistanceMatrix { index1 = indexArray[i], index2 = indexArray[j] }; distance[j, i] = distance[i, j] = distMatrix.distance = Math.Abs(mixedPopulation[distMatrix.index1].Fitness - mixedPopulation[distMatrix.index2].Fitness); distArray.Add(distMatrix); } } distArray.Sort(); int idx = 0; while (elite.Count > desiredEliteSize && idx < distArray.Count) { int index1, index2; do { var temp = distArray[idx++]; index1 = temp.index1; index2 = temp.index2; } while ((originalArray[index1] == -1 || originalArray[index2] == -1) && idx < distArray.Count); if (idx >= distArray.Count) { break; } if (float.IsPositiveInfinity(mixedPopulation[index1].Diversity) && float.IsPositiveInfinity(mixedPopulation[index2].Diversity)) { continue; } if (float.IsPositiveInfinity(mixedPopulation[index1].Diversity)) { elite.Remove(index2); pool.Add(index2); originalArray[index2] = -1; } else if (float.IsPositiveInfinity(mixedPopulation[index2].Diversity)) { elite.Remove(index1); pool.Add(index1); originalArray[index1] = -1; } else { var dist1 = float.PositiveInfinity; foreach (int index in elite) { if (index != index1 && index != index2) { if (dist1 > distance[originalArray[index1], originalArray[index]]) { dist1 = distance[originalArray[index1], originalArray[index]]; } } } var dist2 = float.PositiveInfinity; foreach (int index in elite) { if (index != index1 && index != index2) { if (dist2 > distance[originalArray[index2], originalArray[index]]) { dist2 = distance[originalArray[index2], originalArray[index]]; } } } if (dist1 < dist2) { elite.Remove(index1); pool.Add(index1); originalArray[index1] = -1; } else { elite.Remove(index2); pool.Add(index2); originalArray[index2] = -1; } } } while (elite.Count > desiredEliteSize) { var temp = elite[0]; pool.Add(temp); elite.Remove(temp); } return(elite); }