Ejemplo n.º 1
0
		public static void For( int start, int stop, int stepLength, ForLoopBody loopBody, bool close  )
		{
			// get instance of parallel computation manager
			Parallel instance = new Parallel ();
			instance.Initialize ();
			instance.ForLoop (start,stop,stepLength,loopBody, close);
		}
Ejemplo n.º 2
0
        /// <summary>
        /// Responsible for population migration
        /// </summary>
        /// <param name="iteration"></param>
        private void MigratePopulation(int iteration)
        {
            switch (iteration)
            {
            case 0:
                // Migrate values between populations
                Parallel.For(0, 5,
                             migrationIteration =>
                             _populationArray[migrationIteration].Migrate(_populationArray[5 + migrationIteration],
                                                                          15, new EliteSelection()));
                break;

            case 1:
                // Migrate values between populations
                for (int i = 0; i < 10; i += 2)
                {
                    _populationArray[i].Migrate(_populationArray[i + 1], 15, new EliteSelection());
                }
                break;
                //case 2:
                //    // Migrate values between populations
                //    Parallel.For(0, 5,
                //                 migrationIteration =>
                //                 _populationArray[migrationIteration].Migrate(_populationArray[9 - migrationIteration],
                //                                                              15, new EliteSelection()));
                //    break;
            }
        }
Ejemplo n.º 3
0
 private static void CallDependencies(Computation c, bool executeBefore)
 {
     TaskParallel.ForEach(c.TransformationRule.Dependencies, requirement =>
     {
         if (requirement.ExecuteBefore == executeBefore)
         {
             requirement.HandleDependency(c);
         }
     });
 }
Ejemplo n.º 4
0
        public static void Handle(List <A> aArray)
        {
            int    incorrectCount = 0;
            object o = new object();

            Par.ForEach(aArray, async a =>
            {
                if (a.Incorrect)
                {
                    lock (o)
                    {
                        incorrectCount++;
                    }
                }
                else
                {
                    Thread.Sleep(a.I * 1000);
                    Console.WriteLine(a.I);
                }
            });
            Console.WriteLine($"Total incorrect {incorrectCount}");
        }
Ejemplo n.º 5
0
        private void split(DecisionNode root, double[][] input, int[] output)
        {

            // 2. If all examples are for the same class, return the single-node
            //    tree with the output label corresponding to this common class.
            double entropy = Statistics.Tools.Entropy(output, outputClasses);

            if (entropy == 0)
            {
                if (output.Length > 0)
                    root.Output = output[0];
                return;
            }

            // 3. If number of predicting attributes is empty, then return the single-node
            //    tree with the output label corresponding to the most common value of
            //    the target attributes in the examples.
            int predictors = attributes.Count(x => x == false);

            if (predictors <= attributes.Length - maxHeight)
            {
                root.Output = Statistics.Tools.Mode(output);
                return;
            }


            // 4. Otherwise, try to select the attribute which
            //    best explains the data sample subset.

            double[] scores = new double[predictors];
            double[] entropies = new double[predictors];
            double[] thresholds = new double[predictors];
            int[][][] partitions = new int[predictors][][];

            // Retrieve candidate attribute indices
            int[] candidates = new int[predictors];
            for (int i = 0, k = 0; i < attributes.Length; i++)
                if (!attributes[i]) candidates[k++] = i;


            // For each attribute in the data set
#if SERIAL
            for (int i = 0; i < scores.Length; i++)
#else
            Parallel.For(0, scores.Length, i =>
#endif
            {
                scores[i] = computeGainRatio(input, output, candidates[i],
                    entropy, out partitions[i], out thresholds[i]);
            }
#if !SERIAL
);
#endif

            // Select the attribute with maximum gain ratio
            int maxGainIndex; scores.Max(out maxGainIndex);
            var maxGainPartition = partitions[maxGainIndex];
            var maxGainEntropy = entropies[maxGainIndex];
            var maxGainAttribute = candidates[maxGainIndex];
            var maxGainRange = inputRanges[maxGainAttribute];
            var maxGainThreshold = thresholds[maxGainIndex];

            // Mark this attribute as already used
            attributes[maxGainAttribute] = true;

            double[][] inputSubset;
            int[] outputSubset;

            // Now, create next nodes and pass those partitions as their responsibilities. 
            if (tree.Attributes[maxGainAttribute].Nature == DecisionVariableKind.Discrete)
            {
                // This is a discrete nature attribute. We will branch at each
                // possible value for the discrete variable and call recursion.
                DecisionNode[] children = new DecisionNode[maxGainPartition.Length];

                // Create a branch for each possible value
                for (int i = 0; i < children.Length; i++)
                {
                    children[i] = new DecisionNode(tree)
                    {
                        Parent = root,
                        Value = i + maxGainRange.Min,
                        Comparison = ComparisonKind.Equal,
                    };

                    inputSubset = input.Submatrix(maxGainPartition[i]);
                    outputSubset = output.Submatrix(maxGainPartition[i]);
                    split(children[i], inputSubset, outputSubset); // recursion
                }

                root.Branches.AttributeIndex = maxGainAttribute;
                root.Branches.AddRange(children);
            }

            else if (maxGainPartition.Length > 1)
            {
                // This is a continuous nature attribute, and we achieved two partitions
                // using the partitioning scheme. We will branch on two possible settings:
                // either the value is higher than a currently detected optimal threshold 
                // or it is lesser.

                DecisionNode[] children = 
                {
                    new DecisionNode(tree) 
                    {
                        Parent = root, Value = maxGainThreshold,
                        Comparison = ComparisonKind.LessThanOrEqual 
                    },

                    new DecisionNode(tree)
                    {
                        Parent = root, Value = maxGainThreshold,
                        Comparison = ComparisonKind.GreaterThan
                    }
                };

                // Create a branch for lower values
                inputSubset = input.Submatrix(maxGainPartition[0]);
                outputSubset = output.Submatrix(maxGainPartition[0]);
                split(children[0], inputSubset, outputSubset);

                // Create a branch for higher values
                inputSubset = input.Submatrix(maxGainPartition[1]);
                outputSubset = output.Submatrix(maxGainPartition[1]);
                split(children[1], inputSubset, outputSubset);

                root.Branches.AttributeIndex = maxGainAttribute;
                root.Branches.AddRange(children);
            }
            else
            {
                // This is a continuous nature attribute, but all variables are equal
                // to a constant. If there is only a constant value as the predictor 
                // and there are multiple output labels associated with this constant
                // value, there isn't much we can do. This node will be a leaf.

                // We will set the class label for this node as the
                // majority of the currently selected output classes.

                outputSubset = output.Submatrix(maxGainPartition[0]);
                root.Output = Statistics.Tools.Mode(outputSubset);
            }

            attributes[maxGainAttribute] = false;
        }
Ejemplo n.º 6
0
        private void split(DecisionNode root, double[][] input, int[] output, int height)
        {
            // 2. If all examples are for the same class, return the single-node
            //    tree with the output label corresponding to this common class.
            double entropy = Measures.Entropy(output, outputClasses);

            if (entropy == 0)
            {
                if (output.Length > 0)
                {
                    root.Output = output[0];
                }
                return;
            }

            // 3. If number of predicting attributes is empty, then return the single-node
            //    tree with the output label corresponding to the most common value of
            //    the target attributes in the examples.

            // how many variables have been used less than the limit
            int[] candidates = Matrix.Find(attributeUsageCount, x => x < join);

            if (candidates.Length == 0 || (maxHeight > 0 && height == maxHeight))
            {
                root.Output = Measures.Mode(output);
                return;
            }


            // 4. Otherwise, try to select the attribute which
            //    best explains the data sample subset. If the tree
            //    is part of a random forest, only consider a percentage
            //    of the candidate attributes at each split point

            if (MaxVariables > 0)
            {
                candidates = Vector.Sample(candidates, MaxVariables);
            }

            var scores     = new double[candidates.Length];
            var thresholds = new double[candidates.Length];
            var partitions = new int[candidates.Length][][];

            // For each attribute in the data set
            Parallel.For(0, scores.Length, ParallelOptions, i =>
            {
                scores[i] = computeGainRatio(input, output, candidates[i],
                                             entropy, out partitions[i], out thresholds[i]);
            });

            // Select the attribute with maximum gain ratio
            int maxGainIndex; scores.Max(out maxGainIndex);
            var maxGainPartition = partitions[maxGainIndex];
            var maxGainAttribute = candidates[maxGainIndex];
            var maxGainRange     = inputRanges[maxGainAttribute];
            var maxGainThreshold = thresholds[maxGainIndex];

            // Mark this attribute as already used
            attributeUsageCount[maxGainAttribute]++;

            double[][] inputSubset;
            int[]      outputSubset;

            // Now, create next nodes and pass those partitions as their responsibilities.
            if (tree.Attributes[maxGainAttribute].Nature == DecisionVariableKind.Discrete)
            {
                // This is a discrete nature attribute. We will branch at each
                // possible value for the discrete variable and call recursion.
                DecisionNode[] children = new DecisionNode[maxGainPartition.Length];

                // Create a branch for each possible value
                for (int i = 0; i < children.Length; i++)
                {
                    children[i] = new DecisionNode(tree)
                    {
                        Parent     = root,
                        Value      = i + maxGainRange.Min,
                        Comparison = ComparisonKind.Equal,
                    };

                    inputSubset  = input.Get(maxGainPartition[i]);
                    outputSubset = output.Get(maxGainPartition[i]);
                    split(children[i], inputSubset, outputSubset, height + 1); // recursion
                }

                root.Branches.AttributeIndex = maxGainAttribute;
                root.Branches.AddRange(children);
            }

            else if (maxGainPartition.Length > 1)
            {
                // This is a continuous nature attribute, and we achieved two partitions
                // using the partitioning scheme. We will branch on two possible settings:
                // either the value is greater than a currently detected optimal threshold
                // or it is less.

                DecisionNode[] children =
                {
                    new DecisionNode(tree)
                    {
                        Parent     = root, Value = maxGainThreshold,
                        Comparison = ComparisonKind.LessThanOrEqual
                    },

                    new DecisionNode(tree)
                    {
                        Parent     = root, Value = maxGainThreshold,
                        Comparison = ComparisonKind.GreaterThan
                    }
                };

                // Create a branch for lower values
                inputSubset  = input.Get(maxGainPartition[0]);
                outputSubset = output.Get(maxGainPartition[0]);
                split(children[0], inputSubset, outputSubset, height + 1);

                // Create a branch for higher values
                inputSubset  = input.Get(maxGainPartition[1]);
                outputSubset = output.Get(maxGainPartition[1]);
                split(children[1], inputSubset, outputSubset, height + 1);

                root.Branches.AttributeIndex = maxGainAttribute;
                root.Branches.AddRange(children);
            }
            else
            {
                // This is a continuous nature attribute, but all variables are equal
                // to a constant. If there is only a constant value as the predictor
                // and there are multiple output labels associated with this constant
                // value, there isn't much we can do. This node will be a leaf.

                // We will set the class label for this node as the
                // majority of the currently selected output classes.

                outputSubset = output.Get(maxGainPartition[0]);
                root.Output  = Measures.Mode(outputSubset);
            }

            attributeUsageCount[maxGainAttribute]--;
        }
Ejemplo n.º 7
0
        private void split(DecisionNode root, double[][] input, int[] output)
        {
            // 2. If all examples are for the same class, return the single-node
            //    tree with the output label corresponding to this common class.
            double entropy = Statistics.Tools.Entropy(output, outputClasses);

            if (entropy == 0)
            {
                if (output.Length > 0)
                {
                    root.Output = output[0];
                }
                return;
            }

            // 3. If number of predicting attributes is empty, then return the single-node
            //    tree with the output label corresponding to the most common value of
            //    the target attributes in the examples.
            int predictors = attributes.Count(x => x == false);

            if (predictors == 0)
            {
                root.Output = Statistics.Tools.Mode(output);
                return;
            }


            // 4. Otherwise, try to select the attribute which
            //    best explains the data sample subset.

            double[]  scores     = new double[predictors];
            double[]  entropies  = new double[predictors];
            double[]  thresholds = new double[predictors];
            int[][][] partitions = new int[predictors][][];

            // Retrieve candidate attribute indices
            int[] candidates = new int[predictors];
            for (int i = 0, k = 0; i < attributes.Length; i++)
            {
                if (!attributes[i])
                {
                    candidates[k++] = i;
                }
            }


            // For each attribute in the data set
            Parallel.For(0, scores.Length, i =>
            {
                scores[i] = computeGainRatio(input, output, candidates[i],
                                             entropy, out partitions[i], out thresholds[i]);
            });

            // Select the attribute with maximum gain ratio
            int maxGainIndex; scores.Max(out maxGainIndex);
            var maxGainPartition = partitions[maxGainIndex];
            var maxGainEntropy   = entropies[maxGainIndex];
            var maxGainAttribute = candidates[maxGainIndex];
            var maxGainRange     = inputRanges[maxGainAttribute];
            var maxGainThreshold = thresholds[maxGainIndex];

            // Mark this attribute as already used
            attributes[maxGainAttribute] = true;

            double[][] inputSubset;
            int[]      outputSubset;

            // Now, create next nodes and pass those partitions as their responsabilities.
            if (tree.Attributes[maxGainAttribute].Nature == DecisionAttributeKind.Discrete)
            {
                DecisionNode[] children = new DecisionNode[maxGainPartition.Length];

                for (int i = 0; i < children.Length; i++)
                {
                    children[i] = new DecisionNode(tree)
                    {
                        Parent     = root,
                        Value      = i + maxGainRange.Min,
                        Comparison = ComparisonKind.Equal,
                    };

                    inputSubset  = input.Submatrix(maxGainPartition[i]);
                    outputSubset = output.Submatrix(maxGainPartition[i]);

                    split(children[i], inputSubset, outputSubset); // recursion
                }

                root.Branches = new DecisionBranchNodeCollection(maxGainAttribute, children);
            }
            else
            {
                DecisionNode[] children =
                {
                    new DecisionNode(tree)
                    {
                        Parent     = root, Value = maxGainThreshold,
                        Comparison = ComparisonKind.LessThanOrEqual
                    },

                    new DecisionNode(tree)
                    {
                        Parent     = root, Value = maxGainThreshold,
                        Comparison = ComparisonKind.GreaterThan
                    }
                };

                inputSubset  = input.Submatrix(maxGainPartition[0]);
                outputSubset = output.Submatrix(maxGainPartition[0]);
                split(children[0], inputSubset, outputSubset);

                inputSubset  = input.Submatrix(maxGainPartition[1]);
                outputSubset = output.Submatrix(maxGainPartition[1]);
                split(children[1], inputSubset, outputSubset);

                root.Branches = new DecisionBranchNodeCollection(maxGainAttribute, children);
            }

            attributes[maxGainAttribute] = false;
        }
Ejemplo n.º 8
0
        void InitializePCpu()
        {
            int         N             = cc.c.N;
            const float DistanceScale = 100.0f;
            const float eps           = 2.22e-16f;
            int         bandSize      = Math.Min(N, MaxGroupNumberHyp * GroupSizeHyp);

            PBuf  = gpu.CreateBufferRW(bandSize * N, 4, 1);
            P2Buf = gpu.CreateBufferDynamic(bandSize * N, 4, 7); // dynamic buffer for fast uploading. Linked to Pcpu[] on HLSL.
            int blockSize = 128;                                 // Calculate so many rows per dispatch.

            cpuP = new float[N][];
            for (int i = 0; i < N; i++)
            {
                cpuP[i] = new float[N];
            }

            using (var distanceBuf = gpu.CreateBufferRW(blockSize * N, 4, 0))
                using (var stagingBuf = gpu.CreateStagingBuffer(distanceBuf))
                    using (var sd = gpu.LoadShader("TsneDx.PartialDistance2.cso")) {
                        gpu.SetShader(sd);
                        for (int iBlock = 0; iBlock < N; iBlock += blockSize)
                        {
                            cc.c.blockIdx = iBlock;
                            cc.Upload();
                            gpu.Run(blockSize);

                            int     iBlock2  = Math.Min(iBlock + blockSize, N);
                            int     blockLen = (iBlock2 * (iBlock2 - 1) - iBlock * (iBlock - 1)) / 2;
                            float[] ret      = gpu.ReadRange <float>(stagingBuf, distanceBuf, blockLen);
                            int     idx      = 0;
                            for (int row = iBlock; row < iBlock2; row++)
                            {
                                Array.Copy(ret, idx, cpuP[row], 0, row);
                                idx += row;
                            }
                        }
                    }

            double distanceFactor = double.MinValue;

            MT.For(1, N, i => {
                float maxV = cpuP[i].Max();
                lock (this)
                    distanceFactor = Math.Max(distanceFactor, maxV);
            });

            if (distanceFactor == 0)
            {
                throw new System.Exception("Distance metric degenerated: all components are zero.");
            }

            // Scale the distance to managable range [0, 100.0] to avoid degredation
            // with exp function.
            distanceFactor = DistanceScale / distanceFactor;
            MT.For(1, N, i => {
                for (int j = 0; j < i; j++)
                {
                    cpuP[i][j] = (float)(cpuP[i][j] * distanceFactor);
                }
            });

            MT.For(0, N, i => {
                for (int j = 0; j < i; j++)
                {
                    cpuP[j][i] = cpuP[i][j];
                }
                cpuP[i][i] = 0;
            });

            int bSize = MaxGroupNumberHyp * GroupSizeHyp;

            using (var sd = gpu.LoadShader("TsneDx.Dist2Affinity.cso"))
                using (var stagingBuf = gpu.CreateStagingBuffer(PBuf)) {
                    gpu.SetShader(sd);
                    for (int iBlock = 0; iBlock < N; iBlock += bSize)
                    {
                        cc.c.blockIdx = iBlock;
                        cc.Upload();
                        int iBlock2 = Math.Min(N, iBlock + bSize);
                        using (var ws = gpu.NewWriteStream(PBuf))
                            for (int row = iBlock; row < iBlock2; row++)
                            {
                                ws.WriteRange(cpuP[row]);
                            }
                        gpu.Run(MaxGroupNumberHyp);
                        using (var rs = gpu.NewReadStream(stagingBuf, PBuf))
                            for (int row = iBlock; row < iBlock2; row++)
                            {
                                rs.ReadRange(cpuP[row], 0, N);
                            }
                    }
                }

            double sum = 0;

            MT.For(0, N, i => {
                double sum2 = 0.0;
                for (int j = i + 1; j < N; j++)
                {
                    cpuP[i][j] += cpuP[j][i];
                    sum2       += cpuP[i][j];
                }
                lock (this)
                    sum += sum2;
            });

            if (sum == 0)
            {
                throw new System.Exception("Perplexity too small!");
            }

            sum *= 2;
            MT.For(0, N, i => {
                for (int j = i + 1; j < N; j++)
                {
                    cpuP[i][j] = (float)Math.Max(cpuP[i][j] / sum, eps);
                    cpuP[j][i] = cpuP[i][j];
                }
                cpuP[i][i] = 1.0f;
            });
        }