예제 #1
0
 /// <summary>
 /// Executes a for loop with 64-bit indexes and thread-local data in which iterations
 /// may run in parallel, loop options can be configured, and the state of the loop
 /// can be monitored and manipulated.
 /// </summary>
 public static SystemParallelLoopResult For <TLocal>(long fromInclusive, long toExclusive,
                                                     SystemParallelOptions parallelOptions, Func <TLocal> localInit,
                                                     Func <long, SystemParallelLoopState, TLocal, TLocal> body, Action <TLocal> localFinally)
 {
     ExceptionProvider.ThrowUncontrolledInvocationException(nameof(SystemParallel.For));
     return(SystemParallel.For(fromInclusive, toExclusive, parallelOptions, localInit, body, localFinally));
 }
예제 #2
0
        /// <summary>
        /// Executes a for loop in which iterations may run in parallel.
        /// </summary>
        public static SystemParallelLoopResult For(int fromInclusive, int toExclusive, Action <int> body)
        {
            if (CoyoteRuntime.IsExecutionControlled)
            {
                return(For(fromInclusive, toExclusive, new SystemParallelOptions(), body));
            }

            return(SystemParallel.For(fromInclusive, toExclusive, body));
        }
 public void Run(int matrixDimension, Action <int, int> loopAction)
 {
     Tpl.For(0, matrixDimension, (int i) =>
     {
         Tpl.For(i + 1, matrixDimension, (int j) =>
         {
             loopAction(i, j);
         });
     });
 }
예제 #4
0
        /// <summary>
        /// Executes a for loop in which iterations may run in parallel and loop options
        /// can be configured.
        /// </summary>
        public static SystemParallelLoopResult For(int fromInclusive, int toExclusive,
                                                   SystemParallelOptions parallelOptions, Action <int> body)
        {
            if (CoyoteRuntime.IsExecutionControlled)
            {
                return(SystemParallel.For(fromInclusive, toExclusive, new SystemParallelOptions()
                {
                    CancellationToken = parallelOptions.CancellationToken,
                    MaxDegreeOfParallelism = MaxDegreeOfParallelism,
                    TaskScheduler = CoyoteRuntime.Current.ControlledTaskScheduler
                }, body));
            }

            return(SystemParallel.For(fromInclusive, toExclusive, parallelOptions, body));
        }
예제 #5
0
        private bool IsWhiteRectangle(Bitmap image, IReadOnlyList <IntPoint> rectanglePoints)
        {
            BitmapData data =
                image.LockBits(
                    new Rectangle(rectanglePoints[0].X, rectanglePoints[0].Y,
                                  rectanglePoints[1].X - rectanglePoints[0].X,
                                  rectanglePoints[3].Y - rectanglePoints[0].Y),
                    ImageLockMode.ReadWrite, image.PixelFormat);

            var bytesPerPixel    = Image.GetPixelFormatSize(image.PixelFormat) / 8;
            var isWhiteRectangle = true;

            unsafe
            {
                //Nr. de pixeli pe linie * bytesPerPixel
                int   width      = (data.Width) * bytesPerPixel;
                byte *firstPixel = (byte *)data.Scan0;
                Parallel.For(0, data.Height + 1, (y, state) =>
                {
                    byte *currentLine = firstPixel + y * data.Stride;
                    for (int x = 0; x < width; x += bytesPerPixel)
                    {
                        currentLine[x]     = 0;
                        currentLine[x + 1] = 0;
                        currentLine[x + 2] = 255;
                        if (currentLine[x] != 255 && currentLine[x + 1] != 255 && currentLine[x + 2] != 255)
                        {
                            isWhiteRectangle = false;
                            state.Break();
                        }
                    }
                });
            }
            image.UnlockBits(data);
            return(isWhiteRectangle);
        }
예제 #6
0
        private void split(DecisionNode root, double[][] input, int[] output, int height)
        {

            // 2. If all examples are for the same class, return the single-node
            //    tree with the output label corresponding to this common class.
            double entropy = Statistics.Tools.Entropy(output, outputClasses);

            if (entropy == 0)
            {
                if (output.Length > 0)
                    root.Output = output[0];
                return;
            }

            // 3. If number of predicting attributes is empty, then return the single-node
            //    tree with the output label corresponding to the most common value of
            //    the target attributes in the examples.

            // how many variables have been used less than the limit
            int candidateCount = attributeUsageCount.Count(x => x < join);

            if (candidateCount == 0 || (maxHeight > 0 && height == maxHeight))
            {
                root.Output = Statistics.Tools.Mode(output);
                return;
            }


            // 4. Otherwise, try to select the attribute which
            //    best explains the data sample subset.

            double[] scores = new double[candidateCount];
            double[] thresholds = new double[candidateCount];
            int[][][] partitions = new int[candidateCount][][];

            // Retrieve candidate attribute indices
            int[] candidates = new int[candidateCount];
            for (int i = 0, k = 0; i < attributeUsageCount.Length; i++)
            {
                if (attributeUsageCount[i] < join)
                    candidates[k++] = i;
            }


            // For each attribute in the data set
#if SERIAL
            for (int i = 0; i < scores.Length; i++)
#else
            Parallel.For(0, scores.Length, i =>
#endif
            {
                scores[i] = computeGainRatio(input, output, candidates[i],
                    entropy, out partitions[i], out thresholds[i]);
            }
#if !SERIAL
);
#endif

            // Select the attribute with maximum gain ratio
            int maxGainIndex; scores.Max(out maxGainIndex);
            var maxGainPartition = partitions[maxGainIndex];
            var maxGainAttribute = candidates[maxGainIndex];
            var maxGainRange = inputRanges[maxGainAttribute];
            var maxGainThreshold = thresholds[maxGainIndex];

            // Mark this attribute as already used
            attributeUsageCount[maxGainAttribute]++;

            double[][] inputSubset;
            int[] outputSubset;

            // Now, create next nodes and pass those partitions as their responsibilities. 
            if (tree.Attributes[maxGainAttribute].Nature == DecisionVariableKind.Discrete)
            {
                // This is a discrete nature attribute. We will branch at each
                // possible value for the discrete variable and call recursion.
                DecisionNode[] children = new DecisionNode[maxGainPartition.Length];

                // Create a branch for each possible value
                for (int i = 0; i < children.Length; i++)
                {
                    children[i] = new DecisionNode(tree)
                    {
                        Parent = root,
                        Value = i + maxGainRange.Min,
                        Comparison = ComparisonKind.Equal,
                    };

                    inputSubset = input.Submatrix(maxGainPartition[i]);
                    outputSubset = output.Submatrix(maxGainPartition[i]);
                    split(children[i], inputSubset, outputSubset, height + 1); // recursion
                }

                root.Branches.AttributeIndex = maxGainAttribute;
                root.Branches.AddRange(children);
            }

            else if (maxGainPartition.Length > 1)
            {
                // This is a continuous nature attribute, and we achieved two partitions
                // using the partitioning scheme. We will branch on two possible settings:
                // either the value is greater than a currently detected optimal threshold 
                // or it is less.

                DecisionNode[] children = 
                {
                    new DecisionNode(tree) 
                    {
                        Parent = root, Value = maxGainThreshold,
                        Comparison = ComparisonKind.LessThanOrEqual 
                    },

                    new DecisionNode(tree)
                    {
                        Parent = root, Value = maxGainThreshold,
                        Comparison = ComparisonKind.GreaterThan
                    }
                };

                // Create a branch for lower values
                inputSubset = input.Submatrix(maxGainPartition[0]);
                outputSubset = output.Submatrix(maxGainPartition[0]);
                split(children[0], inputSubset, outputSubset, height + 1);

                // Create a branch for higher values
                inputSubset = input.Submatrix(maxGainPartition[1]);
                outputSubset = output.Submatrix(maxGainPartition[1]);
                split(children[1], inputSubset, outputSubset, height + 1);

                root.Branches.AttributeIndex = maxGainAttribute;
                root.Branches.AddRange(children);
            }
            else
            {
                // This is a continuous nature attribute, but all variables are equal
                // to a constant. If there is only a constant value as the predictor 
                // and there are multiple output labels associated with this constant
                // value, there isn't much we can do. This node will be a leaf.

                // We will set the class label for this node as the
                // majority of the currently selected output classes.

                outputSubset = output.Submatrix(maxGainPartition[0]);
                root.Output = Statistics.Tools.Mode(outputSubset);
            }

            attributeUsageCount[maxGainAttribute]--;
        }
예제 #7
0
        private void split(DecisionNode root, int[][] input, int[] output)
        {

            // 2. If all examples are for the same class, return the single-node
            //    tree with the output label corresponding to this common class.
            double entropy = Statistics.Tools.Entropy(output, outputClasses);

            if (entropy == 0)
            {
                if (output.Length > 0)
                    root.Output = output[0];
                return;
            }

            // 3. If number of predicting attributes is empty, then return the single-node
            //    tree with the output label corresponding to the most common value of
            //    the target attributes in the examples.
            int predictors = attributes.Count(x => x == false);

            if (predictors <= attributes.Length - maxHeight)
            {
                root.Output = Statistics.Tools.Mode(output);
                return;
            }


            // 4. Otherwise, try to select the attribute which
            //    best explains the data sample subset.

            double[] scores = new double[predictors];
            double[] entropies = new double[predictors];
            int[][][] partitions = new int[predictors][][];

            // Retrieve candidate attribute indices
            int[] candidates = new int[predictors];
            for (int i = 0, k = 0; i < attributes.Length; i++)
                if (!attributes[i]) candidates[k++] = i;


            // For each attribute in the data set
#if SERIAL
            for (int i = 0; i < scores.Length; i++)
#else
            Parallel.For(0, scores.Length, i =>
#endif
            {
                scores[i] = computeGainRatio(input, output, candidates[i],
                    entropy, out partitions[i]);
            }
#if !SERIAL
);
#endif

            // Select the attribute with maximum gain ratio
            int maxGainIndex; scores.Max(out maxGainIndex);
            var maxGainPartition = partitions[maxGainIndex];
            var maxGainEntropy = entropies[maxGainIndex];
            var maxGainAttribute = candidates[maxGainIndex];
            var maxGainRange = inputRanges[maxGainAttribute];

            attributes[maxGainAttribute] = true;

            // Now, create next nodes and pass those partitions as their responsibilities.
            DecisionNode[] children = new DecisionNode[maxGainPartition.Length];

            for (int i = 0; i < children.Length; i++)
            {
                children[i] = new DecisionNode(tree);
                children[i].Parent = root;
                children[i].Comparison = ComparisonKind.Equal;
                children[i].Value = i + maxGainRange.Min;


                int[][] inputSubset = input.Submatrix(maxGainPartition[i]);
                int[] outputSubset = output.Submatrix(maxGainPartition[i]);

                split(children[i], inputSubset, outputSubset); // recursion

                if (children[i].IsLeaf)
                {
                    // If the resulting node is a leaf, and it has not
                    // been assigned a value because there were no available
                    // output samples in this category, we will be assigning
                    // the most common label for the current node to it.
                    if (!Rejection && !children[i].Output.HasValue)
                        children[i].Output = Statistics.Tools.Mode(output);
                }
            }


            attributes[maxGainAttribute] = false;

            root.Branches.AttributeIndex = maxGainAttribute;
            root.Branches.AddRange(children);
        }
예제 #8
0
        private void split(DecisionNode root, int[][] input, int[] output)
        {
            // 2. If all examples are for the same class, return the single-node
            //    tree with the output label corresponding to this common class.
            double entropy = Statistics.Tools.Entropy(output, outputClasses);

            if (entropy == 0)
            {
                if (output.Length > 0)
                {
                    root.Output = output[0];
                }
                return;
            }

            // 3. If number of predicting attributes is empty, then return the single-node
            //    tree with the output label corresponding to the most common value of
            //    the target attributes in the examples.
            int predictors = attributes.Count(x => x == false);

            if (predictors == 0)
            {
                root.Output = Statistics.Tools.Mode(output);
                return;
            }


            // 4. Otherwise, try to select the attribute which
            //    best explains the data sample subset.

            double[]  scores     = new double[predictors];
            double[]  entropies  = new double[predictors];
            int[][][] partitions = new int[predictors][][];

            // Retrieve candidate attribute indices
            int[] candidates = new int[predictors];
            for (int i = 0, k = 0; i < attributes.Length; i++)
            {
                if (!attributes[i])
                {
                    candidates[k++] = i;
                }
            }


            // For each attribute in the data set
            Parallel.For(0, scores.Length, i =>
            {
                scores[i] = computeGainRatio(input, output, candidates[i],
                                             entropy, out partitions[i]);
            });

            // Select the attribute with maximum gain ratio
            int maxGainIndex; scores.Max(out maxGainIndex);
            var maxGainPartition = partitions[maxGainIndex];
            var maxGainEntropy   = entropies[maxGainIndex];
            var maxGainAttribute = candidates[maxGainIndex];
            var maxGainRange     = inputRanges[maxGainAttribute];

            attributes[maxGainAttribute] = true;

            // Now, create next nodes and pass those partitions as their responsabilities.
            DecisionNode[] children = new DecisionNode[maxGainPartition.Length];

            for (int i = 0; i < children.Length; i++)
            {
                children[i]            = new DecisionNode(tree);
                children[i].Parent     = root;
                children[i].Comparison = ComparisonKind.Equal;
                children[i].Value      = i + maxGainRange.Min;


                int[][] inputSubset  = input.Submatrix(maxGainPartition[i]);
                int[]   outputSubset = output.Submatrix(maxGainPartition[i]);

                split(children[i], inputSubset, outputSubset); // recursion
            }

            attributes[maxGainAttribute] = false;

            root.Branches = new DecisionBranchNodeCollection(maxGainAttribute, children);
        }
예제 #9
0
 /// <summary>
 /// Executes a for loop in which iterations may run in parallel, loop options can
 /// be configured, and the state of the loop can be monitored and manipulated.
 /// </summary>
 public static SystemParallelLoopResult For(int fromInclusive, int toExclusive,
                                            SystemParallelOptions parallelOptions, Action <int, SystemParallelLoopState> body)
 {
     ExceptionProvider.ThrowUncontrolledInvocationException(nameof(SystemParallel.For));
     return(SystemParallel.For(fromInclusive, toExclusive, parallelOptions, body));
 }
예제 #10
0
 /// <summary>
 /// Executes a for loop with 64-bit indexes in which iterations may run in parallel
 /// and the state of the loop can be monitored and manipulated.
 /// </summary>
 public static SystemParallelLoopResult For(long fromInclusive, long toExclusive,
                                            Action <long, SystemParallelLoopState> body)
 {
     ExceptionProvider.ThrowUncontrolledInvocationException(nameof(SystemParallel.For));
     return(SystemParallel.For(fromInclusive, toExclusive, body));
 }