예제 #1
0
        private void resetBoxEstimation(CvMat input, ref CvMat hue, ref CvMat normalize)
        {
            boxEstimationType = BoxEstimationType.NONE;
            // this logic is here (and not somewhere else) so that we don't have to calculate hue/normalize twice in a single frame
            // that's because hue/normalize are also needed at a later state of frame processing

            // this check right here sounds stupid, but I want to easily change priority between hue and normalize
            // in the end, one of the 2 will stay at 1st place and the check will be removed...
            if (boxEstimationType == BoxEstimationType.NONE)
            {
                hue = MatOps.BGRtoHue(input);
                //MatOps.NewWindowShow( hue, "HUE-processed" );
                if (estimateBoxHint(hue, ref floodHueTolerance))
                {
                    boxEstimationType = BoxEstimationType.HUE;
                }
            }

            if (boxEstimationType == BoxEstimationType.NONE)
            {
                normalize = MatOps.MyNormalize(input);
                MatOps.NewWindowShow(normalize, "NORMALIZE-processed");
                if (estimateBoxHint(normalize, ref floodNormTolerance))
                {
                    boxEstimationType = BoxEstimationType.NORMALIZE;
                }
            }
        }
예제 #2
0
        // => hue and normalize, if null, may be created and assigned
        // <= ROI
        private CvMat detectROI(CvMat input, ref CvMat hue, ref CvMat normalize)
        {
            // TODO : Like I said above, if I get the minimum/maximum values, I have an accurate lowerBound/upperBound pair to work with!!!
            CvMat    roi;
            CvScalar lowerBound;
            CvScalar upperBound;

            // IDEA 3:
            // Determine if I should check for "features" in the "thresholded" image, or in a cropped grayscale version of the original one!!
            // For now, lets search the thresholded one...
            if (boxEstimationType == BoxEstimationType.HUE)
            {
                roi        = MatOps.CopySize(input, MatrixType.U8C1);
                lowerBound = boxEstimatedValue - floodHueTolerance / 1;                 // TODO : this should be +-(MAX VALUE)
                upperBound = boxEstimatedValue + floodHueTolerance / 1;
                if (hue == null)
                {
                    hue = MatOps.BGRtoHue(input);
                }
                hue.InRangeS(lowerBound, upperBound, roi);
            }
            else if (boxEstimationType == BoxEstimationType.NORMALIZE)
            {
                // TODO : must investigate, range doesn't return anything
                roi        = MatOps.CopySize(input, MatrixType.U8C1);
                lowerBound = boxEstimatedValue - floodNormTolerance;
                upperBound = boxEstimatedValue + floodNormTolerance;
                if (normalize == null)
                {
                    normalize = MatOps.MyNormalize(input);
                }
                normalize.InRangeS(lowerBound, upperBound, roi);
            }
            else
            {
                // Couldn't estimate either way? We are off to a bad start, but lets try to see if features can be extracted anyway.
                roi = MatOps.ConvertChannels(input);                   // we are already losing valuable info here!!
            }

            return(roi);
        }