Beispiel #1
0
        public static Mat ContrastAlignment(Image <Gray, Byte> img)
        {
            Mat result = new Mat();

            CvInvoke.CLAHE(img, 4, new Size(16, 16), result);
            return(result);
        }
        public void ClaheAndAvrgImprove(ref IImage processedResizedFrame)
        {
            if ((processedResizedFrame as Image <Gray, byte>) == null)
            {
            }
            else
            {
                if (result == null)
                {
                    result = new Image <Gray, byte>(processedResizedFrame.Size);
                }
                else
                {
                    result.SetValue(new Gray(0));
                }

                imgsForAverage[indxForAverage] = (Image <Gray, byte>)processedResizedFrame;
                indxForAverage = (indxForAverage + 1) % imgsForAverage.Length;

                for (int i = 0; i < imgsForAverage.Length; i++)
                {
                    if (imgsForAverage[i] != null)
                    {
                        result += imgsForAverage[i] / SettingsHolder.Instance.NumberOfFramesForAvrg;
                    }
                }
                processedResizedFrame = result;
            }

            CvInvoke.CLAHE(processedResizedFrame, 40, new Size(8, 8), result);
            processedResizedFrame = result;
        }
        public static Image <Bgr, Byte> ClaheCorrection(Mat src, double clipLimit)
        {
            //Stopwatch sw = new Stopwatch();
            //sw.Start();
            //Mat src = CvInvoke.Imread(inpath, ImreadModes.AnyColor);
            Mat lab = new Mat();

            CvInvoke.CvtColor(src, lab, ColorConversion.Bgr2Lab);
            src.Dispose();

            VectorOfMat channels = new VectorOfMat();

            CvInvoke.Split(lab, channels);
            Mat  dst  = new Mat();
            Size size = new Size(8, 8);

            CvInvoke.CLAHE(channels[0], 0.5, size, dst);

            dst.CopyTo(channels[0]);
            dst.Dispose();
            CvInvoke.Merge(channels, lab);
            Mat clahe = new Mat();

            CvInvoke.CvtColor(lab, clahe, ColorConversion.Lab2Bgr);
            lab.Dispose();

            Image <Bgr, Byte> image = clahe.ToImage <Bgr, Byte>();

            clahe.Dispose();

            return(image);
        }
Beispiel #4
0
        private void toAdaptiveYCrCbEqualization(Mat img)
        {
            if (img.NumberOfChannels < 3)
            {
                MessageBoxResult result = MessageBox.Show("흑백 영상은 변환할 수 없습니다.", "경고");
                return;
            }

            CvInvoke.CvtColor(img, CurrentMat, ColorConversion.Bgr2YCrCb);
            var channels = CurrentMat.Split(); // 이미지 채널 분리

            CvInvoke.CLAHE(channels[0], 2, new System.Drawing.Size(8, 8), channels[0]);
            CvInvoke.CLAHE(channels[1], 2, new System.Drawing.Size(8, 8), channels[1]);
            CvInvoke.CLAHE(channels[2], 2, new System.Drawing.Size(8, 8), channels[2]);

            VectorOfMat temp = new VectorOfMat();

            temp.Push(channels[0]);
            temp.Push(channels[1]);
            temp.Push(channels[2]);

            CvInvoke.Merge(temp, CurrentMat);
            CvInvoke.CvtColor(CurrentMat, CurrentMat, ColorConversion.YCrCb2Bgr);
            showImg(CurrentMat);
        }
        public static void ClaheCorrection(String inpath, double clipLimit)
        {
            Stopwatch sw = new Stopwatch();

            sw.Start();
            Mat src = CvInvoke.Imread(inpath, ImreadModes.AnyColor);
            Mat lab = new Mat();

            CvInvoke.CvtColor(src, lab, ColorConversion.Bgr2Lab);
            src.Dispose();

            VectorOfMat channels = new VectorOfMat();

            CvInvoke.Split(lab, channels);
            Mat  dst  = new Mat();
            Size size = new Size(8, 8);

            CvInvoke.CLAHE(channels[0], clipLimit, size, dst);

            dst.CopyTo(channels[0]);
            dst.Dispose();
            CvInvoke.Merge(channels, lab);
            Mat clahe = new Mat();

            CvInvoke.CvtColor(lab, clahe, ColorConversion.Lab2Bgr);
            lab.Dispose();
            String dir     = Path.GetDirectoryName(inpath);
            String outpath = dir + "\\" + Path.GetFileNameWithoutExtension(inpath) + "_clahe.jpg";

            sw.Stop();
            clahe.Save(outpath);

            clahe.Dispose();
            //clahe.Save("C:\\EXIFGeotagger\\opencv\\contrast_equalise_clahe.jpg");
        }
        private static Image <Bgr, Byte> PreTreatment(Mat gray, int mode)
        {
            switch (mode)
            {
            case 0:
                var contrastedBitmap2 = gray.Bitmap.Contrast(80);
                return(new Image <Bgr, byte>(contrastedBitmap2));

                break;

            case 1:
                CvInvoke.CLAHE(gray, 2, new Size(8, 8), gray);
                var contrastedBitmap = gray.Bitmap.Contrast(30);
                return(new Image <Bgr, byte>(contrastedBitmap));

                break;

            case 2:
                CvInvoke.CLAHE(gray, 6, new Size(8, 8), gray);
                return(new Image <Bgr, byte>(gray.Bitmap));

                break;

            default:
                return(new Image <Bgr, byte>(gray.Bitmap));
            }
        }
Beispiel #7
0
        private void ClaSheToolStripMenuItem_Click(object sender, EventArgs e)
        {
            var img    = new Bitmap(pictureBox2.Image).ToImage <Gray, byte>();
            var output = new Mat();

            CvInvoke.CLAHE(img, 50, new Size(8, 8), output);
            pictureBox2.Image = output.ToBitmap();
        }
        public void ClaheImprove(ref IImage processedResizedFrame)
        {
            if (result == null)
            {
                result = new Image <Gray, byte>(processedResizedFrame.Size);
            }

            CvInvoke.CLAHE(processedResizedFrame, 40, new Size(8, 8), result);
            processedResizedFrame = result;
        }
Beispiel #9
0
 private void CLAHEToolStripMenuItem_Click(object sender, EventArgs e)
 {
     if (!src.IsEmpty)
     {
         im   = src.ToImage <Bgr, Byte>();
         tmp  = im.Convert <Gray, Byte>();
         gray = tmp.Clone();
         CvInvoke.CLAHE(tmp, 20, new Size(32, 32), gray);
         imageBox1.Image = gray;
     }
 }
        private Mat HistogramEqualization(Mat image, bool useCLAHE)
        {
            var channels = new VectorOfMat();

            if (image.NumberOfChannels > 1)
            {
                CvInvoke.CvtColor(image, image, ColorConversion.Bgr2YCrCb);
                CvInvoke.Split(image, channels);
            }

            if (useCLAHE)
            {
                if (image.NumberOfChannels > 1)
                {
                    CvInvoke.CLAHE(channels[0],
                                   (_configurations.CLAEH_CLIP_LIMIT < 1 ? 1 : _configurations.CLAEH_CLIP_LIMIT),
                                   new Size((_configurations.CLAEH_TILE_X_SIZE < 1 ? 1 : _configurations.CLAEH_TILE_X_SIZE),
                                            (_configurations.CLAEH_TILE_Y_SIZE < 1 ? 1 : _configurations.CLAEH_TILE_Y_SIZE)),
                                   channels[0]);
                }
                else
                {
                    CvInvoke.CLAHE(image,
                                   (_configurations.CLAEH_CLIP_LIMIT < 1 ? 1 : _configurations.CLAEH_CLIP_LIMIT),
                                   new Size((_configurations.CLAEH_TILE_X_SIZE < 1 ? 1 : _configurations.CLAEH_TILE_X_SIZE),
                                            (_configurations.CLAEH_TILE_Y_SIZE < 1 ? 1 : _configurations.CLAEH_TILE_Y_SIZE)),
                                   image);
                }
            }
            else
            {
                CvInvoke.EqualizeHist(channels[0], channels[0]);
            }

            if (image.NumberOfChannels > 1)
            {
                CvInvoke.Merge(channels, image);
                CvInvoke.CvtColor(image, image, ColorConversion.YCrCb2Bgr);
            }


            //if (useCLAHE)
            //{
            //    var viewer = new ImageViewer(image, "Histogram Equalized CLAHE");
            //    viewer.Show();
            //}
            //else
            //{
            //    var viewer = new ImageViewer(image, "Histogram Equalized DEFAULT");
            //    viewer.Show();
            //}

            return(image);
        }
Beispiel #11
0
        /// <summary>
        /// Enhance contrast of an image given as UMat.
        /// </summary>
        /// <param name="input">input image</param>
        /// <param name="clipLimit">clip limit (default is 5)</param>
        /// <param name="tileGridSize">tile size (default is 8)</param>
        /// <returns>image with enhanced contrast</returns>
        public static UMat IncreaseContrast(UMat input, double clipLimit = 5.0, int tileGridSize = 8)
        {
            UMat lab = new UMat(), cl = new UMat(), result = new UMat();

            CvInvoke.CvtColor(input, lab, ColorConversion.Bgr2Lab);
            UMat[] splitLAB = lab.Split();
            CvInvoke.CLAHE(splitLAB[0], clipLimit, new Size(tileGridSize, tileGridSize), cl);
            CvInvoke.Merge(new VectorOfUMat(cl, splitLAB[1], splitLAB[2]), result);
            lab.Dispose();
            cl.Dispose();
            return(result);
        }
Beispiel #12
0
 public static void EqualizeHistogram(bool?adaptiveEqualization)
 {
     if (adaptiveEqualization == true)
     {
         CvInvoke.CLAHE(resultMatImage, 0d, new System.Drawing.Size(16, 16), resultMatImage);
         thresholdAddition = 40;
     }
     else
     {
         CvInvoke.EqualizeHist(resultMatImage, resultMatImage);
         thresholdAddition = 60;
     }
 }
 public static Task <Image <Bgr, Byte> > CorrectLightness(Image <Bgr, Byte> rgb)
 {
     return(Task.Run(delegate() {
         Image <Lab, Byte> lab = rgb.Convert <Lab, Byte>();
         Image <Gray, Byte>[] lab_planes = lab.Split();
         Image <Gray, Byte> lightness = new Image <Gray, byte>(rgb.Size);
         CvInvoke.CLAHE(lab_planes[0], 40, new Size(4, 4), lightness);
         VectorOfMat vm = new VectorOfMat(lightness.Mat, lab_planes[1].Mat, lab_planes[2].Mat);
         CvInvoke.Merge(vm, lab);
         Image <Bgr, Byte> dst = lab.Convert <Bgr, Byte>();
         vm.Dispose();
         lab.Dispose();
         lab_planes = null;
         lightness.Dispose();
         return dst;
     }));
 }
        public Image <Gray, byte> GenerateMask(Image <Gray, byte> input)
        {
            Image <Gray, byte> TMP  = new Image <Gray, byte>(input.Size); // with small pixels
            Image <Gray, byte> TMP2 = new Image <Gray, byte>(input.Size); //NO SMALL pixels

            TMP  = input.Clone();
            TMP2 = input.Clone();
            CvInvoke.CLAHE(input, 100, new System.Drawing.Size(8, 8), TMP);

            double MaxEl = 154;

            TMP = TMP.ThresholdToZero(new Gray(MaxEl));
            TMP._EqualizeHist();

            Image <Gray, Byte> NoiseMask = TMP.ThresholdBinary(new Gray(10), new Gray(255));

            NoiseMask = NoiseMask.Erode(1);
            NoiseMask = NoiseMask.Dilate(1);

            TMP2 = TMP.Copy(NoiseMask);

            return(TMP2);
        }