Example #1
11
File: LUT.cs Project: thegodi/Gygax
        public static void ApplyColormap(ref double[,] source, out Emgu.CV.Image <Bgr, Byte> destination, ColorMapType colorMapType)
        {
            double max = 0;

            for (int y = 0; y < source.GetLength(1); y++)
            {
                for (int x = 0; x < source.GetLength(0); x++)
                {
                    if (source[y, x] > max)
                    {
                        max = source[y, x];
                    }
                }
            }

            Image <Gray, byte> buffer = new Image <Gray, Byte>(source.GetLength(0), source.GetLength(1));

            for (int y = 0; y < source.GetLength(1); y++)
            {
                for (int x = 0; x < source.GetLength(0); x++)
                {
                    buffer[y, x] = new Gray(source[y, x] * 255 / max);
                }
            }

            destination = new Image <Bgr, Byte>(source.GetLength(0), source.GetLength(1));
            CvInvoke.ApplyColorMap(buffer, destination, colorMapType);
        }
Example #2
0
        private void ProcessImage(List <string> lines)
        {
            Mat imageOriginal = CvInvoke.Imread(ImageRecievedName, LoadImageType.AnyColor);

            var imageWithHitsBgr = CreateHitImage(imageOriginal.Size, lines);

            // create mask to have white circles wherever hits exist and to be black on all other parts
            var mask = new Mat();

            CvInvoke.Threshold(imageWithHitsBgr, mask, 1, 255, ThresholdType.Binary);
            var inverseMask = new Mat();

            CvInvoke.BitwiseNot(mask, inverseMask);

            // mapping level of gray to ColorMap
            CvInvoke.ApplyColorMap(imageWithHitsBgr, imageWithHitsBgr, ColorMapType.Jet);
            // from mapped image remove everything except hits
            var imageWithHitsWithoutBackground = new Mat();

            CvInvoke.BitwiseAnd(imageWithHitsBgr, imageWithHitsBgr, imageWithHitsWithoutBackground, mask);

            // from original image remove only parts where hits happended
            var imageOriginalWithoutHits = new Mat();

            CvInvoke.BitwiseAnd(imageOriginal, imageOriginal, imageOriginalWithoutHits, inverseMask);
            // result is combination of original image without hits and image with hits mapped to certain ColorMap
            var result = new Mat();

            CvInvoke.Add(imageOriginalWithoutHits, imageWithHitsWithoutBackground, result);
            result.Save(ImageProcessedName);
        }
Example #3
0
        private static Image <Bgr, byte> Map(IImage image, ColorMapType palette)
        {
            var output = new Image <Bgr, byte>(image.Size);

            CvInvoke.ApplyColorMap(image, output, palette);

            return(output);
        }
Example #4
0
        public virtual void dispImg(UMat inImg)
        {
            UMat toUpdate = (imageBox1.Image == disp_1) ? disp_2 : disp_1;

            if (!showColorMap)
            {
                inImg.CopyTo(toUpdate);
            }
            else
            {
                CvInvoke.ApplyColorMap(inImg, toUpdate, ColorMapType.Jet);
            }
            if (run1disp_1 == 0 || run1disp_2 == 0)
            {
                run1disp_1 = disp_1.Bytes[0];
                run1disp_2 = disp_2.Bytes[0];
            }
            imageBox1.Image = toUpdate;
        }
Example #5
0
File: LUT.cs Project: thegodi/Gygax
        public static void ApplyColormap(ref Emgu.CV.Image <Gray, double> source, out Emgu.CV.Image <Bgr, Byte> destination, ColorMapType colorMapType, bool invert = false)
        {
            destination = new Image <Bgr, Byte>(source.Width, source.Height);

            var max = 0.0;

            for (int y = 0; y < source.Height; y++)
            {
                for (int x = 0; x < source.Width; x++)
                {
                    if (source[y, x].Intensity > max)
                    {
                        max = source[y, x].Intensity;
                    }
                }
            }

            Image <Gray, byte> bufferImage = new Image <Gray, Byte>(source.Width, source.Height);

            for (int y = 0; y < source.Height; y++)
            {
                for (int x = 0; x < source.Width; x++)
                {
                    if (invert)
                    {
                        bufferImage[y, x] = new Gray(255 - source[y, x].Intensity * 255 / max);
                    }
                    else
                    {
                        bufferImage[y, x] = new Gray(source[y, x].Intensity * 255 / max);
                    }
                }
            }

            CvInvoke.ApplyColorMap(bufferImage, destination, colorMapType);
        }
Example #6
0
        private void ProcessFrame(object sender, EventArgs arg)
        {
            Mat frame = new Mat();

            _capture.Retrieve(frame, 0);

            Mat newFrame = new Mat();



            //CvInvoke.EdgePreservingFilter(frame, newFrame);
            //CvInvoke.Dct(frame, newFrame, DctType.Forward);
            //CvInvoke.CornerHarris(frame, newFrame, 4);
            //CvInvoke.ConvertPointsToHomogeneous(frame, newFrame);
            //CvInvoke.ConvertPointsFromHomogeneous(frame, newFrame);
            //CvInvoke.Blur(frame, newFrame, new System.Drawing.Size(100,100), new System.Drawing.Point(100,50));
            //CvInvoke.BoxFilter(frame, newFrame, DepthType.Default, new System.Drawing.Size(100, 100), new System.Drawing.Point(100, 50));
            //this.ImgMainZm.Image = newFrame;
            //return;

            if (this.CurFilterType == FilterType.ftNormal)
            {
                this.ImgMainZm.Image = frame;
            }
            else if (this.CurFilterType == FilterType.ftGray)
            {
                newFrame = new Mat();
                CvInvoke.CvtColor(frame, newFrame, ColorConversion.Bgr2Gray);
                this.ImgMainZm.Image = newFrame;
            }
            else if (this.CurFilterType == FilterType.ftSmallGray)
            {
                Mat grayFrame = new Mat();
                CvInvoke.CvtColor(frame, grayFrame, ColorConversion.Bgr2Gray);

                Mat smallGrayFrame = new Mat();
                CvInvoke.PyrDown(grayFrame, smallGrayFrame);

                this.ImgMainZm.Image = smallGrayFrame;
            }
            else if (this.CurFilterType == FilterType.ftSmoothGray)
            {
                Mat grayFrame = new Mat();
                CvInvoke.CvtColor(frame, grayFrame, ColorConversion.Bgr2Gray);

                Mat smallGrayFrame = new Mat();
                CvInvoke.PyrDown(grayFrame, smallGrayFrame);

                Mat smoothedGrayFrame = new Mat();
                CvInvoke.PyrUp(smallGrayFrame, smoothedGrayFrame);

                this.ImgMainZm.Image = smoothedGrayFrame;
            }
            else if (this.CurFilterType == FilterType.ftCanny)
            {
                Mat grayFrame = new Mat();
                CvInvoke.CvtColor(frame, grayFrame, ColorConversion.Bgr2Gray);

                Mat smallGrayFrame = new Mat();
                CvInvoke.PyrDown(grayFrame, smallGrayFrame);

                Mat smoothedGrayFrame = new Mat();
                CvInvoke.PyrUp(smallGrayFrame, smoothedGrayFrame);

                Mat cannyFrame = new Mat();
                CvInvoke.Canny(smoothedGrayFrame, cannyFrame, 100, 60);

                this.ImgMainZm.Image = cannyFrame;
            }
            else if (this.CurFilterType == FilterType.ftAutumn)
            {
                newFrame = new Mat();
                CvInvoke.ApplyColorMap(frame, newFrame, ColorMapType.Autumn);
                this.ImgMainZm.Image = newFrame;
            }
            else if (this.CurFilterType == FilterType.ftCool)
            {
                newFrame = new Mat();
                CvInvoke.ApplyColorMap(frame, newFrame, ColorMapType.Cool);
                this.ImgMainZm.Image = newFrame;
            }
            else if (this.CurFilterType == FilterType.ftBone)
            {
                newFrame = new Mat();
                CvInvoke.ApplyColorMap(frame, newFrame, ColorMapType.Bone);
                this.ImgMainZm.Image = newFrame;
            }
            else if (this.CurFilterType == FilterType.ftHot)
            {
                newFrame = new Mat();
                CvInvoke.ApplyColorMap(frame, newFrame, ColorMapType.Hot);
                this.ImgMainZm.Image = newFrame;
            }
            else if (this.CurFilterType == FilterType.ftHsv)
            {
                newFrame = new Mat();
                CvInvoke.ApplyColorMap(frame, newFrame, ColorMapType.Hsv);
                this.ImgMainZm.Image = newFrame;
            }
            else if (this.CurFilterType == FilterType.ftJet)
            {
                newFrame = new Mat();
                CvInvoke.ApplyColorMap(frame, newFrame, ColorMapType.Jet);
                this.ImgMainZm.Image = newFrame;
            }
            else if (this.CurFilterType == FilterType.ftOcean)
            {
                newFrame = new Mat();
                CvInvoke.ApplyColorMap(frame, newFrame, ColorMapType.Ocean);
                this.ImgMainZm.Image = newFrame;
            }
            else if (this.CurFilterType == FilterType.ftPink)
            {
                newFrame = new Mat();
                CvInvoke.ApplyColorMap(frame, newFrame, ColorMapType.Pink);
                this.ImgMainZm.Image = newFrame;
            }
            else if (this.CurFilterType == FilterType.ftRainbow)
            {
                newFrame = new Mat();
                CvInvoke.ApplyColorMap(frame, newFrame, ColorMapType.Rainbow);
                this.ImgMainZm.Image = newFrame;
            }
            else if (this.CurFilterType == FilterType.ftSpring)
            {
                newFrame = new Mat();
                CvInvoke.ApplyColorMap(frame, newFrame, ColorMapType.Spring);
                this.ImgMainZm.Image = newFrame;
            }
            else if (this.CurFilterType == FilterType.ftSummer)
            {
                newFrame = new Mat();
                CvInvoke.ApplyColorMap(frame, newFrame, ColorMapType.Summer);
                this.ImgMainZm.Image = newFrame;
            }
            else if (this.CurFilterType == FilterType.ftWinter)
            {
                newFrame = new Mat();
                CvInvoke.ApplyColorMap(frame, newFrame, ColorMapType.Winter);
                this.ImgMainZm.Image = newFrame;
            }
            else if (this.CurFilterType == FilterType.ftWinter)
            {
                newFrame = new Mat();
                CvInvoke.ApplyColorMap(frame, newFrame, ColorMapType.Winter);
                this.ImgMainZm.Image = newFrame;
            }
            else if (this.CurFilterType == FilterType.ftBitwiseNot)
            {
                newFrame = new Mat();
                CvInvoke.BitwiseNot(frame, newFrame);
                this.ImgMainZm.Image = newFrame;
            }
            else if (this.CurFilterType == FilterType.ftEdgePreservingFilter)
            {
                newFrame = new Mat();
                CvInvoke.EdgePreservingFilter(frame, newFrame);
                this.ImgMainZm.Image = newFrame;
            }
        }
Example #7
0
        private void ProcessImages(object sender, EventArgs e)
        {
            StereoImage <Bgr, byte> image = null;

            if (Model.Mode == SourceMode.Camera)
            {
                if (Controller.CameraIsEnabled())
                {
                    //image = Controller.GeatedStereoImage();
                }
            }
            else
            {
                if (Model.Image != null)
                {
                    image = Model.Image;
                }
            }

            if (image == null)
            {
                return;
            }

            SourceImage.ViewModel.Image = image;
            var temp = new StereoImage <Bgr, byte>();

            if (ResultImage.ViewModel.ShowLeft)
            {
                var model = (StereoSgbmModel)_viewModel.SgbmModel.Clone();
                model.Image1 = image.LeftImage.Convert <Gray, byte>();
                model.Image2 = image.RightImage.Convert <Gray, byte>();
                var map  = Stereo.Compute(model);
                var test = map.Convert <Bgr, byte>();
                CvInvoke.ApplyColorMap(test, test, ColorMapType.Rainbow);
                temp.LeftImage  = test;
                test            = test.SmoothGaussian(15);
                temp.RightImage = test;
            }
            if (ResultImage.ViewModel.ShowRight)
            {
                var model = (OpticalFlowModel)_viewModel.FlowModel.Clone();
                model.Image1 = image.LeftImage.Convert <Gray, byte>();
                model.Image2 = image.RightImage.Convert <Gray, byte>();
                var map = OpticalFlow.Compute(model);
                temp.RightImage = map.Convert <Bgr, byte>();
                if (temp.LeftImage == null)
                {
                    temp.LeftImage = temp.RightImage;
                }
            }
            ResultImage.ViewModel.Image = temp.Convert <Bgr, byte>();

            if (_viewModel.SaveImage)
            {
                if (_viewModel.AllowGrabImage)
                {
                    _viewModel.Images.Add(image);
                    ImageListBox.Items.Refresh();
                }
                if (_viewModel.AllowSaveImages)
                {
                    Controller.SaveImages(_viewModel.Images, DateTime.Now.Ticks.ToString());
                    _viewModel.Images.Clear();
                    ImageListBox.Items.Refresh();
                }
            }
        }
Example #8
0
        /// <summary>
        /// Main method for processing the image
        /// </summary>
        /// <param name="input"></param>
        private void ProcessFrame(Image <Bgr, Byte> input)
        {
            //input._EqualizeHist();

            if (prevgrayframe == null)
            {
                prevgrayframe = input.Convert <Gray, Byte>();
                preFeatures   = prevgrayframe.GoodFeaturesToTrack(1000, 0.05, 5.0, 3);
                prevgrayframe.FindCornerSubPix(preFeatures, new Size(5, 5), new Size(-1, -1), new MCvTermCriteria(25, 1.5d)); //This just increase the accuracy of the points
                //mixChannel_src = input.PyrDown().PyrDown().Convert<Hsv, float>()[0];
                return;
            }

            grayframe = input.Convert <Gray, Byte>();
            //apply the Optical flow
            Emgu.CV.OpticalFlow.PyrLK(prevgrayframe, grayframe, preFeatures[0], new Size(10, 10), 3, criteria, out curFeatures, out status, out error);
            Image <Gray, float> FlowX      = new Image <Gray, float>(grayframe.Size),
                                FlowY      = new Image <Gray, float>(grayframe.Size),
                                FlowAngle  = new Image <Gray, float>(grayframe.Size),
                                FlowLength = new Image <Gray, float>(grayframe.Size),
                                FlowResult = new Image <Gray, float>(grayframe.Size);

            #region Farneback method to display movement in colour intensity
            //Same as bellow CvInvoke method but a bit simpler
            Emgu.CV.OpticalFlow.Farneback(prevgrayframe, grayframe, FlowX, FlowY, 0.5, 1, 10, 2, 5, 1.1, OPTICALFLOW_FARNEBACK_FLAG.USE_INITIAL_FLOW);
            //CvInvoke.cvShowImage("FlowX", FlowX); //Uncomment to see in external window
            //CvInvoke.cvShowImage("FlowY", FlowY);//Uncomment to see in external window
            //CvInvoke.cvWaitKey(1); //Uncomment to see in external window (NOTE: You only need this line once)

            //CvInvoke Method
            //IntPtr Flow = CvInvoke.cvCreateImage(grayframe.Size, Emgu.CV.CvEnum.IPL_DEPTH.IPL_DEPTH_32F, 2);
            //CvInvoke.cvCalcOpticalFlowFarneback(prevgrayframe, grayframe, Flow, 0.5, 1, 10, 2, 5, 1.1, OPTICALFLOW_FARNEBACK_FLAG.USE_INITIAL_FLOW);
            //CvInvoke.cvSplit(Flow, FlowX, FlowY, IntPtr.Zero, IntPtr.Zero);
            //CvInvoke.cvShowImage("FlowFX", FlowX); //Uncomment to see in external window
            //CvInvoke.cvShowImage("FlowFY", FlowY); //Uncomment to see in external window
            //CvInvoke.cvWaitKey(1); //Uncomment to see in external window (NOTE: You only need this line once)

            #region All this is accomplished in the region bellow
            // for (int i = 0; i < FlowX.Width; i++)
            // {

            //     for (int j = 0; j < FlowX.Height; j++)
            //     {
            //         FlowLength.Data[j, i, 0] = (float)(Math.Sqrt((FlowX.Data[j, i, 0] * FlowX.Data[j, i, 0]) + (FlowY.Data[j, i, 0] * FlowY.Data[j, i, 0]))); //Gradient
            //         if (FlowLength.Data[j, i, 0] < 0)
            //         {
            //             FlowAngle.Data[j, i, 0] = (float)(Math.Atan2(FlowY.Data[j, i, 0], FlowX.Data[j, i, 0]) * 180 / Math.PI);
            //         }
            //         else
            //         {
            //             FlowAngle.Data[j, i, 0] = (float)(Math.Atan2(FlowY.Data[j, i, 0], (FlowX.Data[j, i, 0] * -1)) * 180 / Math.PI);
            //         }

            //         //FlowResult.Data[j, i, 0] = FlowAngle.Data[j, i, 0] * FlowLength.Data[j, i, 0];
            //         FlowResult.Data[j, i, 0] = FlowLength.Data[j, i, 0] * 5;

            //     }
            // }
            // Image<Bgr, Byte> Result = new Image<Bgr, Byte>(grayframe.Size);
            // CvInvoke.ApplyColorMap(FlowResult.Convert<Gray, Byte>(), Result, ColorMapType.Hot);
            //// CvInvoke.cvShowImage("Flow Angle", FlowAngle.Convert<Gray,Byte>());//Uncomment to see in external window
            //// CvInvoke.cvShowImage("Flow Length", FlowLength.Convert<Gray, Byte>());//Uncomment to see in external window
            // CvInvoke.cvShowImage("Flow Angle Colour", Result);//Uncomment to see in external window
            #endregion

            #region This code is much simpler
            //Find the length for the whole array
            FlowY      = FlowY.Mul(FlowY);           //Y*Y
            FlowX      = FlowX.Mul(FlowX);           //X*X
            FlowResult = FlowX + FlowY;              //X^2 + Y^2
            CvInvoke.cvSqrt(FlowResult, FlowResult); //SQRT(X^2 + Y^2)

            //Apply a colour map.
            Image <Bgr, Byte> Result = new Image <Bgr, Byte>(grayframe.Size);                        //store the result
            CvInvoke.ApplyColorMap(FlowResult.Convert <Gray, Byte>() * 5, Result, ColorMapType.Hot); //Scale the FlowResult by a factor of 5 for a better visual difference
            CvInvoke.cvShowImage("Flow Angle Colour II", Result);                                    //Uncomment to see in external window
            CvInvoke.cvWaitKey(1);                                                                   //Uncomment to see in external window (NOTE: You only need this line once)

            #endregion
            #endregion


            prevgrayframe = grayframe.Copy(); //copy current frame to previous

            //Image<Gray, float> mixCahnnel_dest2 = Histo.BackProjectPatch<float>(new Image<Gray, float>[] { input.PyrDown().PyrDown().Convert<Hsv, float>()[0] }, new Size(1, 1), HISTOGRAM_COMP_METHOD.CV_COMP_BHATTACHARYYA, 1.0);
            //CvInvoke.cvShowImage("BackProjection", mixCahnnel_dest2);
            //CvInvoke.cvWaitKey(1); //Uncomment to see in external window (NOTE: You only need this line once)

            for (int i = 0; i < curFeatures.Length; i++)
            {
                LineSegment2DF line = new LineSegment2DF(preFeatures[0][i], curFeatures[i]);


                double dx = Math.Abs(line.P1.X - line.P2.X);
                double dy = Math.Abs(line.P1.Y - line.P2.Y);
                double l  = Math.Sqrt(dx * dx + dy * dy);

                double spinSize = 0.1 * l;
                if (l > 5 && l < 100)
                {
                    frame.Draw(line, new Bgr(Color.Red), 2);

                    double         angle = Math.Atan2((double)line.P1.Y - line.P2.Y, (double)line.P1.X - line.P2.X);
                    Point          Tip1  = new Point((int)(line.P2.X + spinSize * Math.Cos(angle + 3.1416 / 4)), (int)(line.P2.Y + spinSize * Math.Sin(angle + 3.1416 / 4)));
                    Point          Tip2  = new Point((int)(line.P2.X + spinSize * Math.Cos(angle - 3.1416 / 4)), (int)(line.P2.Y + spinSize * Math.Sin(angle - 3.1416 / 4)));
                    LineSegment2DF line1 = new LineSegment2DF(Tip1, curFeatures[i]);
                    LineSegment2DF line2 = new LineSegment2DF(Tip2, curFeatures[i]);
                    frame.Draw(line1, new Bgr(Color.Blue), 2);
                    frame.Draw(line2, new Bgr(Color.Blue), 2);
                }

                //int range = 20;
                //if (preFeatures[0][i].X > curFeatures[i].X - range && preFeatures[0][i].X < curFeatures[i].X + range) preFeatures[0][i].X = curFeatures[i].X;
                //if (preFeatures[0][i].Y > curFeatures[i].Y - range && preFeatures[0][i].Y < curFeatures[i].Y + range) preFeatures[0][i].Y = curFeatures[i].Y;
            }

            preFeatures = prevgrayframe.GoodFeaturesToTrack(1000, 0.05, 5.0, 3);
            prevgrayframe.FindCornerSubPix(preFeatures, new Size(5, 5), new Size(-1, -1), new MCvTermCriteria(25, 1.5d)); //This just increase the accuracy of the points

            /*---------------------------------------------*/
            DisplayImage(input.ToBitmap(), PCBX_Image); //thread safe display for camera cross thread errors
        }
Example #9
0
 public override void Process(ref Image <Bgr, byte> image)
 {
     CvInvoke.ApplyColorMap(image, image, _colorMapType);
 }