Example #1
0
        // output : Image with contour , Size of contour
        public Tuple <Image <Bgr, byte>, double> DrawContourAndAreaSize_B(Image <Bgr, byte> input)
        {
            var gdata = BGRtoGray(input.Data, 1);
            var rdata = BGRtoGray(input.Data, 2);
            var gimg  = new Image <Gray, byte>(gdata);
            var rimg  = new Image <Gray, byte>(rdata);
            Image <Gray, byte> workingImg = gimg + rimg / 2;

            #region Processing
            CvInvoke.MedianBlur(workingImg, workingImg, 5);

            workingImg = workingImg.Convolution(new ConvolutionKernelF(CreateKernel()))
                         .Convert <Gray, byte>();
            workingImg._GammaCorrect(2.0);

            CvInvoke.MedianBlur(workingImg, workingImg, 5);

            workingImg = workingImg.Mul(255 / 200.0);
            workingImg._GammaCorrect(2.0);
            workingImg = workingImg.Mul(255 / 200.0);
            workingImg._GammaCorrect(2.0);
            workingImg = TriFilter(workingImg);
            workingImg._GammaCorrect(2.0);
            workingImg = workingImg.Add(new Gray(50));
            workingImg = workingImg.ThresholdToZero(new Gray(100));
            workingImg = workingImg.Add(new Gray(50));
            workingImg = workingImg.ThresholdBinary(new Gray(120), new Gray(255));

            #endregion

            #region contour
            // Chip Size Range
            int up = 7500;
            int dw = 4500;

            VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();
            CvInvoke.FindContours(workingImg, contours, null, RetrType.External, ChainApproxMethod.ChainApproxNone);


            for (int i = 0; i < contours.Size; i++)
            {
                double areaSize = CvInvoke.ContourArea(contours[i], false);  //  Find the area of contour
                Console.WriteLine(areaSize);
                if (areaSize >= dw && areaSize <= up)
                {
                    CvInvoke.DrawContours(input, contours, i, new MCvScalar(14, 200, 40), 2);
                    return(Tuple.Create(input, areaSize));
                }
            }
            return(null);

            #endregion
        }
Example #2
0
        private Image <Rgb, byte> CreateDepthBumper(Image <Rgb, byte> ocvDepth)
        {
            Image <Rgb, byte> ocvDepthBumper = ocvDepth.Copy(_slidingWindowCompact);

            ocvDepthBumper = ocvDepthBumper.Add(new Rgb(_brightness, _brightness, _brightness));
            return(ocvDepthBumper);
        }
Example #3
0
        public Bitmap edgeDetection(Bitmap bmp, int thresh, int linkSize)
        {
            var imageCV = bitmapToImageCV(bmp);

            frameBuffer.Add(imageCV);
            if (frameBuffer.Count > 10)
            {
                frameBuffer.RemoveAt(0);
            }



            var edges = imageCV.Canny(thresh, linkSize);

            if (blurEdges)
            {
                var blurEdges = edges.Convolution(Convolve.gauss5pxkernel);
                edges = blurEdges.Convert <Gray, byte>();
            }

            var edgeChannels = new Image <Gray, byte>[] { edges, edges, edges };
            var rgbEdges     = new Image <Bgr, byte>(edgeChannels);
            var mashed       = rgbEdges.Add(imageCV);
            var bitmap       = mashed.ToBitmap();
            var g            = Graphics.FromImage(bitmap);
            var pen          = new Pen(Color.Black);

            g.DrawArc(pen, 10, 10, 50, 50, 0, 360);
            var sharpened = imageCV.Convolution(Convolve.sharpen3pxkernel);

            return(sharpened.ToBitmap());
        }
Example #4
0
        public void Render(Image image)
        {
            try
            {
                var imagePath = GetValue <string>("image_path");
                var frame     = Image.Load(RunMode.Interactive, imagePath, imagePath);
                var newLayer  = new Layer(frame.ActiveLayer, image)
                {
                    Visible = true
                };

                image.UndoGroupStart();

                image.Add(newLayer, -1);
                image.ActiveLayer = newLayer;

                image.UndoGroupEnd();

                frame.Delete();
            }
            catch (Exception ex)
            {
                throw new GimpSharpException();
            }
        }
Example #5
0
        internal void CrateImage()
        {
            Tile tile  = Grid[0][0];
            int  tileY = tile.Fields.Count;
            int  tileX = tile.Fields[0].Count;

            int height = Grid.Count * (tileY - 2);
            int width  = Grid[0].Count * (tileX - 2);

            for (int i = 0; i < height; i++)
            {
                Image.Add(new List <char>());
            }

            int lineNum = 0;

            for (int i = 0; i < Grid.Count; i++)
            {
                for (int j = 0; j < Grid[0].Count; j++)
                {
                    lineNum = i * (tileY - 2);

                    for (int iNew = 1; iNew < tileY - 1; iNew++)
                    {
                        for (int jNew = 1; jNew < tileX - 1; jNew++)
                        {
                            Image[lineNum + iNew - 1].Add(Grid[i][j].Fields[iNew][jNew]);
                        }
                    }
                }
            }
            string imageString = PrintImage();

            ImageTile = new Tile($"Tile 0:\r\n{imageString}");
        }
Example #6
0
        public void GetBody(Image <Bgr, Byte> frame, out PointF centroid)
        {
            using (Image <Gray, Byte> origGray = frame.Convert <Gray, Byte>())
                using (Image <Gray, Byte> filteredImage = origGray.SmoothMedian(13))
                    using (Image <Gray, Byte> binary = filteredImage.ThresholdBinary(new Gray(ThresholdValue), new Gray(255)))
                        using (Image <Gray, Byte> backgroundNot = BinaryBackground.Not())
                            using (Image <Gray, Byte> finalImage = binary.Add(backgroundNot))
                                using (Image <Gray, Byte> subbed = finalImage.Not())
                                {
                                    centroid = PointF.Empty;
                                    CvBlobs blobs = new CvBlobs();
                                    BlobDetector.Detect(subbed, blobs);

                                    CvBlob mouseBlob = null;
                                    double maxArea   = -1;
                                    foreach (var blob in blobs.Values)
                                    {
                                        if (blob.Area > maxArea)
                                        {
                                            mouseBlob = blob;
                                            maxArea   = blob.Area;
                                        }
                                    }

                                    if (mouseBlob != null)
                                    {
                                        centroid = mouseBlob.Centroid;
                                    }
                                }
        }
        public override ImageData GetResult(ImageForm_Service service, Bitmap argBitmap, List <int> args)
        {
            if (service.data.LastData() == null)
            {
                return(null);
            }

            if (service.data.LastData().Bitmap == null)
            {
                return(null);
            }

            if (argBitmap == null)
            {
                return(null);
            }

            Image <Bgra, byte> image = new Image <Bgra, byte>(service.data.LastData().Bitmap);
            Image <Bgra, byte> image2;

            if ((image.Bitmap.Width != argBitmap.Width) ||
                (image.Bitmap.Height != argBitmap.Height))
            {
                //Console.WriteLine("Przeliczam");

                Bitmap tmpbitmap = new Bitmap(image.Size.Width, image.Size.Height);

                for (int w = 0; w < tmpbitmap.Width; ++w)
                {
                    for (int h = 0; h < tmpbitmap.Height; ++h)
                    {
                        if (argBitmap.Width <= w || argBitmap.Height <= h)
                        {
                            tmpbitmap.SetPixel(w, h, Color.White);
                        }
                        else
                        {
                            tmpbitmap.SetPixel(w, h, argBitmap.GetPixel(w, h));
                        }
                    }
                }
                image2 = new Image <Bgra, byte>(tmpbitmap);
            }
            else
            {
                image2 = new Image <Bgra, byte>(argBitmap);
            }
            //Image<Bgra, byte> image2 = new Image<Bgra, byte>(tmpbitmap);

            try
            {
                Image <Bgra, byte> result = image.Add(image2);

                return(new ImageData(result.Bitmap, service.data.LastData().ID));
            }
            catch
            {
                return(null);
            }
        }
        public void GetSuperpixelImages(Image <Gray, Byte> image, string dir, string imageName, int regionSize = 20, float ratio = 10.0f)
        {
            var pixelator = new Emgu.CV.XImgproc.SupperpixelSLIC(image, Emgu.CV.XImgproc.SupperpixelSLIC.Algorithm.SLICO, regionSize, ratio);

            pixelator.Iterate();
            var labels = new Mat();

            pixelator.GetLabels(labels);
            var labelsArray = new int[labels.Rows, labels.Cols];

            labelsArray = (int[, ])labels.GetData();
            var superpixelColors = new int[pixelator.NumberOfSuperpixels];
            var pixelCount       = new int[pixelator.NumberOfSuperpixels];
            var imageArray       = new byte[image.Width, image.Height, 1];

            imageArray = image.Data;
            for (int i = 0; i < labels.Rows; i++)
            {
                for (int j = 0; j < labels.Cols; j++)
                {
                    var label = labelsArray[i, j];
                    superpixelColors[label] += imageArray[i, j, 0];
                    pixelCount[label]       += 1;
                }
            }
            for (int i = 0; i < superpixelColors.Length; i++)
            {
                if (pixelCount[i] != 0)
                {
                    superpixelColors[i] /= pixelCount[i];
                }
            }
            var maskedImage = new Image <Bgr, Byte>(image.Width, image.Height);

            // var boundaries = new Image<Bgr, Byte>(image.Width, image.Height);
            //image.CopyTo(maskedImage);
            CvInvoke.CvtColor(image, maskedImage, Emgu.CV.CvEnum.ColorConversion.Gray2Bgr);
            var mask      = new Image <Gray, Byte>(image.Width, image.Height);
            var colorMask = new Image <Bgr, Byte>(image.Width, image.Height);

            colorMask = colorMask.Add(new Bgr(Color.Red));
            pixelator.GetLabelContourMask(mask);
            colorMask   = colorMask.And(colorMask, mask);
            maskedImage = maskedImage.And(maskedImage, mask.Not());
            maskedImage = maskedImage.Add(colorMask);
            maskedImage.Save(dir + @"\" + imageName + "_borders.png");

            var superpixelImage = new Image <Gray, Byte>(image.Width, image.Height);

            for (int i = 0; i < image.Width; i++)
            {
                for (int j = 0; j < image.Height; j++)
                {
                    var pixelNum = labelsArray[i, j];
                    superpixelImage[i, j] = new Gray(superpixelColors[pixelNum]);
                }
            }

            superpixelImage.Save(dir + @"\" + imageName + "_meanColors.png");
        }
Example #9
0
        private void addToolStripMenuItem_Click(object sender, EventArgs e)
        {
            Mat dstimg = new Mat();

            if (pictureBoxA.Image == null)
            {
                MessageBox.Show("Select picture A.");
                return;
            }
            if (pictureBoxB.Image == null)
            {
                MessageBox.Show("Select picture B.");
                return;
            }
            Image <Bgra, byte> resultImage   = new Image <Bgra, byte>(ResultPictureBox.Size.Width, ResultPictureBox.Size.Height);
            Image <Bgra, byte> imageBresized = new Image <Bgra, byte>(PictureA.Size.Width, PictureA.Size.Height);

            CvInvoke.Resize(PictureB, imageBresized, PictureA.Size);
            resultImage            = PictureA.Add(imageBresized, null);
            ResultPictureBox.Image = resultImage.ToBitmap();
            chartResult.Series.Clear();
            saveResultHistogramToolStripMenuItem.Enabled = true;
            Histogram_ResultPicture();
            chartResult.Visible = true;
            label3.Visible      = true;
            saveResultToolStripMenuItem.Enabled = true;
            return;
        }
Example #10
0
 public ActionResult Add(string Title = null, string Category = null, string Description = null, string ResizeForDevices = null, HttpPostedFileBase Data = null)
 {
     if (Data != null)
     {
         Image temp = Models.Image.AddTemp(Data.InputStream.ToBytes(), Data.FileName.Split('.').Last());
         Session.AddSafe("sessionTempImage", temp);
         ViewData.AddSafe("viewTempImage", temp);
     }
     if (!String.IsNullOrEmpty(Title))
     {
         var sessionImage = (Models.Image)Session["sessionTempImage"];
         if (sessionImage != null)
         {
             sessionImage.Title            = Title;
             sessionImage.Description      = Description;
             sessionImage.Category         = Category;
             sessionImage.ResizeForDevices = (ResizeForDevices == "on") ? true : false;
             sessionImage.Active           = true;
             sessionImage.TargetDevice     = Image.Device.GetDefault().ID;
             List <Image> newImages = Image.Add(sessionImage).ToList();
             Session.Remove("sessionTempImage");
             return(Redirect("~/images/" + sessionImage.Name));
         }
     }
     return(View());
 }
Example #11
0
        public void FindBody(Image <Gray, Byte> filteredImage, out double waistLength, out double waistVolume, out double waistVolume2, out double waistVolume3, out double waistVolume4, out PointF centroid)
        {
            using (Image <Gray, Byte> binary = filteredImage.ThresholdBinary(new Gray(ThresholdValue), new Gray(255)))
                using (Image <Gray, Byte> backgroundNot = BinaryBackground.Not())
                    using (Image <Gray, Byte> finalImage = binary.Add(backgroundNot))
                        using (Image <Gray, Byte> subbed = finalImage.Not())
                        {
                            CvBlobs blobs = new CvBlobs();
                            BlobDetector.Detect(subbed, blobs);

                            CvBlob mouseBlob = null;
                            double maxArea   = -1;
                            foreach (var blob in blobs.Values)
                            {
                                if (blob.Area > maxArea)
                                {
                                    mouseBlob = blob;
                                    maxArea   = blob.Area;
                                }
                            }

                            double gapDistance = 50;
                            RBSK.Settings.GapDistance = gapDistance;

                            centroid = mouseBlob.Centroid;

                            waistLength  = -1;
                            waistVolume  = -1;
                            waistVolume2 = -1;
                            waistVolume3 = -1;
                            waistVolume4 = -1;
                        }
        }
Example #12
0
        private void button2_Click(object sender, EventArgs e)
        {
            if (srcImg == null)
            {
                MessageBox.Show("请选择原图片!");
                return;
            }
            Image <Bgr, Byte> dstImg  = srcImg.CopyBlank();
            Image <Bgr, Byte> dstImg2 = srcImg.CopyBlank();

            CvInvoke.Laplacian(srcImg, dstImg, DepthType.Default);
            imageBox2.Image = dstImg;                        //梯度图
            CvInvoke.ConvertScaleAbs(dstImg, dstImg2, 1, 0); // 和下面一样 只是试试这种方法
            dstImg2         = srcImg.Add(dstImg2);
            imageBox3.Image = dstImg2;
        }
Example #13
0
        public void AddImageProduct(List <FileInfo> ImagesFileList, string avartar)
        {
            Image image = new Image();

            image.ImageUri = $"/Resource/Images/Products/{avartar}";
            image.Add();
            for (int i = 1; i < ImagesFileList.Count; i++)
            {
                if (ImagesFileList[i].Name != null)
                {
                    avartar = $"{Guid.NewGuid()}{ImagesFileList[i].Extension}";
                    ImagesFileList[i].CopyTo($"{folderfile}Resource\\Images\\Products\\{avartar}");
                    image.ImageUri = $"/Resource/Images/Products/{avartar}";
                    image.Add();
                }
            }
        }
        public Image <Bgr, Byte> ColoredMask(Image <Gray, Byte> BlackWhiteImage, Image <Bgr, Byte> RGBImage, Bgr color)
        {
            Image <Bgr, Byte> temp = new Image <Bgr, Byte>(BlackWhiteImage.Bitmap);
            Image <Bgr, Byte> mask = new Image <Bgr, Byte>(temp.Width, temp.Height, color);

            temp = temp.And(mask);
            temp = temp.Add(RGBImage);
            return(temp);
        }
Example #15
0
 public void AddImageFITSKeyword(string name, string value, string comment = "")
 {
     if (Image == null)
     {
         throw new InvalidOperationException("No Image component available to add FITS Keyword!");
     }
     Image.Add(new XElement(xmlns + "FITSKeyword",
                            new XAttribute("name", name),
                            new XAttribute("value", RemoveInvalidXMLChars(value)),
                            new XAttribute("comment", comment)));
 }
Example #16
0
 public void DrawDiagonals(ref Image <Rgb, byte> result, int channel)
 {
     // List<Image<Rgb, byte>> ls = new List<Image<Rgb, byte>>();
     for (int posNeg = 0; posNeg < 2; posNeg++)
     {
         LineSegment2D[] aux = Diagonals[channel, posNeg];
         result = result.Add(DrawLines(channel, ref aux)).Clone();
         //   segmentBox.Image = result.Bitmap;
         // ls.Add(result);
     }
 }
Example #17
0
 public void AddCfaAttribute(string cfaPattern, int cfaWidth, int cfaHeight)
 {
     if (Image == null)
     {
         throw new InvalidOperationException("No Image component available to add CFA attribute!");
     }
     Image.Add(new XElement(xmlns + "ColorFilterArray",
                            new XAttribute("pattern", cfaPattern),
                            new XAttribute("width", cfaWidth),
                            new XAttribute("height", cfaHeight),
                            new XAttribute("name", cfaPattern + " Bayer Filter")));
 }
Example #18
0
        private void ThresholdColorizePupilFullImage(Color color)
        {
            // Threshold (whole image)
            pupilImage = gray.ThresholdBinaryInv(new Gray(GTSettings.Current.Processing.PupilThreshold), new Gray(255));

            // Convert thresholded to color and add
            var pupilThresholdImage = new Image <Bgr, byte>(width, height,
                                                            new Bgr(GTSettings.Current.Visualization.PupilThresholdColor));

            pupilThresholdImage = pupilThresholdImage.And(pupilImage.Convert <Bgr, byte>());
            processed           = processed.Add(pupilThresholdImage);
        }
Example #19
0
 public void DrawUDLR(ref Image <Rgb, byte> final)
 {
     for (int channel = 0; channel < 3; channel++)
     {
         Image <Rgb, byte> result = null;
         result = raw.CopyBlank();
         //i iterador es el segmento arriba abajo iz derecha
         DrawUDLR(ref result, channel);
         final = final.Add(result).Clone();
         //   segmentBox.Image = result.Bitmap;
     }
 }
Example #20
0
        public void DrawDiagonals(ref Image <Rgb, byte> final)
        {
            for (int channel = 0; channel < 3; channel++)
            {
                Image <Rgb, byte> result = null;

                //print
                result = raw.CopyBlank();
                DrawDiagonals(ref result, channel);
                final = final.Add(result).Clone();
            }
        }
Example #21
0
        private PointF[] ProcessFrame(Image <Bgr, Byte> image, AutomatedRodentTracker.Services.RBSK.RBSK rbsk, out double waist, out double waistArea, out double waistArea2, out double waistArea3, out double waistArea4, out PointF centroid, bool useBackground = false)
        {
            //Rectangle roi = Rectangle.Empty;

            //if (image.IsROISet)
            //{
            //    roi = image.ROI;
            //    image.ROI = Rectangle.Empty;
            //}

            if (BackgroundImage != null && useBackground)
            {
                using (Image <Gray, Byte> grayImage = image.Convert <Gray, Byte>())
                    //using (Image<Gray, Byte> filteredImage = grayImage.SmoothMedian(rbsk.Settings.FilterLevel))
                    using (Image <Gray, Byte> binaryImage = grayImage.ThresholdBinary(new Gray(rbsk.Settings.BinaryThreshold), new Gray(255)))
                        using (Image <Gray, Byte> backgroundNot = BackgroundImage.Not())
                            using (Image <Gray, Byte> finalImage = binaryImage.Add(backgroundNot))
                                using (Image <Gray, Byte> filteredImage = finalImage.SmoothMedian(rbsk.Settings.FilterLevel))
                                {
                                    //ImageViewer.Show(finalImage);
                                    PointF[] result = RBSKService.RBSK(filteredImage, rbsk);

                                    IBodyDetection bodyDetection = ModelResolver.Resolve <IBodyDetection>();
                                    bodyDetection.BinaryBackground = BackgroundImage;
                                    //bodyDetection.ThresholdValue = ThresholdValue;

                                    if (result == null)
                                    {
                                        waist      = -1;
                                        waistArea  = -1;
                                        waistArea2 = -1;
                                        waistArea3 = -1;
                                        waistArea4 = -1;
                                        bodyDetection.GetBody(grayImage, out centroid);
                                        return(null);
                                    }

                                    bodyDetection.FindBody(grayImage, out waist, out waistArea, out waistArea2, out waistArea3, out waistArea4, out centroid);

                                    return(result);
                                }
            }

            waist      = -1;
            waistArea  = -1;
            waistArea2 = -1;
            waistArea3 = -1;
            waistArea4 = -1;
            centroid   = PointF.Empty;
            return(RBSKService.RBSK(image, rbsk));
        }
Example #22
0
 private void SobelToolStripMenuItem_Click(object sender, EventArgs e)
 {
     if (!src.IsEmpty)
     {
         im   = src.ToImage <Bgr, Byte>();
         tmp  = im.Convert <Gray, Byte>();
         tmp1 = tmp.Clone();
         gray = tmp.Clone();
         CvInvoke.Sobel(tmp, gray, DepthType.Cv8U, 1, 0, 3);
         CvInvoke.Sobel(tmp, tmp1, DepthType.Cv8U, 0, 1, 3);
         gray            = gray.Add(tmp1);
         imageBox1.Image = gray;
     }
 }
        private void ThresholdColorizePupilFullImage()
        {
            // Threshold (whole image)
            Image <Gray, byte> pupilImage = gray.ThresholdBinaryInv(new Gray(Settings.Instance.Processing.PupilThreshold), new Gray(255));

            // Convert thresholded to color and add
            Image <Bgr, byte> pupilThresholdImage = new Image <Bgr, byte>(width, height, new Bgr(Settings.Instance.Visualization.PupilThresholdColor));

            pupilThresholdImage = pupilThresholdImage.And(pupilImage.Convert <Bgr, byte>());
            processed           = processed.Add(pupilThresholdImage);

            pupilThresholdImage.Dispose();
            pupilImage.Dispose();
        }
Example #24
0
        private void printSteps(ref Image <Rgb, byte> final, ref Image <Rgb, byte>[] arr)
        {
            this.rgbbox.Image = arr[0].Bitmap;
            //   MessageBox.Show(".0");
            this.rgbbox.Image = arr[1].Bitmap;
            //
            //      MessageBox.Show(".1");
            ///
            this.rgbbox.Image = arr[2].Bitmap;

            // MessageBox.Show(".2");

            final             = final.Add(arr[0].Add(arr[1]).Add(arr[2])).Clone();
            this.rgbbox.Image = final.Bitmap;
        }
Example #25
0
        public void DrawUDLR(ref Image <Rgb, byte> result, int channel)
        {
            for (int q = 0; q < 4; q++)
            {
                for (int type = 0; type < 3; type++)
                {
                    LineSegment2D[]   aux         = chUDLR_HVO[channel, q, type];
                    Image <Rgb, byte> nuevoResult = DrawLines(channel, ref aux);
                    result = result.Add(nuevoResult).Clone();
                    // segmentBox.Image = result.Bitmap;

                    //  MessageBox.Show(channel.ToString() + " " +q.ToString() + " " + type.ToString());
                }
            }
        }
Example #26
0
 public Response <IEnumerable <Image> > AddConfirm([FromBody()] Image values)
 {
     if (values != null)
     {
         var ret = Image.Add(values);
         if (ret.IsEmpty())
         {
             return(new Response <IEnumerable <Image> >("Failed To Add Image", ret, false));
         }
         else
         {
             return(new Response <IEnumerable <Image> >(ret.FirstOrDefault().Name, ret, true));
         }
     }
     return(new Response <IEnumerable <Image> >("Request Values should not be NULL", null, false));
 }
Example #27
0
        async void UpdateMask()
        {
            if (!IsInitialized)
            {
                return;
            }

            bool  DoNew          = (bool)RadioMaskNew.IsChecked;
            bool  UseHalfmap2    = (bool)RadioHalfmap2File.IsChecked && Halfmap2 != null;
            float Threshold      = (float)MaskThreshold;
            float LowpassNyquist = (float)HalfmapPixelSize * 2 / (float)MaskLowpass;

            ProgressMask.Visibility = Visibility.Visible;

            await Task.Run(() =>
            {
                MaskFinal?.Dispose();
                MaskFinal = null;

                if (!DoNew && Mask != null)
                {
                    MaskFinal = Mask.GetCopyGPU();
                    MaskFinal.Binarize(Threshold);
                }
                else if (DoNew)
                {
                    MaskFinal = Halfmap1.GetCopyGPU();
                    if (UseHalfmap2)
                    {
                        MaskFinal.Add(Halfmap2);
                        MaskFinal.Multiply(0.5f);
                    }

                    MaskFinal.Bandpass(0, LowpassNyquist, true);
                    MaskFinal.Binarize(Threshold);
                }

                Dispatcher.Invoke(() =>
                {
                    RendererMask.Volume = MaskFinal;
                });
            });

            ProgressMask.Visibility = Visibility.Hidden;
            RevalidateTab();
        }
Example #28
0
        /// <summary>
        /// Nimmt HSV Bild, vergleicht mit eingegebener Farbe und gibt binaeres Graubild zurueck
        /// </summary>
        /// <param name="inImg">Eingabe HSV-bild</param>
        /// <param name="pcolor">Vergleichsfarbe</param>
        /// <param name="dHue">Farbtoleranz</param>
        /// <param name="dSaturation">Saettigungstoleranz</param>
        /// <param name="dValue">Helligeitstoleranz</param>
        /// <returns></returns>
        private Emgu.CV.Image <Gray, byte> thresholdHSVtoGray(Emgu.CV.Image <Hsv, byte> inImg, Hsv pcolor)
        {
            double lowH, lowS, lowV, highH, highS, highV;

            if (pcolor.Value < 15)
            {
                lowH = 0; highH = 179;
                lowS = 0; highS = 255;
            }
            else
            {
                lowH  = pcolor.Hue - dHue; //Math.Max(pcolor.Hue - dHue, 0);
                lowS  = Math.Max(pcolor.Satuation - dSaturation, 0);
                highH = pcolor.Hue + dHue; // Math.Min(pcolor.Hue + dHue, 179);
                highS = Math.Min(pcolor.Satuation + dSaturation, 255);
            }

            lowV  = Math.Max(pcolor.Value - dValue, 0);
            highV = Math.Min(pcolor.Value + dValue, 255);

            Image <Gray, byte> gray = new Image <Gray, byte>(inImg.Size);

            if (highH >= 180)
            {
                highH -= 180;
            }
            if (lowH < 0)
            {
                lowH += 180;
            }
            if (highH <= lowH)
            {
                Image <Gray, byte> gray1 = inImg.InRange(new Hsv(lowH, lowS, lowV), new Hsv(179, highS, highV));
                Image <Gray, byte> gray2 = inImg.InRange(new Hsv(0, lowS, lowV), new Hsv(highH, highS, highV));
                gray = gray1.Add(gray2);
            }
            else
            {
                gray = inImg.InRange(new Hsv(lowH, lowS, lowV), new Hsv(highH, highS, highV));
            }
            //Bildverbesserung
            gray.SmoothMedian(mediansize);

            return(gray);
        }
Example #29
0
 public Response <IEnumerable <Image> > AddConfirm([FromBody()] Image values)
 {
     if (values != null)
     {
         var ret = Image.Add(values);
         if (ret.IsEmpty())
         {
             System.Web.HttpContext.Current.Response.StatusCode = 501;
             return(new Response <IEnumerable <Image> >("Failed To Add Image", ret, false));
         }
         else
         {
             return(new Response <IEnumerable <Image> >(ret.FirstOrDefault().Name, ret, true));
         }
     }
     System.Web.HttpContext.Current.Response.StatusCode = 400;
     return(new Response <IEnumerable <Image> >("Request Values should not be NULL", null, false));
 }
Example #30
0
        private PointF[] ProcessFrame(Image <Bgr, Byte> image, RBSK rbsk, bool useBackground = false)
        {
            if (BinaryBackground != null && useBackground)
            {
                using (Image <Gray, Byte> grayImage = image.Convert <Gray, Byte>())
                    using (Image <Gray, Byte> filteredImage = grayImage.SmoothMedian(rbsk.Settings.FilterLevel))
                        using (Image <Gray, Byte> binaryImage = filteredImage.ThresholdBinary(new Gray(rbsk.Settings.BinaryThreshold), new Gray(255)))
                            using (Image <Gray, Byte> backgroundNot = BinaryBackground.Not())
                                using (Image <Gray, Byte> finalImage = binaryImage.Add(backgroundNot))
                                {
                                    PointF[] result = RBSKService.RBSK(finalImage, rbsk);
                                    return(result);
                                }
            }


            return(RBSKService.RBSK(image, rbsk));
        }
Example #31
0
        /// <summary>
        /// Convert normal image to a 3D Cyan Red techology image.
        /// </summary>
        /// <param name="targetImage">Target image.</param>
        /// <param name="shift">Shift - the offset between left and right buffers.</param>
        /// <returns>The 3D image.</returns>
        public static Bitmap Make3DPopIn(Bitmap targetImage, int shift)
        {
            // This would be easier if we had COI support for cv.Set, but it doesn't
            // work that way.
            // OpenCV uses BGR order (even if input image is greyscale):
            // http://www.cs.iit.edu/~agam/cs512/lect-notes/opencv-intro/opencv-intro.html
            // red goes on the left, cyan on the right:
            // # http://en.wikipedia.org/wiki/Anaglyph_image

            // Create base image.
            Image<Bgr, Byte> inputImage = new Image<Bgr, byte>(targetImage);

            // Get the size.
            Size inputImageSize = inputImage.Size;

            // Output image.
            Image<Bgr, Byte> anaglyph;// = new Image<Bgr, byte>(inputImageSize);

            // Devide the image by chanels.
            // Image<Gray, Byte> blueImage = new Image<Gray, byte>(inputImage[0].ToBitmap());
            // Image<Gray, Byte> greenImage = new Image<Gray, byte>(inputImage[1].ToBitmap());
            // Image<Gray, Byte> redImage = new Image<Gray, byte>(inputImage[2].ToBitmap());

            Image<Gray, Byte> blueImage  = inputImage[0];
            Image<Gray, Byte> greenImage = inputImage[1];
            Image<Gray, Byte> redImage   = inputImage[2];

            // Empty image.
            Image<Gray, byte> zeros = new Image<Gray, byte>(inputImageSize);

            // Create the output images.
            Image<Bgr, Byte> leftImage = new Image<Bgr, byte>(new Image<Gray, byte>[] { zeros, zeros, redImage });
            Image<Bgr, Byte> rightImage = new Image<Bgr, byte>(new Image<Gray, byte>[] { blueImage, greenImage, zeros });

            // Set the shift and ROI (view).
            leftImage.ROI  = new Rectangle(new Point(shift, 0), new Size(inputImageSize.Width - shift, inputImageSize.Height));
            rightImage.ROI = new Rectangle(new Point(0,     0), new Size(inputImageSize.Width - shift, inputImageSize.Height));

            // Generate output image.
            anaglyph = leftImage.Add(rightImage);

            return anaglyph.ToBitmap(); //resizeImage(anaglyph.ToBitmap(), outputSize);
        }
Example #32
0
      public void TestGenericOperation()
      {
         Image<Gray, Single> img1 = new Image<Gray, float>(50, 20);
         img1.ROI = new Rectangle(10, 1, 50 - 10, 19 - 1);
         img1.SetValue(5.0);

         Image<Gray, Single> img2 = new Image<Gray, float>(50, 20);
         img2.ROI = new Rectangle(0, 2, 40, 20 - 2);
         img2.SetValue(new Gray(2.0));

         EmguAssert.IsTrue(img1.Width == img2.Width);
         EmguAssert.IsTrue(img1.Height == img2.Height);

         Stopwatch watch = Stopwatch.StartNew();
         Image<Gray, Single> img3 = img1.Add(img2);
         long cvAddTime = watch.ElapsedMilliseconds;

         watch.Reset();
         watch.Start();
         Image<Gray, Single> img4 = img1.Convert<Single, Single>(img2, delegate(Single v1, Single v2)
         {
            return v1 + v2;
         });
         long genericAddTime = watch.ElapsedMilliseconds;

         Image<Gray, Single> img5 = img3.AbsDiff(img4);

         watch.Reset();
         watch.Start();
         double sum1 = img5.GetSum().Intensity;
         long cvSumTime = watch.ElapsedMilliseconds;

         watch.Reset();
         watch.Start();
         Single sum2 = 0.0f;
         img5.Action(delegate(Single v)
         {
            sum2 += v;
         });
         long genericSumTime = watch.ElapsedMilliseconds;

         EmguAssert.WriteLine(String.Format("CV Add     : {0} milliseconds", cvAddTime));
         EmguAssert.WriteLine(String.Format("Generic Add: {0} milliseconds", genericAddTime));
         EmguAssert.WriteLine(String.Format("CV Sum     : {0} milliseconds", cvSumTime));
         EmguAssert.WriteLine(String.Format("Generic Sum: {0} milliseconds", genericSumTime));
         EmguAssert.WriteLine(String.Format("Abs Diff = {0}", sum1));
         EmguAssert.WriteLine(String.Format("Abs Diff = {0}", sum2));
         EmguAssert.IsTrue(sum1 == sum2);

         img3.Dispose();
         img4.Dispose();
         img5.Dispose();

         DateTime t1 = DateTime.Now;
         img3 = img1.Mul(2.0);
         DateTime t2 = DateTime.Now;
         img4 = img1.Convert<Single>(delegate(Single v1)
         {
            return v1 * 2.0f;
         });
         DateTime t3 = DateTime.Now;

         /*
         ts1 = t2.Subtract(t1);
         ts2 = t3.Subtract(t2);
         Trace.WriteLine(String.Format("CV Mul     : {0} milliseconds", ts1.TotalMilliseconds));
         Trace.WriteLine(String.Format("Generic Mul: {0} milliseconds", ts2.TotalMilliseconds));
         */

         EmguAssert.IsTrue(img3.Equals(img4));
         img3.Dispose();
         img4.Dispose();

         t1 = DateTime.Now;
         img3 = img1.Add(img1);
         img4 = img3.Add(img1);
         t2 = DateTime.Now;
         img5 = img1.Convert<Single, Single, Single>(img1, img1, delegate(Single v1, Single v2, Single v3)
         {
            return v1 + v2 + v3;
         });
         t3 = DateTime.Now;

         /*
         ts1 = t2.Subtract(t1);
         ts2 = t3.Subtract(t2);
         Trace.WriteLine(String.Format("CV Sum (3 images)     : {0} milliseconds", ts1.TotalMilliseconds));
         Trace.WriteLine(String.Format("Generic Sum (3 images): {0} milliseconds", ts2.TotalMilliseconds));
         */
         EmguAssert.IsTrue(img5.Equals(img4));
         img3.Dispose();
         img4.Dispose();
         img5.Dispose();

         img1.Dispose();
         img2.Dispose();

         Image<Gray, Byte> gimg1 = new Image<Gray, Byte>(400, 300, new Gray(30));
         Image<Gray, Byte> gimg2 = gimg1.Convert<Byte>(delegate(Byte b)
         {
            return (Byte)(255 - b);
         });
         gimg1.Dispose();
         gimg2.Dispose();
      }
Example #33
0
 private Image<Rgb, byte> BrigthenUpImage(Image<Rgb, byte> ocvImage)
 {
     return ocvImage.Add(new Rgb(_brightness, _brightness, _brightness));
 }
 void  videoStream_RunWorkerCompleted(object sender, RunWorkerCompletedEventArgs e)
 {
     currentFrame = currentFrame.Add(findTargetsOverlay);
     currentFrame = currentFrame.Add(trackingOverlay);
     imgDisplay.Image = currentFrame;
     videoFramesCount++;
     if (!pauseVideo)
         videoStream.RunWorkerAsync();
     else
     {
         video.Dispose();
         video=new Capture(cameraSelectionBox.SelectedIndex);
         pauseVideo = !pauseVideo;
         videoStream.RunWorkerAsync();
     }
 }