Beispiel #1
11
        public static void ApplyColormap(ref double[,] source, out Emgu.CV.Image <Bgr, Byte> destination, ColorMapType colorMapType)
        {
            double max = 0;

            for (int y = 0; y < source.GetLength(1); y++)
            {
                for (int x = 0; x < source.GetLength(0); x++)
                {
                    if (source[y, x] > max)
                    {
                        max = source[y, x];
                    }
                }
            }

            Image <Gray, byte> buffer = new Image <Gray, Byte>(source.GetLength(0), source.GetLength(1));

            for (int y = 0; y < source.GetLength(1); y++)
            {
                for (int x = 0; x < source.GetLength(0); x++)
                {
                    buffer[y, x] = new Gray(source[y, x] * 255 / max);
                }
            }

            destination = new Image <Bgr, Byte>(source.GetLength(0), source.GetLength(1));
            CvInvoke.ApplyColorMap(buffer, destination, colorMapType);
        }
        public void FloodFillTest()
        {
            // Create a Square
            Point[] square = new Point[4];
            square[0] = new Point(25, 25);
            square[1] = new Point(75, 25);
            square[2] = new Point(75, 75);
            square[3] = new Point(25, 75);

            // Create an Original Image
            var original = new Image<Bgr, Byte>(100, 100, new Bgr(255, 0, 0));
            original.FillConvexPoly(square, new Bgr(Color.Green));

            // Create an Expected Output Image
            var expected = new Emgu.CV.Image<Bgr, Byte>(100, 100, new Bgr(Preprocessing.MASK_COLOR));
            expected.FillConvexPoly(square, new Bgr(Color.White));

            // Perform the Flood fill
            Console.WriteLine("Perform Flood Fill ... ");
            var actual = new Emgu.CV.Image<Bgr, Byte>(Preprocessing.FloodFill(original.ToBitmap(), 0, 0, 1));

            bool identical = true;
            for (int ii = 0; ii < expected.Width; ii++)
            {
                for (int jj = 0; jj < expected.Height; jj++)
                {
                    identical = identical && (Utility.IsEqual(expected[jj, ii], actual[jj, ii]));
                }
            }

            Assert.IsTrue(identical);
        }
        private void DoScript()
        {
            // To be designed
            // Intention to run a "flexible" sequence of operations
            // For now hard code
            Mat outImg = new Mat();
            Mat blank  = new Mat(m_RawImage.Height, m_RawImage.Width, DepthType.Cv8S, 1);
            Mat blank2 = new Mat(m_RawImage.Height, m_RawImage.Width, DepthType.Cv8S, 1);

            Stopwatch sw = new Stopwatch();

            sw.Start();
            CvInvoke.Canny(m_RawImage, outImg, 120, 60);
            //var c = CvInvoke.HoughCircles(m_RawImage, HoughType.Gradient, 1, 20, 95, 20, 20, 150);
            DispImage.Source = BitmapSourceConvert.ToBitmapSource(outImg);
            sw.Stop();
            long t = sw.ElapsedMilliseconds;

            MainWnd.Title += " " + t + "ms";
            //m_ResultImage = outImg.ToImage<Gray, byte>();
            Image <Gray, byte>[] data = new Image <Gray, byte> [3];
            data[1]       = outImg.ToImage <Gray, byte>();
            data[2]       = blank.ToImage <Gray, byte>();
            data[0]       = blank.ToImage <Gray, byte>();
            m_ResultImage = new Image <Rgb, byte>(data);
            m_bShowOver   = true;
        }
Beispiel #4
0
        internal void Process(int hueAfter, byte satAfter, int hueMid, int hueWidth)
        {
            byte afterSat = 0;

            BeforeImage = new Image<Bgr, byte>(BeforeImagePath).Resize(440, 320, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC, false);
            DebugImage = BeforeImage.Convert<Hsv, byte>();

            hueMid = FindHuePeak();
            int hueStart = (180 + hueMid - (hueWidth / 2)) % 180;
            int hueEnd = (180 + hueMid + (hueWidth / 2)) % 180;

            for (int i = 0; i < DebugImage.Width; i++)
                for (int j = 0; j < DebugImage.Height; j++)
                {
                    int hue = DebugImage.Data[j, i, 0];
                    int sat = DebugImage.Data[j, i, 1];
                    int val = DebugImage.Data[j, i, 2];

                    if ((hueStart < hueEnd) && (hue < hueEnd && hue > hueStart)
                        || (hueStart > hueEnd) && (hue < hueEnd || hue > hueStart))
                    {
                        if (sat > 30)
                        {
                            DebugImage.Data[j, i, 0] =(byte) hueAfter;
                            //DebugImage.Data[j, i, 1] = satAfter;
                        }
                    }
                }

            AfterImage = DebugImage.Convert<Bgr, byte>();
        }
Beispiel #5
0
        public static Bitmap GetBitmap(Emgu.CV.Image <Bgr, byte> image)
        {
            Emgu.CV.Image <Gray, byte> gray = image.Convert <Gray, byte>();
            int [] k = new int[gray.Width];
            for (int i = 0; i < gray.Width; i++)
            {
                for (int j = 0; j < gray.Height; j++)
                {
                    if (gray[j, i].Intensity <= 200)
                    {
                        k[i]++;
                    }
                }
            }

            Bitmap image2 = new Bitmap(gray.Width, gray.Height);

            System.Drawing.Graphics p = System.Drawing.Graphics.FromImage(image2);
            p.Clear(Color.Black);
            for (int i = 0; i < k.Length - 3; i++)
            {
                PointF p1 = new PointF(i, k[i]);
                PointF p2 = new PointF(i + 1, k[i + 1]);
                PointF p3 = new PointF(i + 2, k[i + 2]);
                PointF p4 = new PointF(i + 3, k[i + 3]);
                p.DrawBezier(new Pen(Brushes.White), p1, p2, p3, p4);
            }

            return(image2);
        }
Beispiel #6
0
        public void Resize(string outputPath, int newX_size, int height, string rootDirectoryName = "")
        {
            Image <Bgr, Byte> img1 = new Emgu.CV.Image <Bgr, Byte>(this.ImagePath);

            var oldX = img1.Width;
            var oldY = img1.Height;

            float ratio = (float)oldX / (float)newX_size;
            int   newY  = (int)Math.Round((double)((float)oldY / (float)ratio));
            int   newX  = (int)Math.Round((double)((float)oldX / (float)ratio));

            img1 = img1.Resize(newX, newY, Emgu.CV.CvEnum.Inter.LinearExact);

            var delta_w = newX_size - newX;
            var delta_h = height - newY;

            var top    = delta_h / 2;
            var bottom = delta_h - top;

            var left  = delta_w / 2;
            var right = delta_w - left;

            //img1.Save(@"C:\Users\lkathke\Desktop\EmguTest\resized.jpg");

            Mat newImage = new Mat();

            CvInvoke.CopyMakeBorder(img1, newImage, top, bottom, left, right, Emgu.CV.CvEnum.BorderType.Constant);

            newImage.Save(System.IO.Path.Combine(outputPath, System.IO.Path.GetFileName(this.ImagePath)));
            ResizeAnnotations(this.XMLPath, newX_size, height, newX, newY, oldX, oldY, top, left, outputPath, rootDirectoryName);
        }
Beispiel #7
0
        public void FloodFillTest()
        {
            // Create a Square
            Point[] square = new Point[4];
            square[0] = new Point(25, 25);
            square[1] = new Point(75, 25);
            square[2] = new Point(75, 75);
            square[3] = new Point(25, 75);

            // Create an Original Image
            var original = new Image <Bgr, Byte>(100, 100, new Bgr(255, 0, 0));

            original.FillConvexPoly(square, new Bgr(Color.Green));

            // Create an Expected Output Image
            var expected = new Emgu.CV.Image <Bgr, Byte>(100, 100, new Bgr(Preprocessing.MASK_COLOR));

            expected.FillConvexPoly(square, new Bgr(Color.White));

            // Perform the Flood fill
            Console.WriteLine("Perform Flood Fill ... ");
            var actual = new Emgu.CV.Image <Bgr, Byte>(Preprocessing.FloodFill(original.ToBitmap(), 0, 0, 1, new Bgr(255, 0, 0)));

            bool identical = true;

            for (int ii = 0; ii < expected.Width; ii++)
            {
                for (int jj = 0; jj < expected.Height; jj++)
                {
                    identical = identical && (Utility.IsEqual(expected[jj, ii], actual[jj, ii]));
                }
            }

            Assert.IsTrue(identical);
        }
Beispiel #8
0
        public static unsafe Emgu.CV.Image <Bgra, byte> GetImage(this SwapChain swapchain)
        {
            var bb = swapchain.GetBackBuffer <Texture2D>(0);

            SharpDX.Direct3D11.Device dev = bb.Device;

            var origDesc = bb.Description;

            // Create Staging texture CPU-accessible
            var textureDesc = new Texture2DDescription
            {
                CpuAccessFlags    = CpuAccessFlags.Read,
                BindFlags         = BindFlags.None,
                Format            = Format.B8G8R8A8_UNorm,
                Width             = origDesc.Width,
                Height            = origDesc.Height,
                OptionFlags       = ResourceOptionFlags.None,
                MipLevels         = 1,
                ArraySize         = 1,
                SampleDescription = { Count = 1, Quality = 0 },
                Usage             = ResourceUsage.Staging
            };

            var screenTexture = new Texture2D(dev, textureDesc);

            dev.ImmediateContext.CopyResource(bb, screenTexture);

            var mapSource = dev.ImmediateContext.MapSubresource(screenTexture, 0, MapMode.Read, 0);

            // Create bitmap
            var bitmap = new Emgu.CV.Image <Bgra, byte>(origDesc.Width, origDesc.Height);

            var sourcePtr = mapSource.DataPointer;

            int height = origDesc.Height;
            int width  = origDesc.Width;

            int stride = bitmap.MIplImage.WidthStep;
            var data   = bitmap.Data;

            fixed(byte *pData = data)
            {
                IntPtr destPtr = new IntPtr(pData);

                for (int y = 0; y < height; y++)
                {
                    // Copy a single line
                    Utilities.CopyMemory(destPtr, sourcePtr, stride);

                    // Advance pointers
                    sourcePtr = IntPtr.Add(sourcePtr, mapSource.RowPitch);
                    destPtr   = IntPtr.Add(destPtr, stride);
                }
            }

            dev.ImmediateContext.UnmapSubresource(screenTexture, 0);
            screenTexture.Dispose();

            return(bitmap);
        }
Beispiel #9
0
        //Metoda przyjmuje obraz w skali szarości i próg
        public static Emgu.CV.Image <Gray, Byte> ManualThreshold(int t, Emgu.CV.Image <Gray, Byte> oldImg)
        {
            Emgu.CV.Image <Gray, Byte> newImg = oldImg;
            int w = oldImg.Width;
            int h = oldImg.Height;

            for (int i = 0; i < h; i++)
            {
                for (int j = 0; j < w; j++)
                {
                    Gray   a         = newImg[i, j];
                    double intensity = a.Intensity;
                    if (intensity > t)
                    {
                        Gray n = new Gray(255);
                        newImg[i, j] = n;
                    }
                    else
                    {
                        Gray n = new Gray(0);
                        newImg[i, j] = n;
                    }
                }
            }
            return(newImg);
        }
Beispiel #10
0
        public static Emgu.CV.Image <Bgr, Byte> RemovePictures(string path, List <CropperViewModel> CropperList)
        {
            var image1   = new Emgu.CV.Image <Bgr, Byte>(path);
            var newImage = image1.Copy();

            if (CropperList != null)
            {
                foreach (var cropper in CropperList)
                {
                    //setPixelsWhite(ref image1, cropper);
                    for (int v = cropper.Y; v < cropper.Height + cropper.Y; v++)
                    {
                        for (int u = cropper.X; u < cropper.Width + cropper.X; u++)
                        {
                            newImage.Data[v, u, 0] = 0; //Set Pixel Color | fast way
                            newImage.Data[v, u, 1] = 0; //Set Pixel Color | fast way
                            newImage.Data[v, u, 2] = 0; //Set Pixel Color | fast way
                        }
                    }
                }
                return(newImage);
            }
            else
            {
                return(image1);
            }
        }
Beispiel #11
0
        public static Emgu.CV.Image <Gray, Byte> Niblack(Emgu.CV.Image <Gray, Byte> oldImg)
        {
            Emgu.CV.Image <Gray, Byte> afterBinarization = new Emgu.CV.Image <Gray, Byte>(oldImg.Width, oldImg.Height, new Gray(0));
            XImgprocInvoke.NiBlackThreshold(oldImg, afterBinarization, 255, 0, 71, -1.5);
            MessageBox.Show("Wykonano.");

            return(afterBinarization);
        }
Beispiel #12
0
 public static Emgu.CV.Image<Bgr, byte> ConvertPixbufToCVImage(Pixbuf pixbuf)
 {
     MemoryStream  stream = new MemoryStream();
     PixbufUtils.Save(pixbuf, stream, "jpeg", new string [] {"quality" }, new string [] { "90" });
     System.Drawing.Bitmap bmp = new System.Drawing.Bitmap(stream);
     Emgu.CV.Image<Bgr,byte> cvimg = new Emgu.CV.Image<Bgr, byte>(bmp);
     return cvimg;
 }
        public Image <Bgr, Byte> imageToEmguImage(System.Drawing.Image imageIn)
        {
            Bitmap bmpImage = new Bitmap(imageIn);

            Emgu.CV.Image <Bgr, Byte> imageOut = new Emgu.CV.Image <Bgr, Byte>(bmpImage);

            return(imageOut);
        }
Beispiel #14
0
 /// <summary>
 /// Update the background model
 /// </summary>
 /// <param name="image">The image that is used to update the background model</param>
 /// <param name="learningRate">Use -1 for default</param>
 public void Update(Image <Bgr, Byte> image, double learningRate)
 {
     if (_fgMask == null)
     {
         _fgMask = new Image <Gray, byte>(image.Size);
     }
     CvInvoke.CvBackgroundSubtractorUpdate(_ptr, image, _fgMask, learningRate);
 }
Beispiel #15
0
 private void InitNewSourceFile()
 {
     MainWnd.Title = System.IO.Path.GetFileName(m_Files[m_FileIndex]);
     m_RawImage    = new Image <Gray, byte>(m_Files[m_FileIndex]);
     m_bShowOver   = false;
     m_ResultImage = null;
     PrepareSource();
 }
Beispiel #16
0
        private void ProcessFrame(object sender, EventArgs arg)
        {
            Image <Bgr, Byte> ImageFrame = capture.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

            inputimByte = ImageFrame.Convert <Gray, Byte>();

            DrawMatche drawmatch = new DrawMatche();

            Stopwatch watch;

            watch = Stopwatch.StartNew();
            watch.Start();



            if (state == 0)
            {
                Emgu.CV.Image <Bgr, byte> returnimByte = DrawMatche.Draw(baseimByte, inputimByte, state, out time, out mpoint);
                imageBox2.Image = returnimByte;
                if (mpoint > 7) //3
                {
                    isMatch         = true;
                    imageBox1.Image = returnimByte;
                    state           = 1;
                    label2.Text     = "matching : " + mpoint.ToString();
                    matchingTime   += time;
                    label6.Text     = matchingTime.ToString();
                }
            }

            else
            {
                Emgu.CV.Image <Bgr, byte> returnimByte = DrawMatche.Draw(baseimByte2, inputimByte, state, out time, out mpoint);
                imageBox2.Image = returnimByte;
                if (mpoint > 7) // 3
                {
                    isMatch         = true;
                    imageBox1.Image = returnimByte;
                    state           = 0;
                    label2.Text     = "matching : " + mpoint.ToString();
                    matchingTime   += time;

                    label3.Text  = "all match time = " + matchingTime.ToString();
                    matchingTime = 0;
                    isMatch      = false;
                    label6.Text  = matchingTime.ToString();
                }
            }

            watch.Stop();

            if (isMatch)
            {
                matchingTime += watch.ElapsedMilliseconds;
            }

            label1.Text = "matching : " + mpoint.ToString() + " ->" + isMatch.ToString();
        }
Beispiel #17
0
        public Emgu.CV.Image <Bgra, Byte> GetImage()
        {
            Bitmap src = null;

            src = new Bitmap(baseFolder + $"{imgCount}_OUTPUT_IMG_HEIGHT_NG.bmp"); //文件名格式: 連號_指定字串_IMG_圖像類別*_綜合判定.bmp
            Emgu.CV.Image <Bgra, Byte> img = src.ToImage <Bgra, Byte>();
            addLog("Succeeded in GetImage()", LogType.Info);
            return(img);
        }
Beispiel #18
0
        public static Emgu.CV.Image <Gray, Byte> Otsu(Emgu.CV.Image <Gray, Byte> oldImg)
        {
            Emgu.CV.Image <Gray, Byte> afterBinarization = new Emgu.CV.Image <Gray, Byte>(oldImg.Width, oldImg.Height, new Gray(0));
            double th = CvInvoke.Threshold(oldImg, afterBinarization, 500, 255, Emgu.CV.CvEnum.ThresholdType.Otsu);

            MessageBox.Show("Wykonano.");

            return(afterBinarization);
        }
Beispiel #19
0
        public static Image <Gray, byte> GetImage(List <int> hist)
        {
            var img = new Emgu.CV.Image <Gray, byte>(400, 200, new Gray(255));

            for (int i = 0; i < hist.Count; i++)
            {
                CvInvoke.Line(img, new Point(i, hist.Max() + 40), new Point(i, hist.Max() + 40 - hist[i]), new MCvScalar(0, 0, 0));
            }
            return(img);
        }
Beispiel #20
0
        // CTOR
        public ImageHandler(string path)
        {
            Mat mat = CvInvoke.Imread(path, Emgu.CV.CvEnum.ImreadModes.Grayscale);

            _img   = mat.ToImage <Gray, Byte>();
            _rows  = _img.Rows;
            _cols  = _img.Cols;
            _path  = path;
            Matrix = LoadImageMatrix();
        }
Beispiel #21
0
 private void openToolStripMenuItem_Click(object sender, EventArgs e)
 {
     if (openFileDialog.ShowDialog(this) != DialogResult.Cancel)
     {
         Emgu.CV.Image <Gray, Byte> Img = new Emgu.CV.Image <Gray, Byte>(openFileDialog.FileName);
         imgBox.Image = Img;
         this.Text    = openFileDialog.FileName;
         // Get image data that is stored in the image from first pixels?
     }
 }
Beispiel #22
0
        private unsafe void ReaderThread()
        {
            IRMetaData irMD = new IRMetaData();

            while (this.shouldRun)
            {
                try
                {
                    this.context.WaitOneUpdateAll(this.ir);
                }
                catch (Exception)
                {
                }

                this.ir.GetMetaData(irMD);
                MapData <ushort> irMap = this.ir.GetIRMap();

                if (isRoiSet == false)
                {
                    defaultWidth  = irMap.XRes;
                    defaultHeight = irMap.YRes;
                }

                int w = irMap.XRes;
                int h = irMap.YRes;

                lock (this)
                {
                    int stride = w * 2;

                    if (stride % 2 != 0)
                    {
                        stride += (2 - (stride % 2));
                    }


                    if (roi.Size.Width != 0)
                    {
                        Emgu.CV.Image <Gray, UInt16> tmp = new Emgu.CV.Image <Gray, UInt16>(irMap.XRes, irMap.YRes, stride, ir.GetIRMapPtr());
                        tmp.ROI   = new Rectangle(roi.X, roi.Y, roi.Width, roi.Height);
                        grayImage = tmp.Copy();
                    }
                    else
                    {
                        grayImage = new Emgu.CV.Image <Gray, UInt16>(w, h, stride, ir.GetIRMapPtr());
                    }
                }

                if (FrameCaptureComplete != null)
                {
                    FrameCaptureComplete();
                }
            }
        }
Beispiel #23
0
 public void drawOnImg(ref Emgu.CV.Image <Hsv, byte> img)
 {
     foreach (LineSegment2D segment in lineSegments)
     {
         img.Draw(segment, new Hsv((int)type, 240, 240), 2);
     }
     if (previousPosition != null)//klappt nicht hat immer nen wert
     {
         img.Draw(new LineSegment2D(previousPosition, pos), new Hsv((int)type, 255, 255), 2);
     }
 }
Beispiel #24
0
        public static void CropAndNormalizeObjects()
        {
            List <PSM4TxSample> samples = LoadSamples(@"\users\jie\projects\Intel\data\PSM4-Tx\20160722\Original");

            for (int i = 0; i < samples.Count; i++)
            {
                PSM4TxSample sample = samples[i];

                Emgu.CV.Image <Gray, byte> image = new Emgu.CV.Image <Gray, byte>(sample.imageFile);

                //{   //Aperture
                //    //Extends in X direction to make width 240 pixels
                //    double diff_x = APERTURE_ORIGINAL_WIDTH - sample.apertureW;
                //    //Extends in Y direction to make 2000 pixels
                //    double diff_y = APERTURE_ORIGINAL_HEIGHT - sample.apertureH;

                //    Rectangle rect = new Rectangle((int)(sample.apertureX - diff_x / 2 + 0.5), (int)(sample.apertureY - diff_y / 2 + 0.5), APERTURE_ORIGINAL_WIDTH, APERTURE_ORIGINAL_HEIGHT);
                //    image.ROI = rect;
                //    Emgu.CV.Image<Gray, byte> normalized = image.Resize(APERTURE_HOG_WIDTH, APERTURE_HOG_HEIGHT, Inter.Linear);

                //    string cropped_file = sample.imageFile.Replace("original", "Aperture");
                //    normalized.Save(cropped_file);
                //}

                //{   //arrayblock
                //    //Extends in X direction to make width 1200 pixels
                //    double diff_x = ARRAYBLOCK_ORIGINAL_WIDTH - sample.arrayblockW;
                //    //Extends in Y direction to make 1600 pixels
                //    double diff_y = ARRAYBLOCK_ORIGINAL_HEIGHT - sample.arrayblockH;

                //    Rectangle rect = new Rectangle((int)(sample.arrayblockX - diff_x / 2 + 0.5), (int)(sample.arrayblockY - diff_y / 2 + 0.5), ARRAYBLOCK_ORIGINAL_WIDTH, ARRAYBLOCK_ORIGINAL_HEIGHT);
                //    image.ROI = rect;
                //    Emgu.CV.Image<Gray, byte> normalized = image.Resize(ARRAYBLOCK_HOG_WIDTH, ARRAYBLOCK_HOG_HEIGHT, Inter.Linear);

                //    string cropped_file = sample.imageFile.Replace("original", "Arrayblock");
                //    normalized.Save(cropped_file);
                //}

                {   //Isolator
                    //Extends in X direction to make width 1000 pixels
                    double diff_x = ISOLATOR_ORIGINAL_WIDTH - sample.isolatorW;
                    //Extends in Y direction to make 1600 pixels
                    double diff_y = ISOLATOR_ORIGINAL_HEIGHT - sample.isolatorH;

                    Rectangle rect = new Rectangle((int)(sample.isolatorX - diff_x / 2 + 0.5), (int)(sample.isolatorY - diff_y / 2 + 0.5), ISOLATOR_ORIGINAL_WIDTH, ISOLATOR_ORIGINAL_HEIGHT);
                    image.ROI = rect;
                    Emgu.CV.Image <Gray, byte> normalized = image.Resize(ISOLATOR_HOG_WIDTH, ISOLATOR_HOG_HEIGHT, Inter.Linear);

                    string cropped_file = sample.imageFile.Replace("original", "Isolator");
                    normalized.Save(cropped_file);
                }
            }
        }
Beispiel #25
0
 private static void setPixelsWhite(ref Emgu.CV.Image <Bgr, Byte> image, CropperViewModel cropper)
 {
     for (int v = cropper.Y; v < cropper.Height; v++)
     {
         for (int u = cropper.X; u < cropper.Width; u++)
         {
             image.Data[v, u, 0] = 0; //Set Pixel Color | fast way
             image.Data[v, u, 1] = 0; //Set Pixel Color | fast way
             image.Data[v, u, 2] = 0; //Set Pixel Color | fast way
         }
     }
 }
Beispiel #26
0
        public static void SaveAsJpg(this Emgu.CV.Image <Bgr, Byte> img, string filename, double quality = 85)
        {
            var encoderParams = new EncoderParameters(1);

            encoderParams.Param[0] = new EncoderParameter(System.Drawing.Imaging.Encoder.Quality, (long)quality);

            var jpegCodec = (from codec in ImageCodecInfo.GetImageEncoders()
                             where codec.MimeType == "image/jpeg"
                             select codec).Single();

            img.Bitmap.Save(filename, jpegCodec, encoderParams);
        }
Beispiel #27
0
        /// <summary>
        /// Process rocks in the image.
        /// </summary>
        private void ProcessSand()
        {
            // TODO: Make image be saved in MyDocs/Pictures.
            if (this.inputImage == null)
            {
                return;
            }

            Thread workerThread = new Thread(() =>
            {
                /*
                 * string path = @"C:\Users\POLYGONTeam\Documents\GitHub\Androbot\Androbot\Androbot\bin\Debug\Images\2D_20160728170358.PNG";
                 */
                //Emgu.CV.Image<Bgr, byte> inpImg = new Emgu.CV.Image<Bgr, byte>(this.inputImage);
                Emgu.CV.Image <Bgr, byte> inpImg = new Emgu.CV.Image <Bgr, byte>(inputImage);

                Emgu.CV.Image <Gray, byte> water = inpImg.InRange(new Bgr(0, 100, 0), new Bgr(255, 255, 255));
                //TODO: To check does we need mask?
                //water = water.Add(mask);
                //water._Dilate(1);

                // Create the blobs.
                Emgu.CV.Cvb.CvBlobs blobs = new Emgu.CV.Cvb.CvBlobs();
                // Create blob detector.
                Emgu.CV.Cvb.CvBlobDetector dtk = new Emgu.CV.Cvb.CvBlobDetector();
                // Detect blobs.
                uint state = dtk.Detect(water, blobs);

                foreach (Emgu.CV.Cvb.CvBlob blob in blobs.Values)
                {
                    //Console.WriteLine("Center: X:{0:F3} Y:{1:F3}", blob.Centroid.X, blob.Centroid.Y);
                    //Console.WriteLine("{0}", blob.Area);
                    if (blob.Area >= 4500 && blob.Area < 34465)
                    {
                        //Console.WriteLine("{0}", blob.Area);
                        inpImg.Draw(new CircleF(blob.Centroid, 5), new Bgr(Color.Red), 2);
                        inpImg.Draw(blob.BoundingBox, new Bgr(Color.Blue), 2);
                    }
                }

                if (this.outputImage != null)
                {
                    this.outputImage.Dispose();
                }
                // Dump the image.
                this.outputImage = inpImg.ToBitmap();
                // Show the nwe mage.
                this.pbMain.Image = this.FitImage(this.outputImage, this.pbMain.Size);
            });

            workerThread.Start();
        }
Beispiel #28
0
 public static Emgu.CV.Image <Bgr, byte> FromFile()
 {
     Emgu.CV.Image <Bgr, byte> loadedImage = null;
     FileOp.LoadFromFile((s, path) =>
     {
         var mat = Emgu.CV.CvInvoke.Imread(path, Emgu.CV.CvEnum.ImreadModes.Color);
         if (mat != null)
         {
             loadedImage = mat.ToImage <Bgr, byte>();
         }
     });
     return(loadedImage);
 }
Beispiel #29
0
 private void Capture_ImageGrabbed(object sender, EventArgs e)
 {
     try
     {
         ImageBox img = new ImageBox();
         Emgu.CV.Image <Bgr, Byte> imagez = capture.QueryFrame().ToImage <Bgr, Byte>();
         //    img.Image = imagez.ToBitmap();
         //    img.Image = Emgu.CV.IImage(imagez);
     }
     catch (Exception)
     {
     }
 }
Beispiel #30
0
        public Bitmap FindEdges(double cannyThreshold)
        {
            Bitmap bmp;

            Emgu.CV.Image <Gray, byte> edgeImage = new Emgu.CV.Image <Gray, byte>(1280, 1024);
            CvInvoke.Canny(img, edgeImage, cannyThreshold, cannyThreshold / 2.0);

            imgEdges = edgeImage;

            bmp = imgEdges.Bitmap;

            return(bmp);
        }
Beispiel #31
0
        public void bilinearfillHoles(Form1 form1)
        {
            Emgu.CV.Image <Gray, ushort> filledsurface = sc.image;

            for (int i = 1; i < sc.image.Height - 1; i++)
            {
                form1.progressBar1.Value = i / filledsurface.Height;
                for (int j = 1; j < sc.image.Width - 1; j++)
                {
                    if (sc.image.Data[i, j, 0] == 0)
                    {
                        if (sc.image.Data[i + 1, j + 1, 0] != 0 && sc.image.Data[i - 1, j - 1, 0] != 0 && sc.image.Data[i + 1, j - 1, 0] != 0 && sc.image.Data[i - 1, j + 1, 0] != 0)
                        {
                            ushort a11 = sc.image.Data[i - 1, j - 1, 0];
                            ushort a12 = sc.image.Data[i + 1, j - 1, 0];
                            ushort a21 = sc.image.Data[i - 1, j + 1, 0];
                            ushort a22 = sc.image.Data[i + 1, j + 1, 0];

                            float r1 = a11 / 2 + a12 / 2;
                            float r2 = a21 / 2 + a22 / 2;
                            float p  = r1 / 2 + r2 / 2;
                            filledsurface.Data[i, j, 0] = (ushort)p;
                        }
                    }
                }
                for (int j = 1; j < sc.image.Width - 1; j++)
                {
                    form1.progressBar1.Value = i / filledsurface.Height;
                    if (sc.image.Data[i, j, 0] == 0)
                    {
                        if (sc.image.Data[i + 1, j, 0] != 0 && sc.image.Data[i - 1, j, 0] != 0 && sc.image.Data[i, j - 1, 0] != 0 && sc.image.Data[i, j + 1, 0] != 0)
                        {
                            ushort a11 = sc.image.Data[i - 1, j, 0];
                            ushort a12 = sc.image.Data[i, j + 1, 0];
                            ushort a21 = sc.image.Data[i, j - 1, 0];
                            ushort a22 = sc.image.Data[i + 1, j, 0];

                            float r1 = a11 / 2 + a12 / 2;
                            float r2 = a21 / 2 + a22 / 2;
                            float p  = r1 / 2 + r2 / 2;
                            filledsurface.Data[i, j, 0] = (ushort)p;
                        }
                    }
                }
            }
            form1.progressBar1.Value = 100;
            filledsurface.Save(form1.SavePath + "\\" + "surface_bilin_filled.png");
            filledsurface.Save(form1.SavePath + "\\" + "surface.png");
            form1.pictureBox1.Image = filledsurface.ToBitmap();
            sc.image = filledsurface;
        }
Beispiel #32
0
        public static Emgu.CV.Image <Rgb, byte> DrawGrid(Emgu.CV.Image <Rgb, byte> source)
        {
            Image <Rgb, byte> imageToShow = source.Copy();

            for (int i = 0; i < source.Width; i += 32)
            {
                for (int j = 0; j < source.Height; j += 32)
                {
                    Rectangle match = new Rectangle(i, j, 32, 32);
                    imageToShow.Draw(match, new Rgb(Color.White), 1);
                }
            }
            return(imageToShow);
        }
Beispiel #33
0
        public static void ApplyLut(ref double[,] source, out Emgu.CV.Image <Bgr, Byte> destination)
        {
            destination = new Image <Bgr, Byte>(source.GetLength(0), source.GetLength(1));


            for (int y = 0; y < source.GetLength(1); y++)
            {
                for (int x = 0; x < source.GetLength(0); x++)
                {
                    var cId = Convert.ToInt32(source[y, x]);
                    destination[y, x] = new Bgr(Colors[cId, 0], Colors[cId, 1], Colors[cId, 2]);
                }
            }
        }
Beispiel #34
0
        public Form3()
        {
            InitializeComponent();

            fetch_GeoData(dateTimePicker1.Value);
            fetch_ChartDate("Harian");
            comboBox1.Text = "Harian";
            Bitmap chosenLogoBmp = new Bitmap(Image.FromStream(System.Reflection.Assembly.GetEntryAssembly().GetManifestResourceStream("LogoDetectionFANET45.Resources.91.jpg")));
            Emgu.CV.Image<Bgr, Byte> chosenLogoEmgu = new Emgu.CV.Image<Bgr, Byte>(chosenLogoBmp);
            imageBox1.Image = chosenLogoEmgu;
            imageBox1.Size = new Size(106, 106);
            imageBox1.SizeMode = PictureBoxSizeMode.StretchImage;

            imageBox2.Image = chosenLogoEmgu;
            imageBox2.Size = new Size(106, 106);
            imageBox2.SizeMode = PictureBoxSizeMode.StretchImage;
        }
    public MarkerDetetection() {
      Emgu.CV.Image<Emgu.CV.Structure.Bgr, byte> m = new Image<Bgr, byte>("marker2.bmp");
      Emgu.CV.Image<Gray, byte> gray = m.Convert<Gray, byte>();
      gray = gray.Resize(30, 30, INTER.CV_INTER_LINEAR);
      gray._ThresholdBinary(new Gray(50), new Gray(255.0));
      _marker = gray;
      _marker_size = _marker.Width;

      _dest = new PointF[] { 
            new PointF(0, 0),
            new PointF(0, _marker_size),
            new PointF(_marker_size, _marker_size),
            new PointF(_marker_size, 0)
            };

      _roi = new Image<Gray,byte>(_marker_size, _marker_size);
      _tmp = new Image<Gray, byte>(_marker_size, _marker_size);
    }
Beispiel #36
0
        private void btnDiabloDetect_Click(object sender, EventArgs e)
        {
            StopSignDetector.MaskHueLow = (int)numericUpDown1.Value;
            StopSignDetector.MaskHueHigh = (int)numericUpDown2.Value;

            diabloMap = new Image<Bgr, byte>(new Bitmap(picbDiabloMap.Image));

            diabloModels = new Image<Bgr, byte>[3];
            diabloModels[0] = new Image<Bgr, byte>(new Bitmap(picbModel1.Image));
            diabloModels[1] = new Image<Bgr, byte>(new Bitmap(picbModel2.Image));
            diabloModels[2] = new Image<Bgr, byte>(new Bitmap(picbModel3.Image));

            List<Rectangle> detectedRectangles = new List<Rectangle>();
            List<Image<Gray, byte>> detectedImages = new List<Image<Gray, byte>>();

            for (var i = 0; i < diabloModels.Length; i++)
            {
                StopSignDetector detector = new StopSignDetector(diabloModels[2 - i]);

                detector.DetectStopSign(
                    diabloMap,
                    detectedImages,
                    detectedRectangles
                );

                picbDiabloMapMask.Image = StopSignDetector.GetRedPixelMask(diabloMap).ToBitmap();
            }

            using (Graphics gr = Graphics.FromImage(picbDiabloResult.Image))
            {
                Pen borderPen = new Pen(Color.Red, 1);

                for (var i = 0; i < detectedRectangles.Count; i++)
                {
                    gr.DrawImage(
                        detectedImages[i].ToBitmap(),
                        detectedRectangles[i].Location
                    );
                    gr.DrawRectangle(borderPen, detectedRectangles[i]);
                }
            }

            picbDiabloResult.Invalidate();
        }
Beispiel #37
0
        public ImageProcessor(Emgu.CV.Image<Bgr, byte> source)
        {
            sourceImage = source;
            reducedNoiseImage = new Emgu.CV.Image<Bgr, byte>(sourceImage.Size);
            grayscaleImage = new Emgu.CV.Image<Gray, byte>(sourceImage.Size);
            normalizedImage = new Emgu.CV.Image<Gray, byte>(sourceImage.Size);

            // Размываем изображение для устранения шума
            Emgu.CV.CvInvoke.cvSmooth(sourceImage.Ptr, reducedNoiseImage.Ptr,
                Emgu.CV.CvEnum.SMOOTH_TYPE.CV_MEDIAN, 3, 0, 0, 0);

            // Преобразуем изображение в оттенки серого
            Emgu.CV.CvInvoke.cvCvtColor(reducedNoiseImage.Ptr, grayscaleImage.Ptr,
                Emgu.CV.CvEnum.COLOR_CONVERSION.CV_BGR2GRAY);

            // Нормализация изображения
            Emgu.CV.CvInvoke.cvNormalize(grayscaleImage.Ptr, normalizedImage.Ptr,
                0, 200, Emgu.CV.CvEnum.NORM_TYPE.CV_MINMAX, IntPtr.Zero);
        }
        public void ComputeFeatures(Segment s)
        {
            //Add the relative size
            NamedFeature f = new NamedFeature("RelativeSize");
            f.values.Add(s.points.Count() / (double)(imageWidth * imageHeight));
            s.features.Add(f);

            // Relative centroid
            PointF c = NormalizedCentroid(s);
            s.features.Add(new NamedFeature("RelativeCentroid", new List<double>{c.X, c.Y}));

            // One interior point
            PointF np = RandomNormalizedInteriorPoint(s);
            s.features.Add(new NamedFeature("OneInteriorPoint", new List<double> { np.X, np.Y }));

            //Radial distance
            s.features.Add(new NamedFeature("RadialDistance", new List<double>{Math.Sqrt(c.X*c.X+c.Y*c.Y)}));

            //Normalized Discrete Compactness http://www.m-hikari.com/imf-password2009/25-28-2009/bribiescaIMF25-28-2009.pdf
            //Find the segment id
            Point sp = s.points.First();
            int sidx = assignments[sp.X, sp.Y];

            //count number of perimeter edges
            int perimeter = 0;
            foreach (Point p in s.points)
            {
                for (int i = -1; i <= 1; i++)
                {
                    for (int j = -1; j <= 1; j++)
                    {
                        if (Math.Abs(i) == Math.Abs(j))
                            continue;
                        if (Util.InBounds(p.X + i, p.Y + j, imageWidth, imageHeight) && assignments[p.X + i, p.Y + j] != sidx)
                            perimeter++;
                        else if (!Util.InBounds(p.X + i, p.Y + j, imageWidth, imageHeight)) //edge pixels should be considered perimeter too
                            perimeter++;
                    }
                }
            }
            int n = s.points.Count();
            double CD = (4.0 * n - perimeter) / 2;
            double CDmin = n - 1;
            double CDmax = (4 * n - 4 * Math.Sqrt(n)) / 2;
            double CDN = (CD - CDmin) / Math.Max(1,(CDmax - CDmin));
            s.features.Add(new NamedFeature("NormalizedDiscreteCompactness", new List<double> { CDN }));

            //Add elongation (width/length normalized between 0-square to 1-long http://hal.archives-ouvertes.fr/docs/00/44/60/37/PDF/ARS-Journal-SurveyPatternRecognition.pdf
            PointF[] points = s.points.Select<Point, PointF>(p => new PointF(p.X, p.Y)).ToArray<PointF>();
            Emgu.CV.Structure.MCvBox2D box = Emgu.CV.PointCollection.MinAreaRect(points);

            PointF[] vertices = box.GetVertices();
            double elongation = 1 - Math.Min(box.size.Width + 1, box.size.Height + 1) / Math.Max(box.size.Width + 1, box.size.Height + 1);
            s.features.Add(new NamedFeature("Elongation", new List<double>{elongation}));

            //Add Hu shape moments, invariant to translation, scale, and rotation (not sure what each measure refers to intuitively though, or if there is an intuitive analog)
            //They may also do badly on noisy data however. See: http://hal.archives-ouvertes.fr/docs/00/44/60/37/PDF/ARS-Journal-SurveyPatternRecognition.pdf (called Invariant Moments)

            Bitmap regionBitmap = new Bitmap(imageWidth, imageHeight);
            Graphics g = Graphics.FromImage(regionBitmap);
            g.FillRectangle(new SolidBrush(Color.Black), 0, 0, imageWidth, imageHeight);
            foreach (Point p in s.points)
            {
                regionBitmap.SetPixel(p.X, p.Y, Color.White);
            }

            Emgu.CV.Image<Gray, byte> region = new Emgu.CV.Image<Gray, byte>(regionBitmap);

            MCvMoments moment = region.GetMoments(true);
            MCvHuMoments hu = moment.GetHuMoment();
            s.features.Add(new NamedFeature("HuMoments", new List<double> {hu.hu1, hu.hu2, hu.hu3,hu.hu4,hu.hu5, hu.hu6, hu.hu7 }));
            region.Dispose();
            regionBitmap.Dispose();
        }
        private void previewBtn_Click(object sender, RoutedEventArgs e)
        {
            if (previewBtn.Content.ToString() == "Preview Stream")
            {
                if (kinect_sensor != null)
                {
                    // disable all other buttons
                    DeactivateReplay();
                    gestureCaptureBtn.IsEnabled = false;
                    gestureRecognitionBtn.IsEnabled = false;
                    gestureReplayBtn.IsEnabled = false;
                    previewBtn.Content = "Stop Stream";
                    isStreaming = true;
                    kinect_data_manager.ifShowJointStatus = true;

                    frame_rec_buffer.Clear();

                    kinect_sensor.Start();
                }
            }
            else
            {
                if(kinect_sensor != null)
                {
                    kinect_sensor.Stop();

                    gestureCaptureBtn.IsEnabled = true;
                    gestureReplayBtn.IsEnabled = true;
                    gestureRecognitionBtn.IsEnabled = true;

                    isStreaming = false;
                    kinect_data_manager.ifShowJointStatus = false;

                    // save recorded frame to disk
                    if (frame_rec_buffer != null && saveVideoCheckBox.IsChecked.Value)
                    {
                        // create video writer
                        int fwidth = (int)groupBox3.Width + 20;
                        int fheight = (int)groupBox3.Height + 20;

                        SaveFileDialog saveDialog = new SaveFileDialog();
                        saveDialog.Filter = "avi files (*.avi)|*.avi";
                        saveDialog.FilterIndex = 2;
                        saveDialog.RestoreDirectory = true;

                        if (saveDialog.ShowDialog().Value)
                        {
                            statusbarLabel.Content = "Saving video...";

                            string videofile = saveDialog.FileName.ToString();
                            VideoWriter videoWriter = new VideoWriter(videofile, CvInvoke.CV_FOURCC('M', 'J', 'P', 'G'), 15,
                                fwidth, fheight, true);

                            if (videoWriter == null)
                                MessageBox.Show("Fail to save video. Check if codec has been installed.");
                            else
                            {
                                for (int i = 0; i < frame_rec_buffer.Count; i++)
                                {
                                    // write to video file
                                    Emgu.CV.Image<Bgr, byte> cvImg =
                                        new Emgu.CV.Image<Bgr, byte>(frame_rec_buffer[i] as Bitmap);

                                    videoWriter.WriteFrame<Bgr, byte>(cvImg);
                                }

                                videoWriter.Dispose();

                                statusbarLabel.Content = "Video saved to " + videofile;
                            }
                        }

                    }

                    frame_rec_buffer.Clear();

                    previewBtn.Content = "Preview Stream";

                    // save tracked elbow speed
                    //FileStream file = File.Open("d:\\temp\\test.txt", FileMode.Create);
                    //StreamWriter writer = new StreamWriter(file);
                    //for (int i = 0; i < motion_assessor.jointStatusSeq.Count; i++)
                    //    writer.WriteLine(motion_assessor.jointStatusSeq[i][JointType.HandRight].abs_speed);
                    //writer.Close();
                }
            }
        }
Beispiel #40
0
        private unsafe void ReaderThread()
		{
            IRMetaData irMD = new IRMetaData();

			while (this.shouldRun)
			{
				try
				{
					this.context.WaitOneUpdateAll(this.ir);
				}
				catch (Exception)
				{
				}

                this.ir.GetMetaData(irMD);
                MapData<ushort> irMap = this.ir.GetIRMap();

                if(isRoiSet == false)
                {
                    defaultWidth = irMap.XRes;
                    defaultHeight = irMap.YRes;
                }

			    int w = irMap.XRes;
			    int h = irMap.YRes;

                lock (this)
                {
                    int stride = w*2;

                    if (stride%2 != 0)
                        stride += (2 - (stride%2));


                    if(roi.Size.Width != 0)
                    {
                        Emgu.CV.Image<Gray, UInt16> tmp = new Emgu.CV.Image<Gray, UInt16>(irMap.XRes, irMap.YRes, stride, ir.GetIRMapPtr());
                        tmp.ROI = new Rectangle(roi.X, roi.Y, roi.Width, roi.Height);
                        grayImage = tmp.Copy();
                    }
                    else
                    {
                        grayImage = new Emgu.CV.Image<Gray, UInt16>(w, h, stride, ir.GetIRMapPtr());
                    }
                }

               if (FrameCaptureComplete != null)
                   FrameCaptureComplete();
			}
        }
Beispiel #41
0
 private void detenido()
 {
     conmutar(true);
     temporizador.Stop();
     controlImagen.Image = null;
     imagenCapturada = null;
     capturador.Dispose();
 }
Beispiel #42
0
        public Emgu.CV.Image<Bgr, Byte> GetBitmap(BitmapSource transformedBitmapSource)
        {
            //System.Windows.Forms.PictureBox picture = _pictureBox;
            //Stream stm = File.Open("Waterfall.jpg", FileMode.Open, FileAccess.Read))
            //// Since we're not specifying a System.Windows.Media.Imaging.BitmapCacheOption, the pixel format
            //// will be System.Windows.Media.PixelFormats.Pbgra32.
            //System.Windows.Media.Imaging.BitmapSource bitmapSource = System.Windows.Media.Imaging.BitmapFrame.Create(
            //    stm, 
            //    System.Windows.Media.Imaging.BitmapCreateOptions.None, 
            //    System.Windows.Media.Imaging.BitmapCacheOption.OnLoad);

            //System.Windows.Media.Imaging.BitmapSource transformedBitmapSource = b;

            // Scale the image so that it will display similarly to the WPF Image.
            //double newWidthRatio = picture.Width / (double)bitmapSource.PixelWidth;
            //double newHeightRatio = ((picture.Width * bitmapSource.PixelHeight) / (double)bitmapSource.PixelWidth) / (double)bitmapSource.PixelHeight;

            //System.Windows.Media.Imaging.BitmapSource transformedBitmapSource = new System.Windows.Media.Imaging.TransformedBitmap(
            //    bitmapSource,
            //    new System.Windows.Media.ScaleTransform(newWidthRatio, newHeightRatio));

            int width = transformedBitmapSource.PixelWidth;
            int height = transformedBitmapSource.PixelHeight;
            int stride = width * PixelFormats.Bgr32.BitsPerPixel / 8;

            byte[] bits = new byte[height * stride];

            transformedBitmapSource.CopyPixels(bits, stride, 0);
            Emgu.CV.Image<Bgr, Byte> bitmap = new Emgu.CV.Image<Bgr, Byte>(width, height);
            bitmap.Bytes = bits;

            return bitmap;
            //unsafe
            //{
            //    fixed (byte* pBits = bits)
            //    {
            //        IntPtr ptr = new IntPtr(pBits);

            //        //System.Drawing.Bitmap bitmap = new System.Drawing.Bitmap(
            //        //    width,
            //        //    height,
            //        //    stride,
            //        //    System.Drawing.Imaging.PixelFormat.Format32bppPArgb,
            //        //    ptr);
                    
            //    }
            //}
        }
Beispiel #43
0
        private void OnImageDone(object o, Gst.GLib.SignalArgs args)
        {
            pipeline.SetState (Gst.State.Null);

            Emgu.CV.Image <Bgr, byte> sourceImage =
            new Emgu.CV.Image<Bgr, byte> ("snapshot.png");

            // Image conversion
            ImageProcessor processor = new ImageProcessor (sourceImage);

            // Face detection
            var detector = new FaceDetector ("/usr/local/share/OpenCV/haarcascades/haarcascade_frontalface_alt2.xml",
            processor.NormalizedImage);

            Image<Gray, byte> grayFace = processor.GrayscaleImage;

            System.Drawing.Rectangle rect = new System.Drawing.Rectangle ();
            if (detector.processImage (grayFace, out rect)) {
                Title = "Face found";
                Photo = grayFace.GetSubRect(rect);
                //PreviewResult();
            } else {
                Title = "Face not found";
            }
        }
Beispiel #44
0
        private void button1_Click_1(object sender, EventArgs e)
        {
            ObservedList.Clear();
            foreach (string dir in db.images) {

                Bitmap bmpImage = new Bitmap(Image.FromStream(System.Reflection.Assembly.GetEntryAssembly().GetManifestResourceStream(dir)));
                Emgu.CV.Image<Gray, byte> imageOut = new Emgu.CV.Image<Gray, byte>(bmpImage);
                ObservedList.Add(imageOut);
            }
            List<ImageBox> boxes =
                new List<ImageBox> {
                    imageBox10,
                    imageBox9,
                    imageBox8,
                    imageBox7,
                    imageBox6,
                    imageBox15,
                    imageBox14,
                    imageBox13,
                    imageBox12,
                    imageBox11,
                    imageBox20,
                    imageBox19,
                    imageBox18,
                    imageBox17,
                    imageBox16,
                    imageBox25,
                    imageBox24,
                    imageBox23,
                    imageBox22,
                    imageBox21,
                    imageBox30,
                    imageBox29,
                    imageBox28,
                    imageBox27,
                    imageBox26,
                    imageBox35,
                    imageBox34,
                    imageBox33,
                    imageBox32,
                    imageBox31,
                    imageBox40,
                    imageBox39,
                    imageBox38,
                    imageBox37,
                    imageBox36,
                    imageBox45,
                    imageBox44,
                    imageBox43,
                    imageBox42,
                    imageBox41,
                    imageBox50,
                    imageBox49,
                    imageBox48,
                    imageBox47,
                    imageBox46
                };
            for (int i = 0; i < db.images.Count; i++) {
                Bitmap bmpImage = new Bitmap(Image.FromStream(System.Reflection.Assembly.GetEntryAssembly().GetManifestResourceStream(db.images[i])));
                boxes[i].Image = new Emgu.CV.Image<Rgba, Byte>(bmpImage);
                boxes[i].SizeMode = PictureBoxSizeMode.StretchImage;
            }
        }
Beispiel #45
0
        private void OnImageDone(object o, Gst.GLib.SignalArgs args)
        {
            Emgu.CV.Image <Bgr, byte> sourceImage =
            new Emgu.CV.Image<Bgr, byte> ("snapshot.png");

            // Image conversion
            ImageProcessor processor = new ImageProcessor (sourceImage);

            // Face detection
            var detector = new FaceDetector ("/usr/local/share/OpenCV/haarcascades/haarcascade_frontalface_alt2.xml",
            processor.NormalizedImage);

            Image<Gray, byte> drawedFace = processor.GrayscaleImage;

            System.Drawing.Rectangle rect = new System.Drawing.Rectangle();
            if (detector.processImage (drawedFace, out rect)) {
                Title = "Лицо найдено. Данные отправляются на сервер";
                var binding = new BasicHttpBinding ();
                var address = new EndpointAddress ("http://" + entryHost.Text + ":" + entryPort.Text);
                client = new CommandClient (binding, address);
                Console.WriteLine (client.authenticate (UserInfoManager.SerializeImage (processor.NormalizedImage.GetSubRect(rect).Clone())));
            //	Console.WriteLine (client.executeCommand ("dmesg", ""));

            } else {
                Title = "В видоискателе нет лица";
                authButton.Sensitive = true;
            }
            /*
            using (Image<Bgr, byte> img = new Image<Bgr, byte>(400, 200, new Bgr(255, 0, 0))) {
            MCvFont f = new MCvFont(Emgu.CV.CvEnum.FONT.CV_FONT_HERSHEY_COMPLEX, 1.0, 1.0);
            Emgu.CV.CvInvoke.cvNamedWindow("w1");
            CvInvoke.cvShowImage("w1", img.Ptr);
            CvInvoke.cvWaitKey (0);
            //Destory the window
            CvInvoke.cvDestroyWindow("w1");
            } */
        }
Beispiel #46
0
        public bool Initialize()
        {
            string labelFileName = _path + trainLabelFilename;
            string imageFileName = _path + trainImageFilename;
            int itemsCount;

            using (BinaryReader br = new BinaryReader(File.Open(labelFileName, FileMode.Open)))
            {
                int pos = 0;
                int length = (int)br.BaseStream.Length;
                int maginNumber = (br.ReadByte() << 24) + (br.ReadByte() << 16) + (br.ReadByte() << 8) + (br.ReadByte() << 0);
                if (maginNumber != 2049)
                    throw new Exception("Invalid format.");
                pos += 4;
                itemsCount = (br.ReadByte() << 24) + (br.ReadByte() << 16) + (br.ReadByte() << 8) + (br.ReadByte() << 0);
                if (_itemsToLoad > 0)
                    itemsCount = Math.Min(_itemsToLoad, itemsCount);
                pos += 4;
                _mnists = new MNistData[itemsCount];
                int index = 0;
                for (int item = 0; item < itemsCount; item++)
                {
                    _mnists[index].Label = br.ReadByte();
                    pos += 1;
                    if (pos > length)
                        throw new Exception("Unexpected end of file.");
                    index++;
                }
            }

            using (BinaryReader br = new BinaryReader(File.Open(imageFileName, FileMode.Open)))
            {
                int pos = 0;
                int length = (int)br.BaseStream.Length;
                int maginNumber = (br.ReadByte() << 24) + (br.ReadByte() << 16) + (br.ReadByte() << 8) + (br.ReadByte() << 0);
                if (maginNumber != 2051)
                    throw new Exception("Invalid format.");
                pos += 4;
                int imagesCount = (br.ReadByte() << 24) + (br.ReadByte() << 16) + (br.ReadByte() << 8) + (br.ReadByte() << 0);
                pos += 4;
                int rowsCount = (br.ReadByte() << 24) + (br.ReadByte() << 16) + (br.ReadByte() << 8) + (br.ReadByte() << 0);
                pos += 4;
                int columnsCount = (br.ReadByte() << 24) + (br.ReadByte() << 16) + (br.ReadByte() << 8) + (br.ReadByte() << 0);
                pos += 4;
                int index = 0;
                Emgu.CV.Image<Bgr, byte> image;
                byte pixel;

                for (int item = 0; item < itemsCount; item++)
                {
                    image = new Emgu.CV.Image<Bgr, byte>(columnsCount, rowsCount);
                    byte[, ,] arr = (byte[, ,])image.ManagedArray;
                    for (int row = 0; row < rowsCount; row++)
                    {
                        for (int column = 0; column < columnsCount; column++)
                        {
                            if (_reversePixel)
                                pixel = (byte)(255 - br.ReadByte());
                            else
                                pixel = br.ReadByte();
                            arr[row, column, 0] = pixel;
                            arr[row, column, 1] = pixel;
                            arr[row, column, 2] = pixel;
                            pos += 1;
                        }
                    }
                    _mnists[index].Image = image;
                    index++;
                }

                scrollBar1.Minimum = 1;
                scrollBar1.Maximum = itemsCount;
                scrollBar1.SmallChange = 1;
                scrollBar1.LargeChange = Math.Max(1, itemsCount / 10);
                scrollBar1.Value = 1;
                //pictureBox1.Width = columnsCount;
                //pictureBox1.Height = rowsCount;
                ItemIndex = 0;
            }
            return true;
        }
Beispiel #47
0
 public FaceImagePos(Emgu.CV.Image<Bgr, byte> image, uint leftX, uint topY,uint width)
 {
     this.image = image;
     this.leftX = leftX;
     this.topY = topY;
     this.width = width;
 }
        public double findCosineSimilarity(string layer1Fn, int ln, string layer2Fn, int rn)
        {
            Emgu.CV.Image<Bgr, int> imgLayer1 = new Emgu.CV.Image<Bgr, int>(layer1Fn);
            Emgu.CV.Image<Bgr, int> imgLayer2 = new Emgu.CV.Image<Bgr, int>(layer2Fn);

               // int ratio = 25;// (int)Math.Max(imgLayer1.Width, imgLayer1.Height) / 100;
            int ratio = 25;

            if (dic == null)
            {
                dic = new Dictionary<int, Matrix<float>>();
            }

            Matrix<float> l;
            if (dic.ContainsKey(ln))
            {
                l = dic[ln];
            }
            else
            {
                l = imageToMatrix(imgLayer1, ratio,ln);
                dic.Add(ln, l);
            }

            Matrix<float> r;
            if (dic.ContainsKey(rn))
            {
                r = dic[rn];
            }
            else
            {
                r = imageToMatrix(imgLayer2, ratio,rn);
                dic.Add(rn, r);
            }

               // System.Diagnostics.Debug.WriteLine(ln + "," + rn+ "->" + unWightedHammingCompareFeatureVectors(l, r) + Environment.NewLine);
            //System.Diagnostics.Debug.WriteLine(ln + "," + rn + "->" + WightedHammingCompareFeatureVectors(l, r) + Environment.NewLine);
            double result = CosineCompareFeatureVectors(l, r) / (WightedHammingCompareFeatureVectors(l, r) + 1);
            return result;// *-1 + 1;
               // return EMDCompare(l, r);

            //unsucccessful try of earth mover's distance
            //double compareResult = 0;
            //try
            //{
            //    compareResult = CvInvoke.cvCalcEMD2(l.Ptr, r.Ptr, Emgu.CV.CvEnum.DIST_TYPE.CV_DIST_L2, null, IntPtr.Zero, IntPtr.Zero, IntPtr.Zero, IntPtr.Zero);
            //}
            //catch (Exception ex)
            //{
            //    int k = 0;
            //}
            //return compareResult;
        }
Beispiel #49
0
        private void workspaceViewer1_MouseClick(object sender, MouseEventArgs e)
        {
            List<strokes> strokesForOneWord = new List<strokes>();
            if (e.Button == System.Windows.Forms.MouseButtons.Left)
            {
                oneStroke.listPoints.Add(new Point((int)(e.X / this.workspaceViewer1.Zoom), (int)(e.Y / this.workspaceViewer1.Zoom)));
                DrawLineOnImage(Color.Red);
                toolStripStatusLabel1.Text = "开始画笔画";
                nFirstLeftClick = true;
            }
            else if (e.Button == System.Windows.Forms.MouseButtons.Right && !IsControlDown()
                && !nFirstLeftClick)
            {
                MessageBox.Show("请选择笔画");
            }
            else if (e.Button == System.Windows.Forms.MouseButtons.Right && !IsControlDown()
                && nFirstLeftClick)
            {
                //Point point = new Point(e.X, e.Y);
                // prepare to select the property
                SelectProperty property = new SelectProperty();
                property.FormClosed += property_FormClosed;
                Rectangle rec = new Rectangle(e.X, e.Y, 150, 300);
                property.Show(dockPanel1, rec);

                //contextMenuStrip1.Show(point);
                //oneStroke.strProperty = property
                toolStripStatusLabel1.Text = "正在画笔画中……";

            }
            else if (IsControlDown() && e.Button == System.Windows.Forms.MouseButtons.Right
            && !nSecondRightClick)
            {
                MessageBox.Show("请选择笔画属性");
            }
            else if (IsControlDown() && e.Button == System.Windows.Forms.MouseButtons.Right
                && nSecondRightClick)
            {
                Emgu.CV.Image<Bgr, Byte> img_lines = new Emgu.CV.Image<Bgr, byte>(this.workspaceViewer1.Image.ToBitmap());
                Emgu.CV.Image<Bgr, Byte> img = new Emgu.CV.Image<Bgr, byte>(m_fileName);
                oneHanzi.nXScrollOffset = nScrollXOffset;
                oneHanzi.nYScrollOffset = nScrollYOffset;
                //Rectangle rec = oneHanzi.getBoundingRectangle();
                Rectangle rec = new Rectangle(0, 0, img.Width, img.Height);
                img_lines.ROI = rec;
                img.ROI = rec;
                Emgu.CV.Image<Bgr, Byte> crop = new Emgu.CV.Image<Bgr, Byte>(rec.Width, rec.Height);
                Emgu.CV.Image<Bgr, Byte> crop2 = new Emgu.CV.Image<Bgr, Byte>(rec.Width, rec.Height);
                CvInvoke.cvCopy(img, crop, IntPtr.Zero);
                CvInvoke.cvCopy(img_lines, crop2, IntPtr.Zero);
                oneHanzi.wordimage = crop;
                oneHanzi.wordimageWithLines = crop2;
                oneHanzi.save(m_fileName);
                oneHanzi.clear();
                toolStripStatusLabel1.Text = "存储完毕";
                nFirstLeftClick = false;
                nSecondRightClick = false;
                if (File.Exists(".\\Temp.txt"))
                {
                    File.Delete(".\\Temp.txt");
                }
                imageBackup = (AtalaImage)this.workspaceViewer1.Image.Clone();
                //crop.Save("E:\\ww.jpg");
            }
            this.workspaceViewer1.Refresh();
        }
Beispiel #50
0
 public void clear()
 {
     this.wordimage = null;
     this.listStrokes.Clear();
     this.nXOffset = 0;
     this.nYOffset = 0;
 }
Beispiel #51
0
        private void btnDiabloInit_Click_1(object sender, EventArgs e)
        {
            StopSignDetector.MaskHueLow = (int)numericUpDown1.Value;
            StopSignDetector.MaskHueHigh = (int)numericUpDown2.Value;

            diabloModels = new Image<Bgr, byte>[3];
            diabloModels[0] = new Image<Bgr, byte>(new Bitmap(picbModel1.Image));
            diabloModels[1] = new Image<Bgr, byte>(new Bitmap(picbModel2.Image));
            diabloModels[2] = new Image<Bgr, byte>(new Bitmap(picbModel3.Image));

            picbModelMask1.Image = StopSignDetector.GetRedPixelMask(diabloModels[0]).ToBitmap();
            picbModelMask2.Image = StopSignDetector.GetRedPixelMask(diabloModels[1]).ToBitmap();
            picbModelMask3.Image = StopSignDetector.GetRedPixelMask(diabloModels[2]).ToBitmap();
        }
Beispiel #52
0
 private void btnDiabloInit_Click(object sender, EventArgs e)
 {
     diabloModels = new Image<Bgr, byte>[3];
     diabloModels[0] = new Image<Bgr, byte>(new Bitmap(picbModel1.Image));
     diabloModels[1] = new Image<Bgr, byte>(new Bitmap(picbModel2.Image));
     diabloModels[2] = new Image<Bgr, byte>(new Bitmap(picbModel3.Image));
 }
Beispiel #53
0
 private void tabpDiabloDetection_Click(object sender, EventArgs e)
 {
     diabloMap = new Image<Bgr, byte>(new Bitmap(picbDiabloMap.Image));
     List<Rectangle> diabloDetectedRectangles = new List<Rectangle>();
     List<Image<Gray, byte>> diabloDetectedImages = new List<Image<Gray, byte>>();
 }
        public int apply(string fileName, string output)
        {
            int counter = 0;

            Emgu.CV.Image<Bgr, Byte> imgS = new Emgu.CV.Image<Bgr, Byte>(fileName);

            Emgu.CV.Image<Gray, Byte> img = new Emgu.CV.Image<Gray, Byte>(fileName);

            //Emgu.CV.Image<Gray, Byte> imgGray = new Image<Gray, byte>(img.Width, img.Height);
            //CvInvoke.cvCvtColor(img, imgGray, COLOR_CONVERSION.BGR2GRAY);

            int thresh = 1;
            int max_thresh = 255;
            img = img.ThresholdBinary(new Gray(thresh), new Gray(max_thresh));

            img.Save(output.Replace(".", "_binary."));

            Contour<Point> contur = img.FindContours(Emgu.CV.CvEnum.CHAIN_APPROX_METHOD.CV_CHAIN_APPROX_SIMPLE, Emgu.CV.CvEnum.RETR_TYPE.CV_RETR_CCOMP);
            Emgu.CV.CvInvoke.cvDrawContours(imgS, contur, new MCvScalar(0, 0, 255), new MCvScalar(0, 0, 255), 1, 1, LINE_TYPE.EIGHT_CONNECTED, new Point(0, 0));

            contur = img.FindContours(Emgu.CV.CvEnum.CHAIN_APPROX_METHOD.CV_CHAIN_APPROX_SIMPLE, Emgu.CV.CvEnum.RETR_TYPE.CV_RETR_CCOMP);

            while (contur != null && contur.HNext != null)
            {
                if (counter == 0) { counter++; }

                contur = contur.HNext;
                counter++;
            }

            MCvFont font = new MCvFont(Emgu.CV.CvEnum.FONT.CV_FONT_HERSHEY_SIMPLEX, 0.8f, 0.8f);
            MCvScalar color = new MCvScalar(255, 255, 255);

            CvInvoke.cvPutText(imgS, "counter:" + counter, new Point(10, 20), ref font, color);

            imgS.Save(output);

            return counter;
        }
Beispiel #55
-1
 private void temporizador_Tick(object sender, EventArgs e)
 {
     string sufijo = "";
     string cadAux = "";
     sufijo = id.ToString();
     cadAux=sufijo.PadLeft(10, '0');
     imagenCapturada = capturador.QueryFrame();
     imagenCapturada.Save(rutaAlmacenamiento+"\\"+ nombre+cadAux + ".jpg");
     id++;
     controlImagen.Image = imagenCapturada;
 }
        public void generateOutput(List<KeyValuePair<int, int>> pairList, int bestK, string dir, string outPutDir,string fileFormat)
        {
            //Find similar data
            List<KeyValuePair<int, int>> blendList = new List<KeyValuePair<int, int>>(bestK);
            List<int> lonelyList = new List<int>(bestK);

            for (int i = 0; i < pairList.Count; i++)
            {
                for (int j = 0; j < pairList.Count; j++)
                {
                    if (pairList[i].Key == pairList[j].Value && pairList[j].Key == pairList[i].Value)
                    {
                        if (!(blendList.Contains(pairList[i]) || blendList.Contains(pairList[j])))
                        {
                            blendList.Add(pairList[i]);
                        }
                    }
                }
            }

            for (int i = 0; i < pairList.Count; i++)
            {
                bool key = true;
                for (int j = 0; j < blendList.Count; j++)
                {
                    if (pairList[i].Key == blendList[j].Key || pairList[i].Key == blendList[j].Value)
                    {
                        key = false;
                    }
                }

                if (key)
                {
                    if (!lonelyList.Contains(pairList[i].Key))
                    {
                        lonelyList.Add(pairList[i].Key);
                    }
                }
            }

            //move not changed layers
            for (int i = 0; i < lonelyList.Count; i++)
            {
                string fileName = String.Format(fileFormat, lonelyList[i]);
                System.IO.File.Copy(dir + fileName, outPutDir + fileName, true);
            }

            //blend and move
            for (int j = 0; j < blendList.Count; j++)
            {
                string fileName1 = String.Format(fileFormat, blendList[j].Key);
                Emgu.CV.Image<Bgr, int> img1 = new Emgu.CV.Image<Bgr, int>(dir + fileName1);

                string fileName2 = String.Format(fileFormat, blendList[j].Value);
                Emgu.CV.Image<Bgr, int> img2 = new Emgu.CV.Image<Bgr, int>(dir + fileName2);

                string fileName3 = String.Format(fileFormat, blendList[j].Key + "_" + blendList[j].Value);

                Emgu.CV.Image<Bgr, int> img3 = new Emgu.CV.Image<Bgr, int>(img1.Width, img1.Height);
                Emgu.CV.CvInvoke.cvAdd(img1, img2, img3, IntPtr.Zero);
                img3.Save(outPutDir + fileName3);

            }
        }