private void ImportPersonEnter_DragDrop(object sender, DragEventArgs e)
        {
            if (e.Data.GetDataPresent(DataFormats.FileDrop))
            {
                string[] files      = (string[])e.Data.GetData(DataFormats.FileDrop);
                string   extenstion = System.IO.Path.GetExtension(files[0]);
                if (string.Compare(extenstion, ".jpg", false) == 0)
                {
                    InitCotrol(true);

                    Image img = Damany.Util.Extensions.MiscHelper.FromFileBuffered(files[0]);
                    img.Tag = files[0];
                    this.picTargetPerson.Image = img;
                    this.InitCotrol(true);

                    using (var ipl = IplImage.FromFile(files[0]))
                    {
                        var facesRect = ipl.LocateFaces(searcher);

                        if (facesRect.Length > 0)
                        {
                            drawRectangle = facesRect[0].ToRectangle();
                        }
                    }
                }
                else if (System.IO.Directory.Exists(files[0]))
                {
                    this.OpenExisted(files[0]);
                }
            }
        }
Ejemplo n.º 2
0
 public IplImage GetIpl()
 {
     if (_ipl == null)
     {
         _ipl     = IplImage.FromFile(ImageFilePath);
         _ipl.ROI = FaceRect;
     }
     return(_ipl);
 }
Ejemplo n.º 3
0
 private static void readImage3()
 {
     // it uses `System.Drawing.Bitmap` behind the scene.
     using (var img = IplImage.FromFile(@"..\..\images\ocv02.jpg", LoadMode.Unchanged))
     {
         using (var window = new CvWindow("window"))
         {
             window.Image = img;
             Cv.WaitKey();
         }
     }
 }
        private void btnOpenMappingImage_Click(object sender, EventArgs e)
        {
            OpenFileDialog OFD = new OpenFileDialog();
            DialogResult   Dr  = OFD.ShowDialog();

            if (Dr == System.Windows.Forms.DialogResult.OK)
            {
                //ColorImage.SetZero();
                mappingimage = IplImage.FromFile(OFD.FileName, LoadMode.AnyColor);

                pictureBox1.Image = mappingimage.ToBitmap();
            }
        }
Ejemplo n.º 5
0
        static void Main(string[] args)
        {
            //读入源文件
            var src = IplImage.FromFile("source.jpg");

            //转换到灰度图
            var gray = Cv.CreateImage(src.Size, BitDepth.U8, 1);

            Cv.CvtColor(src, gray, ColorConversion.BgrToGray);

            //做一下膨胀,x与y方向都做,但系数不同
            //使用了Erode方法,腐蚀操作,针对白色区域,所以等效于对文字进行了膨胀
            var kernal = Cv.CreateStructuringElementEx(5, 2, 1, 1, ElementShape.Rect);

            Cv.Erode(gray, gray, kernal, 2);

            //二值化
            Cv.Threshold(gray, gray, 0, 255, ThresholdType.BinaryInv | ThresholdType.Otsu);

            //检测连通域,每一个连通域以一系列的点表示,FindContours方法只能得到第一个域
            var             storage = Cv.CreateMemStorage();
            CvSeq <CvPoint> contour = null;

            Cv.FindContours(gray, storage, out contour, CvContour.SizeOf, ContourRetrieval.CComp, ContourChain.ApproxSimple);
            var color = new CvScalar(0, 0, 255);

            //开始遍历
            while (contour != null)
            {
                //得到这个连通区域的外接矩形
                var rect = Cv.BoundingRect(contour);

                //如果高度不足,或者长宽比太小,认为是无效数据,否则把矩形画到原图上
                if (rect.Height > 10 && (rect.Width * 1.0 / rect.Height) > 0.2)
                {
                    Cv.DrawRect(src, rect, color);
                }

                //取下一个连通域
                contour = contour.HNext;
            }
            Cv.ReleaseMemStorage(storage);

            //显示
            Cv.ShowImage("Result", src);
            Cv.WaitKey();
            Cv.DestroyAllWindows();
        }
Ejemplo n.º 6
0
        public void Test()
        {
            var faceSearcher = new FaceSearchWrapper.FaceSearch();


            int count = 0;
            var timer = new System.Diagnostics.Stopwatch();

            timer.Start();

            foreach (var file in System.IO.Directory.EnumerateFiles(@"G:\pic", "*.jpg"))
            {
                var img   = IplImage.FromFile(file);
                var rect  = new CvRect(0, 0, img.Width, img.Height);
                var faces = faceSearcher.SearchFace(img, rect);
                System.Diagnostics.Debug.WriteLine(faces.Length);
                count++;
            }

            var msPerPic = timer.ElapsedMilliseconds / count;

            System.Diagnostics.Debug.WriteLine("millisecond per picture: " + msPerPic);
        }
Ejemplo n.º 7
0
 public static IplImage GetImage2()
 {
     return(IplImage.FromFile("2.jpg"));
 }
Ejemplo n.º 8
0
        private double CalculateHistogramDistance(string jpg_path1, string jpg_path2)
        {
            int i, sch = 0;

            float[]   range_0 = { 0, 256 };
            float[][] ranges = { range_0 };
            double    tmp, dist = 0;
            IplImage  src_img1, src_img2;

            IplImage[] dst_img1 = new IplImage[4];
            IplImage[] dst_img2 = new IplImage[4];

            CvHistogram[] hist1 = new CvHistogram[4];
            CvHistogram   hist2;

            src_img1 = IplImage.FromFile(jpg_path1, LoadMode.AnyDepth | LoadMode.AnyColor);

            // チャンネル数分の画像領域を確保
            sch = src_img1.NChannels;
            for (i = 0; i < sch; i++)
            {
                dst_img1[i] = Cv.CreateImage(Cv.Size(src_img1.Width, src_img1.Height), src_img1.Depth, 1);
            }
            // ヒストグラム構造体を確保
            int[] nHisSize = new int[1];
            nHisSize[0] = 256;
            hist1[0]    = Cv.CreateHist(nHisSize, HistogramFormat.Array, ranges, true);

            // 入力画像がマルチチャンネルの場合,画像をチャンネル毎に分割
            if (sch == 1)
            {
                Cv.Copy(src_img1, dst_img1[0]);
            }
            else
            {
                Cv.Split(src_img1, dst_img1[0], dst_img1[1], dst_img1[2], dst_img1[3]);
            }

            for (i = 0; i < sch; i++)
            {
                Cv.CalcHist(dst_img1[i], hist1[i], false);
                Cv.NormalizeHist(hist1[i], 10000);
                if (i < 3)
                {
                    Cv.CopyHist(hist1[i], ref hist1[i + 1]);
                }
            }

            Cv.ReleaseImage(src_img1);

            src_img2 = IplImage.FromFile(jpg_path2, LoadMode.AnyDepth | LoadMode.AnyColor);

            // 入力画像のチャンネル数分の画像領域を確保
            for (i = 0; i < sch; i++)
            {
                dst_img2[i] = Cv.CreateImage(Cv.Size(src_img2.Width, src_img2.Height), src_img2.Depth, 1);
            }

            // ヒストグラム構造体を確保
            nHisSize[0] = 256;
            hist2       = Cv.CreateHist(nHisSize, HistogramFormat.Array, ranges, true);

            // 入力画像がマルチチャンネルの場合,画像をチャンネル毎に分割
            if (sch == 1)
            {
                Cv.Copy(src_img2, dst_img2[0]);
            }
            else
            {
                Cv.Split(src_img2, dst_img2[0], dst_img2[1], dst_img2[2], dst_img2[3]);
            }

            try
            {
                dist = 0.0;

                // ヒストグラムを計算,正規化して,距離を求める
                for (i = 0; i < sch; i++)
                {
                    Cv.CalcHist(dst_img2[i], hist2, false);
                    Cv.NormalizeHist(hist2, 10000);
                    tmp   = Cv.CompareHist(hist1[i], hist2, HistogramComparison.Bhattacharyya);
                    dist += tmp * tmp;
                }
                dist = Math.Sqrt(dist);

                Cv.ReleaseHist(hist2);
                Cv.ReleaseImage(src_img2);
            }
            catch (OpenCVException ex)
            {
                Console.WriteLine("Error : " + ex.Message);
            }

            return(dist);
        }
Ejemplo n.º 9
0
 public static IplImage LoadIntoIpl(this string path)
 {
     return(IplImage.FromFile(path));
 }
Ejemplo n.º 10
0
        static void Main(string[] args)
        {
            CvCapture videoCaprure = null;

            try
            {
                videoCaprure = CvCapture.FromFile(FILE_NAME);
            }
            catch (Exception e)
            {
                Console.WriteLine("Unable to open file {0}", FILE_NAME);
                Console.WriteLine(e.ToString());
                Console.ReadKey();
            }

            Cv.NamedWindow(MAIN_WINDOW_NAME, WindowMode.AutoSize);

            double rate  = videoCaprure.GetCaptureProperty(CvConst.CV_CAP_PROP_FPS);
            int    delay = (int)(1000 / rate);

            //int counter = 0;

            IplImage backgroundImage     = IplImage.FromFile(BACKDROUND_IMAGE_FILE_NAME);
            IplImage grayBackgroundImage = Cv.CreateImage(backgroundImage.Size, backgroundImage.Depth, 1);

            Cv.CvtColor(backgroundImage, grayBackgroundImage, ColorConversion.RgbToGray);

            Console.WriteLine("NChannels = " + backgroundImage.NChannels);
            Console.ReadKey();
            //IplImage grayBackgroundImage = Cv.CreateImage(backgroundImage.Size, backgroundImage.Depth, 1);
            //Cv.CvtColor(backgroundImage, grayBackgroundImage, ColorConversion.RgbToGray););

            while (true)
            {
                var currentOgirinalFrame = videoCaprure.QueryFrame();
                if (currentOgirinalFrame == null)
                {
                    return;
                }

                //counter++;
                //if (counter % 3 != 0)
                //    continue;

                IplImage grayOriginalFrame = Cv.CreateImage(currentOgirinalFrame.Size, currentOgirinalFrame.Depth, 1);
                Cv.CvtColor(currentOgirinalFrame, grayOriginalFrame, ColorConversion.RgbToGray);
                IplImage differenceBetweenFrames = Cv.CreateImage(grayOriginalFrame.Size, grayOriginalFrame.Depth, 1);

                Cv.AbsDiff(grayOriginalFrame, grayBackgroundImage, differenceBetweenFrames);
                //Cv.Smooth(differenceBetweenFrames, differenceBetweenFrames, SmoothType.Blur);
                //IplImage graydifferenceBetweenFrames = Cv.CreateImage(differenceBetweenFrames.Size, differenceBetweenFrames.Depth, 1);
                //Cv.CvtColor(differenceBetweenFrames, graydifferenceBetweenFrames, ColorConversion.RgbToGray);
                //Cv.ShowImage("differenceBetweenFrames", differenceBetweenFrames);


                Cv.Threshold(differenceBetweenFrames, differenceBetweenFrames, 50, 255, ThresholdType.Binary);

                Cv.Erode(differenceBetweenFrames, differenceBetweenFrames);

                //finding blobs
                CvBlobs blobs = new CvBlobs(differenceBetweenFrames);
                blobs.FilterByArea(300, 10000);
                //blobs.Label(differenceBetweenFrames);

                var currentFrameWithRedRects = Cv.CreateImage(currentOgirinalFrame.Size, currentOgirinalFrame.Depth, currentOgirinalFrame.NChannels);
                currentOgirinalFrame.Copy(currentFrameWithRedRects);
                foreach (var cvBlob in blobs)
                {
                    Cv.Rectangle(currentFrameWithRedRects, cvBlob.Value.Rect, CvColor.Red, 4);
                }

                Console.WriteLine(blobs.Count);

                Cv.ShowImage(MAIN_WINDOW_NAME, currentFrameWithRedRects);
                Cv.ShowImage("Result", differenceBetweenFrames);
                //Cv.ShowImage("backgroundImage", backgroundImage);
                Cv.WaitKey(delay);

                //currentOgirinalFrame.Copy(differenceBetweenFrames);
            }
        }