public frmPruebaMano()
        {
            InitializeComponent();

            detector = new AdaptiveSkinDetector(1, AdaptiveSkinDetector.MorphingMethod.NONE);

            YCrCb_min = new Ycc(0, 131, 80);
            YCrCb_max = new Ycc(255, 185, 135);

            imgCaja = new ImageBox();
            imgCaja.Height = 306;
            imgCaja.Width = 430;
            imgCaja.Location = new System.Drawing.Point(12, 42);
            imgCaja.SizeMode = PictureBoxSizeMode.StretchImage;

            imgCaja2 = new ImageBox();
            imgCaja2.Height = 306;
            imgCaja2.Width = 430;
            imgCaja2.Location = new System.Drawing.Point(478, 42);
            imgCaja2.SizeMode = PictureBoxSizeMode.StretchImage;

            imgCaja.BorderStyle = BorderStyle.FixedSingle;
            imgCaja2.BorderStyle = BorderStyle.FixedSingle;

            this.Controls.Add(imgCaja);
            this.Controls.Add(imgCaja2);

            imgCaja.Show();
        }
Esempio n. 2
0
        public Form1()
        {
            InitializeComponent();
            //Y


            trackBar4.Maximum = 255;
            trackBar4.Minimum = 0;
            trackBar1.Maximum = 255;
            trackBar1.Minimum = 0;

            //Cb
            trackBar2.Maximum = 255;
            trackBar2.Minimum = 0;
            trackBar5.Maximum = 255;
            trackBar5.Minimum = 0;
            //Cr
            trackBar3.Maximum = 255;
            trackBar3.Minimum = 0;
            trackBar6.Maximum = 255;
            trackBar6.Minimum = 0;


            //detector = new AdaptiveSkinDetector(1, AdaptiveSkinDetector.MorphingMethod.NONE);
            hsv_min   = new Hsv(0, 45, 0);
            hsv_max   = new Hsv(20, 255, 255);
            YCrCb_min = new Ycc(80, 133, 80);
            YCrCb_max = new Ycc(255, 173, 158);
            box       = new RotatedRect();

            DrawBox.MouseClick += new MouseEventHandler(Form1_MouseClick);
        }
Esempio n. 3
0
        public HandGeastureWindows()
        {
            InitializeComponent();
            InitializeComponent();
            //grabber = new Emgu.CV.Capture(@".\..\..\..\M2U00253.MPG");
            grabber = new Emgu.CV.Capture();
            grabber.QueryFrame();
            frameWidth  = grabber.Width;
            frameHeight = grabber.Height;
            detector    = new AdaptiveSkinDetector(1, AdaptiveSkinDetector.MorphingMethod.NONE);
            hsv_min     = new Hsv(0, 45, 0);
            hsv_max     = new Hsv(20, 255, 255);
            YCrCb_min   = new Ycc(0, 131, 80);
            YCrCb_max   = new Ycc(255, 185, 135);
            box         = new MCvBox2D();
            ellip       = new Emgu.CV.Structure.Ellipse();

            //Application.Idle += new EventHandler(FrameGrabber);

            worker         = new BackgroundWorker();
            worker.DoWork += FrameGrabber;
            worker.RunWorkerAsync();
            worker.RunWorkerCompleted += (object sender, RunWorkerCompletedEventArgs e) =>
            {
                worker.RunWorkerAsync();
            };
        }
Esempio n. 4
0
        public HandRecognition()
        {
            InitializeComponent();
            //  grabber = new Emgu.CV.Capture(@"E:\HandGestureRecognition\M2U00253.mpg");
            try
            {
                grabber = new Emgu.CV.Capture();
            }
            catch (Exception e)
            {
                grabber = new Emgu.CV.Capture(@"E:\HandGestureRecognition\M2U00253.mpg");
            }
            grabber.QuerySmallFrame();
            frameWidth  = grabber.Width;
            frameHeight = grabber.Height;
            detector    = new AdaptiveSkinDetector(1, AdaptiveSkinDetector.MorphingMethod.NONE);
            hsv_min     = new Hsv(0, 45, 0);
            hsv_max     = new Hsv(20, 255, 255);
            YCrCb_min   = new Ycc(0, 131, 80);
            YCrCb_max   = new Ycc(255, 185, 135);
            box         = new MCvBox2D();
            ellip       = new Ellipse();

            Application.Idle += new EventHandler(FrameGrabber);
        }
        public HandPosture()
        {
            InitializeComponent();
            skinDetector = new YCrCbSkinDetector();
            camera       = new Capture();//@"D:\Working\XNA\STREAM\00005_hand.MTS");

            hsv_min   = new Hsv(0, 45, 0);
            hsv_max   = new Hsv(20, 255, 255);
            YCrCb_min = new Ycc(0, 131, 80);
            YCrCb_max = new Ycc(255, 185, 135);
            template  = new Image <Gray, byte>(@"C:\Users\ahmed nady\Desktop\hand1.jpg");
            ////_binary_template = skinDetector.DetectSkin(template, YCrCb_min, YCrCb_max);

            template1 = new Image <Gray, byte>(@"C:\Users\ahmed nady\Desktop\zoomOut.jpg");
            // _binary_template1 = skinDetector.DetectSkin(template1, YCrCb_min, YCrCb_max);

            template2 = new Image <Gray, byte>(@"C:\Users\ahmed nady\Desktop\rotateLeft.jpg");
            // _binary_template2 = skinDetector.DetectSkin(template2, YCrCb_min, YCrCb_max);

            template3 = new Image <Gray, byte>(@"C:\Users\ahmed nady\Desktop\rotateRight.jpg");
            // _binary_template3 = skinDetector.DetectSkin(template3, YCrCb_min, YCrCb_max);

            testImg = new Image <Bgr, byte>(@"C:\Users\Public\Pictures\Sample Pictures\Tulips.jpg");

            combiningGesture = (template.ConcateHorizontal(template1)).ConcateHorizontal(template2.ConcateHorizontal(template3));
        }
Esempio n. 6
0
 public ConvexHull()
 {
     InitializeComponent();
     //@"C:\Users\ahmed nady\Videos\Debut\test.avi" 00023.MTS MOV_0016 @"D:\Working\STREAM\ahmednady.asf"
     camera             = new Capture(/*@"C:\Users\ahmed nady\Videos\Debut\5.avi");*/ @"F:\Working\Final phase\DataSet\sequence.avi");
     fingerTipDetection = new FingerTip();
     skinDetector       = new YCrCbSkinDetector();
     candidateTips      = new List <Point>();
     fingerTracker      = new List <Tracks>();
     // adjust path to find your XML file
     //haar = new HaarCascade("FingerTips.xml");
     hsv_min   = new Hsv(0, 45, 0);
     hsv_max   = new Hsv(20, 255, 255);
     YCrCb_min = new Ycc(0, 131, 80);
     YCrCb_max = new Ycc(255, 185, 135);
     //$N
     rec = new GeometricRecognizer();
     rec.LoadGesture(@"C:\Users\ahmed nady\Documents\TranslateLeft.xml");
     rec.LoadGesture(@"C:\Users\ahmed nady\Documents\TranslateRight.xml");
     rec.LoadGesture(@"C:\Users\ahmed nady\Documents\RotateLeft.xml");
     rec.LoadGesture(@"C:\Users\ahmed nady\Documents\RotateRight.xml");
     rec.LoadGesture(@"C:\Users\ahmed nady\Documents\ZoomIn.xml");
     rec.LoadGesture(@"C:\Users\ahmed nady\Documents\ZoomOut.xml");
     rec.LoadGesture(@"C:\Users\ahmed nady\Documents\KZoomIn.xml");
     rec.LoadGesture(@"C:\Users\ahmed nady\Documents\KZoomOut.xml");
 }
Esempio n. 7
0
        public Form1()
        {
            InitializeComponent();

            grabber = new Emgu.CV.Capture("C:/Users/L33549.CITI/Desktop/a.avi");
            grabber.QueryFrame();
            frameWidth = grabber.Width;
            frameHeight = grabber.Height;
            //detector = new AdaptiveSkinDetector(1, AdaptiveSkinDetector.MorphingMethod.NONE);
            hsv_min = new Hsv(0, 45, 0);
            hsv_max = new Hsv(20, 255, 255);
            YCrCb_min = new Ycc(0, 129, 40);
            YCrCb_max = new Ycc(255, 185, 135);
            box = new MCvBox2D();
            ellip = new Ellipse();

            contourStorage = new MemStorage();
            approxStorage = new MemStorage();
            hullStorage = new MemStorage();
            defectsStorage = new MemStorage();

            tipPts = new Point[MAX_POINTS];   // coords of the finger tips
            foldPts = new Point[MAX_POINTS];  // coords of the skin folds between fingers
            depths = new float[MAX_POINTS];   // distances from tips to folds
            cogPt = new Point();
            fingerTips = new List<Point>();
            face = new CascadeClassifier("C:/Users/L33549.CITI/Desktop/AbuseAnalysis/HandGestureRecognition/HandGestureRecognition/HandGestureRecognition/haar/Original/haarcascade_hand.xml");

            Application.Idle += new EventHandler(FrameGrabber);

            /*foreach (var potentialSensor in KinectSensor.KinectSensors)
            {
                if (potentialSensor.Status == KinectStatus.Connected)
                {
                    this.sensor = potentialSensor;
                    break;
                }
            }

            if (null != this.sensor)
            {
                // Turn on the color stream to receive color frames
                this.sensor.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30);

                // Allocate space to put the pixels we'll receive
                this.colorPixels = new byte[this.sensor.ColorStream.FramePixelDataLength];

                // This is the bitmap we'll display on-screen
                this.colorBitmap = new WriteableBitmap(this.sensor.ColorStream.FrameWidth, this.sensor.ColorStream.FrameHeight, 96.0, 96.0, PixelFormats.Bgr32, null);

                // Set the image we display to point to the bitmap where we'll put the image data
                //this.Image.Source = this.colorBitmap;

                // Add an event handler to be called whenever there is new color frame data
                this.sensor.ColorFrameReady += this.SensorColorFrameReady;

                // Start the sensor!
                this.sensor.Start();
            }*/
        }
        public frmPruebaMano()
        {
            InitializeComponent();

            detector = new AdaptiveSkinDetector(1, AdaptiveSkinDetector.MorphingMethod.NONE);

            YCrCb_min = new Ycc(0, 131, 80);
            YCrCb_max = new Ycc(255, 185, 135);

            imgCaja          = new ImageBox();
            imgCaja.Height   = 306;
            imgCaja.Width    = 430;
            imgCaja.Location = new System.Drawing.Point(12, 42);
            imgCaja.SizeMode = PictureBoxSizeMode.StretchImage;

            imgCaja2          = new ImageBox();
            imgCaja2.Height   = 306;
            imgCaja2.Width    = 430;
            imgCaja2.Location = new System.Drawing.Point(478, 42);
            imgCaja2.SizeMode = PictureBoxSizeMode.StretchImage;

            imgCaja.BorderStyle  = BorderStyle.FixedSingle;
            imgCaja2.BorderStyle = BorderStyle.FixedSingle;

            this.Controls.Add(imgCaja);
            this.Controls.Add(imgCaja2);

            imgCaja.Show();
        }
Esempio n. 9
0
        public K_Curvature()
        {
            InitializeComponent();
            //  x = new List<HandTracking>(2);
            handCandiate  = new List <Contour <Point> >();
            detected_hand = new List <Contour <Point> >();

            hand_centers = new Dictionary <int, PointF>(2);
            //C:\Users\ahmed nady\Videos\Debut\Untitled 5.avi  D:\Working\XNA\rotateRight.avi G:\PRIVATE\AVCHD\BDMV\STREAM\00017.mts
            grabber = new Emgu.CV.Capture();//@"D:\Working\STREAM\00003.mts");//@"D:\Working\XNA\rotateRight3.avi");

            // template = new Image<Gray, byte>(@"C:\Users\ahmed nady\Desktop\hand1.jpg");
            hsv_min   = new Hsv(0, 45, 0);
            hsv_max   = new Hsv(20, 255, 255);
            YCrCb_min = new Ycc(0, 131, 80);
            YCrCb_max = new Ycc(255, 185, 135);
            //$N
            rec = new GeometricRecognizer();

            rec.LoadGesture(@"E:\Documents\ScaleUp.xml");
            rec.LoadGesture(@"E:\Documents\ZoomIn.xml");
            rec.LoadGesture(@"E:\Documents\ZoomOut.xml");
            rec.LoadGesture(@"E:\Documents\rotateLeft.xml");
            rec.LoadGesture(@"E:\Documents\rotateRight.xml");



            fingerTracker = new List <Tracks>();
        }
Esempio n. 10
0
 public gesture()
 {
     InitializeComponent();
     fow_prop = new gesture_recog();
     fow_prop.Show();
     //CvInvoke.UseOpenCL = false;
     try
     {
         grabber = global.capture;
     }
     catch (NullReferenceException excpt)
     {
         MessageBox.Show(excpt.Message);
     }
     grabber.QueryFrame();
     frameWidth  = grabber.Width;
     frameHeight = grabber.Height;
     //   detector = new AdaptiveSkinDetector(1, AdaptiveSkinDetector.MorphingMethod.NONE);
     hsv_min = new Hsv(0, 45, 0);
     hsv_max = new Hsv(20, 254, 254);
     // YCrCb_min = new Ycc(0, 131, 80);
     //YCrCb_max = new Ycc(255, 185, 135);
     YCrCb_min = new Ycc(0, 130, 80);
     YCrCb_max = new Ycc(255, 185, 135);
     index     = 0;
     for (int i = 0; i < 10; i++)
     {
         na[i] = 1;
     }
     fgDetector        = new BackgroundSubtractorMOG2();
     blobDetector      = new Emgu.CV.Cvb.CvBlobDetector();
     Application.Idle += new EventHandler(FrameGrabber);
 }
        //\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\
        //Skin Color filtering/////////////////////////////////////////////////////////////////////
        //-----------------------------------------------------------------------------------------
        public Image <Gray, byte> DetectSkinYCC(Image <Bgr, byte> Img, Ycc YCC_min, Ycc YCC_max)
        {
            Image <Ycc, Byte>  currentYCrCbFrame = Img.Convert <Ycc, Byte>();
            Image <Gray, byte> skin = new Image <Gray, byte>(Img.Width, Img.Height);

            skin = currentYCrCbFrame.InRange(YCC_min, YCC_max);
            return(skin);
        }
Esempio n. 12
0
        public FingerTip()
        {
            tips = new List <Point>();

            hsv_min   = new Hsv(0, 45, 0);
            hsv_max   = new Hsv(20, 255, 255);
            YCrCb_min = new Ycc(0, 131, 80);
            YCrCb_max = new Ycc(255, 185, 135);
        }
 public Form1()
 {
     InitializeComponent();
     hsv_min = new Hsv(0, 45, 0);
     hsv_max = new Hsv(20, 255, 255);
     YCrCb_min = new Ycc(0, 131, 80);
     YCrCb_max = new Ycc(255, 185, 135);
     mv = new MCvMoments();
 }
 public Form1()
 {
     InitializeComponent();
     hsv_min = new Hsv(0, 45, 0);
     hsv_max = new Hsv(20, 255, 255);
     YCrCb_min = new Ycc(0, 131, 80);
     YCrCb_max = new Ycc(255, 185, 135);
     mv = new MCvMoments();
     pt.X = loaded.Width / 2;
     pt.Y = loaded.Height / 2;
 }
Esempio n. 15
0
        public Form1()
        {
            InitializeComponent();
            detector  = new AdaptiveSkinDetector(1, AdaptiveSkinDetector.MorphingMethod.NONE);
            hsv_min   = new Hsv(0, 45, 0);
            hsv_max   = new Hsv(20, 255, 255);
            YCrCb_min = new Ycc(0, 131, 80);
            YCrCb_max = new Ycc(255, 185, 135);
            float flot = 15;

            bgs = new BackgroundSubtractorMOG2(30, flot, false);
            mv  = new MCvMoments();
        }
Esempio n. 16
0
        // 填充缺陷轮廓
        private Image <Bgr, byte> ContourFilling3(Image <Bgr, byte> pic)
        {
            Image <Bgr, byte> outpic = new Image <Bgr, byte>(pic.Size);
            Image <Ycc, byte> ycc    = pic.Convert <Ycc, byte>();

            for (int i = 0; i < ycc.Height; i++)
            {
                for (int j = 0; j < ycc.Width; j++)
                {
                    if (ycc[i, j].Cr > 35 && ycc[i, j].Cr < 148 &&
                        ycc[i, j].Cb > 48 && ycc[i, j].Cb < 141)
                    {
                        ycc[i, j] = new Ycc(0, 0, 0);
                    }
                    else
                    {
                        ycc[i, j] = new Ycc(255, 255, 255);
                    }
                }
            }
            Image <Gray, byte> gray = ycc.Convert <Gray, byte>();

            gray = gray.ThresholdBinary(new Gray(100), new Gray(255));
            gray = gray.Canny(100, 60);
            Image <Gray, byte>    outcon = new Image <Gray, byte>(pic.Size);
            VectorOfVectorOfPoint con    = new VectorOfVectorOfPoint();

            CvInvoke.FindContours(gray, con, outcon, RetrType.External, ChainApproxMethod.ChainApproxNone);
            int n = 0;

            for (int i = 0; i < con.Size; i++)
            {
                if (CvInvoke.ContourArea(con[i]) > 0)
                {
                    n++;
                }
            }
            textBox1.Text = "共" + n.ToString() + "个缺陷" + "      " + "\n";
            n             = 0;
            for (int i = 0; i < con.Size; i++)
            {
                if (CvInvoke.ContourArea(con[i]) > 0)
                {
                    CvInvoke.DrawContours(outpic, con, i, new MCvScalar(0, 255, 0), 5);
                    textBox1.Text = textBox1.Text + "第" + (++n).ToString() + "个缺陷的面积为" + CvInvoke.ContourArea(con[i]) + "    \n";
                }
            }
            CvInvoke.AddWeighted(outpic, 0.5, picture, 0.5, 0, outpic);
            return(outpic);
        }
        public void IdentifyContours(Bitmap colorImage, int minPerimeter, int maxPerimeter, out List <RecognitionType> detectedObj)
        {
            detectedObj = new List <RecognitionType>();

            #region Conversion To grayscale

            colorImage.RotateFlip(RotateFlipType.RotateNoneFlipX);
            Image <Gray, byte> grayImage = new Image <Gray, byte>(colorImage);
            Image <Bgr, byte>  color     = new Image <Bgr, byte>(colorImage);


            IColorSkinDetector skinDetection;

            Ycc YCrCb_min = new Ycc(0, 131, 80);
            Ycc YCrCb_max = new Ycc(255, 185, 135);

            #endregion



            skinDetection = new YCrCbSkinDetector();
            Image <Gray, byte> skin = skinDetection.DetectSkin(color, YCrCb_min, YCrCb_max);


            using (MemStorage storage = new MemStorage())
            {
                for (Contour <Point> contours = skin.FindContours(Emgu.CV.CvEnum.CHAIN_APPROX_METHOD.CV_CHAIN_APPROX_SIMPLE,
                                                                  Emgu.CV.CvEnum.RETR_TYPE.CV_RETR_TREE, storage); contours != null; contours = contours.HNext)
                {
                    Contour <Point> currentContour = contours.ApproxPoly(contours.Perimeter * 0.015, storage);

                    if (currentContour.BoundingRectangle.Width > 20)
                    {
                        if (contours.Perimeter > minPerimeter && contours.Perimeter < maxPerimeter)
                        {
                            CvInvoke.cvDrawContours(skin, contours, new MCvScalar(255), new MCvScalar(255),
                                                    -1, 2, Emgu.CV.CvEnum.LINE_TYPE.EIGHT_CONNECTED, new Point(0, 0));
                            color.Draw(currentContour.BoundingRectangle, new Bgr(0, 255, 0), 1);

                            detectedObj.Add(new RecognitionType()
                            {
                                GesturePosition = currentContour.BoundingRectangle,
                                GestureImage    = skin.ToBitmap().Clone(currentContour.BoundingRectangle, skin.ToBitmap().PixelFormat)
                            });
                        }
                    }
                }
            }
        }
        public GestureRecognitionClass()
        {
            frameWidth  = grabber.Width;
            frameHeight = grabber.Height;
            detector    = new AdaptiveSkinDetector(1, AdaptiveSkinDetector.MorphingMethod.NONE);
            hsv_min     = new Hsv(0, 45, 0);
            hsv_max     = new Hsv(20, 255, 255);
            YCrCb_min   = new Ycc(0, 131, 80);
            YCrCb_max   = new Ycc(255, 185, 135);
            box         = new MCvBox2D();
            ellipse     = new Ellipse();

            mainProcess = new Thread(MainProcess);
            mainProcess.Start();
        }
Esempio n. 19
0
        // constructor khoi tao giá tri
        public Form1()
        {
            InitializeComponent();
            Run();
            detector = new AdaptiveSkinDetector(1, AdaptiveSkinDetector.MorphingMethod.NONE);
            // xác định ngưỡng trên và ngưỡng dưới của hsv and YCrCB color space
            hsv_min   = new Hsv(0, 45, 0);
            hsv_max   = new Hsv(20, 255, 255);
            YCrCb_min = new Ycc(0, 131, 80);
            YCrCb_max = new Ycc(255, 185, 135);
            box       = new MCvBox2D();


            // gắn sự kiện Eventhandler để truy cập vào hsv frame and YCrCB frame
            Application.Idle += new EventHandler(FrameGrabber);
        }
Esempio n. 20
0
 public Form1()
 {
     InitializeComponent();
     grabber = new Emgu.CV.Capture(0);
     grabber.QueryFrame();
     frameWidth        = grabber.Width;
     frameHeight       = grabber.Height;
     detector          = new AdaptiveSkinDetector(1, AdaptiveSkinDetector.MorphingMethod.NONE);
     hsv_min           = new Hsv(0, 45, 0);
     hsv_max           = new Hsv(20, 255, 255);
     YCrCb_min         = new Ycc(0, 131, 80);
     YCrCb_max         = new Ycc(255, 185, 135);
     box               = new MCvBox2D();
     ellip             = new Ellipse();
     Application.Idle += new EventHandler(FrameGrabber);
 }
Esempio n. 21
0
        public Form1()
        {
            InitializeComponent();
            grabber = new Emgu.CV.Capture(@".\..\..\..\M2U00253.MPG");
            grabber.QueryFrame();
            frameWidth = grabber.Width;
            frameHeight = grabber.Height;
            detector = new AdaptiveSkinDetector(1, AdaptiveSkinDetector.MorphingMethod.NONE);
            hsv_min = new Hsv(0, 45, 0);
            hsv_max = new Hsv(20, 255, 255);
            YCrCb_min = new Ycc(0, 131, 80);
            YCrCb_max = new Ycc(255, 185, 135);
            box = new MCvBox2D();
            ellip = new Ellipse();

            Application.Idle += new EventHandler(FrameGrabber);
        }
Esempio n. 22
0
        public void setColorRange(string color)
        {
            if (color.Equals("Black"))
            {
                minRange = new Ycc(0, 96, 96);
                maxRange = new Ycc(32, 160, 160);
            }

            else if (color.Equals("Blue"))
            {
                minRange = new Ycc(9, 208, 78);
                maxRange = new Ycc(73, 272, 142);
            }

            else if (color.Equals("Green"))
            {
                minRange = new Ycc(113, 22, 2);
                maxRange = new Ycc(177, 86, 66);
            }

            else if (color.Equals("Grey"))
            {
                minRange = new Ycc(149, 96, 96);
                maxRange = new Ycc(213, 160, 160);
            }

            else if (color.Equals("Red"))
            {
                minRange = new Ycc(50, 58, 208);
                maxRange = new Ycc(114, 122, 272);
            }

            else if (color.Equals("Yellow"))
            {
                minRange = new Ycc(178, 0, 114);
                maxRange = new Ycc(242, 48, 178);
            }

            else if (color.Equals("default"))
            {
                minRange = new Ycc(0, 131, 80);
                maxRange = new Ycc(255, 185, 135);
            }
        }
Esempio n. 23
0
        //장애물 색상이 맞는지 판별
        public int obstacle_YccColorCheck(Image <Bgr, Byte> iamge, int totalPicxel, int pos_x, int pos_y, int img_width, int img_height, int min1, int min2, int min3, int max1, int max2, int max3)
        {
            int pixCount = 0;

            Image <Ycc, Byte>  YCrCbFrame   = iamge.Convert <Ycc, Byte>();                                 //YCrCb 변환
            Image <Gray, byte> colorSetting = new Image <Gray, byte>(YCrCbFrame.Width, YCrCbFrame.Height); //Ycc범위로 뽑아낸 것을 gray로 바꿔서 수축팽창 하기 위해

            Ycc YCrCb_min = new Ycc(min1, min2, min3);
            Ycc YCrCb_max = new Ycc(max1, max2, max3);                         //blue 색 범위

            colorSetting = YCrCbFrame.InRange((Ycc)YCrCb_min, (Ycc)YCrCb_max); //색 범위 설정

            StructuringElementEx rect_12 = new StructuringElementEx(12, 12, 6, 6, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_RECT);

            CvInvoke.cvErode(colorSetting, colorSetting, rect_12, 1);
            StructuringElementEx rect_6 = new StructuringElementEx(6, 6, 3, 3, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_RECT);

            CvInvoke.cvDilate(colorSetting, colorSetting, rect_6, 2);          //수축 팽창

            Image <Bgr, Byte> colorCount = colorSetting.Convert <Bgr, Byte>(); //픽셀수 세기 위해

            for (int x = pos_x; x < pos_x + img_width; x++)
            {
                for (int y = pos_y; y < pos_y + img_height; y++)
                {
                    if (colorCount[y, x].Equals(new Bgr(255, 255, 255)))
                    {
                        pixCount++;
                        if (totalPicxel / 10 <= pixCount) //일정 픽섹 이상시 색상배열 변경후 종료
                        {
                            return(1);
                        }

                        if (x > pos_x / 5 + x && y > pos_y / 5 + y) //좌표의 1/5 넘으면 없는걸로
                        {
                            return(-1);
                        }
                    }
                }
            }

            return(-1);
        }
Esempio n. 24
0
 public Form1()
 {
     InitializeComponent();
     grabber = new Emgu.CV.Capture();
     grabber.QueryFrame();
     frameWidth  = grabber.Width;
     frameHeight = grabber.Height;
     detector    = new AdaptiveSkinDetector(1, AdaptiveSkinDetector.MorphingMethod.NONE);
     hsv_min     = new Hsv(0, 45, 0);
     hsv_max     = new Hsv(20, 255, 255);
     YCrCb_min   = new Ycc(0, 131, 80);
     YCrCb_max   = new Ycc(255, 185, 135);
     box         = new MCvBox2D();
     // ellip = new Ellipse();
     _face = new HaarCascade("haarcascade_frontalface_alt_tree.xml");
     eyes  = new HaarCascade("haarcascade_mcs_eyepair_big.xml");
     reye  = new HaarCascade("haarcascade_mcs_lefteye.xml");
     leye  = new HaarCascade("haarcascade_mcs_righteye.xml");
     label1.Hide();
 }
 public Form1()
 {
     InitializeComponent();
     grabber = new Emgu.CV.Capture();
     grabber.QueryFrame();
     frameWidth = grabber.Width;
     frameHeight = grabber.Height;
     detector = new AdaptiveSkinDetector(1, AdaptiveSkinDetector.MorphingMethod.NONE);
     hsv_min = new Hsv(0, 45, 0);
     hsv_max = new Hsv(20, 255, 255);
     YCrCb_min = new Ycc(0, 131, 80);
     YCrCb_max = new Ycc(255, 185, 135);
     box = new MCvBox2D();
     // ellip = new Ellipse();
     _face = new HaarCascade("haarcascade_frontalface_alt_tree.xml");
     eyes = new HaarCascade("haarcascade_mcs_eyepair_big.xml");
     reye = new HaarCascade("haarcascade_mcs_lefteye.xml");
     leye = new HaarCascade("haarcascade_mcs_righteye.xml");
     label1.Hide();
 }
Esempio n. 26
0
        MCvBox2D box; // biến  khởi tạo 1 khung (hình chữ nhật)


        // constructor khởi tạo giá trị
        public video()
        {
            InitializeComponent();
            grabber = new Emgu.CV.Capture(@".\..\..\..\hao.mpg");                                // có được từ video file nhờ sử dụng biến grabber
            grabber.QueryFrame();                                                                // nhận khung hình từ video file
            frameWidth  = grabber.Width;                                                         // thiết lập kích thước khung từ kích thước video file đã có
            frameHeight = grabber.Height;
            detector    = new AdaptiveSkinDetector(1, AdaptiveSkinDetector.MorphingMethod.NONE); //class nhận diện da
            // xác định ngưỡng trên và ngưỡng dưới của hsv and YCrCB color space
            // có thể điều chỉnh để phù hơp với video file
            hsv_min   = new Hsv(0, 45, 0);
            hsv_max   = new Hsv(20, 255, 255);
            YCrCb_min = new Ycc(0, 131, 80);
            YCrCb_max = new Ycc(255, 185, 135);
            box       = new MCvBox2D();


            // gắn sự kiện Eventhandler để truy cập vào hsv frame and YCrCB frame
            Application.Idle += new EventHandler(FrameGrabber);
        }
Esempio n. 27
0
        MCvBox2D box; // biến  khởi tạo 1 khung (hình chữ nhật)


        // constructor khởi tạo giá trị
        public Form1()
        {
            InitializeComponent();
            grabber = new Emgu.CV.Capture(@".\..\..\..\hao.mpg");                                // có được từ video file nhờ sử dụng biến grabber
            grabber.QueryFrame();                                                                // nhận khung hình từ video file
            frameWidth  = grabber.Width;                                                         // thiet lap kich thuoc cua khung lay tu kich thuoc cua video file da co
            frameHeight = grabber.Height;
            detector    = new AdaptiveSkinDetector(1, AdaptiveSkinDetector.MorphingMethod.NONE); // nhận diện skin

            /* xác định ngưỡng trên và ngưỡng dưới của hsv and YCrCB color space
             * có thể điều chỉnh để phù hơp với video file  */
            hsv_min   = new Hsv(0, 45, 0);
            hsv_max   = new Hsv(20, 255, 255);
            YCrCb_min = new Ycc(0, 131, 80);
            YCrCb_max = new Ycc(255, 185, 135);
            box       = new MCvBox2D();


            // gắn thêm FrameGrabber vào Eventhandler để truy cập vào hsv frame and YCrCB frame
            Application.Idle += new EventHandler(FrameGrabber);
        }
        public hand_detection()
        {
            InitializeComponent();

            //  x = new List<HandTracking>(2);
            handCandiate  = new List <Contour <Point> >();
            detected_hand = new List <Contour <Point> >();

            hand_centers = new Dictionary <int, PointF>(2);

            grabber = new Emgu.CV.Capture(@"D:\Working\STREAM\test.avi");

            hsv_min   = new Hsv(0, 45, 0);
            hsv_max   = new Hsv(20, 255, 255);
            YCrCb_min = new Ycc(0, 131, 80);
            YCrCb_max = new Ycc(255, 185, 135);


            fingerTracker = new List <Tracks>();
            // y = null;


            //height = (int)grabber.GetCaptureProperty(Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_FRAME_HEIGHT);
            //width = (int)grabber.GetCaptureProperty(Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_FRAME_WIDTH);



            //detector = new AdaptiveSkinDetector(1, AdaptiveSkinDetector.MorphingMethod.NONE);


            //tempImage = new Image<Gray, Byte>(width, height);

            //current_image = new Image<Gray, byte>(width, height);

            //newImageG = new Image<Gray, byte>(width, height);

            //sw = new System.Diagnostics.Stopwatch();

            //Application.Idle += FrameGrabber;
        }
Esempio n. 29
0
        Ellipse ellip;      // bi?n kh?i t?o 1 ellip

        // constructor kh?i t?o giá tr?
        public Form1()
        {
            InitializeComponent();
            Run();
            //grabber = new Capture();
            //grabber.QueryFrame(); // nh?n khung hình t? video file
            //frameWidth = grabber.Width;    // thiet lap kich thuoc cua khung lay tu kich thuoc cua video file da co
            //frameHeight = grabber.Height;
            detector = new AdaptiveSkinDetector(1, AdaptiveSkinDetector.MorphingMethod.NONE); // nh?n di?n skin

            /* xác d?nh ngu?ng trên và ngu?ng du?i c?a hsv and YCrCB color space
             * có th? di?u ch?nh d? phù hop v?i video file  */
            hsv_min   = new Hsv(0, 45, 0);
            hsv_max   = new Hsv(20, 255, 255);
            YCrCb_min = new Ycc(0, 131, 80);
            YCrCb_max = new Ycc(255, 185, 135);
            box       = new MCvBox2D();
            ellip     = new Ellipse();

            // g?n thêm FrameGrabber vào Eventhandler d? truy c?p vào hsv frame and YCrCB frame
            Application.Idle += new EventHandler(FrameGrabber);
        }
Esempio n. 30
0
        public FingerTipsTracking()
        {
            InitializeComponent();
            //D:\Working\STREAM\00003.MTS
            camera = new Capture(@"D:\Working\STREAM\00003.MTS");

            hsv_min   = new Hsv(0, 45, 0);
            hsv_max   = new Hsv(20, 255, 255);
            YCrCb_min = new Ycc(0, 131, 80);
            YCrCb_max = new Ycc(255, 185, 135);

            //$N
            rec = new GeometricRecognizer();


            rec.LoadGesture(@"E:\Documents\ZoomIn.xml");
            rec.LoadGesture(@"E:\Documents\ZoomOut.xml");

            fingerTracker = new List <Tracks>();

            testImage = new Image <Bgr, byte>(@"C:\Users\Public\Pictures\Sample Pictures\Tulips.jpg");
            eImg      = new Image <Gray, byte>(@"C:\Users\ahmed nady\Desktop\eImg1.jpg");
        }
        private void MainWindow_Loaded(object sender, RoutedEventArgs e)
        {
            heightHand = (int)imageConvexHull.Height;
            widhtHand = (int)imageConvexHull.Width;
            nui.Initialize(RuntimeOptions.UseColor | RuntimeOptions.UseSkeletalTracking);
            nui.VideoFrameReady += Nui_VideoFrameReady;
            nui.VideoStream.Open(ImageStreamType.Video, 2, ImageResolution.Resolution1280x1024, ImageType.Color);

            #region SmoothTransform
            nui.SkeletonEngine.TransformSmooth = true;
            var parameters = new TransformSmoothParameters { Smoothing = 0.75f, Correction = 0.0f,
                Prediction = 0.0f, JitterRadius = 0.05f, MaxDeviationRadius = 0.04f };
            nui.SkeletonEngine.SmoothParameters = parameters;
            #endregion

            nui.SkeletonFrameReady += Nui_skeleton_SkeletonFrameReady;

            #region HandRecognitionInit
            detector = new AdaptiveSkinDetector(1, AdaptiveSkinDetector.MorphingMethod.NONE);

            hsv_min = new Hsv(10, 45, 50);
            hsv_max = new Hsv(20, 255, 255);
            YCrCb_min = new Ycc(0, 131, 80);
            YCrCb_max = new Ycc(255, 185, 135);
            box = new MCvBox2D();
            ellip = new Emgu.CV.Structure.Ellipse();
            #endregion
            haarCascade = new HaarCascade(rootXML+xmlName) ?? null;
            if (haarCascade == null)
                Console.WriteLine("Haar cascade is null.");
        }
Esempio n. 32
0
        protected void Page_Load(object sender, EventArgs e)
        {
            bool result = Convert.ToBoolean(Session["result"]);

            faceHaar = new CascadeClassifier("C:/Emgu/opencv_attic/opencv/data/haarcascades/haarcascade_frontalface_default.xml");

            hsv_min = new Hsv(0, 45, 0);
            hsv_max = new Hsv(20, 255, 255);
            //YCrCb_min = new Ycc(0, 131, 80);
            YCrCb_min = new Ycc(0, 140, 0);
            YCrCb_max = new Ycc(255, 185, 135);

            currentFrameList = new List<Image<Bgr, byte>>();
            grayFrameList = new List<Image<Gray, byte>>();
            handMotionList = new List<Image<Gray, byte>>();

            contourStorage = new MemStorage();
            box = new MCvBox2D();
            fingers = new List<LineSegment2D>();
            motionPoints = new List<PointF>();
            gestures = new List<string>();
            runTimes = new List<long>();
            faceEmotions = new List<string>();
        }
Esempio n. 33
0
        Image <Bgr, Byte> KuwaharaFilter(Image <Bgr, Byte> imgToFilter, int size)
        {
            Image <Ycc, Byte> imgYccToFilter = new Image <Ycc, byte>(imgToFilter.Size),
           imgYccFiltered = new Image <Ycc, byte>(imgToFilter.Size);
            Image <Bgr, Byte> imgFiltered = new Image <Bgr, byte>(imgToFilter.Size);

            CvInvoke.CvtColor(imgToFilter, imgYccToFilter, ColorConversion.Bgr2YCrCb);

            int[] apetureMinX = { -(size / 2), 0, -(size / 2), 0 },
            apetureMaxX = { 0, (size / 2), 0, (size / 2) },
            apetureMinY = { -(size / 2), -(size / 2), 0, 0 },
            apetureMaxY = { 0, 0, (size / 2), (size / 2) };

            for (int x = 0; x < imgYccToFilter.Height; ++x)
            {
                for (int y = 0; y < imgYccToFilter.Width; ++y)
                {
                    double[] yValues   = { 0, 0, 0, 0 };
                    int[]    numPixels = { 0, 0, 0, 0 },
                    maxYValue = { 0, 0, 0, 0 },
                    minYValue = { 255, 255, 255, 255 };

                    for (int i = 0; i < 4; ++i)
                    {
                        for (int x2 = apetureMinX[i]; x2 < apetureMaxX[i]; ++x2)
                        {
                            int tempX = x + x2;
                            if (tempX >= 0 && tempX < imgYccToFilter.Height)
                            {
                                for (int y2 = apetureMinY[i]; y2 < apetureMaxY[i]; ++y2)
                                {
                                    int tempY = y + y2;
                                    if (tempY >= 0 && tempY < imgYccToFilter.Width)
                                    {
                                        Ycc tempColor = imgYccToFilter[tempX, tempY];
                                        yValues[i] += tempColor.Y;

                                        if (tempColor.Y > maxYValue[i])
                                        {
                                            maxYValue[i] = (int)tempColor.Y;
                                        }
                                        else if (tempColor.Y < minYValue[i])
                                        {
                                            minYValue[i] = (int)tempColor.Y;
                                        }

                                        ++numPixels[i];
                                    }
                                }
                            }
                        }
                    }

                    int j             = 0;
                    int minDifference = 10000;

                    for (int i = 0; i < 4; ++i)
                    {
                        int currentDifference = maxYValue[i] - minYValue[i];
                        if (currentDifference < minDifference && numPixels[i] > 0)
                        {
                            j             = i;
                            minDifference = currentDifference;
                        }
                    }

                    Ycc filteredPixel = new Ycc(yValues[j] / numPixels[j], imgYccToFilter[x, y].Cr, imgYccToFilter[x, y].Cb);
                    imgYccFiltered[x, y] = filteredPixel;
                }
            }

            CvInvoke.CvtColor(imgYccFiltered, imgFiltered, ColorConversion.YCrCb2Bgr);
            imageBoxFiltered.Image = imgFiltered;

            return(imgFiltered);
        }
 public Form1()
 {
     InitializeComponent();
     YCrCb_min = new Ycc(0, 139, 0);                             //139 or 145
     YCrCb_max = new Ycc(193, 255, 193);
 }
Esempio n. 35
0
        private void RefreshBackground(Image <Bgr, byte> imgCurrentFrame, double refreshHysteresis)
        {
            Image <Gray, byte> imgGrayCurrent = imgCurrentFrame.Convert <Gray, byte>();


            Image <Hsv, byte> imgHsvCurrent           = imgCurrentFrame.Convert <Hsv, byte>();                         // new HSV image of the current frame to compare with the background
            Image <Hsv, byte> imgHsvCurrentBackground = imgback.Convert <Hsv, byte>();                                 // new HSV image of the current background frame to compare with the current frame

            double VvalueCurrent    = imgHsvCurrent.GetSum().Value / (imgCurrentFrame.Width * imgCurrentFrame.Height); // get the luminance
            double VvalueBackground = imgHsvCurrentBackground.GetSum().Value / (imgHsvCurrentBackground.Width * imgHsvCurrentBackground.Height);


            double brightness_imgGrayCurrent = imgGrayCurrent.GetAverage().Intensity;



            double LuminanceDiff = Math.Abs(VvalueCurrent - VvalueBackground);


            YCrCb_min = new Ycc(0, VvalueBackground * (6.0d / 90.0d) + 122.31, 76);

            //__SaftyInterface.InvokeEx(f => f.WriteLogInfo("LuminanceDiff = " + LuminanceDiff));

            // __SaftyInterface.InvokeEx(f => f.WriteLogInfo("Luminance = " + VvalueCurrent));

            //__SaftyInterface.InvokeEx(f => f.WriteLogInfo("CrMin = " + (VvalueBackground * (6.0d / 90.0d) + 122).ToString()));

            if (CurrentState == ProcessState.Safe)
            {
                if (imgbackbuffer.Count == 0)
                {
                    imgbackbuffer.Add(Convert.ToInt32(VvalueBackground), imgback);
                    highest = Convert.ToInt32(VvalueBackground);
                    lowest  = Convert.ToInt32(VvalueBackground);
                }

                if (imgbackbuffer.Count < 200)
                {
                    if (!imgbackbuffer.ContainsKey(Convert.ToInt32(VvalueCurrent)))
                    {
                        imgbackbuffer.Add(Convert.ToInt32(VvalueCurrent), imgCurrentFrame);

                        foreach (KeyValuePair <int, Image <Bgr, byte> > entry in imgbackbuffer)
                        {
                            if (highest < entry.Key)
                            {
                                highest = entry.Key;
                            }

                            if (lowest > entry.Key)
                            {
                                lowest = entry.Key;
                            }
                            // do something with entry.Value or entry.Key
                        }
                    }
                    else
                    {
                        imgbackbuffer[Convert.ToInt32(VvalueCurrent)] = imgCurrentFrame;
                    }
                }
            }
            else if (CurrentState == ProcessState.Init)
            {
                /*
                 * for (int i = 1; i <= 78; i++)
                 * {
                 *  Image<Bgr, byte> imgInput = new Image<Bgr, byte>(@"C:\DHBW\Studienarbeit\T3200\Software\SA2_SafetyModule\SafetyModule\Resources\back\back" + i + ".png");
                 *
                 *  imgHsvCurrent = imgInput.Convert<Hsv, byte>();
                 *
                 *  VvalueCurrent = imgHsvCurrent.GetSum().Value / (imgInput.Width * imgInput.Height);
                 *
                 *
                 *
                 *
                 *  if (!imgbackbuffer.ContainsKey(Convert.ToInt32(VvalueCurrent)))
                 *  {
                 *      imgbackbuffer.Add(Convert.ToInt32(VvalueCurrent), imgInput);
                 *  }
                 *
                 * }
                 */
                /*
                 *  for (int i = 1; i < 5; i++)
                 *  {
                 *      //continue;
                 *      Image<Gray, Byte>[] channels;
                 *      channels = imgHsvCurrent.Split();
                 *      channels[2]._GammaCorrect(1 + (i / 10));
                 *      //channels[2]._GammaCorrect(3);
                 *
                 *      Image<Hsv, byte> imgEqualizedLuminance = imgHsvCurrent;
                 *
                 *
                 *      VectorOfMat arrChannels = new VectorOfMat(channels[0].Mat, channels[1].Mat, channels[2].Mat);
                 *      CvInvoke.Merge(arrChannels, imgEqualizedLuminance);
                 *
                 *      Image<Bgr, byte> imgEqualized = imgEqualizedLuminance.Convert<Bgr, byte>();
                 *
                 *
                 *      imgHsvCurrent = imgEqualized.Convert<Hsv, byte>();
                 *
                 *      VvalueCurrent = imgHsvCurrent.GetSum().Value / (imgEqualized.Width * imgEqualized.Height);
                 *
                 *      //VvalueCurrent = channels[2].GetSum().value;
                 *
                 *      VvalueCurrent = VvalueCurrent - (i * 10);
                 *
                 *
                 *      //imgEqualized.Save("C:\\temp\\Bilder" + "\\" + "row_image" + i);
                 *
                 *
                 *
                 *      if (!imgbackbuffer.ContainsKey(Convert.ToInt32(VvalueCurrent)))
                 *      {
                 *          imgbackbuffer.Add(Convert.ToInt32(VvalueCurrent), imgEqualized);
                 *      }
                 *
                 * }
                 */
            }



            if (LuminanceDiff > refreshHysteresis)
            {
                if (CurrentState == ProcessState.Safe)
                {
                    MoveNext(Command.Pause);
                }
                else if (CurrentState == ProcessState.NotSafe)
                {
                    for (int i = 0; i < 100; i++)
                    {
                        if (imgbackbuffer.ContainsKey(Convert.ToInt32(VvalueCurrent) - i))
                        {
                            imgback = imgbackbuffer[Convert.ToInt32(VvalueCurrent) - i];
                            return;
                        }

                        if (imgbackbuffer.ContainsKey(Convert.ToInt32(VvalueCurrent) + i))
                        {
                            imgback = imgbackbuffer[Convert.ToInt32(VvalueCurrent) + i];
                            return;
                        }
                    }
                }

                if (CurrentState == ProcessState.Paused)
                {
                    for (int i = 0; i < 100; i++)
                    {
                        if (imgbackbuffer.ContainsKey(Convert.ToInt32(VvalueCurrent) - i))
                        {
                            imgback = imgbackbuffer[Convert.ToInt32(VvalueCurrent) - i];
                            return;
                        }

                        if (imgbackbuffer.ContainsKey(Convert.ToInt32(VvalueCurrent) + i))
                        {
                            imgback = imgbackbuffer[Convert.ToInt32(VvalueCurrent) + i];
                            return;
                        }
                    }
                }
            }
            else if (CurrentState == ProcessState.Paused)
            {
                MoveNext(Command.Resume);
                refreshBackCounter++;
            }
            return;


            if (LuminanceDiff > refreshHysteresis)
            {
                if (CurrentState == ProcessState.Safe)
                {
                    MoveNext(Command.Pause);
                }


                if (Math.Abs(prevLuminance - LuminanceDiff) > 1)
                {
                    prevLuminance = LuminanceDiff;
                    //imgback = imgCurrentFrame;
                    if (VvalueCurrent > VvalueBackground)
                    {
                        //  imgback._GammaCorrect(3);

                        //imgback = imgback.Not();
                    }
                    //imgback = imgCurrentFrame;
                }
                else
                {
                    //imgback = imgCurrentFrame;



                    if (imgbackbuffer.ContainsKey(Convert.ToInt32(VvalueCurrent)))
                    {
                        imgback = imgbackbuffer[Convert.ToInt32(VvalueCurrent)];
                    }


                    MoveNext(Command.Resume);
                    refreshBackCounter++;
                    prevLuminance = 0;
                }
            }
            else
            {
                if (CurrentState == ProcessState.Paused)
                {
                    MoveNext(Command.Resume);
                }
            }
        }
 private void ClickRisePointButton(object sender, EventArgs e)
 {
     Image<Ycc,Byte> img = _sourceImage.Clone().Convert<Ycc,Byte>();
     int width=img.Width;
     int height=img.Height;
     for (int i = 0; i < height; i++)
     {
         for (int j = 0; j < width; j++)
         {
             //顏色是皮膚色
             if (img[i, j].Cr >= 138 && img[i, j].Cr <= 173 && img[i, j].Cb >= 77 && img[i, j].Cb <= 127)
             {
                 img[i, j] = new Ycc(16,128,128);
             }
         }
     }
     _outputPictureBox.Image = img.ToBitmap();
 }
Esempio n. 37
0
        //Ycc 색상 모델로 색상 정보 추출함
        public void YccColorCheck(int index, int min1, int min2, int min3, int max1, int max2, int max3)
        {
            int pixCount = 0;

            Image <Ycc, Byte>  YCrCbFrame   = colorCheckImage.Convert <Ycc, Byte>();                       //YCrCb 변환
            Image <Gray, byte> colorSetting = new Image <Gray, byte>(YCrCbFrame.Width, YCrCbFrame.Height); //Ycc범위로 뽑아낸 것을 gray로 바꿔서 수축팽창 하기 위해

            Ycc YCrCb_min = new Ycc(min1, min2, min3);
            Ycc YCrCb_max = new Ycc(max1, max2, max3);                         // 색 범위

            colorSetting = YCrCbFrame.InRange((Ycc)YCrCb_min, (Ycc)YCrCb_max); //색 범위 설정

            StructuringElementEx rect_12 = new StructuringElementEx(6, 6, 3, 3, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_RECT);

            CvInvoke.cvErode(colorSetting, colorSetting, rect_12, 1);
            StructuringElementEx rect_6 = new StructuringElementEx(6, 6, 3, 3, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_RECT);

            CvInvoke.cvDilate(colorSetting, colorSetting, rect_6, 2);          //수축 팽창

            Image <Bgr, Byte> colorCount = colorSetting.Convert <Bgr, Byte>(); //픽셀수 세기 위해

            //이미지가 범위를 벗어날경우 처리
            if (pos_x < 0)
            {
                pos_x = 0;
            }
            if (pos_y < 0)
            {
                pos_y = 0;
            }

            if (pos_x + img_width > colorCheckImage.Width)
            {
                pos_x = colorCheckImage.Width - img_width;
            }
            if (pos_y + img_height > colorCheckImage.Height)
            {
                pos_y = colorCheckImage.Height - img_height;
            }

            for (int x = pos_x; x < pos_x + img_width; x++)
            {
                for (int y = pos_y; y < pos_y + img_height; y++)
                {
                    if (!colorCount[y, x].Equals(new Bgr(0, 0, 0)))
                    {
                        pixCount++;

                        if (totalPicxel / 3 <= pixCount) //일정 픽섹 이상시 색상배열 변경후 종료
                        {
                            color[index]       = true;
                            color_ROI[index].X = x;
                            color_ROI[index].Y = y;
                            color_count++;
                            change_check[index] = false;

                            int margin = 0;
                            ugvList.Add(new UGV("A" + index, glo.TemplateWidth - margin, glo.TemplateHeight - margin, x + 30, y + 30, colorStr[index]));
                            return;
                        }
                    }
                }
            }
        }
Esempio n. 38
0
        public Form1()
        {
            InitializeComponent();

            faceHaar = new CascadeClassifier(@"..\..\haar\haarcascade_frontalface.xml");

            hsv_min = new Hsv(0, 45, 0);
            hsv_max = new Hsv(20, 255, 255);
            //YCrCb_min = new Ycc(0, 131, 80);
            YCrCb_min = new Ycc(0, 140, 0);
            YCrCb_max = new Ycc(255, 185, 135);

            currentFrameList = new List<Image<Bgr, byte>>();
            grayFrameList = new List<Image<Gray, byte>>();
            handMotionList = new List<Image<Gray, byte>>();

            contourStorage = new MemStorage();
            box = new MCvBox2D();
            fingers = new List<LineSegment2D>();
            motionPoints = new List<PointF>();
            gestures = new List<string>();
            runTimes = new List<long>();
            faceEmotions = new List<string>();
        }
Esempio n. 39
0
    // Detects Skin. Takes an image and returns a binary image with the background removed.
    // Skin is white and everything else will be black.
    public static Image <Bgr, Byte> GetSkin(Image <Bgr, Byte> image)
    {
        // allocate the result matrix
        Image <Bgr, Byte> dst = image.Clone();
        Bgr cwhite            = new Bgr(255, 255, 255);                                 //Vec3b::all(255);
        Bgr cblack            = new Bgr(0, 0, 0);                                       //Vec3b::all(0);

        Image <Ycc, Byte>   src_ycrcb = new Image <Ycc, Byte>(dst.Width, dst.Height);   // = new Mat();
        Image <Hsv, Single> src_hsv   = new Image <Hsv, Single>(dst.Width, dst.Height); // = new Mat();

        // OpenCV scales the YCrCb components, so that they
        // cover the whole value range of [0,255], so there's
        // no need to scale the values:
        Emgu.CV.CvInvoke.CvtColor(image, src_ycrcb, Emgu.CV.CvEnum.ColorConversion.Bgr2YCrCb);
        // OpenCV scales the Hue Channel to [0,180] for
        // 8bit images, so make sure we are operating on
        // the full spectrum from [0,360] by using floating
        // point precision:
        //image.ConvertTo(src_hsv, Emgu.CV.CvEnum.DepthType.Cv32F);
        Emgu.CV.CvInvoke.CvtColor(image, src_hsv, Emgu.CV.CvEnum.ColorConversion.Bgr2Hsv); // src_hsv to image
        // Now scale the values between [0,255]:
        Emgu.CV.CvInvoke.Normalize(src_hsv, src_hsv, 0.0, 255.0, Emgu.CV.CvEnum.NormType.MinMax, Emgu.CV.CvEnum.DepthType.Cv32F);

        for (int i = 0; i < image.Rows; i++)
        {
            for (int j = 0; j < image.Cols; j++)
            {
                //Vec3b pix_bgr = src.ptr<Vec3b>(i)[j];
                Bgr pix_bgr = image[i, j];
                int B       = (int)pix_bgr.Blue;
                int G       = (int)pix_bgr.Green;
                int R       = (int)pix_bgr.Red;
                // apply rgb rule
                bool a = R1(R, G, B);

                //Vec3b pix_ycrcb = src_ycrcb.ptr<Vec3b>(i)[j];
                Ycc   pix_ycrcb = src_ycrcb[i, j];
                float Y         = (float)pix_ycrcb.Y;
                float Cr        = (float)pix_ycrcb.Cr;
                float Cb        = (float)pix_ycrcb.Cb;
                // apply ycrcb rule
                bool b = R2(Y, Cr, Cb);

                Hsv   pix_hsv = src_hsv[i, j];
                float H       = (float)pix_hsv.Hue;
                float S       = (float)pix_hsv.Satuation;
                float V       = (float)pix_hsv.Value;
                // apply hsv rule
                bool c = R3(H, S, V);

                if (!(a && b && c))
                {
                    dst[i, j] = cblack;
                }
                else
                {
                    dst[i, j] = cwhite; // Make skin white
                }
            }
        }
        return(dst);
    }
Esempio n. 40
0
        //각각의 색상을 트레킹해서 rect을 만들어줌
        public void color_traking(int index, int min1, int min2, int min3, int max1, int max2, int max3, Image <Bgr, Byte> iamge, Rectangle[] rect)
        {
            int pixCount = 0, small_pixCount = 0;

            Image <Ycc, Byte>  YCrCbFrame = iamge.Convert <Ycc, Byte>(); //YCrCb 변환
            Image <Gray, byte> colorSetting = new Image <Gray, byte>(YCrCbFrame.Width, YCrCbFrame.Height);

            Ycc YCrCb_min = new Ycc(min1, min2, min3);
            Ycc YCrCb_max = new Ycc(max1, max2, max3);                         //blue 색 범위

            colorSetting = YCrCbFrame.InRange((Ycc)YCrCb_min, (Ycc)YCrCb_max); //색 범위 설정

            StructuringElementEx rect_12 = new StructuringElementEx(6, 6, 3, 3, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_RECT);

            CvInvoke.cvErode(colorSetting, colorSetting, rect_12, 1);
            StructuringElementEx rect_6 = new StructuringElementEx(6, 6, 3, 3, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_RECT);

            CvInvoke.cvDilate(colorSetting, colorSetting, rect_6, 2);          //수축 팽창

            Image <Bgr, Byte> colorCount = colorSetting.Convert <Bgr, Byte>(); //픽셀수 세기 위해

            //작은 원 찾기
            //YCrCb_min = new Ycc(0, 0, 0);
            //YCrCb_max = new Ycc(255, 146, 100);   //큰원yellow 색 범위

            YCrCb_min = new Ycc(0, 0, 0);
            //YCrCb_max = new Ycc(255, 150, 114);   //작은 원 yellow 색 범위
            YCrCb_max = new Ycc(255, 150, 120);   //작은 원 yellow 색 범위


            colorSetting = YCrCbFrame.InRange((Ycc)YCrCb_min, (Ycc)YCrCb_max); //색 범위 설정

            rect_12 = new StructuringElementEx(6, 6, 3, 3, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_RECT);
            CvInvoke.cvErode(colorSetting, colorSetting, rect_12, 1);
            rect_6 = new StructuringElementEx(6, 6, 3, 3, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_RECT);
            CvInvoke.cvDilate(colorSetting, colorSetting, rect_6, 2);                //수축 팽창

            Image <Bgr, Byte> small_colorCount = colorSetting.Convert <Bgr, Byte>(); //픽셀수 세기 위해

            int x_p = 0, y_p = 0;                                                    // 큰 원 픽셀수 저장
            int small_x = 0, small_y = 0;                                            // 작은원 픽셀수 저장

            //이미지가 범위를 벗어날경우 처리
            if (color_ROI[index].X < 0)
            {
                color_ROI[index].X = 0;
            }
            if (color_ROI[index].Y < 0)
            {
                color_ROI[index].Y = 0;
            }

            if (color_ROI[index].X + img_width > iamge.Width)
            {
                color_ROI[index].X = iamge.Width - img_width;
            }
            if (color_ROI[index].Y + img_height > iamge.Height)
            {
                color_ROI[index].Y = iamge.Height - img_height;
            }

            //픽셀수 셈
            for (int x = color_ROI[index].X; x < color_ROI[index].X + img_width; x++)
            {
                for (int y = color_ROI[index].Y; y < color_ROI[index].Y + img_height; y++)
                {
                    if (!colorCount[y, x].Equals(new Bgr(0, 0, 0)))
                    {
                        pixCount++;
                        x_p += x;
                        y_p += y;
                    }
                }
            }

            //픽셀 개수에 따라
            if (pixCount >= 10) //개수가 0이 아닐때 ROI 변경해줌
            {
                int big_center_x = x_p / pixCount;
                int big_center_y = y_p / pixCount; //큰 원 중심좌표

                //사라진것을 판별하기 위해.. 원랜 마이너스값으로 좌표가 계산되어 일부러 음수좌표는 0으로 만들고 사라졌을때 좌표를 -1로 만듬
                int tmp_x = big_center_x - glo.TemplateWidth / 2;
                int tmp_y = big_center_y - glo.TemplateHeight / 2;

                int tmp_width  = glo.TemplateWidth;
                int tmp_height = glo.TemplateHeight;

                if (tmp_x < 0)
                {
                    tmp_width += tmp_x;
                    tmp_x      = 0;
                }
                if (tmp_y < 0)
                {
                    tmp_height += tmp_y;
                    tmp_y       = 0;
                }

                if (tmp_x + img_width > glo.rect_width)
                {
                    tmp_width = img_width - (tmp_x + img_width - glo.rect_width);
                }

                if (tmp_y + img_height > glo.rect_height)
                {
                    tmp_height = img_height - (tmp_y + img_height - glo.rect_height);
                }

                int x_end = tmp_x + img_width;
                int y_end = tmp_y + img_height;

                if (x_end > glo.rect_width)
                {
                    x_end = glo.rect_width;
                }
                if (y_end > glo.rect_height)
                {
                    y_end = glo.rect_height;
                }


                for (int x = tmp_x; x < x_end; x++)
                {
                    for (int y = tmp_y; y < y_end; y++)
                    {
                        if (!small_colorCount[y, x].Equals(new Bgr(0, 0, 0)))
                        {
                            small_pixCount++;
                            small_x += x;
                            small_y += y;
                        }
                    }
                }

                rect[index]        = new Rectangle(tmp_x, tmp_y, tmp_width, tmp_height); //사각형의 왼쪽 위의 좌표
                color_ROI[index].X = big_center_x - glo.TemplateWidth / 2;
                color_ROI[index].Y = big_center_y - glo.TemplateHeight / 2;

                if (small_pixCount != 0)
                {
                    int small_center_x = small_x / small_pixCount;
                    int small_center_y = small_y / small_pixCount;

                    int C = big_center_x - small_center_x;
                    int D = big_center_y - small_center_y;

                    double E      = Math.Atan2(D, C);
                    double result = E * (180 / 3.14192);

                    if (result < 0)
                    {
                        result = 360 + result;
                    }

                    // glo.mapObstacleLock.EnterWriteLock(); //critical section start

                    double ref_angle = 45;
                    double margin    = 13;


                    if (ref_angle * 2 - margin <= result && result <= ref_angle * 2 + margin)
                    {
                        glo.direction[index] = 0;
                    }
                    else if (ref_angle * 3 - margin <= result && result <= ref_angle * 3 + margin)
                    {
                        glo.direction[index] = 1;
                    }
                    else if (ref_angle * 4 - margin <= result && result <= ref_angle * 4 + margin)
                    {
                        glo.direction[index] = 2;
                    }
                    else if (ref_angle * 5 - margin <= result && result <= ref_angle * 5 + margin)
                    {
                        glo.direction[index] = 3;
                    }
                    else if (ref_angle * 6 - margin <= result && result <= ref_angle * 6 + margin)
                    {
                        glo.direction[index] = 4;
                    }
                    else if (ref_angle * 7 - margin <= result && result <= ref_angle * 7 + margin)
                    {
                        glo.direction[index] = 5;
                    }
                    else if (ref_angle * 0 <= result && result <= ref_angle * 0 + margin || ref_angle * 8 - margin <= result && result <= ref_angle * 8)
                    {
                        glo.direction[index] = 6;
                    }
                    else if (ref_angle * 1 - margin <= result && result <= ref_angle * 1 + margin)
                    {
                        glo.direction[index] = 7;
                    }
                    else
                    {
                        glo.direction[index] = -1;
                    }

                    /*
                     * if (index == 0)
                     * {
                     *   if (glo.direction[index] != -1)
                     *   {
                     *       Console.WriteLine("result = " + result);
                     *       Console.WriteLine("globals.direction[i] = " + glo.direction[index]);
                     *   }
                     *
                     * }
                     */

                    //glo.mapObstacleLock.ExitWriteLock(); //critical section end

                    /*
                     * if (index == 0)
                     * {
                     *  if (glo.direction[index] == -1)
                     *      Console.WriteLine("i = " + index + " direction[index] = " + glo.direction[index] + "알수 없는 각도" + " result = " + result);
                     *  else
                     *      Console.WriteLine("i = " + index + " direction[index] = " + glo.direction[index] + " result = " + result);
                     * }
                     */
                    // if (index == 3)
                    //   Console.WriteLine("");

                    /*
                     * if (glo.direction[index] == -1)
                     * Console.WriteLine("i = " + index + " direction[index] = " + glo.direction[index] + " result = " + result);
                     */
                }
                else
                {
                    rect[index]         = new Rectangle(0, 0, 0, 0); //사라졌을때 좌표를 0,0 길이 0, 0로 만듬
                    color[index]        = false;
                    change_check[index] = true;
                    color_count--;

                    color_ROI[index].X = 0;
                    color_ROI[index].Y = 0;
                    Console.WriteLine("노랑 사라짐");
                }
            }
            else
            {
                rect[index]         = new Rectangle(0, 0, 0, 0); //사라졌을때 좌표를 0,0 길이 0, 0로 만듬
                color[index]        = false;
                change_check[index] = true;
                color_count--;
                color_ROI[index].X = 0;
                color_ROI[index].Y = 0;
                Console.WriteLine("차량 사라짐");
            }
        }