public ConvexHull() { InitializeComponent(); //@"C:\Users\ahmed nady\Videos\Debut\test.avi" 00023.MTS MOV_0016 @"D:\Working\STREAM\ahmednady.asf" camera = new Capture(/*@"C:\Users\ahmed nady\Videos\Debut\5.avi");*/ @"F:\Working\Final phase\DataSet\sequence.avi"); fingerTipDetection = new FingerTip(); skinDetector = new YCrCbSkinDetector(); candidateTips = new List <Point>(); fingerTracker = new List <Tracks>(); // adjust path to find your XML file //haar = new HaarCascade("FingerTips.xml"); hsv_min = new Hsv(0, 45, 0); hsv_max = new Hsv(20, 255, 255); YCrCb_min = new Ycc(0, 131, 80); YCrCb_max = new Ycc(255, 185, 135); //$N rec = new GeometricRecognizer(); rec.LoadGesture(@"C:\Users\ahmed nady\Documents\TranslateLeft.xml"); rec.LoadGesture(@"C:\Users\ahmed nady\Documents\TranslateRight.xml"); rec.LoadGesture(@"C:\Users\ahmed nady\Documents\RotateLeft.xml"); rec.LoadGesture(@"C:\Users\ahmed nady\Documents\RotateRight.xml"); rec.LoadGesture(@"C:\Users\ahmed nady\Documents\ZoomIn.xml"); rec.LoadGesture(@"C:\Users\ahmed nady\Documents\ZoomOut.xml"); rec.LoadGesture(@"C:\Users\ahmed nady\Documents\KZoomIn.xml"); rec.LoadGesture(@"C:\Users\ahmed nady\Documents\KZoomOut.xml"); }
public HandPosture() { InitializeComponent(); skinDetector = new YCrCbSkinDetector(); camera = new Capture();//@"D:\Working\XNA\STREAM\00005_hand.MTS"); hsv_min = new Hsv(0, 45, 0); hsv_max = new Hsv(20, 255, 255); YCrCb_min = new Ycc(0, 131, 80); YCrCb_max = new Ycc(255, 185, 135); template = new Image <Gray, byte>(@"C:\Users\ahmed nady\Desktop\hand1.jpg"); ////_binary_template = skinDetector.DetectSkin(template, YCrCb_min, YCrCb_max); template1 = new Image <Gray, byte>(@"C:\Users\ahmed nady\Desktop\zoomOut.jpg"); // _binary_template1 = skinDetector.DetectSkin(template1, YCrCb_min, YCrCb_max); template2 = new Image <Gray, byte>(@"C:\Users\ahmed nady\Desktop\rotateLeft.jpg"); // _binary_template2 = skinDetector.DetectSkin(template2, YCrCb_min, YCrCb_max); template3 = new Image <Gray, byte>(@"C:\Users\ahmed nady\Desktop\rotateRight.jpg"); // _binary_template3 = skinDetector.DetectSkin(template3, YCrCb_min, YCrCb_max); testImg = new Image <Bgr, byte>(@"C:\Users\Public\Pictures\Sample Pictures\Tulips.jpg"); combiningGesture = (template.ConcateHorizontal(template1)).ConcateHorizontal(template2.ConcateHorizontal(template3)); }
void FrameGrabber(object sender, EventArgs e) { Mat x = new Mat(); //currentFrame = CvInvoke.grabber.QueryFrame().ToImage<Bgr,Byte>; x = grabber.QueryFrame(); currentFrame = x.ToImage <Bgr, byte>(); if (currentFrame != null) { currentFrameCopy = currentFrame.Copy(); // Uncomment if using opencv adaptive skin detector //Image<Gray,Byte> skin = new Image<Gray,byte>(currentFrameCopy.Width,currentFrameCopy.Height); //detector.Process(currentFrameCopy, skin); skinDetector = new YCrCbSkinDetector(); //skinDetector = new HsvSkinDetector(); Image <Gray, Byte> skin = skinDetector.DetectSkin(currentFrameCopy, YCrCb_min, YCrCb_max); ExtractBlobAndCrop(skin); //imageBoxSkin.Image = skin; //imageBox1.Image = skin; //imageBoxFrameGrabber.Image = skin; } }
void FrameGrabber(object sender, EventArgs e) { currentFrame = grabber.QueryFrame(); Image <Bgr, byte> capturedImage = currentFrame; // FILM if (takeMovie) { if (nextFrame < 500) { // Save the image capturedImage.Save(@".\..\..\..\MOVIES\" + nextFrame + ".jpg"); //capturedImage.Save(@"C:\Users\piotrek\Desktop\FILMS\" + nextFrame + ".jpg"); nextFrame++; // Set the bool to false again to make sure we only take one snapshot //takeMovie = !takeMovie; } //takeMovie = !takeMovie; } if (currentFrame != null) { currentFrameCopy = currentFrame.Copy(); // Uncomment if using opencv adaptive skin detector //Image<Gray,Byte> skin = new Image<Gray,byte>(currentFrameCopy.Width,currentFrameCopy.Height); //detector.Process(currentFrameCopy, skin); skinDetector = new YCrCbSkinDetector(); Image <Gray, Byte> skin = skinDetector.DetectSkin(currentFrameCopy, YCrCb_min, YCrCb_max); ExtractContourAndHull(skin); fingerNum = DrawAndComputeFingersNum(); GestureExecution(fingerNum); imageBoxSkin.Image = skin; imageBoxFrameGrabber.Image = currentFrame; if (takeSnapshot) { // Save the image skin.Save(@".\..\..\..\PICTURES\" + nextPicture + "binary.jpg"); capturedImage.Save(@".\..\..\..\PICTURES\" + nextPicture + ".jpg"); //skin.Save(@"C:\Users\piotrek\Desktop\PICTURES\" + nextPicture + "binary.jpg"); //capturedImage.Save(@"C:\Users\piotrek\Desktop\PICTURES\" + nextPicture + ".jpg"); nextPicture++; // Set the bool to false again to make sure we only take one snapshot takeSnapshot = !takeSnapshot; } } }
private void procesarImagen() { //Image<Gray, Byte> skin = new Image<Gray, byte>(imagen.Width, imagen.Height); // detector.Process(imagen,skin); skinDetector = new YCrCbSkinDetector(); Image <Gray, Byte> skin = skinDetector.DetectSkin(imagen, YCrCb_min, YCrCb_max); observedImageVector = ObservedImageFunctions.ExtractFeatures(skin, imagen); hypothesisImageVector = HypothesisImageFunctions.createFirstHypothesis(imagen2); //imgCaja.Image = skin; imgCaja.Refresh(); imgCaja2.Refresh(); }
void FrameGrabber(object sender, EventArgs e) { currentFrame = grabber.QueryFrame(); if (currentFrame != null) { currentFrameCopy = currentFrame.Copy(); skinDetector = new YCrCbSkinDetector(); Image <Gray, Byte> skin = skinDetector.DetectSkin(currentFrameCopy, YCrCb_min, YCrCb_max); ExtractContourAndHull(skin); DrawAndComputeFingersNum(); imageBoxSkin.Image = skin; imageBoxFrameGrabber.Image = currentFrame; } }
/// <summary> /// hàm truy cập vào khung tham chiếu từ video file /// </summary> /// <param name="sender"></param> /// <param name="e"></param> void FrameGrabber(object sender, EventArgs e) { currentFrame = grabber.QueryFrame(); if (currentFrame != null) { currentFrameCopy = currentFrame.Copy(); // có được khung ánh xạ của bàn tay // sử dụng class YcrCbskinDetector() để nhận diện da skinDetector = new YCrCbSkinDetector(); Image <Gray, Byte> skin = skinDetector.DetectSkin(currentFrameCopy, YCrCb_min, YCrCb_max); ExtractContourAndHull(skin); DrawAndComputeFingersNum(); imageBoxSkin.Image = skin; imageBoxFrameGrabber.Image = currentFrame; } }
// truy c?p vào khung tham chi?u t? video file void FrameGrabber(object sender, EventArgs e) { currentFrame = grabber.QueryFrame(); if (currentFrame != null) { currentFrameCopy = currentFrame.Copy(); // có du?c khung ánh x? c?a bàn tay(khung tr?ng den) // s? d?ng YcrCbskinDetector d? nh?n di?n skin skinDetector = new YCrCbSkinDetector(); Image <Gray, Byte> skin = skinDetector.DetectSkin(currentFrameCopy, YCrCb_min, YCrCb_max); ExtractContourAndHull(skin); DrawAndComputeFingersNum(); imageBoxSkin.Image = skin; imageBoxFrameGrabber.Image = currentFrame; } }
void FrameGrabber(object sender, EventArgs e) { currentFrame = grabber.QuerySmallFrame(); if (currentFrame != null) { currentFrameCopy = currentFrame.Copy(); // Uncomment if using opencv adaptive skin detector //Image<Gray,Byte> skin = new Image<Gray,byte>(currentFrameCopy.Width,currentFrameCopy.Height); //detector.Process(currentFrameCopy, skin); skinDetector = new YCrCbSkinDetector(); Image <Gray, Byte> skin = skinDetector.DetectSkin(currentFrameCopy, YCrCb_min, YCrCb_max); ExtractContourAndHull(skin); imageBoxSkin.Image = skin; imageBoxFrameGrabber.Image = currentFrame; } }
void FrameGrabber(object sender, EventArgs e) { currentFrame = grabber.QueryFrame().Flip(Emgu.CV.CvEnum.FLIP.HORIZONTAL); if (currentFrame != null) { currentFrameCopy = currentFrame.Copy(); // Uncomment if using opencv adaptive skin detector //Image<Gray,Byte> skin = new Image<Gray,byte>(currentFrameCopy.Width,currentFrameCopy.Height); //detector.Process(currentFrameCopy, skin); skinDetector = new YCrCbSkinDetector(); Image <Gray, Byte> skin = skinDetector.DetectSkin(currentFrameCopy, YCrCb_min, YCrCb_max); ExtractContourAndHull(skin); try { if (defects != null) { DrawAndComputeFingersNum(); } } catch (IndexOutOfRangeException outOfRange) { //Unknown Problem Console.WriteLine("{0}", defectArray.Count() == 0 ? 0 : defectArray.Count()); } Dispatcher.Invoke(DispatcherPriority.Render, new Action( delegate() { imageBoxSkin.Source = skin.ToBitmapSource(); imageBoxFrameGrabber.Source = currentFrame.ToBitmapSource(); } )); } }
void processFrameAndUpdateGUI(object sender, EventArgs e) { try { imgOriginal = capwebcam.RetrieveBgrFrame(); currentFrame = capwebcam.QueryFrame(); if (imgOriginal == null) { return; } skinDetector = new YCrCbSkinDetector(); Image <Gray, Byte> skin = skinDetector.DetectSkin(currentFrame, YCrCb_min, YCrCb_max); imgProcessed = skin.SmoothGaussian(9); tbImageName.Text = i.ToString() + ".jpg"; ExtractContourAndHull(imgProcessed); if (defects != null) { DrawAndComputeFingersNum(); } ibOriginal.Image = currentFrame; } catch (Exception ex) { tbGesture.Text = "Exception found!" + ex; } }
void processFrameAndUpdateGUI(object sender, EventArgs e) { /* if (imgOriginal == null) * { * imgOriginal = capwebcam.RetrieveBgrFrame(); //we could use RetrieveGrayFrame if we didn't care about displaying colour image * prevFrame = imgOriginal.Copy(); * } */ imgOriginal = capwebcam.RetrieveBgrFrame(); currentFrame = capwebcam.QueryFrame(); if (imgOriginal == null) { return; } // imgProcessed = /*here*/; // imgProcessed = imgProcessed.SmoothGaussian(9); /* if (_forgroundDetector == null) * { * //_forgroundDetector = new BGCodeBookModel<Bgr>(); * _forgroundDetector = new FGDetector<Bgr>(Emgu.CV.CvEnum.FORGROUND_DETECTOR_TYPE.FGD); * //_forgroundDetector = new BGStatModel<Bgr>(image, Emgu.CV.CvEnum.BG_STAT_TYPE.FGD_STAT_MODEL); * } */ // _forgroundDetector.Update(imgOriginal); //update the motion history // imgProcessed = _forgroundDetector.ForegroundMask; // imgProcessed = imgProcessed.SmoothGaussian(9); /* bgs.Update(imgOriginal); * imgProcessed = bgs.ForegroundMask; */ skinDetector = new YCrCbSkinDetector(); Image <Gray, Byte> skin = skinDetector.DetectSkin(currentFrame, YCrCb_min, YCrCb_max); //imgProcessed.AddWeighted(skin, 1.0, 1.0, 1.0); //imgProcessed.Or(skin); /* Difference = prevFrame.AbsDiff(imgOriginal); * Difference = Difference.ThresholdBinary(new Bgr(60, 60, 60), new Bgr(255, 255, 255)); * GrayDifference = Difference.Convert<Gray, Byte>(); * imgProcessed.AddWeighted(GrayDifference, 1.0, 1.0, 1.0); * */ imgProcessed = skin.SmoothGaussian(9); // imgProcessed = imgProcessed.Erode(5); /* foreach (CircleF circle in circles) * { * if (tbGesture.Text != "") tbGesture.AppendText(Environment.NewLine); * tbGesture.AppendText("Hull Position = x - " + circle.Center.X.ToString().PadLeft(4) + * ", y - " + circle.Center.Y.ToString().PadLeft(4) + * ",radius - " + circle.Radius.ToString("###.000").PadLeft(7)); * tbGesture.ScrollToCaret(); * CvInvoke.cvCircle(imgOriginal, * new Point((int)circle.Center.X, (int)circle.Center.Y), * 3, * new MCvScalar(0, 255, 0), * -1, * LINE_TYPE.CV_AA, * 0); * * imgOriginal.Draw(circle, * new Bgr(Color.Red), * 3); * ss.Speak("Found your red circle!"); * }*/ ExtractContourAndHull(imgProcessed); if (defects != null) { DrawAndComputeFingersNum(); } ibProcessed.Image = imgProcessed; ibOriginal.Image = currentFrame; }
private void processImage(object sender, EventArgs e) { Image <Bgr, byte> skin = camera.QueryFrame();//line 1 CaptureImageBox.Image = skin; if (skin == null) { return; } skin._SmoothGaussian(3); // skin._EqualizeHist(); skinDetector = new YCrCbSkinDetector(); Image <Gray, Byte> BinaryHandImage = skinDetector.DetectSkin(skin, YCrCb_min, YCrCb_max); BinaryHandImage._Erode(3); BinaryHandImage._Dilate(3); // // BinaryHandImage= BinaryHandImage.Canny(100, 0); //// FingerTipsTrackingBox.Image = BinaryHandImage; // //// curvature of premetier method List <Point> tips = distanceTransform(BinaryHandImage, skin); for (int c = 0; c < tips.Count; c++) { skin.Draw(new CircleF((PointF)tips[c], 10), new Bgr(Color.Blue), 2); } // trackFingerTips(tips); //Contour<Point> handContour = null; //using (MemStorage m = new MemStorage()) //{ // Contour<System.Drawing.Point> MaxContour = new Contour<Point>(m); // Contour<System.Drawing.Point> Contours = // BinaryHandImage.FindContours();//Emgu.CV.CvEnum.CHAIN_APPROX_METHOD.CV_CHAIN_APPROX_SIMPLE, Emgu.CV.CvEnum.RETR_TYPE.CV_RETR_TREE); // //Contours.OrderByDescending <Contour ,double)(); // handContour = ExtractBiggestContour(Contours); // if (handContour != null) // { // List<Point> fingerTips = new List<Point>(); // List<Point> tips = new List<Point>(); // findFingerTips(handContour, fingerTips); // tips = tipsCulstering(fingerTips); // tips = tipsCulstering(tips); // Seq<Point> hull = handContour.GetConvexHull(Emgu.CV.CvEnum.ORIENTATION.CV_CLOCKWISE); // skin.Draw(hull, new Bgr(Color.Green), 2); // skin.Draw(handContour, new Bgr(Color.Red), 2); // for (int c = 0; c < fingerTips.Count; c++) // { // skin.Draw(new CircleF((PointF)fingerTips[c], 10), new Bgr(Color.Blue), 2); // } // for (int c = 0; c < tips.Count; c++) // { // skin.Draw(new CircleF((PointF)tips[c], 15), new Bgr(Color.Yellow), 2); // } // if (TargetROISelected) // trackFingerTipsUsingCamshift(skin); // } //} }
/// <summary> /// the main function in this class /// </summary> /// <param name="sender"></param> /// <param name="e"></param> void FrameGrabber(object sender, EventArgs e) { //sw.Start(); newImage = grabber.QueryFrame(); CaptureImageBox.Image = newImage; skinDetector = new YCrCbSkinDetector(); Image <Gray, Byte> BinaryHandImage = skinDetector.DetectSkin(newImage, YCrCb_min, YCrCb_max); finger = new List <Point>(); HandDetection(BinaryHandImage); //int postPoint = 0; //List<Point> lastTry = new List<Point>(); //for (int j = 0; j < finger.Count; j++) //{ // postPoint = j + 1; // if (j == finger.Count - 1) // postPoint = 0; // if (finger[j].Y - finger[postPoint].Y < 10) // lastTry.Add(finger[postPoint]); //} //finger = lastTry; for (int j = 0; j < finger.Count; j++) { newImage.Draw(new Emgu.CV.Structure.CircleF(finger[j], 5), new Bgr(Color.Blue), 6); } //count++; //if (newImage != null) //{ // current_image = newImage.Convert<Gray, byte>(); // detector.Process(newImage, tempImage); // tempImage = tempImage.ThresholdBinary(thresholdValue, MaxValue); // tempImage = tempImage.Dilate(2); // tempImage = tempImage.SmoothMedian(3); // newImageG = current_image.ThresholdBinaryInv(new Gray(threshold), new Gray(255d)); // newImageG = newImageG.And(tempImage); // newImageG = newImageG.Dilate(1); // //if (numberOfHands > 0) // //{ // // int tt = numberOfHands; // // for (int i = 0; i < tt; i++) // // { // // if (x[i] != null) // // { // // try // // { // // x[i].StartTracking(elapsed_time); // // } // // catch (Exception ex) // // { // // Console.WriteLine("lost traking : number of hands {0} & list x {1}", numberOfHands, x.Count); // // int id = x[i].id; // // hand_centers[id] = x[i].new_center_pt; // // hand_centers.Remove(id); // // x.RemoveAt(id); // // --numberOfHands; // // } // // } // // } // //} // //if (numberOfHands < hand_detected) // //{ // // detected_hand = HandDetection(newImageG); // // if (detected_hand.Any())// any elements in the list // // { // // foreach (Contour<Point> h in detected_hand) // // { // // if (numberOfHands < hand_detected) // // { // // y = new HandTracking(current_image.Width, current_image.Height, hand_centers[numberOfHands]); // // y.ExtractFeatures(h); // // y.id = numberOfHands; // // x.Add(y); // // numberOfHands++; // // } // // else // // Console.WriteLine("there is already 2 hands"); // // } // // detected_hand.Clear(); // // } // //} // sw.Stop(); // elapsed_time = sw.Elapsed.TotalMilliseconds; // sw.Reset(); // imageBoxSkin.Image = newImage; // imageBoxFrameGrabber.Image = newImageG; }
/// <summary> /// the main function in this class /// </summary> /// <param name="sender"></param> /// <param name="e"></param> void FrameGrabber(object sender, EventArgs e) { //sw.Start(); newImage = grabber.QueryFrame(); if (newImage == null) { this.Close(); return; } imageBoxFrameGrabber.Image = newImage; newImage._SmoothGaussian(3); skinDetector = new YCrCbSkinDetector(); Image <Gray, Byte> BinaryHandImage = skinDetector.DetectSkin(newImage, YCrCb_min, YCrCb_max); finger = new List <Point>(); // BinaryHandImage=BinaryHandImage.Canny(100, 100); BinaryHandImage._Erode(3); fingerTipsTrajectoryBox.Image = BinaryHandImage; HandDetection(BinaryHandImage); Image <Gray, float> distTransform = new Image <Gray, float>(BinaryHandImage.Width, BinaryHandImage.Height); CvInvoke.cvDistTransform(BinaryHandImage, distTransform, DIST_TYPE.CV_DIST_L2, kernel_size, null, IntPtr.Zero); Image <Gray, byte> mask = new Image <Gray, byte>(BinaryHandImage.Width, BinaryHandImage.Height); // CvInvoke.cvNormalize(distTransform, distTransform, 0, 1, NORM_TYPE.CV_MINMAX, mask); distTransform = distTransform.ThresholdBinary(new Gray(5), new Gray(255)); Contour <Point> i = ExtractBiggestContour(distTransform.Convert <Gray, byte>()); if (i != null) { distTransform.Draw(i, new Gray(200), 2); for (int j = 0; j < finger.Count; j++) { double dst = CvInvoke.cvPointPolygonTest(i, new PointF(finger[j].X, finger[j].Y), true); if (dst > -80) { finger.RemoveAt(j); } } } newImage.Draw("FingerTips : " + finger.Count, ref f, new Point(10, 40), new Bgr(0, 255, 0)); // imageBox3.Image = distTransform; for (int j = 0; j < finger.Count; j++) { newImage.Draw(new Emgu.CV.Structure.CircleF(finger[j], 5), new Bgr(Color.Blue), 6); } // viewer.Image = BinaryHandImage; // List<Point> tips =new List<Point>(); // imageBox3.Image = testImg; }