public void HandleTranslation(GeometricRecognizer rec) { if (isResizing) { return; } var handWithSingleFinger = handData.FingerCount == 1; missedSuccessiveFrame = handWithSingleFinger ? 0 : missedSuccessiveFrame++; if (missedSuccessiveFrame > 10) //&& TargetsListofPoints.Count <20) { TargetsListofPoints.Clear(); missedSuccessiveFrame = 0; } if (handWithSingleFinger) { TargetsListofPoints.Add(new PointR(handData.FingerPoints[0].X, handData.FingerPoints[0].Y, Environment.TickCount)); //TargetsListofPoints.Add(new PointR(handData.PalmX, handData.PalmY, Environment.TickCount)); } // recognize gesture when user pause for 10 ms if (handWithSingleFinger == false && TargetsListofPoints.Count > 30) { recognizeGestureUsingNRecognizer(rec, TargetsListofPoints); TargetsListofPoints.Clear(); } if (handData.FingerCount >= 2) { performLastGestureNumberOfTimes(); } }
public ConvexHull() { InitializeComponent(); //@"C:\Users\ahmed nady\Videos\Debut\test.avi" 00023.MTS MOV_0016 @"D:\Working\STREAM\ahmednady.asf" camera = new Capture(/*@"C:\Users\ahmed nady\Videos\Debut\5.avi");*/ @"F:\Working\Final phase\DataSet\sequence.avi"); fingerTipDetection = new FingerTip(); skinDetector = new YCrCbSkinDetector(); candidateTips = new List <Point>(); fingerTracker = new List <Tracks>(); // adjust path to find your XML file //haar = new HaarCascade("FingerTips.xml"); hsv_min = new Hsv(0, 45, 0); hsv_max = new Hsv(20, 255, 255); YCrCb_min = new Ycc(0, 131, 80); YCrCb_max = new Ycc(255, 185, 135); //$N rec = new GeometricRecognizer(); rec.LoadGesture(@"C:\Users\ahmed nady\Documents\TranslateLeft.xml"); rec.LoadGesture(@"C:\Users\ahmed nady\Documents\TranslateRight.xml"); rec.LoadGesture(@"C:\Users\ahmed nady\Documents\RotateLeft.xml"); rec.LoadGesture(@"C:\Users\ahmed nady\Documents\RotateRight.xml"); rec.LoadGesture(@"C:\Users\ahmed nady\Documents\ZoomIn.xml"); rec.LoadGesture(@"C:\Users\ahmed nady\Documents\ZoomOut.xml"); rec.LoadGesture(@"C:\Users\ahmed nady\Documents\KZoomIn.xml"); rec.LoadGesture(@"C:\Users\ahmed nady\Documents\KZoomOut.xml"); }
public K_Curvature() { InitializeComponent(); // x = new List<HandTracking>(2); handCandiate = new List <Contour <Point> >(); detected_hand = new List <Contour <Point> >(); hand_centers = new Dictionary <int, PointF>(2); //C:\Users\ahmed nady\Videos\Debut\Untitled 5.avi D:\Working\XNA\rotateRight.avi G:\PRIVATE\AVCHD\BDMV\STREAM\00017.mts grabber = new Emgu.CV.Capture();//@"D:\Working\STREAM\00003.mts");//@"D:\Working\XNA\rotateRight3.avi"); // template = new Image<Gray, byte>(@"C:\Users\ahmed nady\Desktop\hand1.jpg"); hsv_min = new Hsv(0, 45, 0); hsv_max = new Hsv(20, 255, 255); YCrCb_min = new Ycc(0, 131, 80); YCrCb_max = new Ycc(255, 185, 135); //$N rec = new GeometricRecognizer(); rec.LoadGesture(@"E:\Documents\ScaleUp.xml"); rec.LoadGesture(@"E:\Documents\ZoomIn.xml"); rec.LoadGesture(@"E:\Documents\ZoomOut.xml"); rec.LoadGesture(@"E:\Documents\rotateLeft.xml"); rec.LoadGesture(@"E:\Documents\rotateRight.xml"); fingerTracker = new List <Tracks>(); }
public ImageForm() { InitializeComponent(); // this.images = new ImageLoader(100, 100, this.Width).LoadImages(); this.Paint += new PaintEventHandler(ImageForm_Paint); this.FormClosing += new FormClosingEventHandler(ImageForm_FormClosing); rec = new GeometricRecognizer(); //load all template for each gesture for (int i = 1; i <= 10; i++) { rec.LoadGesture(@"C:\Users\ahmed nady\Documents\" + gesture + "" + i + ".xml"); if (i == 10 && numOfGestures > 0) { i = 1; numOfGestures--; gesture++; } } ////rec.LoadGesture(@"C:\Users\ahmed nady\Documents\TranslateRightLine.xml"); ////rec.LoadGesture(@"C:\Users\ahmed nady\Documents\RotateLeftCircle.xml"); ////rec.LoadGesture(@"C:\Users\ahmed nady\Documents\RotateRightPI.xml"); ////rec.LoadGesture(@"C:\Users\ahmed nady\Documents\ZoomInCaret.xml"); ////rec.LoadGesture(@"C:\Users\ahmed nady\Documents\ZoomOutCaret.xml"); // rec.LoadGesture(@"C:\Users\ahmed nady\Documents\ZoomIn2Fingers.xml"); // rec.LoadGesture(@"C:\Users\ahmed nady\Documents\ZoomOut2Fingers.xml"); }
public Recognizer() { _rec = new GeometricRecognizer(); _rec.LoadGesture(@"G:\Documents\ZoomIn.xml"); _rec.LoadGesture(@"G:\Documents\ZoomOut.xml"); }
// void Awake() // { // Debug.LogError("Check it"); // } // Use this for initialization void Start() { geometricrec = new GeometricRecognizer(); geometricrec.loadTemplates(); //LogShow.getInstance().debug("=>Test!! "); if (m_IsEditMode) { m_IsEnabled = true; } }
public void LoadResourceInsteadOfAwake() { if (m_bLoadResourceInsteadOfAwake) return; m_bLoadResourceInsteadOfAwake = true; m_meshFilter = (MeshFilter)GoTail.transform.GetComponent<MeshFilter>(); m_mesh = m_meshFilter.mesh; m_recognizer = new GeometricRecognizer(); m_recognizer.LoadTemplates(); }
static void RunConsoleVersion() { // note: this code is similar to MainForm.TestBatch_Click, which is invoked // when running the GUI version string samplesDir = NDollarParameters.Instance.SamplesDirectory; bool include1D = NDollarParameters.Instance.Include1D; bool include2D = NDollarParameters.Instance.Include2D; SamplesCollection categoriesByUser; GeometricRecognizer _rec = new GeometricRecognizer(); // create the set of filenames to read in Directory.SetCurrentDirectory(samplesDir); DirectoryInfo dir = new DirectoryInfo(Directory.GetCurrentDirectory()); FileInfo[] allXMLFiles = dir.GetFiles("*.xml", SearchOption.AllDirectories); string[] allXMLFilenames = new string[allXMLFiles.Length]; int count = 0; Console.Write("Counting Gesture files"); foreach (FileInfo fi in allXMLFiles) { allXMLFilenames[count] = fi.FullName; // dir.FullName + "/" + fi.Name; count++; Console.Write("."); } Console.WriteLine(); Console.WriteLine("Number of files: " + count); // read them categoriesByUser = _rec.AssembleBatch(allXMLFilenames, include1D, include2D); if (categoriesByUser != null) { // do the recognition if (_rec.TestBatch(categoriesByUser, dir.ToString())) { Console.WriteLine("Testing complete."); } else { Console.WriteLine("There was an error writing the output file during testing."); } } else // error assembling batch { Console.WriteLine("Unreadable files, or unbalanced number of gestures in categories."); } }
public void LoadResourceInsteadOfAwake() { if (m_bLoadResourceInsteadOfAwake) { return; } m_bLoadResourceInsteadOfAwake = true; m_meshFilter = (MeshFilter)GoTail.transform.GetComponent <MeshFilter>(); m_mesh = m_meshFilter.mesh; m_recognizer = new GeometricRecognizer(); m_recognizer.LoadTemplates(); }
private void recognizeGestureUsingNRecognizer(GeometricRecognizer recognizser, List <PointR> TargetsListofPoints) { NBestList result = recognizser.Recognize(TargetsListofPoints, 1); if (result.Score > .7) { if (result.Name.StartsWith("TranslateRight")) { controlModel("translate", "+{RIGHT 10}"); lastRecognizedGesture = "TranslateRight"; } else if (result.Name.StartsWith("TranslateLeft")) { controlModel("translate", "+{LEFT 10}"); lastRecognizedGesture = "TranslateLeft"; } else if (result.Name.StartsWith("ZoomIn")) { controlModel("Zoom", "%{UP 10}"); lastRecognizedGesture = "ZoomIn"; } else if (result.Name.StartsWith("ZoomOut")) { controlModel("Zoom", "%{DOWN 10}"); lastRecognizedGesture = "ZoomOut"; } else if (result.Name.StartsWith("RotateLeft")) { controlModel("Rotate", "{LEFT 10}"); lastRecognizedGesture = "RotateLeft"; } else if (result.Name.StartsWith("RotateRight")) { controlModel("Rotate", "{RIGHT 10}"); lastRecognizedGesture = "RotateRight"; } else if (result.Name.StartsWith("ZoomIn2Fingers")) { controlModel("Zoom", "%{UP 10}"); } else if (result.Name.StartsWith("ZoomOut2Fingers")) { controlModel("Zoom", "%{DOWN 10}"); } TargetsListofPoints.Clear(); } }
public void ResizeSingleHand(GeometricRecognizer rec) { if (handData.FingerCount == 2) { twoFingerPoints.Add(new PointR(handData.FingerPoints[0].X, handData.FingerPoints[0].Y, Environment.TickCount)); twoFingerPoints.Add(new PointR(handData.FingerPoints[1].X, handData.FingerPoints[1].Y, Environment.TickCount)); // this.HandleResize(handData.FingerPoints[0].Location, handData.FingerPoints[1].Location); this.isResizing = true; } else { this.isResizing = false; } // recognize gesture when user pause for 10 ms if (isResizing == false && twoFingerPoints.Count > 20) { recognizeGestureUsingNRecognizer(rec, twoFingerPoints); twoFingerPoints.Clear(); } }
public FingerTipsTracking() { InitializeComponent(); //D:\Working\STREAM\00003.MTS camera = new Capture(@"D:\Working\STREAM\00003.MTS"); hsv_min = new Hsv(0, 45, 0); hsv_max = new Hsv(20, 255, 255); YCrCb_min = new Ycc(0, 131, 80); YCrCb_max = new Ycc(255, 185, 135); //$N rec = new GeometricRecognizer(); rec.LoadGesture(@"E:\Documents\ZoomIn.xml"); rec.LoadGesture(@"E:\Documents\ZoomOut.xml"); fingerTracker = new List <Tracks>(); testImage = new Image <Bgr, byte>(@"C:\Users\Public\Pictures\Sample Pictures\Tulips.jpg"); eImg = new Image <Gray, byte>(@"C:\Users\ahmed nady\Desktop\eImg1.jpg"); }
// void Awake() // { // Debug.LogError("Check it"); // } // Use this for initialization void Start() { geometricrec = new GeometricRecognizer(); geometricrec.loadTemplates(); //LogShow.getInstance().debug("=>Test!! "); if(m_IsEditMode) { m_IsEnabled = true; } }