コード例 #1
0
        /// <summary>
        /// Initializes a new instance of the GestureDetector class along with the gesture frame source and reader
        /// </summary>
        /// <param name="kinectSensor">Active sensor to initialize the VisualGestureBuilderFrameSource object with</param>
        /// <param name="gestureResultView">GestureResultView object to store gesture results of a single body to</param>
        public GestureDetector(KinectSensor kinectSensor, GestureResultView gestureResultView)
        {
            if (kinectSensor == null)
            {
                throw new ArgumentNullException("kinectSensor");
            }

            if (gestureResultView == null)
            {
                throw new ArgumentNullException("gestureResultView");
            }

            GestureResultView = gestureResultView;

            // create the vgb source. The associated body tracking ID will be set when a valid body frame arrives from the sensor.
            _vgbFrameSource = new VisualGestureBuilderFrameSource(kinectSensor, 0);
            _vgbFrameSource.TrackingIdLost += Source_TrackingIdLost;

            // open the reader for the vgb frames
            _vgbFrameReader = _vgbFrameSource.OpenReader();
            if (_vgbFrameReader != null)
            {
                _vgbFrameReader.IsPaused      = true;
                _vgbFrameReader.FrameArrived += Reader_GestureFrameArrived;
            }

            // load the gestures from the gesture database
            using (VisualGestureBuilderDatabase database = new VisualGestureBuilderDatabase(_gestureDatabase))
            {
                _vgbFrameSource.AddGestures(database.AvailableGestures);
            }
        }
コード例 #2
0
        /// <summary>
        /// Handles gesture detection results arriving from the sensor for the associated body tracking Id
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void Reader_GestureFrameArrived(object sender, VisualGestureBuilderFrameArrivedEventArgs e)
        {
            _gestureName = null;

            using (VisualGestureBuilderFrame frame = _vgbFrameReader.CalculateAndAcquireLatestFrame())
            {
                if (frame != null)
                {
                    // get the discrete gesture results which arrived with the latest frame
                    IReadOnlyDictionary <Gesture, DiscreteGestureResult> discreteResults = frame.DiscreteGestureResults;

                    if (discreteResults != null)
                    {
                        foreach (Gesture gesture in _vgbFrameSource.Gestures)
                        {
                            DiscreteGestureResult result;
                            discreteResults.TryGetValue(gesture, out result);

                            if (result != null)
                            {
                                if (gesture.Name.Equals(_pickUp) && gesture.GestureType == GestureType.Discrete && result.Confidence > 0.05)
                                {
                                    // update the GestureResultView object with new gesture result values
                                    GestureResultView.UpdateGestureResult(true, result.Detected, result.Confidence, gesture.Name);
                                    _gestureName = "PickUp";
                                }
                                else if (gesture.Name.Equals(_putDown) && gesture.GestureType == GestureType.Discrete && result.Confidence > 0.05)
                                {
                                    // update the GestureResultView object with new gesture result values
                                    GestureResultView.UpdateGestureResult(true, result.Detected, result.Confidence, gesture.Name);
                                    _gestureName = "PutDown";
                                }
                                else if (gesture.Name.Equals(_openDoor) && gesture.GestureType == GestureType.Discrete && result.Confidence > 0.05)
                                {
                                    // update the GestureResultView object with new gesture result values
                                    GestureResultView.UpdateGestureResult(true, result.Detected, result.Confidence, gesture.Name);
                                    _gestureName = "OpenDoor";
                                }
                                else if (gesture.Name.Equals(_handToMouth) && gesture.GestureType == GestureType.Discrete && result.Confidence > 0.05)
                                {
                                    // update the GestureResultView object with new gesture result values
                                    GestureResultView.UpdateGestureResult(true, result.Detected, result.Confidence, gesture.Name);
                                    _gestureName = "HandToMouth";
                                }
                                else if (gesture.Name.Equals(_pour) && gesture.GestureType == GestureType.Discrete && result.Confidence > 0.05)
                                {
                                    // update the GestureResultView object with new gesture result values
                                    GestureResultView.UpdateGestureResult(true, result.Detected, result.Confidence, gesture.Name);
                                    _gestureName = "Pour";
                                }
                                else
                                {
                                    _gestureName = "No Gesture";
                                }
                            }
                        }
                    }
                }
            }
        }
コード例 #3
0
        public MainPage()
        {
            // one sensor is currently supported
            _kinectSensor = KinectSensor.GetDefault();

            _coordinateMapper = _kinectSensor.CoordinateMapper;

            var multiSourceFrameReader = _kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Infrared | FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.BodyIndex | FrameSourceTypes.Body);

            multiSourceFrameReader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;

            // set IsAvailableChanged event notifier
            _kinectSensor.IsAvailableChanged += Sensor_IsAvailableChanged;

            // use the window object as the view model in this simple example
            DataContext = this;

            // open the sensor
            _kinectSensor.Open();

            InitializeComponent();

            // new
            Loaded += MainPage_Loaded;

            // Initialize the gesture detection objects for our gestures
            _gestureDetectorList = new List <GestureDetector>();

            // Create a gesture detector for each body (6 bodies => 6 detectors)
            int maxBodies = _kinectSensor.BodyFrameSource.BodyCount;

            for (int i = 0; i < maxBodies; ++i)
            {
                GestureResultView result   = new GestureResultView(i, false, false, 0.0f, null);
                GestureDetector   detector = new GestureDetector(_kinectSensor, result);
                result.PropertyChanged += GestureResult_PropertyChanged;
                _gestureDetectorList.Add(detector);
            }
        }
コード例 #4
0
 /// <summary>
 /// Handles the TrackingIdLost event for the VisualGestureBuilderSource object
 /// </summary>
 /// <param name="sender">object sending the event</param>
 /// <param name="e">event arguments</param>
 private void Source_TrackingIdLost(object sender, TrackingIdLostEventArgs e)
 {
     // update the GestureResultView object to show the 'Not Tracked' image in the UI
     GestureResultView.UpdateGestureResult(false, false, 0.0f, null);
 }
コード例 #5
0
        /// <summary>
        ///
        /// </summary>
        private void GestureResult_PropertyChanged(object sender, PropertyChangedEventArgs e)
        {
            GestureResultView result = sender as GestureResultView;

            if (result == null)
            {
                return;
            }

            var detectedGestureName = result.GestureName;

            GestureVisual.Opacity  = result.Confidence;
            BodyTracked.Text       = result.BodyIndex.ToString();
            GestureName.FontSize   = 30;
            GestureName.Foreground = new SolidColorBrush(Colors.White);

            // Using the array bodies[], the specific body recording the gesture is found.
            // The location of the left and right fingertips, as well as the midpoint of the spine are found and stored as [x,y,z] coordinates.
            // These coordinates correspond to the location of the camera
            Body body = _bodies[result.BodyIndex];

            if (body != null)
            {
                // FloorClipPlane results, used to normalise the coordinate system of the detector to the floor of the room
                float floorX = _floorPlane.X;
                float floorY = _floorPlane.Y;
                float floorZ = _floorPlane.Z;
                float floorW = _floorPlane.W;

                float cameraAngleRadians = (float)Math.Atan(floorZ / floorY);
                float cosCameraAngle     = (float)Math.Cos(cameraAngleRadians);
                float sinCameraAngle     = (float)Math.Sin(cameraAngleRadians);

                // Output the FloorClipPlane coordinates to the GUI for debug purposes
                FloorPosition.Text = "X: " + floorX + ", Y: " + floorY + ", Z: " + floorZ + ", W: " + floorW;

                // Collect the coordinates of the left and right finger tips and the spine midpoint
                Joint fingerTipLeft  = body.Joints[JointType.HandTipLeft];
                Joint fingerTipRight = body.Joints[JointType.HandTipRight];
                Joint spineMid       = body.Joints[JointType.SpineMid];

                // Location of the left and right finger tips and the spine midpoint, after trigonometric correction for the FloorClipPlane.
                // Gives the coordinate locations with the origin on the floor at the base of the camera, with y-axis normal to the floor.
                float[] fingerTipLeftXyz  = { fingerTipLeft.Position.X *floorX, (fingerTipLeft.Position.Y * floorY) + floorW, fingerTipLeft.Position.Z *floorZ };
                float[] fingerTipRightXyz = { fingerTipRight.Position.X, (fingerTipRight.Position.Y * cosCameraAngle) + (fingerTipRight.Position.Z * sinCameraAngle) + floorW, (fingerTipRight.Position.Z * cosCameraAngle) + (fingerTipRight.Position.Y * sinCameraAngle) };
                float[] spineMidXyz       = { spineMid.Position.X, (spineMid.Position.Y * cosCameraAngle) + (spineMid.Position.Z * sinCameraAngle) + floorW, (spineMid.Position.Z * cosCameraAngle) + (spineMid.Position.Y * sinCameraAngle) };

                // Output the coordinates of the spine and the left fingertip to the GUI for debug purposes
                BodyPosition.Text  = "X: " + spineMidXyz[0].ToString("n4") + ", Y: " + spineMidXyz[1].ToString("n4") + ", Z: " + spineMidXyz[2].ToString("n4");
                LHandPosition.Text = "X: " + fingerTipLeftXyz[0].ToString("n4") + ", Y: " + fingerTipLeftXyz[1].ToString("n4") + ", Z: " + fingerTipLeftXyz[2].ToString("n4");

                // Check whether the location where the gesture is performed matches the key locations
                string gestureLocation = GestureLocationChecker(fingerTipLeftXyz, fingerTipRightXyz, spineMidXyz, detectedGestureName);

                GestureLocation.Text = gestureLocation;

                if (detectedGestureName == null || !(result.Confidence > 0.1f) || GestureLocation.Text == null)
                {
                    return;
                }

                GestureConfidence.Text = result.Confidence < 0.3f ? "Low" : (result.Confidence * 100f).ToString();

                // Output gesture name to GUI
                GestureName.Text = detectedGestureName;

                if (result.Confidence < 0.7f)
                {
                    return;
                }

                // if confidence over 0.4 change gesture name in GUI to green
                GestureName.Foreground = new SolidColorBrush(Colors.Green);

                switch (detectedGestureName)
                {
                case "PickUp":
                    switch (GestureLocation.Text)
                    {
                    case "Medication":
                        _pickUpMedication.Push(DateTime.Now);
                        break;

                    case "Pantry":
                        _pickUpPantry.Push(DateTime.Now);
                        break;

                    case "Fridge":
                        _pickUpFridge.Push(DateTime.Now);
                        break;

                    case "BowlCupboard":
                        _pickUpBowlCupboard.Push(DateTime.Now);
                        break;

                    case "FoodPrep":
                        _pickUpFoodPrep.Push(DateTime.Now);
                        break;

                    default:
                        break;
                    }
                    break;

                case "PutDown":
                    if (GestureLocation.Text == "FoodPrep")
                    {
                        _putDownFoodPrep.Push(DateTime.Now);
                    }
                    break;

                case "OpenDoor":
                    switch (GestureLocation.Text)
                    {
                    case "Pantry":
                        _openDoorPantry.Push(DateTime.Now);
                        break;

                    case "Fridge":
                        _openDoorFridge.Push(DateTime.Now);
                        break;

                    case "BowlCupboard":
                        _openDoorBowlCupboard.Push(DateTime.Now);
                        break;

                    default:
                        break;
                    }
                    break;

                case "HandToMouth":
                    if (GestureLocation.Text == "Dining")
                    {
                        _handToMouthDining.Push(DateTime.Now);
                    }
                    break;

                case "Pour":
                    if (GestureLocation.Text == "FoodPrep")
                    {
                        _pourFoodPrep.Push(DateTime.Now);
                    }
                    break;

                case "No Gesture":
                    GestureName.Foreground = new SolidColorBrush(Colors.Red);
                    GestureConfidence.Text = "";
                    break;

                default:
                    break;
                }

                //Check for an event, such as taking medication or eatin meal
                EventChecker();
            }
            else
            {
                BodyPosition.Text = "body = null";
            }
        }