Example #1
0
        private void M_FaceReader_FrameArrived(object sender, Microsoft.Kinect.Face.FaceFrameArrivedEventArgs e)
        {
            var frameRef = e.FrameReference;

            using (var faceFrame = frameRef.AcquireFrame())
            {
                if (faceFrame != null)
                {
                    //get the Color Region
                    if (faceFrame.FaceFrameResult != null)
                    {
                        m_drawingRegion = faceFrame.FaceFrameResult.FaceBoundingBoxInColorSpace;
                        var faceRegion = new RectI();
                        faceRegion.Top    = Math.Abs(m_drawingRegion.Top - 36);
                        faceRegion.Bottom = Math.Abs(m_drawingRegion.Bottom - 12);
                        faceRegion.Left   = Math.Abs(m_drawingRegion.Left + 26);
                        faceRegion.Right  = Math.Abs(m_drawingRegion.Right - 20);
                        DrawBox(faceRegion);

                        //Take the new region and record ColorFrame Data
                        if (m_timerStarted)
                        {
                            RecordData(faceRegion, faceFrame);
                            lblColorFeeds.Text = "Please be still taking measurements...";
                        }
                        else
                        {
                            lblColorFeeds.Text         = "Face Found, Click the Calculate button to start taking measurements...";
                            btnCalculateRate.IsEnabled = true;
                        }
                    }
                }
            }
        }
Example #2
0
        void faceReader_FrameArrived(object sender, Microsoft.Kinect.Face.FaceFrameArrivedEventArgs e)
        {
            using (FaceFrame frame = e.FrameReference.AcquireFrame())
            {
                if (frame != null)
                {
                    var res = frame.FaceFrameResult;

                    if (res != null)
                    {
                        for (int i = 0; i < this.lastResults.Length; i++)
                        {
                            if (frame.TrackingId == this.faceFrameReaders[i].FaceFrameSource.TrackingId)
                            {
                                this.lastResults[i] = res;
                            }
                        }

                        this.FOutFrameNumber[0] = (int)frame.FaceFrameResult.RelativeTime.Ticks;

                        //this.WriteFaceData(res, 0);
                    }
                }
            }
        }
Example #3
0
 void faceReader_FrameArrived(object sender, Microsoft.Kinect.Face.FaceFrameArrivedEventArgs e)
 {
     using (FaceFrame frame = e.FrameReference.AcquireFrame())
     {
         if (frame != null)
         {
             var res = frame.FaceFrameResult;
             this.FOutFrameNumber[0] = (int)frame.FaceFrameResult.RelativeTime.Ticks;
             this.FOutOrientation[0] = new Quaternion(res.FaceRotationQuaternion.X, res.FaceRotationQuaternion.Y, res.FaceRotationQuaternion.Z, res.FaceRotationQuaternion.W);
         }
     }
 }
Example #4
0
 private void FrameArrived(object sender, FaceFrameArrivedEventArgs e)
 {
     using (FaceFrame frame = e.FrameReference.AcquireFrame())
     {
         if (frame != null)
         {
             if (frame.IsTrackingIdValid == false) { return; }
             if (this.FaceResultAcquired != null)
             {
                 this.FaceResultAcquired(this, new FaceFrameResultEventArgs(this.frameSource.TrackingId, frame.FaceFrameResult));
             }
         }
     }
 }
Example #5
0
        void FaceReader_FrameArrived(object sender, FaceFrameArrivedEventArgs e)
        {
            using (var frame = e.FrameReference.AcquireFrame())
            {
                if (frame != null)
                {
                    // 4) Get the face frame result
                    FaceFrameResult result = frame.FaceFrameResult;

                    if (result != null)
                    {
                        // 5) Do magic!
                        // Get the face points, mapped in the color space.
                        var eyeLeft = result.FacePointsInColorSpace[FacePointType.EyeLeft];
                        var eyeRight = result.FacePointsInColorSpace[FacePointType.EyeRight];

                        // var nose = result.FacePointsInColorSpace[FacePointType.Nose];
                        //var mouthLeft = result.FacePointsInColorSpace[FacePointType.MouthCornerLeft];
                        //var mouthRight = result.FacePointsInColorSpace[FacePointType.MouthCornerRight];

                        var eyeLeftClosed = result.FaceProperties[FaceProperty.LeftEyeClosed];
                        var eyeRightClosed = result.FaceProperties[FaceProperty.RightEyeClosed];
                        //var mouthOpen = result.FaceProperties[FaceProperty.MouthOpen];

                        // Position the canvas UI elements
                        Canvas.SetLeft(ellipseEyeLeft, eyeLeft.X - ellipseEyeLeft.Width / 2.0);
                        Canvas.SetTop(ellipseEyeLeft, eyeLeft.Y - ellipseEyeLeft.Height / 2.0);

                        Canvas.SetLeft(ellipseEyeRight, eyeRight.X - ellipseEyeRight.Width / 2.0);
                        Canvas.SetTop(ellipseEyeRight, eyeRight.Y - ellipseEyeRight.Height / 2.0);

                        //Canvas.SetLeft(ellipseNose, nose.X - ellipseNose.Width / 2.0);
                        //Canvas.SetTop(ellipseNose, nose.Y - ellipseNose.Height / 2.0);

                        //Canvas.SetLeft(ellipseMouth, ((mouthRight.X + mouthLeft.X) / 2.0) - ellipseMouth.Width / 2.0);
                        //Canvas.SetTop(ellipseMouth, ((mouthRight.Y + mouthLeft.Y) / 2.0) - ellipseMouth.Height / 2.0);
                        //ellipseMouth.Width = Math.Abs(mouthRight.X - mouthLeft.X);

                        // Display or hide the ellipses
                        if ((eyeLeftClosed == DetectionResult.Yes || eyeLeftClosed == DetectionResult.Maybe) && (eyeRightClosed == DetectionResult.Yes || eyeRightClosed == DetectionResult.Maybe))
                        {
                            /*while ((eyeLeftClosed == DetectionResult.Yes || eyeLeftClosed == DetectionResult.Maybe) && (eyeRightClosed == DetectionResult.Yes || eyeRightClosed == DetectionResult.Maybe))
                            {
                                if (counter >= 3)
                                {
                                    Sendpulse(4);
                                    break;
                                }
                                ellipseEyeLeft.Visibility = Visibility.Collapsed;
                                ellipseEyeRight.Visibility = Visibility.Collapsed;
                                counter++;
                                Thread.Sleep(1000);
                            }*/
                            eyeclosecounter++;
                            eyeopencounter = 0;
                            //Sendpulse(0);
                        }
                        else
                        {
                            ellipseEyeLeft.Visibility = Visibility.Visible;
                            ellipseEyeRight.Visibility = Visibility.Visible;
                            //Sendpulse(1);
                            eyeopencounter++;
                            eyeclosecounter = 0;
                            //Thread.Sleep(1000);
                        }

                        if (eyeclosecounter > 30)
                            Sendpulse(000);
                        if (eyeopencounter > 60)
                            Sendpulse(111);

                        /*  if (counter == 2)
                              Sendpulse(0);
                          else if (counter == 5)
                              Sendpulse(1);
                          else if (counter == 10)
                              Sendpulse(2);
                          else
                              Sendpulse(3); */

                        /*if (mouthOpen == DetectionResult.Yes || mouthOpen == DetectionResult.Maybe)
                        {
                            ellipseMouth.Height = 50.0;
                        }
                        else
                        {
                            ellipseMouth.Height = 20.0;
                        }*/
                    }
                }
            }
        }
        /// <summary>
        /// Process Face Frames
        /// </summary>
        private void OnFaceFrameArrived(object sender, FaceFrameArrivedEventArgs e)
        {
            // Retrieve the face reference
            FaceFrameReference faceRef = e.FrameReference;

            if (faceRef == null) return;

            // Acquire the face frame
            using (FaceFrame faceFrame = faceRef.AcquireFrame())
            {
                if (faceFrame == null) return;

                // Retrieve the face frame result
                FaceFrameResult frameResult = faceFrame.FaceFrameResult;

                if (frameResult != null)
                {
                    // Update trackers
                    UpdateTrackers(frameResult);
                }
            }
        }
Example #7
0
        private void FaceReader_FrameArrived(object sender, FaceFrameArrivedEventArgs e)
        {
            using (var frame = e.FrameReference.AcquireFrame())
            {
                if (frame != null)
                {
                    // Get the face frame result
                    FaceFrameResult result = frame.FaceFrameResult;

                    if (result != null)
                    {
                        // Get the face points, mapped in the color space
                        var eyeLeft = result.FacePointsInColorSpace[FacePointType.EyeLeft];
                        var eyeRight = result.FacePointsInColorSpace[FacePointType.EyeRight];
                        var nose = result.FacePointsInColorSpace[FacePointType.Nose];
                        var mouthLeft = result.FacePointsInColorSpace[FacePointType.MouthCornerLeft];
                        var mouthRight = result.FacePointsInColorSpace[FacePointType.MouthCornerRight];

                        // Get the face characteristics
                        var eyeLeftClosed = result.FaceProperties[FaceProperty.LeftEyeClosed];
                        var eyeRightClosed = result.FaceProperties[FaceProperty.RightEyeClosed];
                        var mouthOpen = result.FaceProperties[FaceProperty.MouthOpen];

                        // Position the canvas UI elements
                        Canvas.SetLeft(ellipseEyeLeft, eyeLeft.X - ellipseEyeLeft.Width / 2.0);
                        Canvas.SetTop(ellipseEyeLeft, eyeLeft.Y - ellipseEyeLeft.Height / 2.0);

                        Canvas.SetLeft(ellipseEyeRight, eyeRight.X - ellipseEyeRight.Width / 2.0);
                        Canvas.SetTop(ellipseEyeRight, eyeRight.Y - ellipseEyeRight.Height / 2.0);

                        Canvas.SetLeft(ellipseNose, nose.X - ellipseNose.Width / 2.0);
                        Canvas.SetTop(ellipseNose, nose.Y - ellipseNose.Height / 2.0);

                        Canvas.SetLeft(ellipseMouth, ((mouthRight.X + mouthLeft.X) / 2.0) - ellipseMouth.Width / 2.0);
                        Canvas.SetTop(ellipseMouth, ((mouthRight.Y + mouthLeft.Y) / 2.0) - ellipseMouth.Height / 2.0);
                        ellipseMouth.Width = Math.Abs(mouthRight.X - mouthLeft.X);

                        // Display or hide the ellipses
                        if (eyeLeftClosed == DetectionResult.Yes || eyeLeftClosed == DetectionResult.Maybe)
                        {
                            ellipseEyeLeft.Visibility = Visibility.Collapsed;
                        }
                        else
                        {
                            ellipseEyeLeft.Visibility = Visibility.Visible;
                        }

                        if (eyeRightClosed == DetectionResult.Yes || eyeRightClosed == DetectionResult.Maybe)
                        {
                            ellipseEyeRight.Visibility = Visibility.Collapsed;
                        }
                        else
                        {
                            ellipseEyeRight.Visibility = Visibility.Visible;
                        }

                        if (mouthOpen == DetectionResult.Yes || mouthOpen == DetectionResult.Maybe)
                        {
                            ellipseMouth.Height = 50.0;
                        }
                        else
                        {
                            ellipseMouth.Height = 20.0;
                        }
                    }
                }
            }
        }
Example #8
0
 void faceFrameReader_FrameArrived( object sender, FaceFrameArrivedEventArgs e )
 {
     UpdateFaceFrame( e );
 }
        /// <summary>
        /// Handles the face frame data arriving from the sensor
        /// Maneja los datos del marco de la cara que llegan desde el sensor
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void Reader_FaceFrameArrived(object sender, FaceFrameArrivedEventArgs e)
        {
            using (FaceFrame faceFrame = e.FrameReference.AcquireFrame())
                {
                    if (faceFrame != null)
                    {
                        // get the index of the face source from the face source array
                        // Obtener el índice de la fuente de la cara de la matriz de origen cara
                        int index = this.GetFaceSourceIndex(faceFrame.FaceFrameSource);

                        // check if this face frame has valid face frame results
                        // Comprobar si este marco frontal tiene validez resultsr marco de la cara
                        if (this.ValidateFaceBoxAndPoints(faceFrame.FaceFrameResult))
                        {
                            // store this face frame result to draw later
                            // Almacenar este resultado marco de la cara para sacar adelante
                            this.faceFrameResults[index] = faceFrame.FaceFrameResult;
                        }
                        else
                        {
                            // indicates that the latest face frame result from this reader is invalid
                            // Indica que el último resultado marco de la cara de este lector no es valido
                            this.faceFrameResults[index] = null;
                        }
                    }
                }
        }
        /// <summary>
        /// Handles the face frame data arriving from the sensor
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void Reader_FaceFrameArrived(object sender, FaceFrameArrivedEventArgs e)
        {
            using (FaceFrame faceFrame = e.FrameReference.AcquireFrame())
            {
                if (faceFrame != null)
                {
                    // get the index of the face source from the face source array
                    int index = this.GetFaceSourceIndex(faceFrame.FaceFrameSource);

                    // check if this face frame has valid face frame results
                    if (this.ValidateFaceBoxAndPoints(faceFrame.FaceFrameResult))
                    {
                        // store this face frame result to draw later
                        this.faceFrameResults[index] = faceFrame.FaceFrameResult;
                    }
                    else
                    {
                        // indicates that the latest face frame result from this reader is invalid
                        this.faceFrameResults[index] = null;
                    }
                }
            }

            //MyFace fac = new MyFace();
            //fac.boundingboxincolorspace = faceFrameSources.;
            //var json = new JavaScriptSerializer().Serialize(fac);
            //label1.Content = String.Format("Face: \n {0}  \n", json);
        }
        /// <summary>
        /// FaceFrameが利用できるようになった時のイベントを処理します
        /// </summary>
        private void OnFaceFrameArrived(object sender, FaceFrameArrivedEventArgs e)
        {
            using (var faceFrame = e.FrameReference.AcquireFrame())
            {
                if (faceFrame == null) return;

                // 顔情報に関するフレームを取得
                if (!faceFrame.IsTrackingIdValid)
                    return;
                
                var result = faceFrame.FaceFrameResult;
                if (result == null) return;

                // 表情等に関する結果を取得し、プロパティを更新
                this.Happy = result.FaceProperties[FaceProperty.Happy].ToString();
                this.FaceEngagement = result.FaceProperties[FaceProperty.Engaged].ToString();
                this.Glasses = result.FaceProperties[FaceProperty.WearingGlasses].ToString();
                this.LeftEyeClosed = result.FaceProperties[FaceProperty.LeftEyeClosed].ToString();
                this.RightEyeClosed = result.FaceProperties[FaceProperty.RightEyeClosed].ToString();
                this.MouthOpen = result.FaceProperties[FaceProperty.MouthOpen].ToString();
                this.MouthMoved = result.FaceProperties[FaceProperty.MouthMoved].ToString();
                this.LookingAway = result.FaceProperties[FaceProperty.LookingAway].ToString();

                // 顔の回転に関する結果を取得する
                this.FaceRotation = result.FaceRotationQuaternion;

                // カラーデータを描画する
                var drawContext = drawVisual.RenderOpen();

                // 顔の特徴点を取得し描画する
                foreach (var point in result.FacePointsInColorSpace)
                {
                    if (point.Key == FacePointType.None) continue;
                  
                    drawContext.DrawEllipse(facePointColor[(int)point.Key], null, new Point(point.Value.X, point.Value.Y), 5, 5);
                }
                drawContext.Close();

                // ビットマップの描画
                _FacePointBitmap.Clear();
                _FacePointBitmap.Render(drawVisual);

                OnPropertyChanged("FacePointBitmap");
            }
        }
 /// <summary>
 /// When the body data is read, the face reader is executed. 
 /// Connect each face source with their face frame result.
 /// </summary>
 /// <param name="sender"></param>
 /// <param name="e"></param>
 void FaceReader_FrameArrived(object sender, FaceFrameArrivedEventArgs e)
 {
     using (var frame = e.FrameReference.AcquireFrame())
     {
         if(frame != null)
         {
             int index = -1;
             for(int i = 0; i < bodyCount; i++)
             {
                 if(_faceSources[i] == frame.FaceFrameSource)
                 {
                     index = i;
                     break;
                 }
             }
             _faceResults[index] = frame.FaceFrameResult;
         }
     }
 }
        // Since the face source is connected with the body, we can specify what happens when a face frame is available.
        // Face frames work exactly like the color, depth, infared, and body frames: firstly, you get a reference to the
        // frame, then you acquire the frame, and if the frame is not empty, you can grab the FaceFrameResult object. 
        // FaceFrameResult object encapsulates all available face info.

        void FaceReader_FrameArrived(object sender, FaceFrameArrivedEventArgs e)
        {
            maskImage.Visibility = Visibility.Hidden;
            using (var frame = e.FrameReference.AcquireFrame())
            {
                if(frame != null)
                {
                    // Get the face frame result
                    FaceFrameResult result = frame.FaceFrameResult;

                    if(result != null)
                    {
                        if(network.checkChange())
                        {
                            network.setChange(false);
                            string path = "pack://application:,,/Images/" + network.getPath();
                            try
                            {
                                maskImage.Source = new BitmapImage(new Uri(@path));
                            }
                            catch(System.IO.IOException exc)
                            {
                                Debug.WriteLine("Ruhroh ");
                            }
                        }


                        /*
                        // Get the face points, mapped in the color space
                        var eyeLeft = result.FacePointsInColorSpace[FacePointType.EyeLeft];
                        var eyeRight = result.FacePointsInColorSpace[FacePointType.EyeRight];
                        var nose = result.FacePointsInColorSpace[FacePointType.Nose];
                        var mouthLeft = result.FacePointsInColorSpace[FacePointType.MouthCornerLeft];
                        var mouthRight = result.FacePointsInColorSpace[FacePointType.MouthCornerRight];

                        // Get the face characteristics
                        var eyeLeftClosed = result.FaceProperties[FaceProperty.LeftEyeClosed];
                        var eyeRightClosed = result.FaceProperties[FaceProperty.RightEyeClosed];
                        var mouthOpen = result.FaceProperties[FaceProperty.MouthOpen];
                        */

                        var mouthOpen = result.FaceProperties[FaceProperty.MouthOpen];

                        /*
                        if(mouthOpen == DetectionResult.Yes && maskImage.Source.ToString() != "pack://*****:*****@"pack://application:,,,/Images/nickhorror.png"));
                        }
                        if(mouthOpen == DetectionResult.No && maskImage.Source.ToString() != "pack://*****:*****@"pack://application:,,,/Images/nicksmile.png"));
                        }
                        */
                        // Get the Bounding Box
                        var bb = result.FaceBoundingBoxInColorSpace;

                        //Debug.WriteLine("Width: " + width);
                        //Debug.WriteLine("Height: " + height);

                        // Set the values, replace this with the image sourcef
                        /*
                        boundingBox.Width = Math.Abs(bb.Right - bb.Left);
                        boundingBox.Height = Math.Abs(bb.Bottom - bb.Top);
                        Canvas.SetLeft(boundingBox, bb.Left);
                        Canvas.SetRight(boundingBox, bb.Right);
                        Canvas.SetTop(boundingBox, bb.Top);
                        Canvas.SetBottom(boundingBox, bb.Bottom);
                        */
                        double width = Math.Abs(bb.Right - bb.Left) * 1.8;
                        double height = Math.Abs(bb.Bottom - bb.Top) * 1.8;
                        double wDiff = Math.Abs(width - maskImage.Width);
                        double hDiff = Math.Abs(height - maskImage.Height);

                        // This will tell whether or not the image should be resized.
                        if(wDiff/maskImage.Width > 0.15 || hDiff/maskImage.Height > 0.15 || Double.IsNaN(maskImage.Width))
                        {
                            maskImage.Width = width;
                            maskImage.Height = height;
                        }
                        else
                        {
                            width = maskImage.Width;
                            height = maskImage.Height;
                        }
                        double left = bb.Left - width * 0.2;
                        double top = bb.Top - height * 0.2;
                        double lDiff = Math.Abs(Canvas.GetLeft(maskImage) - left);
                        double tDiff = Math.Abs(Canvas.GetTop(maskImage) - top);

                        // this will tell whether or not the image should be translated.
                        if(lDiff/Canvas.GetLeft(maskImage) > 0.03 || tDiff/Canvas.GetTop(maskImage) > 0.03 || Double.IsNaN(Canvas.GetTop(maskImage)))
                        {
                            Canvas.SetLeft(maskImage, bb.Left - width * 0.2);
                            Canvas.SetTop(maskImage, bb.Top - height * 0.2);

                            // Below may/may not be necessary.
                            // Canvas.SetRight(maskImage, bb.Right + width * 0.2);
                            //Canvas.SetBottom(maskImage, bb.Bottom + height * 0.2);
                        }

                        maskImage.Visibility = Visibility.Visible;
                        
                        /*
                        // Position the canvas UI elements
                        Canvas.SetLeft(ellipseEyeLeft, eyeLeft.X - ellipseEyeLeft.Width / 2.0);
                        Canvas.SetTop(ellipseEyeLeft, eyeLeft.Y - ellipseEyeLeft.Width / 2.0);

                        Canvas.SetLeft(ellipseEyeRight, eyeRight.X - ellipseEyeRight.Width / 2.0);
                        Canvas.SetTop(ellipseEyeRight, eyeRight.Y - ellipseEyeRight.Height / 2.0);

                        Canvas.SetLeft(ellipseNose, nose.X - ellipseNose.Width / 2.0);
                        Canvas.SetTop(ellipseNose, nose.Y - ellipseNose.Height / 2.0);

                        Canvas.SetLeft(ellipseMouth, ((mouthRight.X + mouthLeft.X) / 2.0) - ellipseMouth.Width / 2.0);
                        Canvas.SetTop(ellipseMouth, ((mouthRight.Y + mouthLeft.Y) / 2.0) - ellipseMouth.Height / 2.0);
                        ellipseMouth.Width = Math.Abs(mouthRight.X - mouthLeft.X);
                        
                        // Display or hide the ellipses
                        if (eyeLeftClosed == DetectionResult.Yes || eyeLeftClosed == DetectionResult.Maybe)
                        {
                            ellipseEyeLeft.Visibility = Visibility.Collapsed;
                        }
                        else
                        {
                            ellipseEyeLeft.Visibility = Visibility.Visible;
                        }

                        if (eyeRightClosed == DetectionResult.Yes || eyeRightClosed == DetectionResult.Maybe)
                        {
                            ellipseEyeRight.Visibility = Visibility.Collapsed;
                        }
                        else
                        {
                            ellipseEyeRight.Visibility = Visibility.Visible;
                        }

                        if (mouthOpen == DetectionResult.Yes || mouthOpen == DetectionResult.Maybe)
                        {
                            ellipseMouth.Height = 50.0;
                        }
                        else
                        {
                            ellipseMouth.Height = 20.0;
                        }
                        */
                    }
                }
            }
        }
Example #14
0
        // Manages facial gestures.
        void FaceReader_FrameArrived(object sender, FaceFrameArrivedEventArgs e)
        {
            // Face
            using (var frame = e.FrameReference.AcquireFrame())
            {
                if (frame != null)
                {
                    // 4) Get the face frame result
                    FaceFrameResult result = frame.FaceFrameResult;

                    if (result != null)
                    {
                        // 5) Do magic!

                        // Get the face points, mapped in the color space.
                        var eyeLeft = result.FacePointsInColorSpace[FacePointType.EyeLeft];
                        var eyeRight = result.FacePointsInColorSpace[FacePointType.EyeRight];
                        var nose = result.FacePointsInColorSpace[FacePointType.Nose];
                        var mouthLeft = result.FacePointsInColorSpace[FacePointType.MouthCornerLeft];
                        var mouthRight = result.FacePointsInColorSpace[FacePointType.MouthCornerRight];

                        var glasses = result.FaceProperties[FaceProperty.WearingGlasses];
                        var eyeLeftClosed = result.FaceProperties[FaceProperty.LeftEyeClosed];
                        var eyeRightClosed = result.FaceProperties[FaceProperty.RightEyeClosed];
                        var mouthOpen = result.FaceProperties[FaceProperty.MouthOpen];

                        // Position the canvas UI elements
                        /*
                        Canvas.SetLeft(ellipseEyeLeft, eyeLeft.X - ellipseEyeLeft.Width / 2.0);
                        Canvas.SetTop(ellipseEyeLeft, eyeLeft.Y - ellipseEyeLeft.Height / 2.0);

                        Canvas.SetLeft(ellipseEyeRight, eyeRight.X - ellipseEyeRight.Width / 2.0);
                        Canvas.SetTop(ellipseEyeRight, eyeRight.Y - ellipseEyeRight.Height / 2.0);

                        Canvas.SetLeft(ellipseNose, nose.X - ellipseNose.Width / 2.0);
                        Canvas.SetTop(ellipseNose, nose.Y - ellipseNose.Height / 2.0);

                        Canvas.SetLeft(ellipseMouth, ((mouthRight.X + mouthLeft.X) / 2.0) - ellipseMouth.Width / 2.0);
                        Canvas.SetTop(ellipseMouth, ((mouthRight.Y + mouthLeft.Y) / 2.0) - ellipseMouth.Height / 2.0);
                        ellipseMouth.Width = Math.Abs(mouthRight.X - mouthLeft.X);
                        */
                        
                        // Set some base text for non-conclusive checks to return '-' instead of the prior loop's result.
                        string rightEyeState = "-";
                        string leftEyeState = "-";
                        string mouthState = "-";

                        //if (glasses != DetectionResult.Yes)
                        //{
                        // Checks current state of the user's right eye and displays the according text and/or image.
                        switch (eyeRightClosed)
                        {
                            case DetectionResult.No:
                                rightEyeState = "Open";
                                //ellipseEyeRight.Visibility = Visibility.Visible;
                                break;
                            case DetectionResult.Yes:
                                rightEyeState = "Closed";
                                //ellipseEyeRight.Visibility = Visibility.Collapsed;
                                break;
                            case DetectionResult.Maybe:
                                rightEyeState = "Maybe";
                                //ellipseEyeRight.Visibility = Visibility.Visible;
                                break;
                            default:
                                break;
                        }

                        // Checks current state of the user's left eye and displays the according text and/or image.
                        switch (eyeLeftClosed)
                        {
                            case DetectionResult.No:
                                leftEyeState = "Open";
                                //ellipseEyeLeft.Visibility = Visibility.Visible;
                                break;
                            case DetectionResult.Yes:
                                leftEyeState = "Closed";
                                //ellipseEyeLeft.Visibility = Visibility.Collapsed;
                                break;
                            case DetectionResult.Maybe:
                                leftEyeState = "Maybe";
                                //ellipseEyeLeft.Visibility = Visibility.Visible;
                                break;
                            default:
                                break;
                        }
                        //}
                        /*
                        else
                        {
                            rightEyeState = "Please Remove Glasses";
                            leftEyeState = "Please Remove Glasses";
                        }
                        */

                        // Checks current state of the user's mouth and displays the according text and/or image.
                        switch (mouthOpen)
                        {
                            case DetectionResult.Yes:
                                mouthState = "Open";
                                //ellipseMouth.Height = 50.0;
                                break;
                            case DetectionResult.No:
                                mouthState = "Closed";
                                //ellipseMouth.Height = 20.0;
                                break;
                            case DetectionResult.Maybe:
                                mouthState = "Maybe";
                                //ellipseMouth.Height = 50.0;
                                break;
                            default:
                                break;
                        }

                        // Updates the text values.
                        tblRightEyeState.Text = rightEyeState;
                        tblLeftEyeState.Text = leftEyeState;
                        tblMouthState.Text = mouthState;
                    }
                }
            }
        }
        /// <summary>
        /// Process the face frame
        /// </summary>
        private void OnFaceFrameArrived(object sender, FaceFrameArrivedEventArgs e)
        {
            // Retrieve the face reference
            FaceFrameReference faceRef = e.FrameReference;

            if (faceRef == null) return;

            // Acquire the face frame
            using (FaceFrame faceFrame = faceRef.AcquireFrame())
            {
                if (faceFrame == null) return;

                // Retrieve the face frame result
                FaceFrameResult frameResult = faceFrame.FaceFrameResult;

                // Display the values
                HappyResult.Text = frameResult.FaceProperties[FaceProperty.Happy].ToString();
                EngagedResult.Text = frameResult.FaceProperties[FaceProperty.Engaged].ToString();
                GlassesResult.Text = frameResult.FaceProperties[FaceProperty.WearingGlasses].ToString();
                LeftEyeResult.Text = frameResult.FaceProperties[FaceProperty.LeftEyeClosed].ToString();
                RightEyeResult.Text = frameResult.FaceProperties[FaceProperty.RightEyeClosed].ToString();
                MouthOpenResult.Text = frameResult.FaceProperties[FaceProperty.MouthOpen].ToString();
                MouthMovedResult.Text = frameResult.FaceProperties[FaceProperty.MouthMoved].ToString();
                LookingAwayResult.Text = frameResult.FaceProperties[FaceProperty.LookingAway].ToString();
            }
        }
        /// <summary>
        /// The Face Reader connects to the body reader and will reaturn a bounding box for us to reference the 
        /// image to. 
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        void FaceReader_FrameArrived(object sender, FaceFrameArrivedEventArgs e)
        {
            maskImage.Visibility = Visibility.Hidden;
            using (var frame = e.FrameReference.AcquireFrame())
            {
                if(frame != null)
                {
                    // Get the face frame result
                    FaceFrameResult result = frame.FaceFrameResult;

                    if(result != null)
                    {
                        if(network.checkChange())
                        {
                            network.setChange(false);
                            string path = "pack://application:,,/Images/" + network.getPath();
                            //string path = network.getPath();
                            try
                            {
                                //maskImage.Source = new BitmapImage(new Uri(path, UriKind.Absolute));
                                maskImage.Source = new BitmapImage(new Uri(@path));
                            }
                            catch(System.IO.IOException exc)
                            {
                                Debug.WriteLine("System IO Exception ");
                            }
                            catch(System.UriFormatException exc)
                            {
                                Debug.WriteLine("UriFormatException");
                            }
                        }


                        var bb = result.FaceBoundingBoxInColorSpace;

                        double width = Math.Abs(bb.Right - bb.Left) * 1.8;
                        double height = Math.Abs(bb.Bottom - bb.Top) * 1.8;
                        double wDiff = Math.Abs(width - maskImage.Width);
                        double hDiff = Math.Abs(height - maskImage.Height);

                        // This will tell whether or not the image should be resized.
                        if(wDiff/maskImage.Width > 0.35 || hDiff/maskImage.Height > 0.35 || Double.IsNaN(maskImage.Width))
                        {
                            if (width > 0 && height > 0)
                            {
                                maskImage.Width = width;
                                maskImage.Height = height;
                            }
                        }
                        else
                        {
                            width = maskImage.Width;
                            height = maskImage.Height;
                        }
                        double left = bb.Left - width * 0.25;
                        double top = bb.Top - height * 0.30;
                        double lDiff = Math.Abs(Canvas.GetLeft(maskImage) - left);
                        double tDiff = Math.Abs(Canvas.GetTop(maskImage) - top);

                        // this will tell whether or not the image should be translated.
                        if(lDiff/Canvas.GetLeft(maskImage) > 0.08 || tDiff/Canvas.GetTop(maskImage) > 0.08 || Double.IsNaN(Canvas.GetTop(maskImage)))
                        {
                            if (left > 0 && top > 0)
                            {
                                Canvas.SetLeft(maskImage, left);
                                Canvas.SetTop(maskImage, top);
                            }
                        }

                        maskImage.Visibility = Visibility.Visible;
                    }
                }
            }
        }
        private void face_FrameArrived(object sender, FaceFrameArrivedEventArgs e)
        {
            using (FaceFrame faceFrame = e.FrameReference.AcquireFrame())
            {
                if (faceFrame == null)
                {
                    return;
                }

                int index = ffsDic[faceFrame.FaceFrameSource];
               // Debug.Print("Face {0} comes with {1} ID {2}", index, faceFrame.FaceFrameSource.IsTrackingIdValid, faceFrame.FaceFrameSource.TrackingId);

                if (faceProcessor.validateFaceFrame(faceFrame))
                {
                    //Debug.Print("Validate Face {0} Succeed with ID {1}", index, faceFrame.FaceFrameSource.TrackingId);
                    faceFrameResults[index] = faceFrame.FaceFrameResult;
                }
                else
                {
                    faceFrameResults[index] = null;
                    //Debug.Print("Face {0} not valid", index);
                }
            }
        }
Example #18
0
        void FaceReader_FrameArrived(object sender, FaceFrameArrivedEventArgs e)
        {
            using (var frame = e.FrameReference.AcquireFrame())
            {

                if (frame != null)
                {
                    // 4) Get the face frame result
                    FaceFrameResult result = frame.FaceFrameResult;
                        //Joint rightHand = body.Joints[JointType.HandRight];
                    if (result != null)
                    {

                        // 5) Do magic!

                        // Position the canvas UI elements
                        var eyeLeft = result.FacePointsInColorSpace[FacePointType.EyeLeft];
                        var eyeRight = result.FacePointsInColorSpace[FacePointType.EyeRight];
                        var nose = result.FacePointsInColorSpace[FacePointType.Nose];
                        var mouthLeft = result.FacePointsInColorSpace[FacePointType.MouthCornerLeft];
                        var mouthRight = result.FacePointsInColorSpace[FacePointType.MouthCornerRight];

                        var eyeLeftClosed = result.FaceProperties[FaceProperty.LeftEyeClosed];
                        var eyeRightClosed = result.FaceProperties[FaceProperty.RightEyeClosed];
                        var mouthOpen = result.FaceProperties[FaceProperty.MouthOpen];

                        // Position the canvas UI elements
                        Canvas.SetLeft(ellipseEyeLeft, eyeLeft.X - ellipseEyeLeft.Width / 2.0);
                        Canvas.SetTop(ellipseEyeLeft, eyeLeft.Y - ellipseEyeLeft.Height / 2.0);

                        //Canvas.SetTop(ironimage, nose.Y - ironimage.Height / 2.0);
                       // Canvas.SetLeft(ironimage, nose.X - ironimage.Width / 2.0);

                        Canvas.SetLeft(ellipseEyeRight, eyeRight.X - ellipseEyeRight.Width / 2.0);
                        Canvas.SetTop(ellipseEyeRight, eyeRight.Y - ellipseEyeRight.Height / 2.0);

                        Canvas.SetLeft(ellipseNose, nose.X - ellipseNose.Width / 2.0);
                        Canvas.SetTop(ellipseNose, nose.Y - ellipseNose.Height / 2.0);

                        Canvas.SetLeft(ellipseMouth, ((mouthRight.X + mouthLeft.X) / 2.0) - ellipseMouth.Width / 2.0);
                        Canvas.SetTop(ellipseMouth, ((mouthRight.Y + mouthLeft.Y) / 2.0) - ellipseMouth.Height / 2.0);
                        ellipseMouth.Width = Math.Abs(mouthRight.X - mouthLeft.X);

                        if (eyeLeftClosed == DetectionResult.Yes || eyeLeftClosed == DetectionResult.Maybe )
                        {
                            ellipseEyeLeft.Visibility = Visibility.Collapsed;

                        }
                        else
                        {
                            ellipseEyeLeft.Visibility = Visibility.Visible;

                        }

                        if (eyeRightClosed == DetectionResult.Yes || eyeRightClosed == DetectionResult.Maybe )
                        {
                            ellipseEyeRight.Visibility = Visibility.Collapsed;

                        }
                        else
                        {
                            ellipseEyeRight.Visibility = Visibility.Visible;

                        }

                        if (mouthOpen == DetectionResult.Yes || mouthOpen == DetectionResult.Maybe)
                        {
                            ellipseMouth.Height = 50.0;
                        }
                        else
                        {
                            ellipseMouth.Height = 20.0;
                        }
                    }
                }
            }
        }
Example #19
0
        /// <summary>
        /// when face is found calls this event
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private void FaceReader_FrameArrived(object sender, FaceFrameArrivedEventArgs e)
        {
            using (var frame = e.FrameReference.AcquireFrame())
            {
                if (frame != null)
                {
                    FaceFrameResult result = frame.FaceFrameResult;

                    if (result != null)
                    {
                        //face is visible
                        OnFaceVisible(null, null);

                        // Get the face characteristics
                        var mouthOpen = result.FaceProperties[FaceProperty.MouthOpen];
                        //result.FaceFrameFeatures
                        //fire events

                        CallAction(mouthOpen, OnMouthOpened, OnMouthClosed);
                    }
                    else
                    {
                        //face is invisible (lost)
                        OnFaceLost(null, null);
                    }
                }
            }
        }
Example #20
0
        void UpdateFaceFrame( FaceFrameArrivedEventArgs e )
        {
            using ( FaceFrame faceFrame = e.FrameReference.AcquireFrame() ) {
                if ( faceFrame == null ) {
                    return;
                }
                bool tracked;
                tracked = faceFrame.IsTrackingIdValid;
                if ( !tracked ) {
                    return;
                }

                FaceFrameResult faceResult = faceFrame.FaceFrameResult;
                int index = GetFaceSourceIndex( faceFrame.FaceFrameSource );
                faceFrameResults[index] = faceResult;
            }
        }
        /// <summary>
        /// The Face Reader connects to the body reader and will reaturn a bounding box for us to reference the 
        /// image to. 
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        void FaceReader_FrameArrived(object sender, FaceFrameArrivedEventArgs e)
        {
            //maskImage.Visibility = Visibility.Hidden;
            using (var frame = e.FrameReference.AcquireFrame())
            {
                if(frame != null)
                {
                    // Get the face frame result
                    FaceFrameResult result = frame.FaceFrameResult;

                    if(result != null)
                    {
                        // Get the Bounding Box
                        var bb = result.FaceBoundingBoxInColorSpace;
                        var eyeLeft = result.FacePointsInColorSpace[FacePointType.EyeRight];
                        var mouthLeft = result.FacePointsInColorSpace[FacePointType.MouthCornerLeft];
                        var mouthRight = result.FacePointsInColorSpace[FacePointType.MouthCornerRight];

                        double width = Math.Abs(bb.Right - bb.Left) * 1.8;
                        double height = Math.Abs(bb.Bottom - bb.Top) * 1.8;
                        double wDiff = Math.Abs(width - maskImage.Width);
                        double hDiff = Math.Abs(height - maskImage.Height);

                        // This will tell whether or not the image should be resized.
                        if(wDiff/maskImage.Width > 0.15 || hDiff/maskImage.Height > 0.15 || Double.IsNaN(maskImage.Width))
                        {
                            if (width > 0 && height > 0)
                            {
                                if(width > maskImage.Width)
                                {
                                    maskImage.Width = width * 0.85;
                                    maskImage.Height = height * 0.85;
                                }
                                else
                                {
                                    maskImage.Width = width * 1.15;
                                    maskImage.Height = height * 1.15;
                                }
                            }
                        }
                        width = maskImage.Width;
                        height = maskImage.Height;

                        double tleft = bb.Left - width * 0.2;
                        double ttop = bb.Top - height * 0.2 - height * 0.70;
                        if (tleft > 0)
                            left = tleft;
                        if (ttop > 0)
                            top = ttop;
                        double lDiff = Math.Abs(Canvas.GetLeft(maskImage) - left);
                        double tDiff = Math.Abs(Canvas.GetTop(maskImage) - top);

                        // This will tell whether or not the image should be translated.
                        if(lDiff/Canvas.GetLeft(maskImage) > 0.07 || tDiff/Canvas.GetTop(maskImage) > 0.07 || Double.IsNaN(Canvas.GetTop(maskImage)))
                        {
                            Canvas.SetLeft(maskImage, left);
                            Canvas.SetTop(maskImage, top);
                        }

                        maskImage.Visibility = Visibility.Visible;


                        // Moustache modifications
                        /*
                        double stacheWid = 3 * Math.Abs(mouthRight.X - mouthLeft.X);
                        if(stacheWid > 0)
                        {
                            stache.Width = stacheWid;
                            stache.Height = stacheWid / 3.0;
                            Canvas.SetLeft(stache, ((mouthRight.X + mouthLeft.X) / 2.0) - stache.Width / 2.0);
                            Canvas.SetTop(stache, ((mouthRight.Y + mouthLeft.Y) / 2.0) - stache.Height / 2.0 - 5);
                            stache.Visibility = Visibility.Visible;
                        }*/

                        if (eyeLeft.X > 0 && eyeLeft.X > 0)
                        {
                            Canvas.SetLeft(monocle, eyeLeft.X - monocle.Width / 2.0);
                            Canvas.SetTop(monocle, eyeLeft.Y - monocle.Height / 2.0);
                            Debug.WriteLine(eyeLeft.X + ", " + eyeLeft.Y);
                            monocle.Visibility = Visibility.Visible;
                        }

                    }
                }
            }

        }
Example #22
0
        /// <summary>
        /// Handles the face frame data arriving from the sensor
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void Reader_FaceFrameArrived(object sender, FaceFrameArrivedEventArgs e)
        {
            using (FaceFrame faceFrame = e.FrameReference.AcquireFrame())
            {
                if (faceFrame != null)
                {
                    // get the index of the face source from the face source array
                    int index = this.GetFaceSourceIndex(faceFrame.FaceFrameSource);

                    // check if this face frame has valid face frame results
                    if (this.ValidateFaceBoxAndPoints(faceFrame.FaceFrameResult))
                    {
                        // store this face frame result to draw later
                        this.faceFrameResults[index] = faceFrame.FaceFrameResult;
                    }
                    else
                    {
                        // indicates that the latest face frame result from this reader is invalid
                        this.faceFrameResults[index] = null;
                    }
                }
            }
        }
Example #23
0
        public void OnFaceFrameArrived2(object sender, FaceFrameArrivedEventArgs e)
        {
            if (kinectenable.Checked == false) return;
            using (var faceFrame = e.FrameReference.AcquireFrame())
            {
                if (faceFrame == null) return;
                var result = faceFrame.FaceFrameResult;

                if (result == null)
                    return;

                var rotation = result.FaceRotationQuaternion;
                int x, y, z;
                ExtractFaceRotationInDegrees(rotation, out x, out y, out z);
                
                rotX_2 = x;
                rotY_2 = y;
                rotZ_2 = z;
                happy2 = result.FaceProperties[FaceProperty.Happy].ToString();
                mouth2 = result.FaceProperties[FaceProperty.MouthMoved].ToString();
                kin1_2 = "kin1_2," + x.ToString() + "," + y.ToString() + "," + z.ToString() + "," + result.FaceProperties[FaceProperty.Happy].ToString() + "," + result.FaceProperties[FaceProperty.MouthMoved].ToString();

            }
        }