Exemplo n.º 1
0
        public void EnqueueFaceTracking(int sensorId, int user, Microsoft.Kinect.Face.FaceFrameResult faceResult)
        {
            if (!capturing)
            {
                return;
            }
            if (faceResult == null)
            {
                return;
            }


            // extract face rotation in degrees as Euler angles
            if (faceResult.FaceRotationQuaternion != null)
            {
                int pitch, yaw, roll;
                MainWindow.ExtractFaceRotationInDegrees(faceResult.FaceRotationQuaternion, out pitch, out yaw, out roll);
                trackingInformationQueue.Add(new FaceRotationTrackingInformation(sensorId, user, pitch, yaw, roll, 0.2));
            }

            // extract each face property information and store it in faceText
            if (faceResult.FaceProperties != null)
            {
                trackingInformationQueue.Add(new FacePropertyTrackingInformation(sensorId, user,
                                                                                 detectionResultToConfidence(faceResult.FaceProperties[FaceProperty.Happy]),
                                                                                 detectionResultToConfidence(faceResult.FaceProperties[FaceProperty.Engaged]),
                                                                                 detectionResultToConfidence(faceResult.FaceProperties[FaceProperty.WearingGlasses]),
                                                                                 detectionResultToConfidence(faceResult.FaceProperties[FaceProperty.LeftEyeClosed]),
                                                                                 detectionResultToConfidence(faceResult.FaceProperties[FaceProperty.RightEyeClosed]),
                                                                                 detectionResultToConfidence(faceResult.FaceProperties[FaceProperty.MouthOpen]),
                                                                                 detectionResultToConfidence(faceResult.FaceProperties[FaceProperty.MouthMoved]),
                                                                                 detectionResultToConfidence(faceResult.FaceProperties[FaceProperty.LookingAway]), 0.2));
            }
        }
Exemplo n.º 2
0
        /// <summary>
        /// Constructor
        /// </summary>
        /// <param name="trackingId">Tracking ID</param>
        /// <param name="frameResult">Frame result</param>
        public FaceFrameResultEventArgs(ulong trackingId, FaceFrameResult frameResult)
        {
            if (frameResult == null)
                throw new ArgumentNullException("frameResult");

            this.trackingId = trackingId;
            this.frameResult = frameResult;
        }
        public void matchFaceWithBody(FaceFrameSource[] faceFrameSources, FaceFrameResult[] faceFrameResults, FaceProcessor faceProcessor)
        {
            foreach (var body in bodies)
            {
                if (!body.IsTracked)
                {
                    continue;
                }

                int i = getBodyIndex(body);
                //Debug.Print("Body {0} comes with ID {1}", i, body.TrackingId);

                if (faceFrameSources[i].IsTrackingIdValid)
                {
                    // check if we have valid face frame results
                    //Debug.Print("Source Valid {0}", i);

                    if (faceFrameResults[i] != null)
                    {
                        //Debug.Print("Result Valid {0}", i);
                        int pitch, yaw, roll;
                        faceProcessor.ExtractFaceRotationInDegrees(faceFrameResults[i].FaceRotationQuaternion, out pitch,
                            out yaw, out roll);

                        eManager.users[faceFrameSources[i].TrackingId].headPitch = pitch;
                        eManager.users[faceFrameSources[i].TrackingId].headYaw = yaw;
                        eManager.users[faceFrameSources[i].TrackingId].headRoll = roll;
                    }
                    else
                    {
                        eManager.users[faceFrameSources[i].TrackingId].headPitch = 1000;
                        eManager.users[faceFrameSources[i].TrackingId].headYaw = 1000;
                        eManager.users[faceFrameSources[i].TrackingId].headRoll = 1000;
                    }

                    if (eManager.HasEngaged && eManager.Engager.body.TrackingId == faceFrameSources[i].TrackingId)
                    {
                        //Debug.Print("Engager: {0}", eManager.Engager.body.TrackingId);
                        eManager.setTrackerFaceOrientation(faceFrameSources[i].TrackingId);

                    }
                }
                else
                {
                    faceFrameSources[i].TrackingId = bodies[i].TrackingId;
                }
            }
        }
        public static bool ValidateFaceBoxAndPoints(this FaceFrameResult faceResult, int displayWidth, int displayHeight)
        {
            bool isFaceValid = faceResult != null;

            if (isFaceValid)
            {
                var faceBox = faceResult.FaceBoundingBoxInColorSpace;
                if (faceBox != null)
                {
                    // check if we have a valid rectangle within the bounds of the screen space
                    isFaceValid = (faceBox.Right - faceBox.Left) > 0 &&
                                  (faceBox.Bottom - faceBox.Top) > 0 &&
                                  faceBox.Right <= displayWidth &&
                                  faceBox.Bottom <= displayHeight;

                    if (isFaceValid)
                    {
                        var facePoints = faceResult.FacePointsInColorSpace;
                        if (facePoints != null)
                        {
                            foreach (PointF pointF in facePoints.Values)
                            {
                                // check if we have a valid face point within the bounds of the screen space
                                bool isFacePointValid = pointF.X > 0.0f &&
                                                        pointF.Y > 0.0f &&
                                                        pointF.X < displayWidth &&
                                                        pointF.Y < displayHeight;

                                if (!isFacePointValid)
                                {
                                    isFaceValid = false;
                                    break;
                                }
                            }
                        }
                    }
                }
            }

            return(isFaceValid);
        }
        private bool ValidateFaceBoxAndPoints(FaceFrameResult faceResult)
        {
            bool isFaceValid = faceResult != null;
            int displayHeight = CoordinateConverter.SCREEN_HEIGHT;
            int displayWidth = CoordinateConverter.SCREEN_WIDTH;

            if (isFaceValid)
            {
                var faceBox = faceResult.FaceBoundingBoxInColorSpace;
                //Debug.Print("faceBox {0}, {1}, {2}, {3}", faceBox.Left, faceBox.Top, faceBox.Right, faceBox.Bottom);
                // check if we have a valid rectangle within the bounds of the screen space
                isFaceValid = (faceBox.Right - faceBox.Left) > 0 &&
                              (faceBox.Bottom - faceBox.Top) > 0 &&
                              faceBox.Right <= displayWidth &&
                              faceBox.Bottom <= displayHeight;

                if (isFaceValid)
                {
                    var facePoints = faceResult.FacePointsInColorSpace;
                    if (facePoints != null)
                    {
                        foreach (PointF pointF in facePoints.Values)
                        {
                            // check if we have a valid face point within the bounds of the screen space
                            bool isFacePointValid = pointF.X > 0.0f &&
                                                    pointF.Y > 0.0f &&
                                                    pointF.X < displayWidth &&
                                                    pointF.Y < displayHeight;

                            if (!isFacePointValid)
                            {
                                isFaceValid = false;
                                break;
                            }
                        }
                    }
                }
            }

            return isFaceValid;
        }
Exemplo n.º 6
0
        /// <summary>
        /// Shift window right with right eye wink at the camera
        /// </summary>
        private void MoveWindowRightHotkeyPress(FaceFrameResult frameResult)
        {
            if (this.IsEngaged(frameResult) && !this.IsLeftEyeClosed(frameResult) && this.IsRightEyeClosed(frameResult) && this.AllFlagsAreFalse())
            {
                this.MoveWindowRightHotkeyPressFlag = true;
                this.CountTime = 0;
            }

            if (this.MoveWindowRightHotkeyPressFlag == true && this.CountTime >= BlinkTime)
            {
                if (this.IsEngaged(frameResult) && !this.IsLeftEyeClosed(frameResult) && this.IsRightEyeClosed(frameResult))
                {
                    this.MoveWindowRightHotkeyPressFlag = true;
                    // Simulate each key stroke
                    InputSimulator.SimulateKeyDown(VirtualKeyCode.LWIN);
                    InputSimulator.SimulateKeyPress(VirtualKeyCode.RIGHT);
                    InputSimulator.SimulateKeyUp(VirtualKeyCode.LWIN);
                }
                else if (!this.IsLeftEyeClosed(frameResult) && !this.IsRightEyeClosed(frameResult))
                {
                    this.MoveWindowLeftHotkeyPressFlag = false;
                }
            }
            else if (this.MoveWindowLeftHotkeyPressFlag == true)
            {
                this.CountTime += 1;
            }
        }
Exemplo n.º 7
0
        /// <summary>
        /// Acquire the latest FaceFrameResults from all the FaceFrameReaders. 
        /// They must be checked for validity.
        /// </summary>
        /// <returns>Array of FaceFrames of bodyCount size (6 with KinectV2)</returns>
        public FaceFrameResult[] GetLatestFaceFrameResults()
        {
            FaceFrameResult[] results = new FaceFrameResult[bodyCount];
            for (int i = 0; i < this.bodyCount; i++)
            {
                if (this.faceFrameReaders[i] != null)
                {
                    // wire handler for face frame arrival
                    FaceFrame frame= this.faceFrameReaders[i].AcquireLatestFrame();
                    if (frame != null && frame.FaceFrameResult != null)
                    {
                        results[i] = frame.FaceFrameResult;
                    }
                }
            }

            return results;
        }
Exemplo n.º 8
0
        /// <summary>
        /// Create shapes and text to apply to a parent panel which represent the
        /// current state of a faceFrameResult.
        /// </summary>
        /// <param name="parentPanel">The Panel in which the shapes will be added</param>
        /// <param name="faceFrameResult">The reult of the face tracking</param>
        /// <param name="displayFeatures">The FaceFrameFeatures to be drawn</param>
        /// <param name="color">The color of the shapes and text</param>
        /// <param name="bodyIndex">The index of the body/face</param>
        private void DrawFaceFeatures(Panel parentPanel, FaceFrameResult faceFrameResult, FaceFrameFeatures displayFeatures, Color color, int bodyIndex)
        {
            if (parentPanel.Width == 0 ||
                Double.IsNaN(parentPanel.Width) ||
                parentPanel.Height == 0 ||
                Double.IsNaN(parentPanel.Height))
            {
                // The parent Panel must have a size to be rendered on
                return;
            }

            string messages = "";
            bool renderMessages = false;
            int fontSize = (int)((double)parentPanel.Height * (double)0.023);
            Point messagesPosition = new Point(fontSize * bodyIndex * 10, 0);

            // Face points and bounding boxes
            if (displayFeatures.HasFlag(FaceFrameFeatures.BoundingBoxInColorSpace))
            {
                double lineSize = 7;
                int posX = faceFrameResult.FaceBoundingBoxInColorSpace.Left;
                int posY = faceFrameResult.FaceBoundingBoxInColorSpace.Top;
                int width = faceFrameResult.FaceBoundingBoxInColorSpace.Right - posX + (int)lineSize;
                int height = faceFrameResult.FaceBoundingBoxInColorSpace.Bottom - posY + (int)lineSize;
                Rectangle rect = CreateFaceBoxRectangle(color, lineSize, width, height);
                Canvas.SetLeft(rect, posX);
                Canvas.SetTop(rect, posY);
                parentPanel.Children.Add(rect);
                messagesPosition = new Point(posX, posY + height);
            }
            if (displayFeatures.HasFlag(FaceFrameFeatures.BoundingBoxInInfraredSpace))
            {
                double lineSize = 7;
                int posX = faceFrameResult.FaceBoundingBoxInInfraredSpace.Left;
                int posY = faceFrameResult.FaceBoundingBoxInInfraredSpace.Top;
                int width = faceFrameResult.FaceBoundingBoxInInfraredSpace.Right - posX + (int)lineSize;
                int height = faceFrameResult.FaceBoundingBoxInInfraredSpace.Bottom - posY + (int)lineSize;
                Rectangle rect = CreateFaceBoxRectangle(color, lineSize, width, height);
                Canvas.SetLeft(rect, posX);
                Canvas.SetTop(rect, posY);
                parentPanel.Children.Add(rect);
                messagesPosition = new Point(posX, posY + height);
            }
            if (displayFeatures.HasFlag(FaceFrameFeatures.PointsInColorSpace))
            {
                foreach (KeyValuePair<FacePointType, Point> facePointKVP in
                        faceFrameResult.FacePointsInColorSpace)
                {
                    Size ellipseSize = new Size(10, 10);
                    Ellipse ellipse = CreateFacePointEllipse(color, ellipseSize);
                    Canvas.SetLeft(ellipse, facePointKVP.Value.X - (ellipseSize.Width / 2));
                    Canvas.SetTop(ellipse, facePointKVP.Value.Y - (ellipseSize.Height / 2));
                    parentPanel.Children.Add(ellipse);
                }
            }
            if (displayFeatures.HasFlag(FaceFrameFeatures.PointsInInfraredSpace))
            {
                foreach (KeyValuePair<FacePointType, Point> facePointKVP in
                        faceFrameResult.FacePointsInInfraredSpace)
                {
                    Size ellipseSize = new Size(3, 3);
                    Ellipse ellipse = CreateFacePointEllipse(color, ellipseSize);
                    Canvas.SetLeft(ellipse, facePointKVP.Value.X - (ellipseSize.Width / 2));
                    Canvas.SetTop(ellipse, facePointKVP.Value.Y - (ellipseSize.Height / 2));
                    parentPanel.Children.Add(ellipse);
                }
            }
            // Rotation stuff
            if (displayFeatures.HasFlag(FaceFrameFeatures.RotationOrientation))
            {
                int pitch, yaw, roll = 0;
                ExtractFaceRotationInDegrees(faceFrameResult.FaceRotationQuaternion,
                    out pitch, out yaw, out roll);
                messages += "Rotation Pitch: " + pitch + "\n";
                messages += "Rotation Yaw: " + yaw + "\n";
                messages += "Rotation Roll: " + roll + "\n";
                renderMessages = true;
            }

            // Other Face Properties and states
            if (displayFeatures.HasFlag(FaceFrameFeatures.FaceEngagement))
            {
                messages += FacePropertyToString(FaceProperty.Engaged,
                    faceFrameResult.FaceProperties[FaceProperty.Engaged]);
                renderMessages = true;
            }
            if (displayFeatures.HasFlag(FaceFrameFeatures.Glasses))
            {
                messages += FacePropertyToString(FaceProperty.WearingGlasses,
                    faceFrameResult.FaceProperties[FaceProperty.WearingGlasses]);
                renderMessages = true;
            }
            if (displayFeatures.HasFlag(FaceFrameFeatures.Happy))
            {
                messages += FacePropertyToString(FaceProperty.Happy,
                    faceFrameResult.FaceProperties[FaceProperty.Happy]);
                renderMessages = true;
            }
            if (displayFeatures.HasFlag(FaceFrameFeatures.LeftEyeClosed))
            {
                messages += FacePropertyToString(FaceProperty.LeftEyeClosed,
                    faceFrameResult.FaceProperties[FaceProperty.LeftEyeClosed]);
                renderMessages = true;
            }
            if (displayFeatures.HasFlag(FaceFrameFeatures.RightEyeClosed))
            {
                messages += FacePropertyToString(FaceProperty.RightEyeClosed,
                    faceFrameResult.FaceProperties[FaceProperty.RightEyeClosed]);
                renderMessages = true;
            }
            if (displayFeatures.HasFlag(FaceFrameFeatures.LookingAway))
            {
                messages += FacePropertyToString(FaceProperty.LookingAway,
                    faceFrameResult.FaceProperties[FaceProperty.LookingAway]);
                renderMessages = true;
            }
            if (displayFeatures.HasFlag(FaceFrameFeatures.MouthMoved))
            {
                messages += FacePropertyToString(FaceProperty.MouthMoved,
                    faceFrameResult.FaceProperties[FaceProperty.MouthMoved]);
                renderMessages = true;
            }
            if (displayFeatures.HasFlag(FaceFrameFeatures.MouthOpen))
            {
                messages += FacePropertyToString(FaceProperty.MouthOpen,
                    faceFrameResult.FaceProperties[FaceProperty.MouthOpen]);
                renderMessages = true;
            }

            if (renderMessages)
            {
                TextBlock textBlock = new TextBlock();
                textBlock.Text = messages;
                textBlock.Foreground = new SolidColorBrush(color);
                textBlock.FontSize = fontSize;
                Canvas.SetLeft(textBlock, messagesPosition.X);
                Canvas.SetTop(textBlock, messagesPosition.Y);
                parentPanel.Children.Add(textBlock);
            }
        }
Exemplo n.º 9
0
 //private void DrawFace(FrameworkElement face, )
 //{
 //    CoordinateMapper coordinateMapper = this.kinectSensor.CoordinateMapper;
 //    Brush drawingBrush = new SolidColorBrush(this.BodyColors[0]);
 //    //var facePoints = faceResult.;
 //    //;
 //    //CameraSpacePoint point = facePoints[FacePointType.Nose].
 //    //faceResult.FacePointsInColorSpace
 //    //CameraSpacePoint cameraPoint = new CameraSpacePoint();
 //    //cameraPoint.X = faceBox.Right - faceBox.Left;
 //    //cameraPoint.Y = faceBox.Top - faceBox.Bottom;
 //    //faceBox.
 //    //DepthSpacePoint depthSpacePoint = coordinateMapper.MapCameraPointToDepthSpace(.;
 //    //var rect = new Rectangle();
 //    //rect.Fill = drawingBrush;
 //    //rect.Width = faceBox.Right - faceBox.Left;
 //    //rect.Height = faceBox.Bottom - faceBox.Top;
 //    //rect.Margin = new Thickness(faceBox.Left, faceBox.Top, 0, 0);
 //    //this.drawingCanvas.Children.Add(rect);
 //}
 private void DrawFace(FrameworkElement face, FaceFrameResult faceFrameResult, Point point)
 {
     face.Visibility = Visibility.Visible;
 }
Exemplo n.º 10
0
        private void UpdateHead(FrameworkElement face, Body body, FaceFrameResult faceResult, Point point)
        {
            face.Visibility = Visibility.Visible;

            bool isLeftEyeClosed = false, isRightEyeClosed = false, isHappy = false;

            // If face tracking working
            if(faceResult != null)
            {
                // Mouth
                isHappy = faceResult.FaceProperties[FaceProperty.Happy] == DetectionResult.Yes;

                // Eyes
                isLeftEyeClosed = faceResult.FaceProperties[FaceProperty.LeftEyeClosed] == DetectionResult.Yes ? true : false;
                isRightEyeClosed = faceResult.FaceProperties[FaceProperty.RightEyeClosed] == DetectionResult.Yes ? true : false;
            }

            body.UpdateMouth(isHappy);
            body.UpdateEye(!isLeftEyeClosed, true);
            body.UpdateEye(!isRightEyeClosed, false);

            // Position Head
            double borderLeft = point.X - face.Width / 2;
            double borderRight = point.Y - face.Height / 2;

            if (!Double.IsInfinity(point.X) && !Double.IsInfinity(point.Y))
            {
                Canvas.SetLeft(face, borderLeft);
                Canvas.SetTop(face, borderRight);
            }
        }
    private bool ValidateFaceBox(FaceFrameResult faceResult)
    {
        bool isFaceValid = faceResult != null;

        if (isFaceValid)
        {
            var faceBox = faceResult.FaceBoundingBoxInColorSpace;
            //if (faceBox != null)
            {
                // check if we have a valid rectangle within the bounds of the screen space
                isFaceValid = (faceBox.Right - faceBox.Left) > 0 &&
                    (faceBox.Bottom - faceBox.Top) > 0; // &&
                        //faceBox.Right <= this.faceDisplayWidth &&
                        //faceBox.Bottom <= this.faceDisplayHeight;
            }
        }

        return isFaceValid;
    }
Exemplo n.º 12
0
        /// <summary>
        /// Draws face frame results
        /// </summary>
        /// <param name="faceIndex">the index of the face frame corresponding to a specific body in the FOV</param>
        /// <param name="faceResult">container of all face frame results</param>
        /// <param name="drawingContext">drawing context to render to</param>
        private void DrawFaceFrameResults(int faceIndex, FaceFrameResult faceResult, DrawingContext drawingContext)
        {
            // choose the brush based on the face index
            Brush drawingBrush = this.faceBrush[0];
            if (faceIndex < this.bodyCount)
            {
                drawingBrush = this.faceBrush[faceIndex];
            }

            Pen drawingPen = new Pen(drawingBrush, DrawFaceShapeThickness);

            // draw the face bounding box
            var faceBoxSource = faceResult.FaceBoundingBoxInColorSpace;
            Rect faceBox = new Rect(faceBoxSource.Left, faceBoxSource.Top, faceBoxSource.Right - faceBoxSource.Left, faceBoxSource.Bottom - faceBoxSource.Top);
            drawingContext.DrawRectangle(null, drawingPen, faceBox);

            if (faceResult.FacePointsInColorSpace != null)
            {
                // draw each face point
                foreach (PointF pointF in faceResult.FacePointsInColorSpace.Values)
                {
                    drawingContext.DrawEllipse(null, drawingPen, new Point(pointF.X, pointF.Y), FacePointRadius, FacePointRadius);
                }
            }

            string faceText = string.Empty;

            // extract each face property information and store it in faceText
            if (faceResult.FaceProperties != null)
            {
                foreach (var item in faceResult.FaceProperties)
                {
                    faceText += item.Key + " : ";

                    if (item.Key == FaceProperty.LeftEyeClosed 
                        || item.Key == FaceProperty.RightEyeClosed
                        || item.Key == FaceProperty.LookingAway)
                    {
                        this.faceSignals.OnNext(item);
                    }

                    // consider a "maybe" as a "no" to restrict 
                    // the detection result refresh rate
                    if (item.Value == DetectionResult.Maybe)
                    {
                        faceText += DetectionResult.No + "\n";
                    }
                    else
                    {
                        faceText += item.Value.ToString() + "\n";
                    }                    
                }
            }

            // extract face rotation in degrees as Euler angles
            if (faceResult.FaceRotationQuaternion != null)
            {
                int pitch, yaw, roll;
                ExtractFaceRotationInDegrees(faceResult.FaceRotationQuaternion, out pitch, out yaw, out roll);
                faceText += "FaceYaw : " + yaw + "\n" +
                            "FacePitch : " + pitch + "\n" +
                            "FacenRoll : " + roll + "\n";
            }

            // render the face property and face rotation information
            Point faceTextLayout;
            if (this.GetFaceTextPositionInColorSpace(faceIndex, out faceTextLayout))
            {
                drawingContext.DrawText(
                        new FormattedText(
                            faceText,
                            CultureInfo.GetCultureInfo("en-us"),
                            FlowDirection.LeftToRight,
                            new Typeface("Georgia"),
                            DrawTextFontSize,
                            drawingBrush),
                        faceTextLayout);
            }
        }
        /// <summary>
        /// Draws face frame results
        /// Dibuja resultados frame cara
        /// </summary>
        /// <param name="faceIndex">the index of the face frame corresponding to a specific body in the FOV</param>
        /// <param name="faceResult">container of all face frame results</param>
        /// <param name="drawingContext">drawing context to render to</param>
        private void DrawFaceFrameResults(int faceIndex, FaceFrameResult faceResult, DrawingContext drawingContext)
        {
            // choose the brush based on the face index
                // Elegir el cepillo sobre la base del índice de cara
                Brush drawingBrush = this.faceBrush[0];
                if (faceIndex < this.bodyCount)
                {
                    drawingBrush = this.faceBrush[faceIndex];
                }

                Pen drawingPen = new Pen(drawingBrush, DrawFaceShapeThickness);

                // draw the face bounding box
                // Dibujar el cuadro delimitador cara
                var faceBoxSource = faceResult.FaceBoundingBoxInColorSpace;
                Rect faceBox = new Rect(faceBoxSource.Left, faceBoxSource.Top, faceBoxSource.Right - faceBoxSource.Left, faceBoxSource.Bottom - faceBoxSource.Top);
                drawingContext.DrawRectangle(null, drawingPen, faceBox);

                if (faceResult.FacePointsInColorSpace != null)
                {
                    // draw each face point
                    // Dibujar cada punto de la cara
                    foreach (PointF pointF in faceResult.FacePointsInColorSpace.Values)
                    {
                        drawingContext.DrawEllipse(null, drawingPen, new Point(pointF.X, pointF.Y), FacePointRadius, FacePointRadius);
                    }
                }

                string faceText = string.Empty;

                // extract each face property information and store it in faceText
                // Extraer información de cada propiedad face y almacenarlo en faceText
                if (faceResult.FaceProperties != null)
                {
                    foreach (var item in faceResult.FaceProperties)
                    {
                        faceText += item.Key.ToString() + " : ";

                        // consider a "maybe" as a "no" to restrict
                        // the detection result refresh rate
                        // Considerar un "tal vez ", como un "no" para restringir
                        // El resultado frecuencia de actualización de detección
                        if (item.Value == DetectionResult.Maybe)
                        {
                            faceText += DetectionResult.Maybe + "\n";
                        }
                        else
                        {
                            faceText += item.Value.ToString() + "\n";
                        }
                    }
                }

                // extract face rotation in degrees as Euler angles
                // Extracto de la cara de rotación en grados como ángulos de Euler
                if (faceResult.FaceRotationQuaternion != null)
                {
                    int pitch, yaw, roll;
                    ExtractFaceRotationInDegrees(faceResult.FaceRotationQuaternion, out pitch, out yaw, out roll);
                    faceText += "FaceYaw : " + yaw + "\n" +
                                "FacePitch : " + pitch + "\n" +
                                "FacenRoll : " + roll + "\n";
                }

                // render the face property and face rotation information
                // Hacer que la información de propiedades de la cara y la rotación de la cara
                Point faceTextLayout;
                if (this.GetFaceTextPositionInColorSpace(faceIndex, out faceTextLayout))
                {
                    drawingContext.DrawText(
                            new FormattedText(
                                faceText,
                                CultureInfo.GetCultureInfo("en-us"),
                                FlowDirection.LeftToRight,
                                new Typeface("Georgia"),
                                DrawTextFontSize,
                                drawingBrush),
                            faceTextLayout);
                }
        }
Exemplo n.º 14
0
 private bool ValidateFaceBoxAndPoints(FaceFrameResult faceResult)
 {
     bool isFaceValid = faceResult != null;
     return isFaceValid;
 }
        /// <summary>
        /// Draws face frame results
        /// </summary>
        /// <param name="faceIndex">the index of the face frame corresponding to a specific body in the FOV</param>
        /// <param name="faceResult">container of all face frame results</param>
        /// <param name="drawingContext">drawing context to render to</param>
        private void DrawFaceFrameResults(int faceIndex, FaceFrameResult faceResult, DrawingContext drawingContext)
        {
            // choose the brush based on the face index
            Brush drawingBrush = this.faceBrush[0];
            no_cuerpos.Content = faceIndex.ToString();

            if (faceIndex < this.bodyCount)
            {
                drawingBrush = this.faceBrush[faceIndex];

                //muestra el color de la cara en hexadecimal
                //var pintar = drawingBrush;
                ColorCara.Content = faceBrush[0];
                //no_cuerpos.Content=bodies[0].TrackingId; //numero de tracking 
                //no_cuerpos.Content = bodies[0].IsTracked;  //true
                //no_cuerpos.Content = bodies[faceIndex].ToString();


            }

            Pen drawingPen = new Pen(drawingBrush, DrawFaceShapeThickness);

            // draw the face bounding box
            var faceBoxSource = faceResult.FaceBoundingBoxInColorSpace;
            Rect faceBox = new Rect(faceBoxSource.Left, faceBoxSource.Top, faceBoxSource.Right - faceBoxSource.Left, faceBoxSource.Bottom - faceBoxSource.Top);
            drawingContext.DrawRectangle(null, drawingPen, faceBox);

            if (faceResult.FacePointsInColorSpace != null)
            {
                // draw each face point
                foreach (PointF pointF in faceResult.FacePointsInColorSpace.Values)
                {
                    drawingContext.DrawEllipse(null, drawingPen, new Point(pointF.X, pointF.Y), FacePointRadius, FacePointRadius);
                }
            }

            string faceText = string.Empty;

            // extract each face property information and store it in faceText
            if (faceResult.FaceProperties != null)
            {
                foreach (var item in faceResult.FaceProperties)
                {
                    faceText += item.Key.ToString() + " : ";

                    // consider a "maybe" as a "no" to restrict 
                    // the detection result refresh rate
                    if (item.Value == DetectionResult.Maybe)
                    {
                        faceText += DetectionResult.No + "\n";
                    }
                    else
                    {
                        faceText += item.Value.ToString() + "\n";
                    }
                }
            }

            // extract face rotation in degrees as Euler angles
            if (faceResult.FaceRotationQuaternion != null)
            {
                int pitch, yaw, roll;
                ExtractFaceRotationInDegrees(faceResult.FaceRotationQuaternion, out pitch, out yaw, out roll);
                faceText += "FaceYaw : " + yaw + "\n" +
                            "FacePitch : " + pitch + "\n" +
                            "FacenRoll : " + roll + "\n";
            }

            // render the face property and face rotation information
            Point faceTextLayout;
            if (this.GetFaceTextPositionInColorSpace(faceIndex, out faceTextLayout))
            {
                drawingContext.DrawText(
                        new FormattedText(
                            faceText,
                            CultureInfo.GetCultureInfo("en-us"),
                            FlowDirection.LeftToRight,
                            new Typeface("Georgia"),
                            DrawTextFontSize,
                            drawingBrush),
                        faceTextLayout);
            }
            FaceFrameResult cara = faceResult;

            MySqlCommand command = new MySqlCommand();
            command.CommandType = System.Data.CommandType.Text;

            command.CommandText = "INSERT INTO data_capturev5(clave_unica,Happy,Engage,WearingGlasses,LeftEyeClosed,RightEyeClosed,MouthOpen,MouthMoved,LookingAway,nombrePC,No_Usuarios) VALUES (@clave_unica,@Happy,@Engaged,@WearingGlasses,@LeftEyeClosed,@RightEyeClosed,@MouthOpen,@MouthMoved,@LookingAway,@nombrePC,@No_Usuarios);";

            command.Parameters.Add("@clave_unica", MySqlDbType.VarChar, 200);
            command.Parameters["@clave_unica"].Value = strGuid;

            command.Parameters.Add("@Happy", MySqlDbType.VarChar, 10);
            command.Parameters["@Happy"].Value = cara.FaceProperties[FaceProperty.Happy].ToString();

            command.Parameters.Add("@Engaged", MySqlDbType.VarChar, 10);
            command.Parameters["@Engaged"].Value = cara.FaceProperties[FaceProperty.Engaged].ToString();

            command.Parameters.Add("@WearingGlasses", MySqlDbType.VarChar, 10);
            command.Parameters["@WearingGlasses"].Value = cara.FaceProperties[FaceProperty.WearingGlasses].ToString();

            command.Parameters.Add("@LeftEyeClosed", MySqlDbType.VarChar, 10);
            command.Parameters["@LeftEyeClosed"].Value = cara.FaceProperties[FaceProperty.LeftEyeClosed].ToString();

            command.Parameters.Add("@RightEyeClosed", MySqlDbType.VarChar, 10);
            command.Parameters["@RightEyeClosed"].Value = cara.FaceProperties[FaceProperty.RightEyeClosed].ToString();

            command.Parameters.Add("@MouthOpen", MySqlDbType.VarChar, 10);
            command.Parameters["@MouthOpen"].Value = cara.FaceProperties[FaceProperty.MouthOpen].ToString();

            command.Parameters.Add("@MouthMoved", MySqlDbType.VarChar, 10);
            command.Parameters["@MouthMoved"].Value = cara.FaceProperties[FaceProperty.MouthMoved].ToString();

            command.Parameters.Add("@LookingAway", MySqlDbType.VarChar, 10);
            command.Parameters["@LookingAway"].Value = cara.FaceProperties[FaceProperty.LookingAway].ToString();

            command.Parameters.Add("@No_Usuarios", MySqlDbType.VarChar, 10);
            command.Parameters["@No_Usuarios"].Value = faceIndex;

            command.Parameters.Add("@nombrePC", MySqlDbType.VarChar, 20);
            command.Parameters["@nombrePC"].Value = nombrePC;

            //command.Parameters.Add("@Resource_Id", MySqlDbType.VarChar, 10);
            //command.Parameters["@Resource_Id"].Value = txt_resource_Id.Text;

            command.Connection = conn.conection;
            conn.connect();
            if (counter % 30 == 0)
            {
                command.ExecuteNonQuery();
            }
            else
            {

            }
            counter++;
            conn.desconectar();
        }
Exemplo n.º 16
0
 /// <summary>
 /// Right Eye is Closed? 
 /// </summary>
 private bool IsRightEyeClosed(FaceFrameResult frameResult)
 {
     return frameResult.FaceProperties[FaceProperty.RightEyeClosed].Equals(DetectionResult.Yes);
 }
Exemplo n.º 17
0
 /// <summary>
 /// Is the user Happy? 
 /// </summary>
 private bool IsHappy(FaceFrameResult frameResult)
 {
     return frameResult.FaceProperties[FaceProperty.Happy].Equals(DetectionResult.Yes);
 }
Exemplo n.º 18
0
 /// <summary>
 /// Is the user looking at the camera? 
 /// </summary>
 private bool IsEngaged(FaceFrameResult frameResult)
 {
     return frameResult.FaceProperties[FaceProperty.Engaged].Equals(DetectionResult.Yes);
 }
Exemplo n.º 19
0
 /// <summary>
 /// Alt-tab left with left eye wink at camera while being happy
 /// </summary>
 private void AltTabHotkeyPress(FaceFrameResult frameResult)
 {
     if (this.IsEngaged(frameResult) && this.IsHappy(frameResult) && this.IsLeftEyeClosed(frameResult) && !this.IsRightEyeClosed(frameResult) && this.AltTabHotkeyPressFlag == false)
     {
         System.Threading.Thread.Sleep(BlinkTime);
         if (this.IsEngaged(frameResult) && this.IsLeftEyeClosed(frameResult) && !this.IsRightEyeClosed(frameResult))
         {
             this.AltTabHotkeyPressFlag = true;
             // Simulate each key stroke
             InputSimulator.SimulateKeyPress(VirtualKeyCode.TAB);
         }
     }
     else if (!this.IsLeftEyeClosed(frameResult) && !this.IsRightEyeClosed(frameResult))
     {
         this.AltTabHotkeyPressFlag = false;
     }
 }
Exemplo n.º 20
0
        private void handleFaceFrameResults(int faceIndex, FaceFrameResult faceResult)
        {
            if (faceResult.FaceProperties != null)
            {
                if (closeness == Closeness.Close)
                {
                    faceIndicator.Fill = new SolidColorBrush(Colors.Black);
                }
                
                if (faceResult.FaceProperties[FaceProperty.LookingAway] == DetectionResult.Yes
                    || faceResult.FaceProperties[FaceProperty.LookingAway] == DetectionResult.Maybe
                    || faceResult.FaceProperties[FaceProperty.LookingAway] == DetectionResult.Unknown)
                {
                    if (closeness == Closeness.Far)
                    {
                        DepthImage.Opacity = 0.65;
                    } else
                    {
                        DepthImage.Opacity = 0;
                    }
                } else
                {
                    DepthImage.Opacity = 0;
                }

                if (faceResult.FaceProperties[FaceProperty.LookingAway] == DetectionResult.Yes
                    || faceResult.FaceProperties[FaceProperty.LookingAway] == DetectionResult.Maybe
                    || faceResult.FaceProperties[FaceProperty.LookingAway] == DetectionResult.Unknown)
                {
                    lookAtTimer = 0.0;
                    lookAwayTimer += (currentBodyFrameTime - previousBodyFrameTime).TotalSeconds;
                    //Debug.WriteLine("lookAway:" + lookAwayTimer.ToString());
                }
                else
                {
                    lookAtTimer += (currentBodyFrameTime - previousBodyFrameTime).TotalSeconds;
                    if (lookAtTimer >= 0.66)
                    {
                        //Debug.WriteLine("RESET LOOKAWAY");
                        lookAwayTimer = 0.0;
                    }

                    if (lookAtTimer > 7 && closeness == Closeness.Close)
                    {
                        // screamer
                        screamerImage.Opacity = 1;

                        screamerImage.MouseUp += ScreamerImage_MouseUp;

                        screamPlayer.Play();

                        staticListenPlayer.Stop();
                        staticListenPlayer.Volume = 0;
                    }
                }

                if (!didScaryVolume && lookAwayTimer > 3.6 && closeness == Closeness.Far && magicEye.Opacity > 0.5)
                {
                    didScaryVolume = true;
                    Uri uri = new Uri(@bump_file, UriKind.Relative);
                    staticListenPlayer.MediaEnded -= Player_MediaEnded;
                    staticListenPlayer.Stop();
                    staticListenPlayer.Open(uri);
                    staticListenPlayer.Volume = 1;
                    staticListenPlayer.Play();
                    staticListenPlayer.MediaEnded += Player_MediaEnded;

                    lookAwayTimer = 0;
                }
            } else
            {
                DepthImage.Opacity = 0;
            }
        }
        /// <summary>
        /// Draws face frame results
        /// </summary>
        /// <param name="faceIndex">the index of the face frame corresponding to a specific body in the FOV</param>
        /// <param name="faceResult">container of all face frame results</param>
        /// <param name="drawingContext">drawing context to render to</param>
        private void DrawFaceFrameResults(int faceIndex, FaceFrameResult faceResult, DrawingContext drawingContext)
        {
            // choose the brush based on the face index
            Brush drawingBrush = this.faceBrush[0];
            no_cuerpos.Content = faceIndex.ToString();

            if (faceIndex < this.bodyCount)
            {
                drawingBrush = this.faceBrush[faceIndex];
               
                //muestra el color de la cara en hexadecimal
                ColorCara.Content = faceBrush[faceIndex].ToString();
            }

            Pen drawingPen = new Pen(drawingBrush, DrawFaceShapeThickness);

            // draw the face bounding box
            var faceBoxSource = faceResult.FaceBoundingBoxInColorSpace;

//distancia entre dos puntos
            //float distancia = Math.Sqrt(Math.Pow(x2-x1,2)+Math.Pow(y2-y1,2));

            Rect faceBox = new Rect(faceBoxSource.Left, faceBoxSource.Top, faceBoxSource.Right - faceBoxSource.Left, faceBoxSource.Bottom - faceBoxSource.Top);
            drawingContext.DrawRectangle(null, drawingPen, faceBox);

            //resta para sacar diagonal , es decir centro
            //float resta1 = faceBoxSource.Right - faceBoxSource.Left;
            //float resta2 = faceBoxSource.Bottom - faceBoxSource.Top;
            //float a=faceBoxSource.Left;
            //float b=faceBoxSource.Right;
            //float c=faceBoxSource.Bottom;
            //float d=faceBoxSource.Top;

            //distancia entre dos puntos
            //float distancia = Math.Sqrt(Math.Pow(x2-x1,2)+Math.Pow(y2-y1,2));
            double distanciaZ;

           distanciaZ = Math.Sqrt(Math.Pow(faceBox.Bottom - faceBox.Top, 2) + Math.Pow(faceBoxSource.Right - faceBoxSource.Left, 2));
            

            if (faceResult.FacePointsInColorSpace != null)
            {
                // draw each face point
                foreach (PointF pointF in faceResult.FacePointsInColorSpace.Values)
                {
                    drawingContext.DrawEllipse(null, drawingPen, new Point(pointF.X, pointF.Y), FacePointRadius, FacePointRadius);
                }
            }

            string faceText = string.Empty;

            // extract each face property information and store it in faceText
            if (faceResult.FaceProperties != null)
            {
                foreach (var item in faceResult.FaceProperties)
                {
                    faceText += item.Key.ToString() + " : ";

                    // consider a "maybe" as a "no" to restrict 
                    // the detection result refresh rate
                    if (item.Value == DetectionResult.Maybe)
                    {
                        faceText += DetectionResult.No + "\n";
                    }
                    else
                    {
                        faceText += item.Value.ToString() + "\n";
                    }                    
                }
            }

            // extract face rotation in degrees as Euler angles
            if (faceResult.FaceRotationQuaternion != null)
            {
                int pitch, yaw, roll;
                ExtractFaceRotationInDegrees(faceResult.FaceRotationQuaternion, out pitch, out yaw, out roll);
                faceText += "FaceYaw : " + yaw + "\n" +
                            "FacePitch : " + pitch + "\n" +
                            "FacenRoll : " + roll + "\n";
            }

            // render the face property and face rotation information
            Point faceTextLayout;
            if (this.GetFaceTextPositionInColorSpace(faceIndex, out faceTextLayout))
            {
                drawingContext.DrawText(
                        new FormattedText(
                            faceText,
                            CultureInfo.GetCultureInfo("en-us"),
                            FlowDirection.LeftToRight,
                            new Typeface("Georgia"),
                            DrawTextFontSize,
                            drawingBrush),
                        faceTextLayout);
            }
            FaceFrameResult cara = faceResult;

            MySqlCommand command = new MySqlCommand();
            command.CommandType = System.Data.CommandType.Text;

            command.CommandText = "INSERT INTO extradata_capturev9_videos(clave_unica,Happy,Engage,WearingGlasses,LeftEyeClosed,RightEyeClosed,MouthOpen,MouthMoved,LookingAway,nombrePC,No_Usuarios,FaceBoxSourceLeft,FaceBoxSourceTop,FaceBoxSourceRight,FaceBoxSourceBottom,PointFX,PointFY,distanciaZ) VALUES (@clave_unica,@Happy,@Engaged,@WearingGlasses,@LeftEyeClosed,@RightEyeClosed,@MouthOpen,@MouthMoved,@LookingAway,@nombrePC,@No_Usuarios,@FaceBoxSourceLeft,@FaceBoxSourceTop,@FaceBoxSourceRight,@FaceBoxSourceBottom,@PointFX,@PointFY,@distanciaZ);";
           
            command.Parameters.Add("@clave_unica", MySqlDbType.VarChar, 200);
            command.Parameters["@clave_unica"].Value = strGuid;

            command.Parameters.Add("@Happy", MySqlDbType.VarChar, 10);
            command.Parameters["@Happy"].Value = cara.FaceProperties[FaceProperty.Happy].ToString();

            command.Parameters.Add("@Engaged", MySqlDbType.VarChar, 10);
            command.Parameters["@Engaged"].Value = cara.FaceProperties[FaceProperty.Engaged].ToString();

            command.Parameters.Add("@WearingGlasses", MySqlDbType.VarChar, 10);
            command.Parameters["@WearingGlasses"].Value = cara.FaceProperties[FaceProperty.WearingGlasses].ToString();

            command.Parameters.Add("@LeftEyeClosed", MySqlDbType.VarChar, 10);
            command.Parameters["@LeftEyeClosed"].Value = cara.FaceProperties[FaceProperty.LeftEyeClosed].ToString();

            command.Parameters.Add("@RightEyeClosed", MySqlDbType.VarChar, 10);
            command.Parameters["@RightEyeClosed"].Value = cara.FaceProperties[FaceProperty.RightEyeClosed].ToString();

            command.Parameters.Add("@MouthOpen", MySqlDbType.VarChar, 10);
            command.Parameters["@MouthOpen"].Value = cara.FaceProperties[FaceProperty.MouthOpen].ToString();

            command.Parameters.Add("@MouthMoved", MySqlDbType.VarChar, 10);
            command.Parameters["@MouthMoved"].Value = cara.FaceProperties[FaceProperty.MouthMoved].ToString();

            command.Parameters.Add("@LookingAway", MySqlDbType.VarChar, 10);
            command.Parameters["@LookingAway"].Value = cara.FaceProperties[FaceProperty.LookingAway].ToString();

            command.Parameters.Add("@No_Usuarios", MySqlDbType.VarChar,10);
            command.Parameters["@No_Usuarios"].Value = faceIndex;

            command.Parameters.Add("@nombrePC", MySqlDbType.VarChar, 20);
            command.Parameters["@nombrePC"].Value = nombrePC;

            //extras para saber cerca o lejos estan de la obre
            command.Parameters.Add("@FaceBoxSourceLeft", MySqlDbType.Float);//base de datos
            command.Parameters["@FaceBoxSourceLeft"].Value = faceBoxSource.Left;// variable a mandar a bd refaceBoxSource.Left;

            command.Parameters.Add("@FaceBoxSourceTop", MySqlDbType.Float);
            command.Parameters["@FaceBoxSourceTop"].Value = faceBoxSource.Top;

            command.Parameters.Add("@FaceBoxSourceRight", MySqlDbType.Float);
            command.Parameters["@FaceBoxSourceRight"].Value = faceBox.Right;

            command.Parameters.Add("@FaceBoxSourceBottom", MySqlDbType.Float);
            command.Parameters["@FaceBoxSourceBottom"].Value = faceBox.Bottom;

            //puntos en X y Y
            command.Parameters.Add("@PointFX", MySqlDbType.Float);
            command.Parameters["@PointFX"].Value = faceBox.X;

            command.Parameters.Add("@PointFY", MySqlDbType.Float);
            command.Parameters["@PointFY"].Value = faceBox.Y;

            //resta entre puntos distancia
            command.Parameters.Add("@distanciaZ",MySqlDbType.Float);
            command.Parameters["@distanciaZ"].Value = distanciaZ;

            //FaceBox.Z
            //command.Parameters.Add("@Z", MySqlDbType.Float);
            //command.Parameters["@Z"].Value = this.faceFrameSources[0].FaceFrameFeatures; 


            //command.Parameters.Add("@Resource_Id", MySqlDbType.VarChar, 10);
            //command.Parameters["@Resource_Id"].Value = txt_resource_Id.Text;

            command.Connection = conn.conection;
            conn.connect();
            command.ExecuteNonQuery();
            conn.desconectar();
        }
Exemplo n.º 22
0
        private void WriteFaceData(FaceFrameResult res, int slice)
        {
            Vector2 pos;
            Vector2 size;

            size.X = Math.Abs(res.FaceBoundingBoxInColorSpace.Right - res.FaceBoundingBoxInColorSpace.Left);
            size.Y = Math.Abs(res.FaceBoundingBoxInColorSpace.Bottom - res.FaceBoundingBoxInColorSpace.Top);

            pos.X = (float)res.FaceBoundingBoxInColorSpace.Left + (size.X * 0.5f);
            pos.Y = (float)res.FaceBoundingBoxInColorSpace.Top + (size.Y * 0.5f);

            pos = this.ProcessPoint(pos);

            size.X /= 1920.0f;
            size.Y /= 1080.0f;

            this.FOutPositionColor[slice] = pos;
            this.FOutSizeColor[slice] = size;

            this.FOutPointsColor[slice].SliceCount = res.FacePointsInColorSpace.Count;
            this.FOutPointsWorld[slice].SliceCount = res.FacePointsInColorSpace.Count;
            var pointRef = this.FOutPointsColor[slice];
            var wRef = this.FOutPointsWorld[slice];
            for (int i = 0; i < res.FacePointsInColorSpace.Count; i++)
            {
                var pt = res.FacePointsInColorSpace[(FacePointType)i];
                pointRef[i] = this.ProcessPoint(new Vector2(pt.X, pt.Y));
            }

            this.FOutOrientation[slice] = new Quaternion(res.FaceRotationQuaternion.X, res.FaceRotationQuaternion.Y,
                res.FaceRotationQuaternion.Z, res.FaceRotationQuaternion.W);

            this.FOutEngaged[slice] = res.FaceProperties[FaceProperty.Engaged].ToString();
            this.FOutWearGlasses[slice] = res.FaceProperties[FaceProperty.WearingGlasses].ToString();
            this.FOutHappy[slice] = res.FaceProperties[FaceProperty.Happy].ToString();
            this.FOutLeftEyeClosed[slice] = res.FaceProperties[FaceProperty.LeftEyeClosed].ToString();
            this.FOutRightEyeClosed[slice] = res.FaceProperties[FaceProperty.RightEyeClosed].ToString();
            this.FOutlookAway[slice] = res.FaceProperties[FaceProperty.LookingAway].ToString();
            this.FOutMouthMoved[slice] = res.FaceProperties[FaceProperty.MouthMoved].ToString();
            this.FOutMouthOpen[slice] = res.FaceProperties[FaceProperty.MouthOpen].ToString();
            this.FOutUserIndex[slice] = (int)res.TrackingId;
        }
Exemplo n.º 23
0
        private void DrawFaceFrameResults(int faceIndex, FaceFrameResult faceResult, DrawingContext drawingContext)
        {
            // choose the brush based on the face index
            Brush drawingBrush = this.faceBrush[0];
            if (faceIndex < this.bodyCount)
            {
                drawingBrush = this.faceBrush[faceIndex];
            }

            Pen drawingPen = new Pen(drawingBrush, DrawFaceShapeThickness);

            // draw the face bounding box
            var faceBoxSource = faceResult.FaceBoundingBoxInColorSpace;
            Rect faceBox = new Rect(faceBoxSource.Left, faceBoxSource.Top, faceBoxSource.Right - faceBoxSource.Left, faceBoxSource.Bottom - faceBoxSource.Top);
            drawingContext.DrawRectangle(null, drawingPen, faceBox);
            WelcomeText.Text += "hi" + "\n";
            if (faceResult.FacePointsInColorSpace != null)
            {
                WelcomeText.Text += "not null" + "\n";
                var catNoseX = faceResult.FacePointsInColorSpace[FacePointType.Nose].X;
                var catNoseY = faceResult.FacePointsInColorSpace[FacePointType.Nose].Y;
                BitmapImage catNose = new BitmapImage(new Uri("E:/DramaLearning/kinect-2-background-removal-master/KinectBackgroundRemoval/Assets/CatNose.png"));

                drawingContext.DrawImage(catNose, new Rect(catNoseX - (catNose.PixelWidth / 4), catNoseY - (catNose.PixelHeight / 4),
                                                                  catNose.PixelWidth / 2, catNose.PixelHeight / 2));

                drawingContext.DrawEllipse(null, drawingPen, new Point(catNoseX, catNoseY), FacePointRadius, FacePointRadius);
            }

            string faceText = string.Empty;

            // extract each face property information and store it in faceText
            if (faceResult.FaceProperties != null)
            {
                foreach (var item in faceResult.FaceProperties)
                {
                    faceText += item.Key.ToString() + " : ";

                    // consider a "maybe" as a "no" to restrict
                    // the detection result refresh rate
                    if (item.Value == DetectionResult.Maybe)
                    {
                        faceText += DetectionResult.No + "\n";
                    }
                    else
                    {
                        faceText += item.Value.ToString() + "\n";
                    }
                }
            }

            // extract face rotation in degrees as Euler angles
            /*if (faceResult.FaceRotationQuaternion != null)
            {
                int pitch, yaw, roll;
                ExtractFaceRotationInDegrees(faceResult.FaceRotationQuaternion, out pitch, out yaw, out roll);
                faceText += "FaceYaw : " + yaw + "\n" +
                            "FacePitch : " + pitch + "\n" +
                            "FacenRoll : " + roll + "\n";
            }*/

            // render the face property and face rotation information
            Point faceTextLayout;
            if (this.GetFaceTextPositionInColorSpace(faceIndex, out faceTextLayout))
            {
                drawingContext.DrawText(
                        new FormattedText(
                            faceText,
                            CultureInfo.GetCultureInfo("en-us"),
                            FlowDirection.LeftToRight,
                            new Typeface("Georgia"),
                            DrawTextFontSize,
                            drawingBrush),
                        faceTextLayout);
            }
        }
        /// <summary>
        /// Each result it drawn for each person attached to the result.
        /// </summary>
        /// <param name="p"></param>
        /// <param name="f"></param>
        void DrawFaceFrameResults(Person p, FaceFrameResult f)
        {
            var bb = f.FaceBoundingBoxInColorSpace;

            // Gather the width of the bounding box then the size difference 
            double width = Math.Abs(bb.Right - bb.Left) * 1.8;
            double height = Math.Abs(bb.Bottom - bb.Top) * 1.8;
            double wDiff = Math.Abs(width - p.imageRef.Width);
            double hDiff = Math.Abs(height - p.imageRef.Height);

            // This will tell whether or not the image should be resized.
            if (wDiff / p.imageRef.Width > 0.25 || hDiff / p.imageRef.Height > 0.25 || Double.IsNaN(p.imageRef.Width))
            {
                if (width > 0 && height > 0)
                {
                    p.imageRef.Width = width;
                    p.imageRef.Height = height;
                }
            }
            else
            {
                width = p.imageRef.Width;
                height = p.imageRef.Height;
            }

            // Gather the coordinates of the image and the location difference
            double left = bb.Left - width * 0.2;
            double top = bb.Top - height * 0.2 - height * 0.65;
            double lDiff = Math.Abs(Canvas.GetLeft(p.imageRef) - left);
            double tDiff = Math.Abs(Canvas.GetTop(p.imageRef) - top);

            // this will tell whether or not the image should be translated.
            if (lDiff / Canvas.GetLeft(p.imageRef) > 0.07 || tDiff / Canvas.GetTop(p.imageRef) > 0.07 || Double.IsNaN(Canvas.GetTop(p.imageRef)))
            {
                if (left > 0 && top > 0)
                {
                    p.left = left;
                    p.top = top;
                }
                Canvas.SetLeft(p.imageRef, p.left);
                Canvas.SetTop(p.imageRef, p.top);
            }

            p.imageRef.Visibility = Visibility.Visible;

        }
        /// <summary>
        /// Draws face frame results
        /// </summary>
        /// <param name="faceIndex">the index of the face frame corresponding to a specific body in the FOV</param>
        /// <param name="faceResult">container of all face frame results</param>
        /// <param name="drawingContext">drawing context to render to</param>
        private void DrawFaceFrameResults(int faceIndex, FaceFrameResult faceResult, DrawingContext drawingContext)
        {
            // choose the brush based on the face index
            Brush drawingBrush = this.faceBrush[0];
            if (faceIndex < this.bodyCount)
            {
                drawingBrush = this.faceBrush[faceIndex];
            }

            Pen drawingPen = new Pen(drawingBrush, DrawFaceShapeThickness);

            // draw the face bounding box
            var faceBoxSource = faceResult.FaceBoundingBoxInColorSpace;
            Rect faceBox = new Rect(faceBoxSource.Left, faceBoxSource.Top, faceBoxSource.Right - faceBoxSource.Left, faceBoxSource.Bottom - faceBoxSource.Top);
            drawingContext.DrawRectangle(null, drawingPen, faceBox);

            if (faceResult.FacePointsInColorSpace != null)
            {
                // draw each face point
                foreach (PointF pointF in faceResult.FacePointsInColorSpace.Values)
                {
                    drawingContext.DrawEllipse(null, drawingPen, new Point(pointF.X, pointF.Y), FacePointRadius, FacePointRadius);
                }
            }

            string faceText = string.Empty;

            // extract each face property information and store it in faceText
            if (faceResult.FaceProperties != null)
            {
                foreach (var item in faceResult.FaceProperties)
                {
                    faceText += item.Key.ToString() + "  :  ";

                    // consider a "maybe" as a "no" to restrict
                    // the detection result refresh rate
                    if (item.Value == DetectionResult.Maybe)
                    {
                        faceText += DetectionResult.No + " \n  ";
                    }
                    else
                    {
                        faceText += item.Value.ToString() + " \n  ";
                    }
                }
            }

            // extract face rotation in degrees as Euler angles
            if (faceResult.FaceRotationQuaternion != null)
            {
                int pitch, yaw, roll;
                ExtractFaceRotationInDegrees(faceResult.FaceRotationQuaternion, out pitch, out yaw, out roll);
                faceText += "FaceYaw : " + yaw + "\n" +
                            "FacePitch : " + pitch + "\n" +
                            "FacenRoll : " + roll + "\n";
            }

            //agregado
            label1.Content = faceText;
            var valor = faceText;
            var json = new JavaScriptSerializer().Serialize(valor);
            label2.Content = String.Format("Face:  {0}  ", json);

            MySqlCommand command = new MySqlCommand();
            command.CommandType = System.Data.CommandType.Text;
            command.CommandText = "INSERT INTO caract (JSON) VALUES ('"+json+"');";
            command.Connection = conn.conection;
            conn.connect();
            command.ExecuteNonQuery();
            conn.desconectar();
            MessageBox.Show("Se inserto correctamente");

            // render the face property and face rotation information
            Point faceTextLayout;
            if (this.GetFaceTextPositionInColorSpace(faceIndex, out faceTextLayout))
            {
                drawingContext.DrawText(
                        new FormattedText(
                            faceText,
                            CultureInfo.GetCultureInfo("en-us"),
                            FlowDirection.LeftToRight,
                            new Typeface("Georgia"),
                            DrawTextFontSize,
                            drawingBrush),
                        faceTextLayout);
            }
        }
Exemplo n.º 26
0
        void DrawFaceFrameResult( int faceIndex, FaceFrameResult faceResult, DrawingContext drawingContext )
        {
            //Brush/Pen
            Brush drawingBrush = faceBrush[0];
            if ( faceIndex<bodyCount ) {
                drawingBrush = faceBrush[faceIndex];
            }
            Pen drawingPen = new Pen( drawingBrush, 5 );

            //Face Points
            var facePoints = faceResult.FacePointsInColorSpace;
            foreach ( PointF pointF in facePoints.Values ) {
                drawingContext.DrawEllipse( null, drawingPen, new Point( pointF.X, pointF.Y ), 10, 10 );
            }

            //Bounding Box
            RectI box = faceResult.FaceBoundingBoxInColorSpace;
            int width = box.Right - box.Left;
            int height = box.Bottom - box.Top;
            Rect rect = new Rect( box.Left, box.Top, width, height );
            drawingContext.DrawRectangle( null, drawingPen, rect );
            String drawingText = String.Empty;

            //Rotation
            if ( faceResult.FaceRotationQuaternion==null ) {
                return;
            }
            Vector4 quaternion = faceResult.FaceRotationQuaternion;
            int offset = 30;
            int pitch, yaw, roll;
            quaternion2degree( quaternion, out pitch, out yaw, out roll );
            drawingText = "Pitch, Yaw, Roll : " + pitch.ToString() + ", " + yaw.ToString() + ", " + roll.ToString();
            FormattedText formattedText = new FormattedText( drawingText, CultureInfo.GetCultureInfo( "ja-JP" ), FlowDirection.LeftToRight, new Typeface( "Georgia" ), 25, drawingBrush );
            drawingContext.DrawText( formattedText, new Point( box.Left, box.Bottom + offset ) );

            //Properties
            if ( faceResult.FaceProperties!=null ) {
                foreach ( var item in faceResult.FaceProperties ) {
                    drawingText = item.Key.ToString();
                    switch ( item.Value ) {
                    case DetectionResult.Yes:
                        drawingText += " : Yes";
                        break;
                    case DetectionResult.Maybe:
                        drawingText += " : Maybe";
                        break;
                    case DetectionResult.No:
                        drawingText += " : No";
                        break;
                    case DetectionResult.Unknown:
                        drawingText += " : Unknown";
                        break;
                    default:
                        break;
                    }
                    offset += 30;
                    formattedText = new FormattedText( drawingText, CultureInfo.GetCultureInfo( "ja-JP" ), FlowDirection.LeftToRight, new Typeface( "Georgia" ), 25, drawingBrush );
                    drawingContext.DrawText( formattedText, new Point( box.Left, box.Bottom+offset ) );
                }
            }
        }
Exemplo n.º 27
0
        private void DrawFaceFrameResults(int faceIndex, FaceFrameResult faceResult, DrawingContext drawingContext)
        {
            // Choose the brush based on the face index
            Brush drawingBrush = this.faceBrush[0];

            if (faceIndex < this.bodyCount)
            {
                drawingBrush = this.faceBrush[faceIndex];
            }

            Pen drawingPen = new Pen(drawingBrush, drawFaceShapeThickness);

            // Draw the face bounding box
            var faceBoxSource = faceResult.FaceBoundingBoxInColorSpace;
            Rect faceBox = new Rect(faceBoxSource.Left, faceBoxSource.Top, faceBoxSource.Width, faceBoxSource.Height);
            drawingContext.DrawRectangle(null, drawingPen, faceBox);

            if (faceResult.FacePointsInColorSpace != null)
            {
                // Draw each face point
                foreach (PointF pointF in faceResult.FacePointsInColorSpace.Values)
                {
                    drawingContext.DrawEllipse(drawingBrush, drawingPen, new Point(pointF.X, pointF.Y), facePointRadius, facePointRadius);
                }
            }

            string faceText = string.Empty;

            // Extract each face property information and store it in faceText
            if (faceResult.FaceProperties != null)
            {
                faceText += "Engaged: " + faceResult.FaceProperties[FaceProperty.Engaged].ToString() + "\n";
                faceText += "Looking Away: " + faceResult.FaceProperties[FaceProperty.LookingAway].ToString() + "\n";
            }

            // Extract the face rotation in degrees
            if (faceResult.FaceRotationQuaternion != null)
            {
                int pitch, yaw, roll;
                ExtractFaceRotationInDegrees(faceResult.FaceRotationQuaternion, out pitch, out yaw, out roll);
                faceText += "Face Yaw : " + yaw + "\n" +
                            "Face Pitch : " + pitch + "\n" +
                            "Face Roll : " + roll + "\n";
            }

            drawingContext.DrawText(
                    new FormattedText(
                        faceText,
                        CultureInfo.GetCultureInfo("en-us"),
                        System.Windows.FlowDirection.LeftToRight,
                        new Typeface("Segoe UI"),
                        drawTextFontSize,
                        drawingBrush),
                    new Point(
                        faceBox.BottomLeft.X,
                        faceBox.BottomLeft.Y + textLayoutOffsetY));
        }
Exemplo n.º 28
0
        /// <summary>
        /// Validates face bounding box and face points to be within screen space
        /// </summary>
        /// <param name="faceResult">the face frame result containing face box and points</param>
        /// <returns>success or failure</returns>
        private bool ValidateFaceBoxAndPoints(FaceFrameResult faceResult)
        {
            bool isFaceValid = faceResult != null;

            if (isFaceValid)
            {
                var faceBox = faceResult.FaceBoundingBoxInColorSpace;
                if (faceBox != null)
                {
                    // check if we have a valid rectangle within the bounds of the screen space
                    isFaceValid = (faceBox.Right - faceBox.Left) > 0 &&
                                  (faceBox.Bottom - faceBox.Top) > 0 &&
                                  faceBox.Right <= this.displayWidth &&
                                  faceBox.Bottom <= this.displayHeight;

                    if (isFaceValid)
                    {
                        var facePoints = faceResult.FacePointsInColorSpace;
                        if (facePoints != null)
                        {
                            foreach (PointF pointF in facePoints.Values)
                            {
                                // check if we have a valid face point within the bounds of the screen space
                                bool isFacePointValid = pointF.X > 0.0f &&
                                                        pointF.Y > 0.0f &&
                                                        pointF.X < this.displayWidth &&
                                                        pointF.Y < this.displayHeight;

                                if (!isFacePointValid)
                                {
                                    isFaceValid = false;
                                    break;
                                }
                            }
                        }
                    }
                }
            }

            return isFaceValid;
        }
Exemplo n.º 29
0
 /// <summary>
 /// Update the FaceFeatureTrackers
 /// </summary>
 /// <param name="frameResult">Face tracking frame</param>
 private void UpdateTrackers(FaceFrameResult frameResult)
 {
     // Loop all trackers
     foreach (FaceProperty feature in _featureAnalytics.Keys)
     {
         // Track the detection results
         _featureAnalytics[feature].Track(frameResult.FaceProperties[feature]);
     }
 }