예제 #1
0
        void _hdReader_FrameArrived(HighDefinitionFaceFrameReader sender, HighDefinitionFaceFrameArrivedEventArgs args)
        {
            using (var hdFaceFrame = args.FrameReference.AcquireFrame())
            {
                if (hdFaceFrame != null && _hdSource.TrackingId != 0)
                {
                    hdFaceFrame.GetAndRefreshFaceAlignmentResult(this._faceAlignment);
                    var animationUnits = this._faceAlignment.AnimationUnits;

                    if (_faceAlignment.Quality == FaceAlignmentQuality.High)
                    {
                        foreach (var animUnit in animationUnits)
                        {
                            if (animUnit.Key == FaceShapeAnimations.LefteyebrowLowerer)
                            {
                                _leftBrow[ndx] = animUnit.Value;
                            }

                            if (animUnit.Key == FaceShapeAnimations.RighteyebrowLowerer)
                            {
                                _rightBrow[ndx] = animUnit.Value;
                            }
                            ndx++;
                            if (ndx == 30)
                            {
                                ndx = 0;
                                //get average brow movements
                                var leftBrowMovementSum  = 0.0f;
                                var rightBrowMovementSum = 0.0f;
                                for (int i = 0; i < 30; i++)
                                {
                                    leftBrowMovementSum  += _leftBrow[i];
                                    rightBrowMovementSum += _rightBrow[i];
                                }
                                _rightBrowDelta[0] = _rightBrowDelta[1];
                                _leftBrowDelta[0]  = _leftBrowDelta[1];
                                _rightBrowDelta[1] = rightBrowMovementSum / 30;
                                _leftBrowDelta[1]  = leftBrowMovementSum / 30;
                            }

                            var rightBrowDiff = Math.Abs(_rightBrowDelta[1] * _rightBrowDelta[1] - _rightBrowDelta[0] * _rightBrowDelta[0]);
                            var leftBrowDiff  = Math.Abs(_leftBrowDelta[1] * _leftBrowDelta[1] - _leftBrowDelta[0] * _leftBrowDelta[0]);

                            if (leftBrowDiff > 0.015 && rightBrowDiff > 0.015)
                            {
                                browToleranceCount++;
                                if (browToleranceCount > 350)
                                {
                                    OnEyebrowsDrawnUpArrived(new EyebrowsDrawnUpArrivedEventArgs()
                                    {
                                        Confidence = 1.0f
                                    });
                                    browToleranceCount = 0;
                                }
                            }
                        }
                    }
                }
            }
        }
예제 #2
0
        static void FaceReader_FrameArrived(object sender, HighDefinitionFaceFrameArrivedEventArgs e)
        {
            TimeSpan milliseconds = DateTime.Now.TimeOfDay;

            using (var _faceFrame = e.FrameReference.AcquireFrame())
            {
                if (_faceFrame != null && _faceFrame.IsFaceTracked)
                {
                    if (!trackingSuccess)
                    {
                        Console.WriteLine("started face tracking..");
                        trackingSuccess = true;
                    }
                    _faceFrame.GetAndRefreshFaceAlignmentResult(_faceAlignment);
                    Task.Factory.StartNew(() =>
                    {
                        _worker.WriteAnimationUnits(_faceAlignment, milliseconds);
                    });
                }
                else
                {
                    if (trackingSuccess)
                    {
                        Console.WriteLine("tracking is lost..");
                        trackingSuccess = false;
                    }
                    _worker.WriteAnimationUnits(_faceAlignment, milliseconds, false);
                }
            }
        }
예제 #3
0
 void m_hdFaceReader_FrameArrived(object sender, HighDefinitionFaceFrameArrivedEventArgs e)
 {
     if (!m_faceBuilderStarted)
     {
         m_faceBuilder.BeginFaceDataCollection();
     }
 }
예제 #4
0
        private void OnFaceReaderHighDefFrameArrived(object sender, HighDefinitionFaceFrameArrivedEventArgs e)
        {
            using (HighDefinitionFaceFrame frame = e.FrameReference.AcquireFrame())
            {
                if (frame != null && frame.IsFaceTracked)
                {
                    frame.GetAndRefreshFaceAlignmentResult(_faceAlignment);

                    if (_faceModel != null && _sensor != null)
                    {
                        CameraSpacePoint[] cameraSpacePoints = _faceModel.CalculateVerticesForAlignment(_faceAlignment).ToArray();
                        DepthSpacePoint[]  depthSpacePoints  = new DepthSpacePoint[cameraSpacePoints.Length];

                        if (cameraSpacePoints.Length > 0)
                        {
                            _sensor.CoordinateMapper.MapCameraPointsToDepthSpace(cameraSpacePoints, depthSpacePoints);
                        }

                        _faceState.Points = depthSpacePoints.ConvertToPointF();

                        if (this.OnFaceChanged != null)
                        {
                            this.OnFaceChanged(sender, _faceState);
                        }
                    }
                }
            }
        }
예제 #5
0
    private void HighDefinitionFaceFrameReader_FrameArrived(object sender, HighDefinitionFaceFrameArrivedEventArgs e)
    {
        using (var frame = e.FrameReference.AcquireFrame())
        {
            if (frame != null && frame.IsTrackingIdValid && frame.IsFaceTracked &&
                frame.FaceAlignmentQuality == FaceAlignmentQuality.High)
            {
                frame.GetAndRefreshFaceAlignmentResult(FaceAlignment);

                var command = new CommandMessage
                {
                    CommandType = CommandType.KinectFace,
                    KinectFace  =
                        new MoCapFaceFrame
                    {
                        ExpressionWeights = new float[(int)MoCapKinectFacialExpression.Count]
                    }
                };

                var time = (float)(DateTime.Now - MoCapRecordStartTime).TotalSeconds;

                var rotationX = FaceAlignment.FaceOrientation.X;
                var rotationY = FaceAlignment.FaceOrientation.Y;
                var rotationZ = FaceAlignment.FaceOrientation.Z;
                var rotationW = FaceAlignment.FaceOrientation.W;

                var transform = new TransformTime();
                transform.Time     = time;
                transform.Position = Vector3.Zero;
                transform.Rotation = new Quaternion(rotationX, rotationY, rotationZ, rotationW);
                transform.Scale    = Vector3.One;

                transform.Rotation = FaceDESP.Predict(transform.Rotation);

                command.KinectFace.FaceTransform = transform;

                foreach (FaceShapeAnimations faceShapeAnimation in Enum.GetValues(typeof(FaceShapeAnimations)))
                {
                    float weight;

                    if (FaceAlignment.AnimationUnits.TryGetValue(faceShapeAnimation, out weight))
                    {
                        FaceExpressionDESP[(int)faceShapeAnimation].Update(weight);

                        command.KinectFace.ExpressionWeights[(int)faceShapeAnimation] =
                            FaceExpressionDESP[(int)faceShapeAnimation].Predict(1);
                    }
                }

                if (IsRecording)
                {
                    FaceFrames.Add(command.KinectFace);
                }

                BeginWriteCommand(command);
            }
        }
    }
예제 #6
0
        private void HdFaceReader_FrameArrived(object sender, HighDefinitionFaceFrameArrivedEventArgs e)
        {
            faceFrame = e.FrameReference.AcquireFrame();
            if (faceFrame == null || !faceFrame.IsFaceTracked)
            {
                return;
            }

            faceFrame.GetAndRefreshFaceAlignmentResult(this.faceAlignment);
        }
예제 #7
0
 private void FaceReader_FrameArrived(object sender, HighDefinitionFaceFrameArrivedEventArgs e)
 {
     using (var frame = e.FrameReference.AcquireFrame())
     {
         if (frame != null && frame.IsFaceTracked)
         {
             frame.GetAndRefreshFaceAlignmentResult(_faceAlignment);
             UpdateFacePoints();
         }
     }
 }
예제 #8
0
        private void _FaceFrameHandler(object sender, HighDefinitionFaceFrameArrivedEventArgs e)
        {
            HighDefinitionFaceFrame faceFrame = e.FrameReference.AcquireFrame();

            // Checks face is tracked from body frame handler
            if (faceFrame != null && faceFrame.IsFaceTracked)
            {
                faceFrame.GetAndRefreshFaceAlignmentResult(_faceAlignment);

                _DrawFacePoints();
            }
        }
예제 #9
0
        private void onFaceFrameArrived(object sender, HighDefinitionFaceFrameArrivedEventArgs e)
        {
            // Return if there are no face clients.
            if (!this.faceConnector.HasClients)
            {
                return;
            }

            // Retrieve face data for current frame.
            var frame = e.FrameReference.AcquireFrame();

            if (frame == null)
            {
                return;
            }

            using (frame)
            {
                // Ignore untracked faces.
                if (!frame.IsTrackingIdValid)
                {
                    return;
                }
                if (!frame.IsFaceTracked)
                {
                    return;
                }

                // Record the current Unix epoch timestamp and convert it to a byte array for serialization.
                long timestamp = DateTimeOffset.Now.ToUnixTimeMilliseconds();

                // Retrieve face alignment data.
                var faceAlignment = new FaceAlignment();
                frame.GetAndRefreshFaceAlignmentResult(faceAlignment);

                // Combine the body array with a timestamp.
                Dictionary <string, object> faceJson = new Dictionary <string, object> {
                    { "Time", timestamp },
                    { "TrackingId", frame.HighDefinitionFaceFrameSource.TrackingId },
                    { "Alignment", faceAlignment },
                };

                // Send face data to clients.
                string json = JsonConvert.SerializeObject(faceJson,
                                                          new JsonSerializerSettings {
                    ContractResolver = new FaceContractResolver()
                }) + "\n";
                byte[] bytes = System.Text.Encoding.ASCII.GetBytes(json);
                this.faceConnector.Broadcast(bytes);
            }
        }
예제 #10
0
        void faceReader_FrameArrived(object sender, HighDefinitionFaceFrameArrivedEventArgs e)
        {
            using (HighDefinitionFaceFrame frame = e.FrameReference.AcquireFrame())
            {
                if (frame != null)
                {
                    if (frame.IsTrackingIdValid == false)
                    {
                        return;
                    }
                    frame.GetAndRefreshFaceAlignmentResult(this.faceAlignment);
                    var vertices = this.faceModel.CalculateVerticesForAlignment(this.faceAlignment);

                    for (int i = 0; i < vertices.Count; i++)
                    {
                        var v = vertices[i];
                        this.FOutVertices[i] = new Vector3(v.X, v.Y, v.Z);
                    }

                    this.FOutFrameNumber[0] = frame.RelativeTime.Ticks;
                    //var res = frame.FaceModel.CalculateVerticesForAlignment(FaceAlignmen;

                    /*if(res != null)
                     * {
                     *  this.FOutFrameNumber[0] = (int)frame.FaceFrameResult.RelativeTime.Ticks;
                     *
                     *  Vector2 pos;
                     *  Vector2 size;
                     *
                     *  size.X = res.FaceBoundingBoxInColorSpace.Right - res.FaceBoundingBoxInColorSpace.Left;
                     *  //size.X /= 1920.0f;
                     *
                     *  size.Y = res.FaceBoundingBoxInColorSpace.Bottom - res.FaceBoundingBoxInColorSpace.Top;
                     *  //size.Y /= 1080.0f;
                     *
                     *  pos.X = size.X / 2.0f + (float)res.FaceBoundingBoxInColorSpace.Left;
                     *  pos.Y = size.Y / 2.0f + (float)res.FaceBoundingBoxInColorSpace.Top;
                     *
                     *  this.FOutPositionColor[0] = pos;
                     *  this.FOutSizeColor[0] = size;
                     *
                     *  this.FOutOrientation[0] = new Quaternion(res.FaceRotationQuaternion.X, res.FaceRotationQuaternion.Y,
                     *      res.FaceRotationQuaternion.Z, res.FaceRotationQuaternion.W);
                     *
                     *  this.FOutMouthOpen[0] = res.FaceProperties[FaceProperty.MouthOpen];
                     * } */
                }
            }
        }
        /// <summary>
        /// This event is fired when a new HDFace frame is ready for consumption
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void HdFaceReader_FrameArrived(object sender, HighDefinitionFaceFrameArrivedEventArgs e)
        {
            using (var frame = e.FrameReference.AcquireFrame())
            {
                // We might miss the chance to acquire the frame; it will be null if it's missed.
                // Also ignore this frame if face tracking failed.
                if (frame == null || !frame.IsFaceTracked)
                {
                    return;
                }

                frame.GetAndRefreshFaceAlignmentResult(this.currentFaceAlignment);
                this.UpdateMesh();
            }
        }
예제 #12
0
 private void FaceReader_FrameArrived(object sender, HighDefinitionFaceFrameArrivedEventArgs e)
 {
     if (_isPaused)
     {
         return;
     }
     using (var frame = e.FrameReference.AcquireFrame())
     {
         if (frame != null && frame.IsFaceTracked)
         {
             _lastFaceFrame = DateTime.Now;
             frame.GetAndRefreshFaceAlignmentResult(_faceAlignment);
             _faceVertices = _faceModel.CalculateVerticesForAlignment(_faceAlignment);
         }
     }
 }
예제 #13
0
        /// <summary>
        /// FaceFrameが利用できるようになった時のイベントを処理します
        /// </summary>
        private void OnFaceFrameArrived(object sender, HighDefinitionFaceFrameArrivedEventArgs e)
        {
            using (var faceFrame = e.FrameReference.AcquireFrame())
            {
                if (faceFrame == null || !faceFrame.IsFaceTracked)
                {
                    return;
                }

                // FaceAlignmentを更新
                faceFrame.GetAndRefreshFaceAlignmentResult(this.faceAlignment);
                UpdateMesh();

                // Animation Unitを更新
                OnPropertyChanged("AnimationUnits");
            }
        }
예제 #14
0
        private void FaceReader_FrameArrived(object sender, HighDefinitionFaceFrameArrivedEventArgs e)
        {
            /// <summary>
            /// Tarea a realizar por alumno
            /// Procesar frame facial
            /// </summary>
            /// /////////////////////////////////////////////////////////////////////////////////////////////////

            using (var frame = e.FrameReference.AcquireFrame())
            {
                if (frame != null && frame.IsFaceTracked)
                {
                    frame.GetAndRefreshFaceAlignmentResult(_faceAlignment);
                    RenderFacePoints();
                }
            }
        }
예제 #15
0
 private void FrameArrived(object sender, HighDefinitionFaceFrameArrivedEventArgs e)
 {
     using (HighDefinitionFaceFrame frame = e.FrameReference.AcquireFrame())
     {
         if (frame != null)
         {
             if (frame.IsTrackingIdValid == false)
             {
                 return;
             }
             frame.GetAndRefreshFaceAlignmentResult(this.faceAlignment);
             frame.Dispose();
             if (this.HdFrameReceived != null)
             {
                 this.HdFrameReceived(this, new HdFaceFrameResultEventArgs(this.TrackingId, this.faceModel, this.faceAlignment));
             }
         }
     }
 }
예제 #16
0
        void _hdReader_FrameArrived(HighDefinitionFaceFrameReader sender, HighDefinitionFaceFrameArrivedEventArgs args)
        {
            using (var hdFaceFrame = args.FrameReference.AcquireFrame())
            {
                if (hdFaceFrame != null && _hdSource.TrackingId != 0)
                {
                    hdFaceFrame.GetAndRefreshFaceAlignmentResult(this._faceAlignment);


                    var animationUnits = this._faceAlignment.AnimationUnits;

                    if (_faceAlignment.Quality == FaceAlignmentQuality.High)
                    {
                        foreach (var animUnit in animationUnits)
                        {
                            if (animUnit.Key == FaceShapeAnimations.LefteyebrowLowerer)
                            {
                                // _leftBrow[ndx] = animUnit.Value;
                            }

                            if (animUnit.Key == FaceShapeAnimations.RighteyebrowLowerer)
                            {
                                // _rightBrow[ndx] = animUnit.Value;
                            }
                            //ndx++;
                            //if (ndx == 200) ndx = 0;
                            ////  _rightBrowDelta = _rightBrow /rightValue;
                            //   _leftBrowDelta = _leftBrow / leftValue;

                            //if (( (1.0f > _rightBrowDelta ) && (_rightBrowDelta >= .5f ) )&& ((1.0f > _leftBrowDelta) && (_leftBrowDelta >= .5f)))
                            //{
                            //    OnEyebrowsDrawnUpArrived(new EyebrowsDrawnUpArrivedEventArgs() { Confidence = 1.0f });

                            //}
                            //  _rightBrow = rightValue;
                            // _leftBrow = leftValue;
                        }
                    }
                }
            }
        }
예제 #17
0
        void faceReader_FrameArrived(object sender, HighDefinitionFaceFrameArrivedEventArgs e)
        {
            using (HighDefinitionFaceFrame frame = e.FrameReference.AcquireFrame())
            {
                if (frame != null)
                {
                    this.alignmentQuality = frame.FaceAlignmentQuality.ToString();
                    if (frame.IsTrackingIdValid == false)
                    {
                        return;
                    }
                    if (frame.FaceAlignmentQuality == FaceAlignmentQuality.Low)
                    {
                        return;
                    }

                    frame.GetAndRefreshFaceAlignmentResult(this.faceAlignment);
                    var o = this.faceAlignment.FaceOrientation;
                    this.FOutOrientation[0] = new Quaternion(o.X, o.Y, o.Z, o.W);

                    if (this.FInRCheck[0])
                    {
                        float f = this.FOutOrientation[0].LengthSquared();
                        if (f > 0.1f)
                        {
                            this.cameraPoints = this.faceModel.CalculateVerticesForAlignment(this.faceAlignment).ToArray();
                            this.runtime.Runtime.CoordinateMapper.MapCameraPointsToColorSpace(this.cameraPoints, this.colorPoints);
                            SetBounds();
                        }
                    }
                    else
                    {
                        this.cameraPoints = this.faceModel.CalculateVerticesForAlignment(this.faceAlignment).ToArray();
                        this.runtime.Runtime.CoordinateMapper.MapCameraPointsToColorSpace(this.cameraPoints, this.colorPoints);

                        SetBounds();
                    }
                }
            }
        }
예제 #18
0
        private void Reader_FaceFrameArrived(object sender, HighDefinitionFaceFrameArrivedEventArgs e)
        {
            var frameReference = e.FrameReference;

            try
            {
                var frame = frameReference.AcquireFrame();

                if (frame != null)
                {
                    using (frame)
                    {
                        frame.GetAndRefreshFaceAlignmentResult(currentAlignment);
                        this.bodySender.Send(frame, currentAlignment);
                    }
                }
            }
            catch (Exception exception)
            {
                Console.WriteLine("Frame exception encountered... {0}", exception.Message);
            }
        }
        /// <summary>
        /// This event is fired when a new HDFace frame is ready for consumption
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void HdFaceReader_FrameArrived(object sender, HighDefinitionFaceFrameArrivedEventArgs e)
        {
            using (var frame = e.FrameReference.AcquireFrame())
            {
                // We might miss the chance to acquire the frame; it will be null if it's missed.
                // Also ignore this frame if face tracking failed.
                if (frame == null || !frame.IsFaceTracked)
                {
                    return;
                }

                frame.GetAndRefreshFaceAlignmentResult(this.currentFaceAlignment);
                this.UpdateMesh();
                if (frame != null)
                {
                    this.Clock.Add(frame.RelativeTime);
                }
                int sizeofClock;
                sizeofClock = this.Clock.Count;
                if (sizeofClock == 1)
                {
                    TOPRINT += "Frame " + totalFrames.ToString() + " NO DATA" + Environment.NewLine;
                }
                if (sizeofClock > 1)
                {
                    double dFramesDropped = ((Clock[1].TotalSeconds - Clock[0].TotalSeconds) / (0.0333333333)) - 1;
                    int    FramesDropped  = (int)dFramesDropped;
                    totalFrames += FramesDropped;
                    TOPRINT     += "Frame " + totalFrames.ToString() + Environment.NewLine;

                    if (FramesDropped >= 1)
                    {
                        TOPRINT += "Frames Dropped: " + FramesDropped.ToString() + Environment.NewLine;
                    }
                    Clock.RemoveAt(0);
                }
                totalFrames += 1;
            }
        }
예제 #20
0
    private void HdFaceReader_FrameArrived(object sender, HighDefinitionFaceFrameArrivedEventArgs e)

    {
        using (var frame = e.FrameReference.AcquireFrame())

        {
            // We might miss the chance to acquire the frame; it will be null if it's missed.

            // Also ignore this frame if face tracking failed.

            if (frame == null || !frame.IsFaceTracked)

            {
                return;
            }

            frame.GetAndRefreshFaceAlignmentResult(currentFaceAlignment);
            if (debugEnabled)
            {
                string aus = "";
                foreach (FaceShapeAnimations key in currentFaceAlignment.AnimationUnits.Keys)
                {
                    aus += key.ToString();
                    aus += currentFaceAlignment.AnimationUnits[key].ToString("0.00") + "; ";
                    Debug.Log(aus);
                }
            }

            if (actorMesh != null)
            {
                tempAus.Clear();
                foreach (FaceShapeAnimations key in currentFaceAlignment.AnimationUnits.Keys)
                {
                    tempAus.Add(key.ToString(), currentFaceAlignment.AnimationUnits[key]);
                }
                UpdateActormeshBlendshapes(tempAus);
            }
        }
    }
예제 #21
0
        /// <summary>
        /// Handles face frame updates
        /// </summary>
        private void FaceFrameArrived(object sender, HighDefinitionFaceFrameArrivedEventArgs e)
        {
            ulong?newTrackingId = null;

            using (var frame = e.FrameReference.AcquireFrame())
            {
                if (frame != null)
                {
                    if (frame.IsTrackingIdValid && frame.IsFaceTracked)
                    {
                        frame.GetAndRefreshFaceAlignmentResult(this.faceAlignment);
                        this.faceModel = frame.FaceModel;
                        newTrackingId  = frame.TrackingId;
                    }
                }
            }

            if (this.Processors.Any(x => x.RequiresFaceModelBuilder) && newTrackingId.HasValue && this.currentTrackingId != newTrackingId)
            {
                lock (this.processFaceModelMutex)
                {
                    this.currentTrackingId    = newTrackingId;
                    this.faceModel            = null;
                    this.constructedFaceModel = null;
                    this.DisposeFaceModelBuilder();
                    this.fmb = this.faceSource.OpenModelBuilder(FaceModelBuilderAttributes.HairColor | FaceModelBuilderAttributes.SkinColor);
                    this.fmb.BeginFaceDataCollection();
                    this.fmb.CollectionCompleted += this.FaceModelBuilderCollectionCompleted;
                }
            }

            lock (this)
            {
                this.faceReady = true;
                this.StartWorkerIfReady();
            }
        }
예제 #22
0
        /// <summary>
        /// This event is fired when a new HDFace frame is ready for consumption
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void HdFaceReader_FrameArrived(object sender, HighDefinitionFaceFrameArrivedEventArgs e)
        {
            using (var frame = e.FrameReference.AcquireFrame())
            {
                // We might miss the chance to acquire the frame; it will be null if it's missed.
                // Also ignore this frame if face tracking failed
                if (frame == null || !frame.IsFaceTracked)
                {
                    return;
                }

                frame.GetAndRefreshFaceAlignmentResult(this.currentFaceAlignment);
                var captureValues = this.currentFaceAlignment.AnimationUnits;

                if (collectData)
                {
                    FaceDataReading f = new FaceDataReading(true);

                    //Format data from IReadOnlyDic -> Dictionary
                    f.AddData(captureValues);
                    fd.AddFaceData(f);
                }
            }
        }
예제 #23
0
        void faceFrameReader_FrameArrived(object sender, HighDefinitionFaceFrameArrivedEventArgs e)
        {
            lock (faceFrameLock)
            {
                if (processedLastFaceFrame)
                {
                    processedLastFaceFrame = false;
                    bool dataReceived = false;

                    using (HighDefinitionFaceFrame faceFrame = e.FrameReference.AcquireFrame())
                    {
                        if (faceFrame != null && faceFrame.IsFaceTracked)
                        {
                            faceFrame.GetAndRefreshFaceAlignmentResult(currentFaceAlignment);
                            dataReceived = true;
                        }
                    }

                    if (dataReceived && FaceFrameReady != null)
                    {
                        ThreadManager.invoke(() =>
                        {
                            lock (faceFrameLock)
                            {
                                FaceFrameReady.Invoke(currentFaceAlignment);
                                processedLastFaceFrame = true;
                            }
                        });
                    }
                    else
                    {
                        processedLastFaceFrame = true;
                    }
                }
            }
        }
예제 #24
0
        void UpdateHDFaceFrame(HighDefinitionFaceFrameArrivedEventArgs e)
        {
            using (var hdFaceFrame = e.FrameReference.AcquireFrame()) {
                if (hdFaceFrame == null)
                {
                    return;
                }
                bool tracked;
                tracked = hdFaceFrame.IsFaceTracked;
                if (!tracked)
                {
                    return;
                }

                hdFaceFrame.GetAndRefreshFaceAlignmentResult(faceAlignment);

                using (var dc = drawingGroup.Open()) {
                    dc.DrawRectangle(Brushes.Black, null, displayRect);
                    BuildFaceModel(dc);
                    Result(dc);
                    drawingGroup.ClipGeometry = new RectangleGeometry(displayRect);
                }
            }
        }
        private void OnFaceFrameArrived(object sender, HighDefinitionFaceFrameArrivedEventArgs e)
        {
            using (var faceFrame = e.FrameReference.AcquireFrame())
            {
                if (faceFrame == null || !faceFrame.IsFaceTracked)
                {
                    return;
                }

                // Update Face Alignment
                faceFrame.GetAndRefreshFaceAlignmentResult(this.faceAlignment);

                MainWindow.FaceFrameCount++;
                // Update the Animation Unit
                //Console.WriteLine(MainWindow.FaceFrameCount);
                //Console.WriteLine(FrameRateStateToInt(MainWindow.FrameRateState)); //30frame per sec = 30frame / sec
                if ((MainWindow.RecordStarted == true) && (MainWindow.FaceFrameCount % 30 == 0)) // (FrameRateStateToInt(MainWindow.FrameRateState) == 0)
                {
                    //Console.WriteLine(System.DateTime.Now.ToString("yyyy/MM/dd hh:mm:ss"));
                    OnPropertyChanged("EmotionUnits");
                    MainWindow.FaceFrameCount = 1;
                }
            }
        }
        private void FaceReader_FrameArrived(object sender, HighDefinitionFaceFrameArrivedEventArgs args)
        {
            using (var frame = args.FrameReference.AcquireFrame())
            {
                if (frame != null && frame.IsFaceTracked)
                {
                    Face face = frame.Face();

                    if (_showAllPoints)
                    {
                        // Display all face points.
                        if (_ellipses.Count == 0)
                        {
                            for (int index = 0; index < face.Vertices.Count; index++)
                            {
                                Ellipse ellipse = new Ellipse
                                {
                                    Width  = 2.0,
                                    Height = 2.0,
                                    Fill   = new SolidColorBrush(Colors.Orange)
                                };

                                _ellipses.Add(ellipse);

                                canvas.Children.Add(ellipse);
                            }
                        }

                        for (int index = 0; index < face.Vertices.Count; index++)
                        {
                            Ellipse ellipse = _ellipses[index];

                            CameraSpacePoint vertex = face.Vertices[index];
                            PointF           point  = vertex.ToPoint(Visualization.Infrared);

                            Canvas.SetLeft(ellipse, point.X - ellipse.Width / 2.0);
                            Canvas.SetTop(ellipse, point.Y - ellipse.Height / 2.0);
                        }
                    }
                    else
                    {
                        // Display basic points only.
                        PointF pointEyeLeft    = face.EyeLeft.ToPoint(Visualization.Infrared);
                        PointF pointEyeRight   = face.EyeRight.ToPoint(Visualization.Infrared);
                        PointF pointCheekLeft  = face.CheekLeft.ToPoint(Visualization.Infrared);
                        PointF pointCheekRight = face.CheekRight.ToPoint(Visualization.Infrared);
                        PointF pointNose       = face.Nose.ToPoint(Visualization.Infrared);
                        PointF pointMouth      = face.Mouth.ToPoint(Visualization.Infrared);
                        PointF pointChin       = face.Chin.ToPoint(Visualization.Infrared);
                        PointF pointForehead   = face.Forehead.ToPoint(Visualization.Infrared);

                        Canvas.SetLeft(eyeLeft, pointEyeLeft.X - eyeLeft.Width / 2.0);
                        Canvas.SetTop(eyeLeft, pointEyeLeft.Y - eyeLeft.Height / 2.0);

                        Canvas.SetLeft(eyeRight, pointEyeRight.X - eyeRight.Width / 2.0);
                        Canvas.SetTop(eyeRight, pointEyeRight.Y - eyeRight.Height / 2.0);

                        Canvas.SetLeft(cheekLeft, pointCheekLeft.X - cheekLeft.Width / 2.0);
                        Canvas.SetTop(cheekLeft, pointCheekLeft.Y - cheekLeft.Height / 2.0);

                        Canvas.SetLeft(cheekRight, pointCheekRight.X - cheekRight.Width / 2.0);
                        Canvas.SetTop(cheekRight, pointCheekRight.Y - cheekRight.Height / 2.0);

                        Canvas.SetLeft(nose, pointNose.X - nose.Width / 2.0);
                        Canvas.SetTop(nose, pointNose.Y - nose.Height / 2.0);

                        Canvas.SetLeft(mouth, pointMouth.X - mouth.Width / 2.0);
                        Canvas.SetTop(mouth, pointMouth.Y - mouth.Height / 2.0);

                        Canvas.SetLeft(chin, pointChin.X - chin.Width / 2.0);
                        Canvas.SetTop(chin, pointChin.Y - chin.Height / 2.0);

                        Canvas.SetLeft(forehead, pointForehead.X - forehead.Width / 2.0);
                        Canvas.SetTop(forehead, pointForehead.Y - forehead.Height / 2.0);
                    }
                }
            }
        }
        private void FaceReader_FrameArrived(object sender, HighDefinitionFaceFrameArrivedEventArgs args)
        {
            using (var frame = args.FrameReference.AcquireFrame())
            {
                if (frame != null && frame.IsFaceTracked)
                {
                    if (_faceModel == null || _faceAlignment == null)
                    {
                        return;
                    }

                    frame.GetAndRefreshFaceAlignmentResult(_faceAlignment);
                    var faceVertices = _faceModel.CalculateVerticesForAlignment(_faceAlignment);

                    var cspLeftEye  = GetLeftEye(faceVertices);
                    var cspRightEye = GetRightEye(faceVertices);

                    var ipd = cspLeftEye.Distance(cspRightEye) * 1000;
                    if (_isMeasuring)
                    {
                        _collectedMeasurements.Add(ipd);
                        var avg = _collectedMeasurements.Average();
                        this.myText.Text = avg.ToString("0.000");
                        pbMeasure.Value++;
                        if (pbMeasure.Value == pbMeasure.Maximum)
                        {
                            _isMeasuring  = false;
                            finalIPD.Text = avg.ToString("0.000");
                            spTalkToDevicePortal.Visibility = Visibility.Visible;
                        }
                    }
                    else
                    {
                        this.myText.Text = ipd.ToString("0.000");
                    }


                    var pointEyeLeft    = _coordinateMapper.MapCameraPointToDepthSpace(cspLeftEye);
                    var pointEyeRight   = _coordinateMapper.MapCameraPointToDepthSpace(cspRightEye);
                    var pointCheekLeft  = _coordinateMapper.MapCameraPointToDepthSpace(faceVertices[412]);
                    var pointCheekRight = _coordinateMapper.MapCameraPointToDepthSpace(faceVertices[933]);
                    var pointNose       = _coordinateMapper.MapCameraPointToDepthSpace(faceVertices[18]);
                    var pointMouth      = _coordinateMapper.MapCameraPointToDepthSpace(faceVertices[10]);
                    var pointChin       = _coordinateMapper.MapCameraPointToDepthSpace(faceVertices[4]);
                    var pointForehead   = _coordinateMapper.MapCameraPointToDepthSpace(faceVertices[28]);

                    Canvas.SetLeft(eyeLeft, pointEyeLeft.X - eyeLeft.Width / 2.0);
                    Canvas.SetTop(eyeLeft, pointEyeLeft.Y - eyeLeft.Height / 2.0);

                    Canvas.SetLeft(eyeRight, pointEyeRight.X - eyeRight.Width / 2.0);
                    Canvas.SetTop(eyeRight, pointEyeRight.Y - eyeRight.Height / 2.0);

                    Canvas.SetLeft(cheekLeft, pointCheekLeft.X - cheekLeft.Width / 2.0);
                    Canvas.SetTop(cheekLeft, pointCheekLeft.Y - cheekLeft.Height / 2.0);

                    Canvas.SetLeft(cheekRight, pointCheekRight.X - cheekRight.Width / 2.0);
                    Canvas.SetTop(cheekRight, pointCheekRight.Y - cheekRight.Height / 2.0);

                    Canvas.SetLeft(nose, pointNose.X - nose.Width / 2.0);
                    Canvas.SetTop(nose, pointNose.Y - nose.Height / 2.0);

                    Canvas.SetLeft(mouth, pointMouth.X - mouth.Width / 2.0);
                    Canvas.SetTop(mouth, pointMouth.Y - mouth.Height / 2.0);

                    Canvas.SetLeft(chin, pointChin.X - chin.Width / 2.0);
                    Canvas.SetTop(chin, pointChin.Y - chin.Height / 2.0);

                    Canvas.SetLeft(forehead, pointForehead.X - forehead.Width / 2.0);
                    Canvas.SetTop(forehead, pointForehead.Y - forehead.Height / 2.0);
                }
            }
        }
예제 #28
0
        private void FaceReader_FrameArrived(object sender, HighDefinitionFaceFrameArrivedEventArgs args)
        {
            using (var frame = args.FrameReference.AcquireFrame())
            {
                string output = "";
                if (frame != null && frame.IsFaceTracked && currBodyState == BodyState.Ready)
                {
                    // Prepare to enter the setup chain in the state diagram
                    if (currFaceState == FaceState.FaceWait)
                    {
                        currFaceState = FaceState.NeutralWait;
                    }

                    // Display basic points only.
                    Face face = frame.Face();

                    Point pointEyeLeft    = face.EyeLeft.ToPoint(Visualization.Infrared);
                    Point pointEyeRight   = face.EyeRight.ToPoint(Visualization.Infrared);
                    Point pointCheekLeft  = face.CheekLeft.ToPoint(Visualization.Infrared);
                    Point pointCheekRight = face.CheekRight.ToPoint(Visualization.Infrared);
                    Point pointNose       = face.Nose.ToPoint(Visualization.Infrared);
                    Point pointMouth      = face.Mouth.ToPoint(Visualization.Infrared);
                    Point pointChin       = face.Chin.ToPoint(Visualization.Infrared);
                    Point pointForehead   = face.Forehead.ToPoint(Visualization.Infrared);

                    Canvas.SetLeft(eyeLeft, pointEyeLeft.X - eyeLeft.Width / 2.0);
                    Canvas.SetTop(eyeLeft, pointEyeLeft.Y - eyeLeft.Height / 2.0);

                    Canvas.SetLeft(eyeRight, pointEyeRight.X - eyeRight.Width / 2.0);
                    Canvas.SetTop(eyeRight, pointEyeRight.Y - eyeRight.Height / 2.0);

                    Canvas.SetLeft(cheekLeft, pointCheekLeft.X - cheekLeft.Width / 2.0);
                    Canvas.SetTop(cheekLeft, pointCheekLeft.Y - cheekLeft.Height / 2.0);

                    Canvas.SetLeft(cheekRight, pointCheekRight.X - cheekRight.Width / 2.0);
                    Canvas.SetTop(cheekRight, pointCheekRight.Y - cheekRight.Height / 2.0);

                    Canvas.SetLeft(nose, pointNose.X - nose.Width / 2.0);
                    Canvas.SetTop(nose, pointNose.Y - nose.Height / 2.0);

                    Canvas.SetLeft(mouth, pointMouth.X - mouth.Width / 2.0);
                    Canvas.SetTop(mouth, pointMouth.Y - mouth.Height / 2.0);

                    Canvas.SetLeft(chin, pointChin.X - chin.Width / 2.0);
                    Canvas.SetTop(chin, pointChin.Y - chin.Height / 2.0);

                    Canvas.SetLeft(forehead, pointForehead.X - forehead.Width / 2.0);
                    Canvas.SetTop(forehead, pointForehead.Y - forehead.Height / 2.0);

                    // State handling to determine what to print to message area
                    switch (currFaceState)
                    {
                    case FaceState.NeutralWait:
                        output += "Step 2/6: Position your head in neutral forward-facing position.\n"
                                  + "Hold for at least " + RECOMMEND_TIME + " seconds, then press OK.\n";
                        faceSamples[sampleInd] = Math.Round(face.Chin.Z - face.Forehead.Z, 4);
                        break;

                    case FaceState.UpTiltWait:
                        output += "Step 3/6: Now tilt your head about 45 degress upwards.\n"
                                  + "Hold for at least " + RECOMMEND_TIME + " seconds, then press OK.\n";
                        faceSamples[sampleInd] = Math.Round(face.Chin.Z - face.Forehead.Z, 4);
                        break;

                    case FaceState.DownTiltWait:
                        output += "Step 4/6: Now tilt your head about 45 degress downwards.\n"
                                  + "Hold for at least " + RECOMMEND_TIME + " seconds, then press OK.\n";
                        faceSamples[sampleInd] = Math.Round(face.Chin.Z - face.Forehead.Z, 4);
                        break;

                    case FaceState.LeftTiltWait:
                        output += "Step 5/6: Turn your head slightly to the left.\n"
                                  + "Hold for at least " + RECOMMEND_TIME + " seconds, then press OK.\n";
                        faceSamples[sampleInd] = Math.Round(face.CheekRight.Z - face.CheekLeft.Z, 4);
                        break;

                    case FaceState.RightTiltWait:
                        output += "Step 6/6: Now turn your head slightly to to the right.\n"
                                  + "Hold for at least " + RECOMMEND_TIME + " seconds, then press OK.\n";
                        faceSamples[sampleInd] = Math.Round(face.CheekRight.Z - face.CheekLeft.Z, 4);
                        break;

                    case FaceState.Alerted:

                        // Waited long enough: switch back!
                        delayFrames++;
                        if (delayFrames >= ALERT_DELAY_FRAMES)
                        {
                            delayFrames   = 0;
                            currFaceState = FaceState.Presentation;
                        }
                        break;

                    case FaceState.Presentation:
                        output += "You're all set - start speaking right now! When you're done, press OK.\n";
                        // The BIG "check if these flags are activated" chunk of code

                        // Display timer
                        long   currentTime   = getCurrentTimeMillis();
                        int    numMinutes    = (int)(currentTime - startTime) / 60000;
                        int    numSeconds    = ((int)(currentTime - startTime) / 1000) % 60;
                        string numSecondsStr = "" + numSeconds;

                        output += "Time elapsed: " + numMinutes + ":" + numSecondsStr.PadLeft(2, '0');

                        int  enumInd;
                        bool alerted = false;
                        currYTilt = Math.Round(face.Chin.Z - face.Forehead.Z, 4);
                        if (Math.Abs(currYTilt - yTilts[0]) > Math.Abs(currYTilt - yTilts[1]))
                        {
                            enumInd = Convert.ToInt32(FlagType.HeadUp);
                            flagRuns[enumInd]++;
                            if (flagRuns[enumInd] >= RUN_THRESHOLD)
                            {
                                flagRuns[enumInd] = 0;
                                AlertFlag("Reminder: Tilt head lower!", FlagType.HeadUp);
                                alerted = true;
                            }
                        }
                        else if (Math.Abs(currYTilt - yTilts[0]) > Math.Abs(currYTilt - yTilts[2]))
                        {
                            enumInd = Convert.ToInt32(FlagType.HeadDown);
                            flagRuns[enumInd]++;
                            if (flagRuns[enumInd] >= RUN_THRESHOLD)
                            {
                                flagRuns[enumInd] = 0;
                                AlertFlag("Reminder: Tilt head higher!", FlagType.HeadDown);
                                alerted = true;
                            }
                        }

                        // track which way speaker is facing now
                        currXTilt = Math.Round(face.CheekRight.Z - face.CheekLeft.Z, 4);
                        FaceOrientation newOrientation = getFaceOrientation(currXTilt);
                        if (newOrientation == currFaceOrientation)
                        {
                            currZoneFrames++;
                        }
                        else
                        {
                            currZoneFrames      = 0;
                            currFaceOrientation = newOrientation;
                        }

                        if (currZoneFrames >= CURR_ZONE_THRESHOLD)
                        {
                            currZoneFrames = 0;
                            enumInd        = Convert.ToInt32(FlagType.HeadStatic);
                            AlertFlag("Reminder: Face different parts of the audience more often!", FlagType.HeadStatic);
                            alerted = true;
                        }

                        if (currRotation > FRONT_FACING_THRESHOLD)
                        {
                            enumInd = Convert.ToInt32(FlagType.Shoulders);
                            flagRuns[enumInd]++;
                            if (flagRuns[enumInd] >= RUN_THRESHOLD)
                            {
                                flagRuns[enumInd] = 0;
                                AlertFlag("Reminder: Straighten your shoulders!", FlagType.Shoulders);
                                alerted = true;
                            }
                        }
                        if (!alerted)
                        {
                            tblFeedback.Foreground = Brushes.Black;
                            tblFeedback.Text       = "READY";
                            tblFeedback.FontSize   = 15.0;
                            tblFeedback.FontStyle  = FontStyles.Oblique;
                            tblFeedback.FontWeight = FontWeights.ExtraBold;
                        }

                        break;

                    case FaceState.Evaluation:
                        output += "Practice session over! Return to main screen and check your feedback";
                        break;
                    }
                    // Record samples for the setup parameters
                    sampleInd = (sampleInd + 1) % NUM_SAMPLES;
                    // output += sampleInd; // for testing
                }
                else
                {
                    output = "Step 1/6: Stand up, face as squarely with the Kinect as you can, then wait for camera to find your face.";
                }

                tblFaceStatus.Text = output; // Show message in window
            }
        }
예제 #29
0
 void hdRead_FrameArrived(object sender, HighDefinitionFaceFrameArrivedEventArgs e)
 {
 }
예제 #30
0
 void hdFaceFrameReader_FrameArrived(object sender, HighDefinitionFaceFrameArrivedEventArgs e)
 {
     UpdateHDFaceFrame(e);
 }