private void FaceReader_FrameArrived(object sender, HighDefinitionFaceFrameArrivedEventArgs e)
 {
     using (var frame = e.FrameReference.AcquireFrame())
     {
         if (frame != null && frame.IsFaceTracked)
         {
             frame.GetAndRefreshFaceAlignmentResult(_faceAlignment);
             UpdateFacePoints();
         }
     }
 }
Esempio n. 2
0
 private void FrameArrived(object sender, HighDefinitionFaceFrameArrivedEventArgs e)
 {
     using (HighDefinitionFaceFrame frame = e.FrameReference.AcquireFrame())
     {
         if (frame != null)
         {
             if (frame.IsTrackingIdValid == false) { return; }
             frame.GetAndRefreshFaceAlignmentResult(this.faceAlignment);
             frame.Dispose();
             if (this.HdFrameReceived != null)
             {
                 this.HdFrameReceived(this, new HdFaceFrameResultEventArgs(this.TrackingId, this.faceModel, this.faceAlignment));
             }
         }
     }
 }
Esempio n. 3
0
        /// <summary>
        /// This event is fired when a new HDFace frame is ready for consumption
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void HdFaceReader_FrameArrived(object sender, HighDefinitionFaceFrameArrivedEventArgs e)
        {
            using (var frame = e.FrameReference.AcquireFrame())
            {
                // We might miss the chance to acquire the frame; it will be null if it's missed.
                // Also ignore this frame if face tracking failed
                if (frame == null || !frame.IsFaceTracked)
                {
                    return;
                }

                frame.GetAndRefreshFaceAlignmentResult(this.currentFaceAlignment);
                var captureValues = this.currentFaceAlignment.AnimationUnits;

                if (collectData)
                {
                    FaceDataReading f = new FaceDataReading(true);

                    //Format data from IReadOnlyDic -> Dictionary
                    f.AddData(captureValues);
                    fd.AddFaceData(f);
                }

            }
        }
Esempio n. 4
0
 void hdFaceFrameReader_FrameArrived( object sender, HighDefinitionFaceFrameArrivedEventArgs e )
 {
     UpdateHDFaceFrame( e );
 }
Esempio n. 5
0
        void UpdateHDFaceFrame( HighDefinitionFaceFrameArrivedEventArgs e )
        {
            using ( var hdFaceFrame = e.FrameReference.AcquireFrame() ) {
                if ( hdFaceFrame==null ) {
                    return;
                }
                bool tracked;
                tracked = hdFaceFrame.IsFaceTracked;
                if ( !tracked ) {
                    return;
                }

                hdFaceFrame.GetAndRefreshFaceAlignmentResult( faceAlignment );

                using ( var dc = drawingGroup.Open() ) {
                    dc.DrawRectangle( Brushes.Black, null, displayRect );
                    BuildFaceModel( dc );
                    Result( dc );
                    drawingGroup.ClipGeometry = new RectangleGeometry( displayRect );
                }
            }
        }
        /// <summary>
        /// FaceFrameが利用できるようになった時のイベントを処理します
        /// </summary>
        private void OnFaceFrameArrived(object sender, HighDefinitionFaceFrameArrivedEventArgs e)
        {
            using (var faceFrame = e.FrameReference.AcquireFrame())
            {
                
                if (faceFrame == null || !faceFrame.IsFaceTracked) return;

                
                // FaceAlignmentを更新
                faceFrame.GetAndRefreshFaceAlignmentResult(this.faceAlignment);
                UpdateMesh();

                // Animation Unitを更新
                OnPropertyChanged("AnimationUnits");

            }
        }
Esempio n. 7
0
        void hdRead_FrameArrived(object sender, HighDefinitionFaceFrameArrivedEventArgs e)
        {
            using (HighDefinitionFaceFrame frame = e.FrameReference.AcquireFrame())
            {
                if (frame != null)
                {

                }
            }
        }
        private void _faceReader_FrameArrived(object sender, HighDefinitionFaceFrameArrivedEventArgs e)
        {
            if (_mode == EVisualization.Face)
            {
                using (var frame = e.FrameReference.AcquireFrame())
                {
                    if (frame != null)
                    {
                        //CustomBody body = null;
                        foreach (CustomBody b in _bodies)
                        {
                            if (b.IsTracked) // Will match once.
                            {
                               // if (!_faceSource.IsTrackingIdValid)
                                //{
                                    if (b != null)
                                    {
                                        _faceSource.TrackingId = b.TrackingId;
                                    }
                                //}
                            }
                        }
                    }

                    if (frame != null && frame.IsFaceTracked)
                    {
                        frame.GetAndRefreshFaceAlignmentResult(_faceAlignment);
                        UpdateFacePoints();
                    }
                }
            }
        }
Esempio n. 9
0
 void _faceReader_FrameArrived(object sender, HighDefinitionFaceFrameArrivedEventArgs args)
 {
     using (var frame = args.FrameReference.AcquireFrame())
     {
         if (_isStarted)
             RecordFrame(frame);
     }
 }
Esempio n. 10
0
    private void HighDefinitionFaceFrameReader_FrameArrived(object sender, HighDefinitionFaceFrameArrivedEventArgs e)
    {
        using (var frame = e.FrameReference.AcquireFrame())
        {
            if (frame != null && frame.IsTrackingIdValid && frame.IsFaceTracked &&
                frame.FaceAlignmentQuality == FaceAlignmentQuality.High)
            {
                frame.GetAndRefreshFaceAlignmentResult(FaceAlignment);

                var command = new CommandMessage
                {
                    CommandType = CommandType.KinectFace,
                    KinectFace =
                        new MoCapFaceFrame
                        {
                            ExpressionWeights = new float[(int) MoCapKinectFacialExpression.Count]
                        }
                };

                var time = (float) (DateTime.Now - MoCapRecordStartTime).TotalSeconds;

                var rotationX = FaceAlignment.FaceOrientation.X;
                var rotationY = FaceAlignment.FaceOrientation.Y;
                var rotationZ = FaceAlignment.FaceOrientation.Z;
                var rotationW = FaceAlignment.FaceOrientation.W;

                var transform = new TransformTime();
                transform.Time = time;
                transform.Position = Vector3.Zero;
                transform.Rotation = new Quaternion(rotationX, rotationY, rotationZ, rotationW);
                transform.Scale = Vector3.One;

                transform.Rotation = FaceDESP.Predict(transform.Rotation);

                command.KinectFace.FaceTransform = transform;

                foreach (FaceShapeAnimations faceShapeAnimation in Enum.GetValues(typeof (FaceShapeAnimations)))
                {
                    float weight;

                    if (FaceAlignment.AnimationUnits.TryGetValue(faceShapeAnimation, out weight))
                    {
                        FaceExpressionDESP[(int) faceShapeAnimation].Update(weight);

                        command.KinectFace.ExpressionWeights[(int) faceShapeAnimation] =
                            FaceExpressionDESP[(int) faceShapeAnimation].Predict(1);
                    }
                }

                if (IsRecording)
                    FaceFrames.Add(command.KinectFace);

                BeginWriteCommand(command);
            }
        }
    }
        /// <summary>
        /// This event is fired when a new HDFace frame is ready for consumption
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void HdFaceReader_FrameArrived(object sender, HighDefinitionFaceFrameArrivedEventArgs e)
        {
            using (var frame = e.FrameReference.AcquireFrame())
            {
                // We might miss the chance to acquire the frame; it will be null if it's missed.
                // Also ignore this frame if face tracking failed.
                if (frame == null || !frame.IsFaceTracked)
                {
                    return;
                }
                Microsoft.Kinect.Vector4 orientation = currentFaceAlignment.FaceOrientation;
                changePoint(CalculateGazePoint(orientation));

                if(cmdDone){
                    issueCommand();
                }
                if (cmdGiven && !mirrorChecked) {
                    string s = checkMirrorGaze(currentLeft, currentRight);
                    if (cmds[Count] == s)
                    {
                        mirrorChecked = true;
                        TS_chk = (DateTime.Now - DateTime.Today).TotalMilliseconds;
                        TS_chk = TS_chk - TS_issued;
                        txt_cmd.Text = "YOU GOT IT!!!";
                        player.Play();

                    }
                    else
                        if (s != "none") errors++;

                }
                else
                {
                    if (currentLeft >= 322 && currentLeft <= 422 && currentRight >= 280 && currentRight <= 380)
                    {
                        cmdDone = true;
                        cmdGiven = false;
                        TS_done = (DateTime.Now - DateTime.Today).TotalMilliseconds;
                        TS_done = TS_done - TS_issued;
                        //outputFile.WriteLine(cmds[Count] + ", " + TS_issued + ", " + TS_chk + ", " + TS_done);
                        Console.Write(cmds[Count] + ", " + (int)(TS_issued - TS_start) + ", " + (int)TS_chk + ", " + (int)TS_done + ", " + errors + "\n");
                    }
                }

                //Console.Write("Orientation: " + orientation.X + "\n");
                frame.GetAndRefreshFaceAlignmentResult(this.currentFaceAlignment);
                this.UpdateMesh();
            }
        }
Esempio n. 12
0
        void faceReader_FrameArrived(object sender, HighDefinitionFaceFrameArrivedEventArgs e)
        {
            using (HighDefinitionFaceFrame frame = e.FrameReference.AcquireFrame())
            {
                if (frame != null)
                {
                    if (frame.IsTrackingIdValid == false) { return; }
                    if (frame.FaceAlignmentQuality == FaceAlignmentQuality.Low) { return; }

                    frame.GetAndRefreshFaceAlignmentResult(this.faceAlignment);
                    var o = this.faceAlignment.FaceOrientation;
                    this.FOutOrientation[0] = new Quaternion(o.X, o.Y, o.Z, o.W);

                    if (this.FInRCheck[0])
                    {
                        float f = this.FOutOrientation[0].LengthSquared();
                        if (f > 0.1f)
                        {
                            this.cameraPoints = this.faceModel.CalculateVerticesForAlignment(this.faceAlignment).ToArray();
                            this.runtime.Runtime.CoordinateMapper.MapCameraPointsToColorSpace(this.cameraPoints, this.colorPoints);
                            SetBounds();
                            this.FInvalidate = true;
                        }
                    }
                    else
                    {
                        this.cameraPoints = this.faceModel.CalculateVerticesForAlignment(this.faceAlignment).ToArray();
                        this.runtime.Runtime.CoordinateMapper.MapCameraPointsToColorSpace(this.cameraPoints, this.colorPoints);
                        SetBounds();
                        this.FInvalidate = true;
                    }

                }
            }
        }
        /// <summary>
        /// Handles face frame updates
        /// </summary>
        private void FaceFrameArrived(object sender, HighDefinitionFaceFrameArrivedEventArgs e)
        {
            ulong? newTrackingId = null;

            using (var frame = e.FrameReference.AcquireFrame())
            {
                if (frame != null)
                {
                    if (frame.IsTrackingIdValid && frame.IsFaceTracked)
                    {
                        frame.GetAndRefreshFaceAlignmentResult(this.faceAlignment);
                        this.faceModel = frame.FaceModel;
                        newTrackingId = frame.TrackingId;
                    }
                }
            }

            if (this.Processors.Any(x => x.RequiresFaceModelBuilder) && newTrackingId.HasValue && this.currentTrackingId != newTrackingId)
            {
                lock (this.processFaceModelMutex)
                {
                    this.currentTrackingId = newTrackingId;
                    this.faceModel = null;
                    this.constructedFaceModel = null;
                    this.DisposeFaceModelBuilder();
                    this.fmb = this.faceSource.OpenModelBuilder(FaceModelBuilderAttributes.HairColor | FaceModelBuilderAttributes.SkinColor);
                    this.fmb.BeginFaceDataCollection();
                    this.fmb.CollectionCompleted += this.FaceModelBuilderCollectionCompleted;
                }
            }

            lock (this)
            {
                this.faceReady = true;
                this.StartWorkerIfReady();
            }
        }
Esempio n. 14
0
        public void HdFaceReader_FrameArrived2(object sender, HighDefinitionFaceFrameArrivedEventArgs e) //right user
        {
            if (kinectenable.Checked == false) return;
            using (var frame = e.FrameReference.AcquireFrame())
            { 
                // We might miss the chance to acquire the frame; it will be null if it's missed.
                // Also ignore this frame if face tracking failed.
                if (frame == null || !frame.IsFaceTracked)
                {
                    return;
                }
                
                frame.GetAndRefreshFaceAlignmentResult(this.currentFaceAlignment);

                mKinectData2.mAU.JawOpen = currentFaceAlignment.AnimationUnits[FaceShapeAnimations.JawOpen].ToString("0.000");
                mKinectData2.mAU.JawSlideRight = currentFaceAlignment.AnimationUnits[FaceShapeAnimations.JawSlideRight].ToString("0.000");
                mKinectData2.mAU.LeftcheekPuff = currentFaceAlignment.AnimationUnits[FaceShapeAnimations.LeftcheekPuff].ToString("0.000");
                mKinectData2.mAU.LefteyebrowLowerer = currentFaceAlignment.AnimationUnits[FaceShapeAnimations.LefteyebrowLowerer].ToString("0.000");
                mKinectData2.mAU.LefteyeClosed = currentFaceAlignment.AnimationUnits[FaceShapeAnimations.LefteyeClosed].ToString("0.000");
                mKinectData2.mAU.LipCornerDepressorLeft = currentFaceAlignment.AnimationUnits[FaceShapeAnimations.LipCornerDepressorLeft].ToString("0.000");
                mKinectData2.mAU.LipCornerDepressorRight = currentFaceAlignment.AnimationUnits[FaceShapeAnimations.LipCornerDepressorRight].ToString("0.000");
                mKinectData2.mAU.LipCornerPullerLeft = currentFaceAlignment.AnimationUnits[FaceShapeAnimations.LipCornerPullerLeft].ToString("0.000");
                mKinectData2.mAU.LipCornerPullerRight = currentFaceAlignment.AnimationUnits[FaceShapeAnimations.LipCornerPullerRight].ToString("0.000");
                mKinectData2.mAU.LipPucker = currentFaceAlignment.AnimationUnits[FaceShapeAnimations.LipPucker].ToString("0.000");
                mKinectData2.mAU.LipStretcherLeft = currentFaceAlignment.AnimationUnits[FaceShapeAnimations.LipStretcherLeft].ToString("0.000");
                mKinectData2.mAU.LipStretcherRight = currentFaceAlignment.AnimationUnits[FaceShapeAnimations.LipStretcherRight].ToString("0.000");
                mKinectData2.mAU.LowerlipDepressorLeft = currentFaceAlignment.AnimationUnits[FaceShapeAnimations.LowerlipDepressorLeft].ToString("0.000");
                mKinectData2.mAU.LowerlipDepressorRight = currentFaceAlignment.AnimationUnits[FaceShapeAnimations.LowerlipDepressorRight].ToString("0.000");
                mKinectData2.mAU.RightcheekPuff = currentFaceAlignment.AnimationUnits[FaceShapeAnimations.RightcheekPuff].ToString("0.000");
                mKinectData2.mAU.RighteyebrowLowerer = currentFaceAlignment.AnimationUnits[FaceShapeAnimations.RighteyebrowLowerer].ToString("0.000");
                mKinectData2.mAU.RighteyeClosed = currentFaceAlignment.AnimationUnits[FaceShapeAnimations.RighteyeClosed].ToString("0.000");
                
                kin3_2 = "kin3_2," + mKinectData2.mAU.JawOpen.ToString() + "," + mKinectData2.mAU.JawSlideRight.ToString() + "," + mKinectData2.mAU.LeftcheekPuff.ToString() + "," + mKinectData2.mAU.LefteyebrowLowerer.ToString() + "," + mKinectData2.mAU.LefteyeClosed.ToString() + "," + mKinectData2.mAU.LipCornerDepressorLeft.ToString() + "," + mKinectData2.mAU.LipCornerDepressorRight.ToString() + "," + mKinectData2.mAU.LipCornerPullerLeft.ToString() + "," + mKinectData2.mAU.LipCornerPullerRight.ToString() + "," + mKinectData2.mAU.LipPucker.ToString() + "," + mKinectData2.mAU.LipStretcherLeft.ToString() + "," + mKinectData2.mAU.LipStretcherRight.ToString() + "," + mKinectData2.mAU.LowerlipDepressorLeft.ToString() + "," + mKinectData2.mAU.LowerlipDepressorRight.ToString() + "," + mKinectData2.mAU.RightcheekPuff.ToString() + "," + mKinectData2.mAU.RighteyebrowLowerer.ToString() + "," + mKinectData2.mAU.RighteyeClosed.ToString();
                eyebrowleft_2 = Convert.ToDouble(mKinectData2.mAU.LefteyebrowLowerer);
                eyebrowright_2 = Convert.ToDouble(mKinectData2.mAU.RighteyebrowLowerer);
                           
            }
        }