Track() public method

Starts face tracking from Kinect input data. Track() detects a face based on the passed parameters, then identifies characteristic points and begins tracking. The first call to this API is more expensive, but if the tracking succeeds then subsequent calls use the tracking information generated from first call and is faster, until a tracking failure happens.
public Track ( ColorImageFormat colorImageFormat, byte colorImage, DepthImageFormat depthImageFormat, short depthImage ) : FaceTrackFrame
colorImageFormat ColorImageFormat /// format of the colorImage array ///
colorImage byte /// Input color image frame retrieved from Kinect sensor ///
depthImageFormat DepthImageFormat /// format of the depthImage array ///
depthImage short /// Input depth image frame retrieved from Kinect sensor ///
return FaceTrackFrame
Exemplo n.º 1
0
        //! @copydoc FaceTracker::Track((Skeleton[],ColorImageFrame,DepthImageFrame,int)
        public override void Track(Skeleton[] skeletons, ColorImageFrame colorFrame,
                                   DepthImageFrame depthFrame, int nearestUserId)
        {
            if (faceTracker == null)
            {
                try
                {
                    faceTracker = new Microsoft.Kinect.Toolkit.FaceTracking.FaceTracker(sensor);
                }
                catch (InvalidOperationException)
                {
                    this.faceTracker = null;
                    UpdateFaceTrackingStatusInternally(FaceTracker.FaceTrackingState.UnableToDetectFaces);
                    return;
                }
            }

            if (colors == null)
            {
                colors = new byte[sensor.ColorStream.FramePixelDataLength];
            }

            if (colorFrame == null)
            {
                UpdateFaceTrackingStatusInternally(FaceTracker.FaceTrackingState.UnableToDetectFaces);
                return;
            }

            colorFrame.CopyPixelDataTo(colors);


            if (depths == null)
            {
                depths = new short[sensor.DepthStream.FramePixelDataLength];
            }

            if (depthFrame == null)
            {
                UpdateFaceTrackingStatusInternally(FaceTracker.FaceTrackingState.UnableToDetectFaces);
                return;
            }
            depthFrame.CopyPixelDataTo(depths);


            bool?nearUserLooking = null;
            bool?farUserLooking  = null;
            int  nTrackedUsers   = 0;

            foreach (Skeleton skeleton in skeletons)
            {
                if (skeleton.TrackingState == SkeletonTrackingState.Tracked)
                {
                    nTrackedUsers++;

                    var  frame             = faceTracker.Track(sensor.ColorStream.Format, colors, sensor.DepthStream.Format, depths, skeleton);
                    bool?isLookingToSensor = null;

                    if (frame == null)
                    {
                        if (skeleton.TrackingId == nearestUserId)
                        {
                            nearUserLooking = isLookingToSensor;
                        }
                        else
                        {
                            farUserLooking = isLookingToSensor;
                        }
                    }
                    else
                    {
                        var shape = frame.Get3DShape();

                        var leftEyeZ  = shape[FeaturePoint.AboveMidUpperLeftEyelid].Z;
                        var rightEyeZ = shape[FeaturePoint.AboveMidUpperRightEyelid].Z;

                        var eyeDistZ = Math.Abs(leftEyeZ - rightEyeZ);

                        if (eyeDistZ == 0.0) //special case where, most of the times, indicates an error
                        {
                            isLookingToSensor = null;
                        }
                        else
                        {
                            isLookingToSensor = eyeDistZ <= epsilon;
                        }

                        if (skeleton.TrackingId == nearestUserId)
                        {
                            nearUserLooking = isLookingToSensor;
                        }
                        else
                        {
                            farUserLooking = isLookingToSensor;
                        }
                    }
                }
            }

            FaceTracker.FaceTrackingState trackFaceInternalState = FaceTracker.FaceTrackingState.Disabled;
            trackFaceInternalState = getFaceTrackState(nTrackedUsers, nearUserLooking, farUserLooking);
            UpdateFaceTrackingStatusInternally(trackFaceInternalState);
        }
Exemplo n.º 2
0
    private async Task FaceTrackingAsync(TimeSpan dueTime, TimeSpan interval, CancellationToken token) {
      if (interval.TotalMilliseconds == 0) return;

      // Initial wait time before we begin the periodic loop.
      if (dueTime > TimeSpan.Zero)
        await Task.Delay(dueTime, token);

      DateTime LocalTimestamp = Timestamp;
      FaceTracker tracker = new FaceTracker(Sensor);

      // Repeat this loop until cancelled.
      while (!token.IsCancellationRequested) {

        // Skip already work with given data
        if (Timestamp == LocalTimestamp) {
          await Task.Delay(interval, token);
          continue;
        }

        // Timestamp data
        LocalTimestamp = Timestamp;
        FaceTrackWatch.Again();

        // Do Job
        try {
          CopyColorData = true;
          CopySkeletons = true;
          FPoints = null;
          Mood = 0;
          if (null != GestureManager && null != GestureManager.Skeleton) {
            FaceTrackFrame frame = tracker.Track(ColorFormat, ColorData, DepthFormat, DepthData, GestureManager.Skeleton);
            if (frame.TrackSuccessful) {
              
              // Only once.  It doesn't change.
              if (FTriangles == null) { FTriangles = frame.GetTriangles(); }
              FPoints = frame.GetProjected3DShape();
              Mood = frame.GetAnimationUnitCoefficients()[AnimationUnit.LipCornerDepressor];
              WSRProfileManager.GetInstance().UpdateMood(Mood);
            }
          }
        }
        catch (Exception ex) {
          WSRConfig.GetInstance().logError("FACE", ex);
        }
        FaceTrackWatch.Stop(); 

        // Wait to repeat again.
        if (interval > TimeSpan.Zero)
          await Task.Delay(interval, token);
      }

      // Dispose Tracker
      tracker.Dispose();
    }