Main class that instantiates the face tracking engine and tracks the faces of a single person retrieving various metrics like animation units, 3D points and triangles on the face.
Inheritance: IDisposable
Beispiel #1
0
        private KinectHelper(TransformSmoothParameters tsp, bool near = false, 
                             ColorImageFormat colorFormat = ColorImageFormat.RgbResolution1280x960Fps12, 
                             DepthImageFormat depthFormat = DepthImageFormat.Resolution640x480Fps30)
        {
            _kinectSensor = KinectSensor.KinectSensors.FirstOrDefault(s => s.Status == KinectStatus.Connected);

            if (_kinectSensor == null)
            {
                throw new Exception("No Kinect-Sensor found.");
            }
            if (near)
            {
                _kinectSensor.SkeletonStream.TrackingMode = SkeletonTrackingMode.Seated;
                _kinectSensor.DepthStream.Range = DepthRange.Near;
                _kinectSensor.SkeletonStream.EnableTrackingInNearRange = true;
            }

            DepthImageFormat = depthFormat;
            ColorImageFormat = colorFormat;

            _kinectSensor.SkeletonStream.Enable(tsp);
            _kinectSensor.ColorStream.Enable(colorFormat);
            _kinectSensor.DepthStream.Enable(depthFormat);
            _kinectSensor.AllFramesReady += AllFramesReady;

            _kinectSensor.Start();
            _faceTracker = new FaceTracker(_kinectSensor);
        }
Beispiel #2
0
 /// <summary>
 /// Constructor.
 /// </summary>
 /// <param name="game"></param>
 internal KinectInput(Game game)
     : base(game)
 {
     head = new Vector3();
     pointing = false;
     countPointing = 0;
     //initializing the walk history
     walkHistory = new Queue<FeetState>(12);
     for (int i = 0; i < 20; i++)
     {
         walkHistory.Enqueue(new FeetState());
     }
     positions = new List<Vector3>();
     //initializing kinect sensors
     sensor = KinectSensor.KinectSensors[0];
     sensor.ColorStream.Enable();
     sensor.SkeletonStream.Enable();
     sensor.DepthStream.Enable();
     // Listen to the AllFramesReady event to receive KinectSensor's data.
     sensor.AllFramesReady += new EventHandler<AllFramesReadyEventArgs>(kinectSensor_AllFramesReady);
     sensor.Start();
     faceTracker = new FaceTracker(sensor);
     // Initialize data arrays
     colorPixelData = new byte[sensor.ColorStream.FramePixelDataLength];
     depthPixelData = new short[sensor.DepthStream.FramePixelDataLength];
     skeletonData = new Skeleton[6];
 }
        void TrackFace()
        {
            var skeleton = skeletonData
                .Where(s => s.TrackingState != SkeletonTrackingState.NotTracked)
                .OrderBy(s => s.Position.Z)
                .FirstOrDefault();

            IsSkeletonTrackedUpdated(skeleton != null);

            if (skeleton == null)
            {
                skeletonId = -1;
                if (faceTracker != null)
                {
                    faceTracker.Dispose();
                    faceTracker = null;
                }

                JawLowerUpdated(null);
                return;
            }

            if (skeletonId != skeleton.TrackingId)
            {
                try
                {
                    if (faceTracker != null)
                    {
                        faceTracker.Dispose();
                    }
                    faceTracker = new FaceTracker(sensor);
                }
                catch (InvalidOperationException)
                {
                    return;
                }
            }
            skeletonId = skeleton.TrackingId;

            if (skeleton.TrackingState != SkeletonTrackingState.Tracked)
            {
                JawLowerUpdated(null);
                return;
            }

            // MEMO: FaceTrackFrame オブジェクトの Dispose メソッドを呼び出すと、以降の処理が正常に続かなくなります。
            var faceFrame = faceTracker.Track(sensor.ColorStream.Format, colorImage, sensor.DepthStream.Format, depthImage, skeleton);
            if (!faceFrame.TrackSuccessful)
            {
                JawLowerUpdated(null);
                return;
            }

            var animationUnits = faceFrame.GetAnimationUnitCoefficients();
            JawLowerUpdated(animationUnits[AnimationUnit.JawLower]);
        }
        internal FaceModel(FaceTracker faceTracker, IFTModel faceModelPtr)
        {
            if (faceTracker == null || faceModelPtr == null)
            {
                throw new InvalidOperationException("Cannot associate face model with null face tracker or native face model reference");
            }

            this.faceTrackingModelPtr = faceModelPtr;
            this.faceTracker = faceTracker;
        }
Beispiel #5
0
        internal FaceTrackFrame(IFTResult faceTrackResultPtr, FaceTracker parentTracker)
        {
            if (faceTrackResultPtr == null)
            {
                throw new InvalidOperationException("Cannot associate with a null native frame pointer");
            }

            this.faceTrackingResultPtr = faceTrackResultPtr;
            this.parentFaceTracker = new WeakReference(parentTracker, false);
        }
        private void Initialize()
        {
            if (_kinectSensor == null)
                return;
            _kinectSensor.AllFramesReady += KinectSensorAllFramesReady;
            _kinectSensor.ColorStream.Enable();
            _kinectSensor.DepthStream.Enable(DepthImageFormat.Resolution80x60Fps30);
            _kinectSensor.SkeletonStream.Enable();
            _kinectSensor.SkeletonStream.EnableTrackingInNearRange = true;
            //_kinectSensor.SkeletonStream.TrackingMode = SkeletonTrackingMode.Seated;
            _kinectSensor.Start();
            _faceTracker = new FaceTracker(_kinectSensor);
            _faceEllipses = new Dictionary<FeaturePoint, Ellipse>();
            _bodyEllipses = new Dictionary<JointType, Ellipse>();

            Message = "Kinect connected";
        }
        public MainWindow()
        {
            InitializeComponent();

            try {
                // 利用可能なKinectを探す
                foreach ( var k in KinectSensor.KinectSensors ) {
                    if ( k.Status == KinectStatus.Connected ) {
                        kinect = k;
                        break;
                    }
                }
                if ( kinect == null ) {
                    throw new Exception( "利用可能なKinectがありません" );
                }

                // すべてのフレーム更新通知をもらう
                kinect.AllFramesReady += new EventHandler<AllFramesReadyEventArgs>( kinect_AllFramesReady );

                // Color,Depth,Skeletonを有効にする
                kinect.ColorStream.Enable( ColorImageFormat.RgbResolution640x480Fps30 );
                kinect.DepthStream.Enable( DepthImageFormat.Resolution640x480Fps30 );
                kinect.SkeletonStream.Enable();

                kinect.DepthStream.Range = DepthRange.Near;
                kinect.SkeletonStream.TrackingMode = SkeletonTrackingMode.Seated;
                kinect.SkeletonStream.EnableTrackingInNearRange = true;

                // Kinectの動作を開始する
                kinect.Start();

                // 顔追跡用のインスタンスを生成する
                faceTracker = new FaceTracker( kinect );
            }
            catch ( Exception ex ) {
                MessageBox.Show( ex.Message );
                Close();
            }
        }
Beispiel #8
0
		void newSensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
		{
			if (_faceTracker == null)
			{
				try
				{
					_faceTracker = new FaceTracker(_sensorChooser.Kinect);
				}
				catch (InvalidOperationException)
				{
					// During some shutdown scenarios the FaceTracker
					// is unable to be instantiated.  Catch that exception
					// and don't track a face.
					Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
					_faceTracker = null;
				}
			}

			#region copying color data
			using (var colorFrame = e.OpenColorImageFrame())
			{
				if (colorFrame == null)
				{
					return;
				}

				// Make a copy of the color frame for displaying.
				var haveNewFormat = _currentColorImageFormat != colorFrame.Format;
				if (haveNewFormat)
				{
					_currentColorImageFormat = colorFrame.Format;
					_colorImageData = new byte[colorFrame.PixelDataLength];
					_colorImageWritableBitmap = new WriteableBitmap(colorFrame.Width, colorFrame.Height, 96, 96, PixelFormats.Bgr32, null);

					_colorImage.Source = _colorImageWritableBitmap;
				}

				colorFrame.CopyPixelDataTo(_colorImageData);
				_colorImageWritableBitmap.WritePixels(
					new Int32Rect(0, 0, colorFrame.Width, colorFrame.Height),
					_colorImageData,
					colorFrame.Width * colorFrame.BytesPerPixel,
					0);
			}
			#endregion

			#region copying depth data
			using (var depthFrame = e.OpenDepthImageFrame())
			{
				if (depthFrame == null)
				{
					return;
				}

				depthData = new short[depthFrame.PixelDataLength];
				depthFrame.CopyPixelDataTo(depthData);
			}
			#endregion

			#region copying skeleton data
			using (var skeletonFrame = e.OpenSkeletonFrame())
			{
				if (skeletonFrame == null)
				{
					return;
				}

				Skeleton[] allSkeletons = new Skeleton[skeletonFrame.SkeletonArrayLength];
				skeletonFrame.CopySkeletonDataTo(allSkeletons);

				firstSkeleton = (from c in allSkeletons
								 where c.TrackingState == SkeletonTrackingState.Tracked
								 select c).FirstOrDefault();

				if (firstSkeleton == null)
				{
					return;
				}
			}
			#endregion

			if (_faceTracker != null)
			{
				FaceTrackFrame frame = _faceTracker.Track(_sensorChooser.Kinect.ColorStream.Format,
					_colorImageData, _sensorChooser.Kinect.DepthStream.Format, depthData, firstSkeleton);

				if (frame.TrackSuccessful)
				{
                    AnalyzeFace(frame);

                    if (_thoughtBubble.Visibility != System.Windows.Visibility.Visible)
                    {
                        _thoughtBubble.Visibility = System.Windows.Visibility.Visible;
                    }

				}
				else
				{
				}
			}
		}
            /// <summary>
            /// Updates the face tracking information for this skeleton
            /// </summary>
            internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                this.skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    // nothing to do with an untracked skeleton.                    
                    return;
                }

                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(kinectSensor);
                    }
                    catch (InvalidOperationException)
                    {
                        // During some shutdown scenarios the FaceTracker
                        // is unable to be instantiated.  Catch that exception
                        // and don't track a face.
                        Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                        this.faceTracker = null;
                    }
                }

                isFaceTracked = false;
                if (this.faceTracker != null)
                {
                    FaceTrackFrame frame = this.faceTracker.Track(colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);

                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    
                    if (this.lastFaceTrackSucceeded)
                    {
                        if (faceTriangles == null)
                        {
                            // only need to get this once.  It doesn't change.
                            faceTriangles = frame.GetTriangles();
                        }
                        isFaceTracked = true;     
                        this.facePoints = frame.GetProjected3DShape();
                        this.facePoints3D = frame.Get3DShape();          //****************自己家的

                    }
                }
            }
Beispiel #10
0
 public EyeTracker(KinectSensor sensor, float epsilon = 0.02f)
 {
     faceTracker = new FaceTracker(sensor);
     this.sensor = sensor;
     this.epsilon = epsilon;
 }
Beispiel #11
0
        /// <summary>
        /// Allows the game to run logic such as updating the world,
        /// checking for collisions, gathering input, and playing audio.
        /// </summary>
        /// <param name="gameTime">Provides a snapshot of timing values.</param>
        protected override void Update(GameTime gameTime)
        {
            // Allows the game to exit
            if (GamePad.GetState(PlayerIndex.One).Buttons.Back == ButtonState.Pressed)
                this.Exit();

            colorData = colorStream.ColorData;
            depthData = depthStream.DepthData;
            nearestSkeleton = skeletonStream.Skel;

            if (nearestSkeleton != null && nearestSkeleton.TrackingState == SkeletonTrackingState.Tracked)
            {
                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(this.chooser.Sensor);
                    }
                    catch (InvalidOperationException)
                    {
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null)
                {
                    FaceTrackFrame faceTrackFrame = this.faceTracker.Track(
                        colorImageFormat,
                        colorData,
                        depthImageFormat,
                        depthData,
                        nearestSkeleton);

                    if (faceTrackFrame.TrackSuccessful)
                    {
                        EnumIndexableCollection<FeaturePoint, Vector3DF> shapePoints = faceTrackFrame.Get3DShape();
                        EnumIndexableCollection<FeaturePoint, PointF> projectedShapePoints = faceTrackFrame.GetProjected3DShape();

                        yaw = -MathHelper.ToRadians(faceTrackFrame.Rotation.Y);
                        pitch = -MathHelper.ToRadians(faceTrackFrame.Rotation.X);
                        roll = MathHelper.ToRadians(faceTrackFrame.Rotation.Z);

                        vector.X = 9.3f * (shapePoints[4].X / shapePoints[4].Z);
                        vector.Y = 9.3f * (shapePoints[4].Y / shapePoints[4].Z) * 0.95f;
                        vector.Z = 0;
                        scale = 0.4f;

                        Window.Title = shapePoints[4].X.ToString() + " " + shapePoints[4].Y.ToString() + " " + shapePoints[4].Z.ToString();
                    }
                    else
                        scale = 0;
                }
            }

            if (gameTime.TotalGameTime.Seconds > 3)
            {
            }
            base.Update(gameTime);
        }
Beispiel #12
0
            /// <summary>
            /// Updates the face tracking information for this skeleton
            /// </summary>
            internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                this.skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    // nothing to do with an untracked skeleton.
                    return;
                }

                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(kinectSensor);
                    }
                    catch (InvalidOperationException)
                    {
                        // During some shutdown scenarios the FaceTracker
                        // is unable to be instantiated.  Catch that exception
                        // and don't track a face.
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null)
                {
                    FaceTrackFrame frame = this.faceTracker.Track(
                        colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);

                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    if (this.lastFaceTrackSucceeded)
                    {
                        if (faceTriangles == null)
                        {
                            // only need to get this once.  It doesn't change.
                            faceTriangles = frame.GetTriangles();
                        }

                        this.facePoints = frame.GetProjected3DShape();


                        double dbX = facePoints[5].X;
                        double dbY = facePoints[5].Y;

                        App thisApp = App.Current as App;
                        thisApp.m_dbX = dbX;
                        thisApp.m_dbY = dbY;
                    }
                }
            }
            /// <summary>
            /// Updates the face tracking information for this skeleton
            /// </summary>
            internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                this.skeletonTrackingState = skeletonOfInterest.TrackingState;
                List<Point> faceModelPtsCompare = new List<Point>();


                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    // nothing to do with an untracked skeleton.
                    return;
                }

                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(kinectSensor);

                    }
                    catch (InvalidOperationException)
                    {
                        // During some shutdown scenarios the FaceTracker
                        // is unable to be instantiated.  Catch that exception
                        // and don't track a face.
                        Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null )
                {
                    double shoulderLeft = (double)skeletonOfInterest.Joints[JointType.ShoulderLeft].Position.X;
                    double shoulderRight = (double)skeletonOfInterest.Joints[JointType.ShoulderRight].Position.X;
                    double head = (double)skeletonOfInterest.Joints[JointType.Head].Position.Y;
                    double hip = (double)skeletonOfInterest.Joints[JointType.HipCenter].Position.Y;
                    double headDistZ = (double)skeletonOfInterest.Joints[JointType.Head].Position.Z;
                    shoulderLeft = Math.Abs(shoulderLeft);
                    shoulderRight = Math.Abs(shoulderRight);
                    head = Math.Abs(head);
                    hip = Math.Abs(hip);
                    headDistZ = Math.Abs(headDistZ);
                    FaceTrackFrame frame = this.faceTracker.Track(
                        colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);
                    this.facePoints2 = frame.GetProjected3DShape();

                    for (int i = 0; i < this.facePoints2.Count; i++)
                    {
                        faceModelPtsCompare.Add(new Point(this.facePoints2[i].X + 0.5f, this.facePoints2[i].Y + 0.5f));
                    }

                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    if (saveFace == true) 
                    {
                       // trackedSkeletonsSave.Add(skeletonOfInterest.TrackingId, this);
                        this.facePoints = frame.GetProjected3DShape();

                        for (int i = 0; i < this.facePoints.Count; i++)
                        {
                            faceModelPts.Add(new Point(this.facePoints[i].X + 0.5f, this.facePoints[i].Y + 0.5f));
                        }
                        shoulderDistX = (shoulderLeft + shoulderRight) * 100;
                        headhipDistY = (hip +head) * 100;
                        saveFaceList.Add(faceModelPts);
                        saveSkeleList.Add(skeletonOfInterest);
                        saveTrackList.Add(skeletonOfInterest.TrackingId);
                        saveFace = false;
                    }
                    if (this.lastFaceTrackSucceeded)
                    {


                        foreach (Faces kinects in faceList) 
                        {
                            if (colorCount < 3)
                            {
                                if (kinectSensor == kinects.kinect)
                                {

                                    break;

                                }

                                
                            }
                            else
                                colorCount = 0;
                            
                            colorCount++;
                        }
                        int countPt = 0;
                        double pointxDiff = 0;
                        double pointyDiff = 0;
                        double pointsNew = 0;
                        double pointsSaved = 0;
                        double shoulderDistDiff = 0;
                        double headhipDistDiff = 0;
                        double pointsDiff = 0;

                        if (faceModelPts.Count > 0)
                        {
                            foreach (Point pointNew in faceModelPtsCompare)
                            {
                                pointsNew = (pointNew.X + pointNew.Y) / headDistZ ;

                            }
                            foreach (Point pointSave in faceModelPts)
                            {
                                //pointxDiff = (pointNew.X - pointSave.X);
                                //pointyDiff = (pointNew.Y - pointSave.Y);

                                pointsSaved = (pointSave.X + pointSave.Y) / headDistZ ;
                            }
                            shoulderDistXNew = (shoulderLeft + shoulderRight) ;
                            headhipDistYNew = (hip + head);
                            
                         //   Debug.WriteLine("x diff: " + pointxDiff + " y diff: " + pointyDiff);
                            //Debug.WriteLine("new: " + pointsNew + " old: " + pointsSaved);
                            shoulderDistDiff = Math.Abs(shoulderDistXNew - shoulderDistX) *10 /headDistZ;
                            headhipDistDiff = Math.Abs(headhipDistYNew - headhipDistY)*10 /headDistZ;
                            pointsSaved = Math.Abs(pointsNew - pointsSaved) *10;
                            fMap.newFace = pointsNew;
                            fMap.oldFace = pointsSaved;

                            Debug.WriteLine("Shoulder Dist diff: " + shoulderDistDiff / headDistZ);
                            Debug.WriteLine("Head dist diff: " + headhipDistDiff / headDistZ);
                            Debug.WriteLine("face points diff: " + pointsSaved / headDistZ);

                            if (pointsSaved < (50 / headDistZ) )
                            {
                                colorCount = 3;
                            }
                        }


                        switch (colorCount)
                        {
                            case 0:
                                rectangle.Width = frame.FaceRect.Width;
                                rectangle.Height = frame.FaceRect.Height;
                                Point rectPt = new Point();
                                rectPt.X = frame.FaceRect.Left;
                                rectPt.Y = frame.FaceRect.Top;
                                rectangle.Location = (Point)rectPt;
                                drawNum = 0;
                                break;
                            case 1:
                                rectangle2.Width = frame.FaceRect.Width;
                                rectangle2.Height = frame.FaceRect.Height;
                                Point rectPt2 = new Point();
                                rectPt2.X = frame.FaceRect.Left;
                                rectPt2.Y = frame.FaceRect.Top;
                                rectangle2.Location = (Point)rectPt2;
                                drawNum = 1;
                                break;
                            case 2:
                                rectangle3.Width = frame.FaceRect.Width;
                                rectangle3.Height = frame.FaceRect.Height;
                                Point rectPt3 = new Point();
                                rectPt3.X = frame.FaceRect.Left;
                                rectPt3.Y = frame.FaceRect.Top;
                                rectangle3.Location = (Point)rectPt3;
                                drawNum = 2;
                                break;
                            case 3:
                                rectangle4.Width = frame.FaceRect.Width;
                                rectangle4.Height = frame.FaceRect.Height;
                                Point rectPt4 = new Point();
                                rectPt4.X = frame.FaceRect.Left;
                                rectPt4.Y = frame.FaceRect.Top;
                                rectangle4.Location = (Point)rectPt4;
                                drawNum = 3;
                                break;
                        }


                    }
                }
            }
Beispiel #14
0
            /// <summary>
            /// Updates the face tracking information for this skeleton
            /// </summary>
            internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                this.skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    // nothing to do with an untracked skeleton.
                    return;
                }

                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(kinectSensor);
                    }
                    catch (InvalidOperationException)
                    {
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null)
                {
                    frame = this.faceTracker.Track(
                        colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest).Clone() as FaceTrackFrame;
                }
            }
        private void OnKinectChanged(KinectSensor oldSensor, KinectSensor newSensor)
        {
            if (oldSensor != null)
            {
                try
                {
                    oldSensor.AllFramesReady -= this.AllFramesReady;

                    this.DestroyFaceTracker();
                }
                catch (InvalidOperationException)
                {
                    // KinectSensor might enter an invalid state while enabling/disabling streams or stream features.
                    // E.g.: sensor might be abruptly unplugged.
                }
            }

            if (newSensor != null)
            {
                try
                {
                    this.faceTracker = new FaceTracker(this.Kinect);

                    newSensor.AllFramesReady += this.AllFramesReady;
                }
                catch (InvalidOperationException)
                {
                    // KinectSensor might enter an invalid state while enabling/disabling streams or stream features.
                    // E.g.: sensor might be abruptly unplugged.
                }
            }
        }
        private void AllFramesReady(object sender, AllFramesReadyEventArgs allFramesReadyEventArgs)
        {
            ColorImageFrame colorImageFrame = null;
            DepthImageFrame depthImageFrame = null;
            SkeletonFrame skeletonFrame = null;


            try
            {
                colorImageFrame = allFramesReadyEventArgs.OpenColorImageFrame();
                depthImageFrame = allFramesReadyEventArgs.OpenDepthImageFrame();
                skeletonFrame = allFramesReadyEventArgs.OpenSkeletonFrame();

                if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null)
                {
                    return;
                }

                // Check for changes in any of the data this function is receiving
                // and reset things appropriately.
                if (this.depthImageFormat != depthImageFrame.Format)
                {
                    this.DestroyFaceTracker();
                    this.depthImage = null;
                    this.depthImageFormat = depthImageFrame.Format;
                }

                if (this.colorImageFormat != colorImageFrame.Format)
                {
                    this.DestroyFaceTracker();
                    this.colorImage = null;
                    this.colorImageFormat = colorImageFrame.Format;
                    this.colorImageWritableBitmap = null;
                    this.ColorImage.Source = null;
                    this.theMaterial.Brush = null;
                }

                if (this.skeletonData != null && this.skeletonData.Length != skeletonFrame.SkeletonArrayLength)
                {
                    this.skeletonData = null;
                }

                // Create any buffers to store copies of the data we work with
                if (this.depthImage == null)
                {
                    this.depthImage = new short[depthImageFrame.PixelDataLength];
                }

                if (this.colorImage == null)
                {
                    this.colorImage = new byte[colorImageFrame.PixelDataLength];
                }

                if (this.colorImageWritableBitmap == null)
                {
                    this.colorImageWritableBitmap = new WriteableBitmap(
                        colorImageFrame.Width, colorImageFrame.Height, 96, 96, PixelFormats.Bgr32, null);
                    this.ColorImage.Source = this.colorImageWritableBitmap;
                    this.theMaterial.Brush = new ImageBrush(this.colorImageWritableBitmap)
                        {
                            ViewportUnits = BrushMappingMode.Absolute
                        };
                }

                if (this.skeletonData == null)
                {
                    this.skeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                }

                // Copy data received in this event to our buffers.
                colorImageFrame.CopyPixelDataTo(this.colorImage);
                depthImageFrame.CopyPixelDataTo(this.depthImage);
                skeletonFrame.CopySkeletonDataTo(this.skeletonData);
                this.colorImageWritableBitmap.WritePixels(
                    new Int32Rect(0, 0, colorImageFrame.Width, colorImageFrame.Height),
                    this.colorImage,
                    colorImageFrame.Width * Bgr32BytesPerPixel,
                    0);

                // Find a skeleton to track.
                // First see if our old one is good.
                // When a skeleton is in PositionOnly tracking state, don't pick a new one
                // as it may become fully tracked again.
                Skeleton skeletonOfInterest =
                    this.skeletonData.FirstOrDefault(
                        skeleton =>
                        skeleton.TrackingId == this.trackingId
                        && skeleton.TrackingState != SkeletonTrackingState.NotTracked);

                if (skeletonOfInterest == null)
                {
                    // Old one wasn't around.  Find any skeleton that is being tracked and use it.
                    skeletonOfInterest =
                        this.skeletonData.FirstOrDefault(
                            skeleton => skeleton.TrackingState == SkeletonTrackingState.Tracked);

                    if (skeletonOfInterest != null)
                    {
                        // This may be a different person so reset the tracker which
                        // could have tuned itself to the previous person.
                        if (this.faceTracker != null)
                        {
                            this.faceTracker.ResetTracking();
                        }

                        this.trackingId = skeletonOfInterest.TrackingId;
                    }
                }

                bool displayFaceMesh = false;

                if (skeletonOfInterest != null && skeletonOfInterest.TrackingState == SkeletonTrackingState.Tracked)
                {
                    if (this.faceTracker == null)
                    {
                        try
                        {
                            this.faceTracker = new FaceTracker(this.Kinect);
                        }
                        catch (InvalidOperationException)
                        {
                            // During some shutdown scenarios the FaceTracker
                            // is unable to be instantiated.  Catch that exception
                            // and don't track a face.
                            Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                            this.faceTracker = null;
                        }
                    }

                    if (this.faceTracker != null)
                    {
                        FaceTrackFrame faceTrackFrame = this.faceTracker.Track(
                            this.colorImageFormat,
                            this.colorImage,
                            this.depthImageFormat,
                            this.depthImage,
                            skeletonOfInterest);

                        if (faceTrackFrame.TrackSuccessful && status.Text.ToString() == "STATUS:MONITORING")
                        {
                            this.UpdateMesh(faceTrackFrame);

                            // Only display the face mesh if there was a successful track.
                            displayFaceMesh = true;
                        }
                    }
                }
                else
                {
                    this.trackingId = -1;
                }

                this.viewport3d.Visibility = displayFaceMesh ? Visibility.Visible : Visibility.Hidden;
            }
            finally
            {
                if (colorImageFrame != null)
                {
                    colorImageFrame.Dispose();
                }

                if (depthImageFrame != null)
                {
                    depthImageFrame.Dispose();
                }

                if (skeletonFrame != null)
                {
                    skeletonFrame.Dispose();
                }
            }

            try
            {
                    kinectRegion.KinectSensor = Kinect;
            }
            catch (Exception)
            {
                throw;
            }
        }
Beispiel #17
0
        void nui_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            ColorImageFrame colorImageFrame = null;
            DepthImageFrame depthImageFrame = null;
            SkeletonFrame skeletonFrame = null;

            try
            {
                colorImageFrame = e.OpenColorImageFrame();
                depthImageFrame = e.OpenDepthImageFrame();
                skeletonFrame = e.OpenSkeletonFrame();

                if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null)
                {
                    return;
                }

                if (this.depthImageFormat != depthImageFrame.Format)
                {
                    this.depthImage = null;
                    this.depthImageFormat = depthImageFrame.Format;
                }

                if (this.colorImageFormat != colorImageFrame.Format)
                {
                    this.colorImage = null;
                    this.colorImageFormat = colorImageFrame.Format;
                }

                if (this.depthImage == null)
                {
                    this.depthImage = new short[depthImageFrame.PixelDataLength];
                }

                if (this.colorImage == null)
                {
                    this.colorImage = new byte[colorImageFrame.PixelDataLength];
                }

                if (this.skeletonData == null || this.skeletonData.Length != skeletonFrame.SkeletonArrayLength)
                {
                    this.skeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                }

                colorImageFrame.CopyPixelDataTo(this.colorImage);
                depthImageFrame.CopyPixelDataTo(this.depthImage);
                skeletonFrame.CopySkeletonDataTo(this.skeletonData);
            }
            finally
            {
                if (colorImageFrame != null)
                {
                    colorImageFrame.Dispose();
                }

                if (depthImageFrame != null)
                {
                    depthImageFrame.Dispose();
                }

                if (skeletonFrame != null)
                {
                    skeletonFrame.Dispose();
                }
            }

            using (depthImageFrame)
            {
                if (depthImageFrame != null)
                {
                    foreach (Skeleton skeleton in skeletonData)
                    {
                        if (skeleton.TrackingState == SkeletonTrackingState.Tracked || skeleton.TrackingState == SkeletonTrackingState.PositionOnly)
                        {
                            /////////// 머리 정보를 받아온다 ///////////////
                            Joint joint = skeleton.Joints[JointType.Head];

                            DepthImagePoint depthPoint;
                            depthPoint = depthImageFrame.MapFromSkeletonPoint(joint.Position);

                            System.Windows.Point point = new System.Windows.Point((int)(image1.ActualWidth  * depthPoint.X
                                                               / depthImageFrame.Width),
                                                    (int)(image1.ActualHeight * depthPoint.Y
                                                               / depthImageFrame.Height));

                            textBlock1.Text = string.Format("X:{0:0.00} Y:{1:0.00} Z:{2:0.00}", point.X, point.Y, joint.Position.Z);

                            // 이전 헤드의 위치를 저장한다.
                            m_prevHeadX = m_headX;
                            m_prevHeadY = m_headY;
                            m_headX = point.X;
                            m_headY = point.Y;

                            if (Math.Abs(m_prevHeadX - point.X) < 10 )
                            {
                                m_headX = m_prevHeadX;
                            }

                            if (Math.Abs(m_prevHeadY - point.Y) < 10)
                            {
                                m_headY = m_prevHeadY;
                            }

                            Canvas.SetLeft(ellipse1, point.X - ellipse1.Width / 2);
                            Canvas.SetTop(ellipse1, point.Y - ellipse1.Height / 2);

                            ////////////// face 정보를 받아온다//////////////////////
                            if (this.faceTracker == null)
                            {
                                try
                                {
                                    this.faceTracker = new FaceTracker(nui1);
                                }
                                catch (InvalidOperationException)
                                {
                                    // During some shutdown scenarios the FaceTracker
                                    // is unable to be instantiated.  Catch that exception
                                    // and don't track a face.
                                    this.faceTracker = null;
                                }
                            }

                            if (this.faceTracker != null)
                            {
                                FaceTrackFrame frame = this.faceTracker.Track(
                                    colorImageFormat, colorImage, depthImageFormat, depthImage, skeleton);

                                if (frame.TrackSuccessful)
                                {
                                    facePoints = frame.GetProjected3DShape();

                                    textBlock2.Text = string.Format("noseX:{0:0.00} noseY:{1:0.00} ", facePoints[107].X, facePoints[107].Y);

                                    m_noseX = facePoints[107].X;
                                    m_noseY = facePoints[107].Y;

                                    Canvas.SetLeft(ellipse2, facePoints[107].X - ellipse2.Width / 2);
                                    Canvas.SetTop(ellipse2, facePoints[107].Y - ellipse2.Width / 2);

                                }
                            }

                            ///////////////고개의 각도를 계산 ////////////////////

                            lineOfSight.X1 = m_headX;
                            lineOfSight.Y1 = m_headY;
                            lineOfSight.X2 = m_noseX;
                            lineOfSight.Y2 = m_noseY;

                            Canvas.SetLeft(m_sightRect, m_headX - m_sightRect.Width / 2);
                            Canvas.SetTop(m_sightRect, m_headY);

                            CheckWhichSight(depthImageFrame, m_noseX, m_noseY);

                        }
                    }
                }
            }
        }
        /// <summary>
        /// Opens the kinect.
        /// </summary>
        /// <param name="newSensor">The new sensor.</param>
        private void OpenKinect(KinectSensor newSensor)
        {
            kinectSensor = newSensor;

            // Initialize all the necessary streams:
            // - ColorStream with default format
            // - DepthStream with Near mode
            // - SkeletonStream with tracking in NearReange and Seated mode.

            kinectSensor.ColorStream.Enable();

            kinectSensor.DepthStream.Range = DepthRange.Near;
            kinectSensor.DepthStream.Enable(DepthImageFormat.Resolution80x60Fps30);

            kinectSensor.SkeletonStream.EnableTrackingInNearRange = true;
            kinectSensor.SkeletonStream.TrackingMode = SkeletonTrackingMode.Seated;
            kinectSensor.SkeletonStream.Enable(new TransformSmoothParameters() { Correction = 0.5f, JitterRadius = 0.05f, MaxDeviationRadius = 0.05f, Prediction = 0.5f, Smoothing = 0.5f });

            // Listen to the AllFramesReady event to receive KinectSensor's data.
            kinectSensor.AllFramesReady += new EventHandler<AllFramesReadyEventArgs>(kinectSensor_AllFramesReady);

            // Initialize data arrays
            colorPixelData = new byte[kinectSensor.ColorStream.FramePixelDataLength];
            depthPixelData = new short[kinectSensor.DepthStream.FramePixelDataLength];
            skeletonData = new Skeleton[6];

            // Starts the Sensor
            kinectSensor.Start();

            // Initialize a new FaceTracker with the KinectSensor
            faceTracker = new FaceTracker(kinectSensor);
        }
Beispiel #19
0
        //! @copydoc FaceTracker::Track((Skeleton[],ColorImageFrame,DepthImageFrame,int)
        public override void Track(Skeleton[] skeletons, ColorImageFrame colorFrame,
                                   DepthImageFrame depthFrame, int nearestUserId)
        {
            if (faceTracker == null)
            {
                try
                {
                    faceTracker = new Microsoft.Kinect.Toolkit.FaceTracking.FaceTracker(sensor);
                }
                catch (InvalidOperationException)
                {
                    this.faceTracker = null;
                    UpdateFaceTrackingStatusInternally(FaceTracker.FaceTrackingState.UnableToDetectFaces);
                    return;
                }
            }

            if (colors == null)
            {
                colors = new byte[sensor.ColorStream.FramePixelDataLength];
            }

            if (colorFrame == null)
            {
                UpdateFaceTrackingStatusInternally(FaceTracker.FaceTrackingState.UnableToDetectFaces);
                return;
            }

            colorFrame.CopyPixelDataTo(colors);


            if (depths == null)
            {
                depths = new short[sensor.DepthStream.FramePixelDataLength];
            }

            if (depthFrame == null)
            {
                UpdateFaceTrackingStatusInternally(FaceTracker.FaceTrackingState.UnableToDetectFaces);
                return;
            }
            depthFrame.CopyPixelDataTo(depths);


            bool?nearUserLooking = null;
            bool?farUserLooking  = null;
            int  nTrackedUsers   = 0;

            foreach (Skeleton skeleton in skeletons)
            {
                if (skeleton.TrackingState == SkeletonTrackingState.Tracked)
                {
                    nTrackedUsers++;

                    var  frame             = faceTracker.Track(sensor.ColorStream.Format, colors, sensor.DepthStream.Format, depths, skeleton);
                    bool?isLookingToSensor = null;

                    if (frame == null)
                    {
                        if (skeleton.TrackingId == nearestUserId)
                        {
                            nearUserLooking = isLookingToSensor;
                        }
                        else
                        {
                            farUserLooking = isLookingToSensor;
                        }
                    }
                    else
                    {
                        var shape = frame.Get3DShape();

                        var leftEyeZ  = shape[FeaturePoint.AboveMidUpperLeftEyelid].Z;
                        var rightEyeZ = shape[FeaturePoint.AboveMidUpperRightEyelid].Z;

                        var eyeDistZ = Math.Abs(leftEyeZ - rightEyeZ);

                        if (eyeDistZ == 0.0) //special case where, most of the times, indicates an error
                        {
                            isLookingToSensor = null;
                        }
                        else
                        {
                            isLookingToSensor = eyeDistZ <= epsilon;
                        }

                        if (skeleton.TrackingId == nearestUserId)
                        {
                            nearUserLooking = isLookingToSensor;
                        }
                        else
                        {
                            farUserLooking = isLookingToSensor;
                        }
                    }
                }
            }

            FaceTracker.FaceTrackingState trackFaceInternalState = FaceTracker.FaceTrackingState.Disabled;
            trackFaceInternalState = getFaceTrackState(nTrackedUsers, nearUserLooking, farUserLooking);
            UpdateFaceTrackingStatusInternally(trackFaceInternalState);
        }
            /// <summary>
            /// Updates the face tracking information for this skeleton
            /// </summary>
            internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                this.skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    // if the current skeleton is not tracked, track it now
                    //kinectSensor.SkeletonStream.ChooseSkeletons(skeletonOfInterest.TrackingId);
                }

                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(kinectSensor);
                    }
                    catch (InvalidOperationException)
                    {
                        // During some shutdown scenarios the FaceTracker
                        // is unable to be instantiated.  Catch that exception
                        // and don't track a face.
                        Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null)
                {
                    // hack to make this face tracking detect the face even when it is not actually tracked
                    // <!>need to confirm if it works
                    //skeletonOfInterest.TrackingState = SkeletonTrackingState.Tracked;

                    FaceTrackFrame frame = this.faceTracker.Track(
                        colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);
                    //new Microsoft.Kinect.Toolkit.FaceTracking.Rect(skeletonOfInterest.Position.));

                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    if (this.lastFaceTrackSucceeded)
                    {
                        if (faceTriangles == null)
                        {
                            // only need to get this once.  It doesn't change.
                            faceTriangles = frame.GetTriangles();

                        }
                        if (faceTag == null)
                        {
                            // here call the face detection
                            faceTag = new FaceRecognizer().getFaceTag(this.colorImageBmp);

                            if (faceTag != null)
                            {
                                Global.StatusBarText.Text = "Found " + faceTag + "!";
                                if (Global.trackedPeople.ContainsKey(skeletonOfInterest))
                                    Global.trackedPeople[skeletonOfInterest] = faceTag;
                                else
                                    Global.trackedPeople.Add(skeletonOfInterest, faceTag);
                            }
                        }
                        this.facePoints = frame.GetProjected3DShape();
                        this.faceRect = frame.FaceRect;
                    }
                }
            }
 protected void VerifyFaceTracker(KinectSensor kinectSensor)
 {
     if (this.faceTracker == null)
     {
         try
         {
             this.faceTracker = new FaceTracker(kinectSensor);
         }
         catch (InvalidOperationException)
         {
             // During some shutdown scenarios the FaceTracker
             // is unable to be instantiated.  Catch that exception
             // and don't track a face.
             Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
             this.faceTracker = null;
         }
     }
 }
 private void DestroyFaceTracker()
 {
     if (this.faceTracker != null)
     {
         this.faceTracker.Dispose();
         this.faceTracker = null;
     }
 }
            /// <summary>
            /// Updates the face tracking information for this skeleton
            /// </summary>
            internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                this.skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    // nothing to do with an untracked skeleton.
                    return;
                }

                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(kinectSensor);
                    }
                    catch (InvalidOperationException)
                    {
                        // During some shutdown scenarios the FaceTracker
                        // is unable to be instantiated.  Catch that exception
                        // and don't track a face.
                        Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null)
                {
                    FaceTrackFrame frame = this.faceTracker.Track(
                        colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);

                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    if (this.lastFaceTrackSucceeded)
                    {
                        if (faceTriangles == null)
                        {
                            // only need to get this once.  It doesn't change.
                            faceTriangles = frame.GetTriangles();
                        }

                        this.facePoints = frame.GetProjected3DShape();

                        Vector3DF faceRotation = frame.Rotation;

                        var AUCoeff = frame.GetAnimationUnitCoefficients();

                        var jawLower = AUCoeff[AnimationUnit.JawLower];
                        var BrowLower = AUCoeff[AnimationUnit.BrowLower];
                        var BrowUpper = AUCoeff[AnimationUnit.BrowRaiser];
                        var lcd = AUCoeff[AnimationUnit.LipCornerDepressor];
                        var lipRaiser = AUCoeff[AnimationUnit.LipRaiser];
                        var lipStrectch = AUCoeff[AnimationUnit.LipStretcher];
                        var Pitch = faceRotation.X;
                        var Yaw = faceRotation.Y;
                        var Roll = faceRotation.Z;

                        dataToBeSent1 = "P: " + ((float)Pitch).ToString() + " Y: " + ((float)Yaw).ToString() + " R: " + ((float)Roll).ToString();
                        dataToBeSent2 = "JL: " + ((float)jawLower).ToString() + " BL: " + ((float)BrowLower).ToString() + " BU: " + ((float)BrowUpper).ToString();
                        dataToBeSent3 = "lcd: " + ((float)lcd).ToString() + " LR: " + ((float)lipRaiser).ToString() + " LS: " + ((float)lipStrectch).ToString();
                    }
                }
            }
Beispiel #24
0
        void KinectFaceNode_AllFrameReady(object sender, AllFramesReadyEventArgs e)
        {
            ColorImageFrame colorImageFrame = null;
            DepthImageFrame depthImageFrame = null;
            SkeletonFrame skeletonFrame = null;

            colorImageFrame = e.OpenColorImageFrame();
            depthImageFrame = e.OpenDepthImageFrame();
            skeletonFrame = e.OpenSkeletonFrame();

            if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null)
            {
                return;
            }

            if (first)
            {
                first = false;
                this.olddepth = depthImageFrame.Format;
            }
            else
            {
                if (this.olddepth != depthImageFrame.Format)
                {
                    //Need a reset
                    if (this.depthImage != null) { this.depthImage = null; }
                    if (this.face != null) { this.face.Dispose(); this.face = null; }
                    this.trackedSkeletons.Clear();
                    this.olddepth = depthImageFrame.Format;
                }
            }

            if (this.depthImage == null)
            {
                this.depthImage = new short[depthImageFrame.PixelDataLength];
            }

            if (this.colorImage == null)
            {
                this.colorImage = new byte[colorImageFrame.PixelDataLength];
            }

            if (this.skeletonData == null || this.skeletonData.Length != skeletonFrame.SkeletonArrayLength)
            {
                this.skeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
            }

            if (face == null)
            {
                face = new FaceTracker(this.runtime.Runtime);
            }

            colorImageFrame.CopyPixelDataTo(this.colorImage);
            depthImageFrame.CopyPixelDataTo(this.depthImage);
            skeletonFrame.CopySkeletonDataTo(this.skeletonData);

            foreach (Skeleton skeleton in this.skeletonData)
            {
                if (skeleton.TrackingState == SkeletonTrackingState.Tracked
                    || skeleton.TrackingState == SkeletonTrackingState.PositionOnly)
                {
                    // We want keep a record of any skeleton, tracked or untracked.
                    if (!this.trackedSkeletons.ContainsKey(skeleton.TrackingId))
                    {
                        this.trackedSkeletons.Add(skeleton.TrackingId, new SkeletonFaceTracker());
                    }

                    // Give each tracker the upated frame.
                    SkeletonFaceTracker skeletonFaceTracker;
                    if (this.trackedSkeletons.TryGetValue(skeleton.TrackingId, out skeletonFaceTracker))
                    {
                        skeletonFaceTracker.OnFrameReady(this.runtime.Runtime, colorImageFrame.Format, colorImage, depthImageFrame.Format, depthImage, skeleton);
                        skeletonFaceTracker.LastTrackedFrame = skeletonFrame.FrameNumber;
                    }
                }
            }

            this.RemoveOldTrackers(skeletonFrame.FrameNumber);

            colorImageFrame.Dispose();
            depthImageFrame.Dispose();
            skeletonFrame.Dispose();

            this.FInvalidate = true;
        }
 public void Dispose()
 {
     if (this.faceTracker != null)
     {
         this.faceTracker.Dispose();
         this.faceTracker = null;
     }
 }
            /// <summary>
            /// Updates the face tracking information for this skeleton
            /// </summary>
            internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                this.skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked) {
                    // nothing to do with an untracked skeleton.
                    return;
                }

                if (this.faceTracker == null) {
                    try {
                        this.faceTracker = new FaceTracker(kinectSensor);
                    } catch (InvalidOperationException) {
                        // During some shutdown scenarios the FaceTracker
                        // is unable to be instantiated.  Catch that exception
                        // and don't track a face.
                        Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null) {
                    FaceTrackFrame frame = this.faceTracker.Track(
                        colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);

                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    if (this.lastFaceTrackSucceeded) {
                        if (faceTriangles == null) {
                            // only need to get this once.  It doesn't change.
                            faceTriangles = frame.GetTriangles();
                        }

                        this.facePoints = frame.GetProjected3DShape();

                        int top = frame.FaceRect.Top;
                        int left = frame.FaceRect.Left;
                        int width = frame.FaceRect.Right - frame.FaceRect.Left;
                        int height = frame.FaceRect.Bottom - frame.FaceRect.Top;
                        this.faceCoordinates = new int[] { left, top, width, height };
                        this.rotationXYZ = new float[] { frame.Rotation.X, frame.Rotation.Y, frame.Rotation.Z };
                        this.translationXYZ = new float[] { frame.Translation.X, frame.Translation.Y, frame.Translation.Z };
                    } else {
                        this.rotationXYZ = null;
                        this.faceCoordinates = null;
                        this.translationXYZ = null;
                    }
                }
            }
            /// <summary>
            /// Updates the face tracking information for this skeleton
            /// </summary>
            internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                this.skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    // nothing to do with an untracked skeleton.
                    return;
                }

                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(kinectSensor);
                    }
                    catch (InvalidOperationException)
                    {
                        // During some shutdown scenarios the FaceTracker
                        // is unable to be instantiated.  Catch that exception
                        // and don't track a face.
                        Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null)
                {
                    FaceTrackFrame frame = this.faceTracker.Track(
                        colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);

                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    if (this.lastFaceTrackSucceeded)
                    {

                        if (faceTriangles == null)
                        {
                            // only need to get this once.  It doesn't change.
                            faceTriangles = frame.GetTriangles();
                            initByteBuffer();
                        }
                        /*
                hurra, dont need 4th vertex
                hurra, dont need 35th vertex
                hurra, dont need 36th vertex
                hurra, dont need 37th vertex
                hurra, dont need 38th vertex
                hurra, dont need 39th vertex
                hurra, dont need 41th vertex
                hurra, dont need 42th vertex
                hurra, dont need 43th vertex
                        */

                        this.projectedShapePoints = frame.GetProjected3DShape();
                        this.shapePoints = frame.Get3DShape();

                        byte[] buffer = WriteFaceDataToBuffer();
                        //System.Diagnostics.Debug.WriteLine("Should send {0}", buffer.Length);//1468

                        //need to reduce by 444 bytes
                        //a point is 12. 85 points would result in 1020 bytes. 4 bytes remaining for face id...?
                        //currently 121 points. which ones can be leaft out?
                        //found 9 not needed. still 27 to much...
                        //header is useless. 434 bytes remaining after removing it
                        //byte[] buffer = buffer = new byte[1024];//works fast, even if unknown send to ip..
                        //ushort thefaceid = 22;
                        //Array.Copy(System.BitConverter.GetBytes(thefaceid), 0, buffer, 0, sizeof(ushort));
                        try
                        {
                            sending_socket.SendTo(buffer, sending_end_point); // is this blocking? need to start sending asynchronously?!?

                            //System.Diagnostics.Debug.WriteLine("sending {0} bytes to ip {1} on port {2}", buffer.Length, sending_end_point.Address, sending_end_point.Port);
                            //Console.WriteLine( buffer);
                        }
                        catch (Exception send_exception)
                        {
                            System.Diagnostics.Debug.WriteLine("Exception {0}", send_exception.Message);
                            //System.Diagnostics.Debug.WriteLine("Is the buffer with it's {0} bytes to long to send in one packet?", buffer.Length);
                        }
                    }
                }
            }
Beispiel #28
0
        private void SensorChooserOnKinectChanged(object sender, KinectChangedEventArgs kinectChangedEventArgs)
        {
            KinectSensor oldSensor = kinectChangedEventArgs.OldSensor;
            KinectSensor newSensor = kinectChangedEventArgs.NewSensor;

            if (oldSensor != null)
            {
                oldSensor.AllFramesReady -= KinectSensorOnAllFramesReady;
                oldSensor.ColorStream.Disable();
                oldSensor.DepthStream.Disable();
                oldSensor.DepthStream.Range = DepthRange.Default;
                oldSensor.SkeletonStream.Disable();
                oldSensor.SkeletonStream.EnableTrackingInNearRange = false;
                oldSensor.SkeletonStream.TrackingMode = SkeletonTrackingMode.Default;
            }

            if (newSensor != null)
            {
                try
                {
                    newSensor.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30);
                    newSensor.DepthStream.Enable(DepthImageFormat.Resolution320x240Fps30);
                    try
                    {
                        // This will throw on non Kinect For Windows devices.
                        newSensor.DepthStream.Range = DepthRange.Near;
                        newSensor.SkeletonStream.EnableTrackingInNearRange = true;
                    }
                    catch (InvalidOperationException)
                    {
                        newSensor.DepthStream.Range = DepthRange.Default;
                        newSensor.SkeletonStream.EnableTrackingInNearRange = false;
                    }

                    newSensor.SkeletonStream.TrackingMode = SkeletonTrackingMode.Seated;
                    newSensor.SkeletonStream.Enable();

                    colorPixelData = new byte[sensorChooser.Kinect.ColorStream.FramePixelDataLength];
                    depthPixelData = new short[sensorChooser.Kinect.DepthStream.FramePixelDataLength];
                    skeletonData = new Skeleton[6];

                    faceTracker = new FaceTracker(sensorChooser.Kinect);

                    newSensor.AllFramesReady += KinectSensorOnAllFramesReady;
                }
                catch (InvalidOperationException)
                {
                    // This exception can be thrown when we are trying to
                    // enable streams on a device that has gone away.  This
                    // can occur, say, in app shutdown scenarios when the sensor
                    // goes away between the time it changed status and the
                    // time we get the sensor changed notification.
                    //
                    // Behavior here is to just eat the exception and assume
                    // another notification will come along if a sensor
                    // comes back.
                }
            }
        }
            /// <summary>
            /// Updates the face tracking information for this skeleton
            /// </summary>
            //public static System.IO.Ports.SerialPort serialPort1;
            //private System.IO.Ports.SerialPort serialPort1;
            //serialPort1 = new SerialPort();
            internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                this.skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    // nothing to do with an untracked skeleton.
                    return;
                }

                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(kinectSensor);
                    }
                    catch (InvalidOperationException)
                    {
                        // During some shutdown scenarios the FaceTracker
                        // is unable to be instantiated.  Catch that exception
                        // and don't track a face.
                        Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                        this.faceTracker = null;
                    }
                }

                /*System.ComponentModel.IContainer components = new System.ComponentModel.Container();
                serialPort1 = new System.IO.Ports.SerialPort(components); // Creating the new object.
                serialPort1.PortName = "COM3"; //+ numCom.Value.ToString(); // Setting what port number.
                serialPort1.BaudRate = 9600; // Setting baudrate.
                serialPort1.DtrEnable = true; // Enable the Data Terminal Ready
                serialPort1.Open(); // Open the port for use.*/

                if (this.faceTracker != null)
                {
                    FaceTrackFrame frame = this.faceTracker.Track(
                        colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);

                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    if (this.lastFaceTrackSucceeded)
                    {
                        if (faceTriangles == null)
                        {
                            // only need to get this once.  It doesn't change.
                            faceTriangles = frame.GetTriangles();
                        }

                        this.facePoints = frame.GetProjected3DShape();
                        rotation = frame.Rotation;
                        float yRotation = frame.Rotation.Y;
                        Debug.WriteLine(yRotation);

                        if ((yRotation <= -25))
                        {
                            Debug.WriteLine("a");
                            //port.Write("a");
                            if (!serialPort1.IsOpen)
                            {
                                try
                                {
                                    serialPort1.Open();
                                    serialPort1.Write("a");
                                    serialPort1.Close();
                                }
                                catch
                                {
                                    MessageBox.Show("There was an error. Please make sure that the correct port was selected, and the device, plugged in.");
                                }
                            }
                            //serialPort1.Write("1");

                        }

                        if ((yRotation > -25) && (yRotation < -10))
                        {
                            Debug.WriteLine("b");
                            //port.Write("a");
                            if (!serialPort1.IsOpen)
                            {
                                try
                                {
                                    serialPort1.Open();
                                    serialPort1.Write("b");
                                    serialPort1.Close();
                                }
                                catch
                                {
                                    MessageBox.Show("There was an error. Please make sure that the correct port was selected, and the device, plugged in.");
                                }
                            }
                            //serialPort1.Write("1");

                        }

                        if ((yRotation >= -10) && (yRotation < 10))
                        {
                            Debug.WriteLine("c");
                            //port.Write("a");
                            if (!serialPort1.IsOpen)
                            {
                                try
                                {
                                    serialPort1.Open();
                                    serialPort1.Write("c");
                                    serialPort1.Close();
                                }
                                catch
                                {
                                    MessageBox.Show("There was an error. Please make sure that the correct port was selected, and the device, plugged in.");
                                }
                            }
                            //serialPort1.Write("1");

                        }

                        if ((yRotation >= 10) && (yRotation < 20))
                        {
                            Debug.WriteLine("d");
                            //port.Write("a");
                            if (!serialPort1.IsOpen)
                            {
                                try
                                {
                                    serialPort1.Open();
                                    serialPort1.Write("d");
                                    serialPort1.Close();
                                }
                                catch
                                {
                                    MessageBox.Show("There was an error. Please make sure that the correct port was selected, and the device, plugged in.");
                                }
                            }
                            //serialPort1.Write("1");

                        }

                        if ((yRotation >= 20) && (yRotation < 30))
                        {
                            Debug.WriteLine("e");
                            //port.Write("a");
                            if (!serialPort1.IsOpen)
                            {
                                try
                                {
                                    serialPort1.Open();
                                    serialPort1.Write("e");
                                    serialPort1.Close();
                                }
                                catch
                                {
                                    MessageBox.Show("There was an error. Please make sure that the correct port was selected, and the device, plugged in.");
                                }
                            }
                            //serialPort1.Write("1");

                        }

                        if ((yRotation >= 30))
                        {
                            Debug.WriteLine("f");
                            //port.Write("a");
                            if (!serialPort1.IsOpen)
                            {
                                try
                                {
                                    serialPort1.Open();
                                    serialPort1.Write("f");
                                    serialPort1.Close();
                                }
                                catch
                                {
                                    MessageBox.Show("There was an error. Please make sure that the correct port was selected, and the device, plugged in.");
                                }
                            }
                            //serialPort1.Write("1");

                        }

                    }
                }
            }
            /// <summary>
            /// Updates the face tracking information for this skeleton
            /// </summary>
            internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                this.skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    // nothing to do with an untracked skeleton.
                    return;
                }

                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(kinectSensor);
                    }
                    catch (InvalidOperationException)
                    {
                        // During some shutdown scenarios the FaceTracker
                        // is unable to be instantiated.  Catch that exception
                        // and don't track a face.
                        Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null)
                {
                    FaceTrackFrame frame = this.faceTracker.Track(
                        colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);

                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    if (this.lastFaceTrackSucceeded)
                    {
                        if (faceTriangles == null)
                        {
                            // only need to get this once.  It doesn't change.
                            faceTriangles = frame.GetTriangles();
                        }

                        //getting the Animation Unit Coefficients
                        this.AUs = frame.GetAnimationUnitCoefficients();
                        var jawLowerer = AUs[AnimationUnit.JawLower];
                        var browLower = AUs[AnimationUnit.BrowLower];
                        var browRaiser = AUs[AnimationUnit.BrowRaiser];
                        var lipDepressor = AUs[AnimationUnit.LipCornerDepressor];
                        var lipRaiser = AUs[AnimationUnit.LipRaiser];
                        var lipStretcher = AUs[AnimationUnit.LipStretcher];
                        //set up file for output
                        using (System.IO.StreamWriter file = new System.IO.StreamWriter
                            (@"C:\Users\Public\data.txt"))
                        {
                            file.WriteLine("FaceTrack Data, started recording at " + DateTime.Now.ToString("HH:mm:ss tt"));
                        }

                        //here is the algorithm to test different facial features

                        //BrowLower is messed up if you wear glasses, works if you don't wear 'em

                        //surprised
                        if ((jawLowerer < 0.25 || jawLowerer > 0.25) && browLower < 0)
                        {
                            System.Diagnostics.Debug.WriteLine("surprised");
                            using (System.IO.StreamWriter file = new System.IO.StreamWriter
                                (@"C:\Users\Public\data.txt", true))
                            {
                                file.WriteLine(DateTime.Now.ToString("HH:mm:ss tt") + ": surprised");
                                file.WriteLine("JawLowerer: " + jawLowerer);
                                file.WriteLine("BrowLowerer: " + browLower);
                            }
                        }
                        //smiling
                        if (lipStretcher > 0.4 || lipDepressor<0)
                        {
                            System.Diagnostics.Debug.WriteLine("Smiling");
                            using (System.IO.StreamWriter file = new System.IO.StreamWriter
                                (@"C:\Users\Public\data.txt", true))
                            {
                                file.WriteLine(DateTime.Now.ToString("HH:mm:ss tt") + ": smiling");
                                file.WriteLine("LipStretcher: " + lipStretcher);
                            }
                        }
                        //kissing face
                        if (lipStretcher < -0.75)
                        {
                            System.Diagnostics.Debug.WriteLine("kissing face");
                            using (System.IO.StreamWriter file = new System.IO.StreamWriter
                                (@"C:\Users\Public\data.txt", true))
                            {
                                file.WriteLine(DateTime.Now.ToString("HH:mm:ss tt") + ": kissing face");
                                file.WriteLine("LipStretcher: " + lipStretcher);
                            }
                        }
                        //sad
                        if (browRaiser < 0 && lipDepressor>0)
                        {
                            System.Diagnostics.Debug.WriteLine("sad");
                            using (System.IO.StreamWriter file = new System.IO.StreamWriter
                                (@"C:\Users\Public\data.txt", true))
                            {
                                file.WriteLine(DateTime.Now.ToString("HH:mm:ss tt") + ": sad");
                                file.WriteLine("LipCornerDepressor: " + lipDepressor);
                                file.WriteLine("OuterBrowRaiser: " + browRaiser);
                            }
                        }
                        //angry
                        if ((browLower > 0 && (jawLowerer > 0.25 || jawLowerer < -0.25)) ||
                            (browLower > 0 && lipDepressor > 0))
                        {
                            System.Diagnostics.Debug.WriteLine("angry");
                            using (System.IO.StreamWriter file = new System.IO.StreamWriter
                                (@"C:\Users\Public\data.txt", true))
                            {
                                file.WriteLine(DateTime.Now.ToString("HH:mm:ss tt") + ": angry");
                                file.WriteLine("LipCornerDepressor: " + lipDepressor);
                                file.WriteLine("BrowLowerer: " + browLower);
                                file.WriteLine("JawLowerer: " + jawLowerer);
                            }
                        }
                        //System.Diagnostics.Debug.WriteLine(browLower);

                        this.facePoints = frame.GetProjected3DShape();
                    }
                }
            }
Beispiel #31
0
 public WagFaceTracker(KinectSensor kinectSensor)
 {
     FaceTracker = new FaceTracker(kinectSensor);
     IsUsing = false;
     SkeletonId = -1;
 }