コード例 #1
0
    void Start()
    {
        _Sensor = KinectSensor.GetDefault();

        if (_Sensor != null)
        {
            _Reader = _Sensor.BodyFrameSource.OpenReader();

            if (!_Sensor.IsOpen)
            {
                _Sensor.Open();
            }
        }

        this.faceFrameSources = new FaceFrameSource[_Sensor.BodyFrameSource.BodyCount];
        this.faceFrameReaders = new FaceFrameReader[_Sensor.BodyFrameSource.BodyCount];
        // specify the required face frame results
        FaceFrameFeatures faceFrameFeatures =
            FaceFrameFeatures.RotationOrientation
            | FaceFrameFeatures.FaceEngagement
            | FaceFrameFeatures.LookingAway;

        for (int i = 0; i < _Sensor.BodyFrameSource.BodyCount; i++)
        {
            // create the face frame source with the required face frame features and an initial tracking Id of 0
            faceFrameSources[i] = FaceFrameSource.Create(_Sensor, 0, faceFrameFeatures);

            // open the corresponding reader
            faceFrameReaders[i] = faceFrameSources[i].OpenReader();
        }
    }
コード例 #2
0
        /// <summary>
        /// Extended CTOR
        /// </summary>
        /// <param name="bodyId">Id of the tracked body</param>
        /// <param name="faceFeatures">Set of requested face features to track</param>
        /// <param name="kinect">Kinect sensor that is tracking</param>
        public FaceTracker(ulong bodyId, FaceFrameFeatures faceFeatures, KinectSensor kinect)
        {
            // Pin-point start of tracking
            _startTracking = DateTime.Now;

            // Save variables
            _bodyId       = bodyId;
            _faceFeatures = faceFeatures;
            // _kinectId = kinect.UniqueKinectId --> NotImplementedYet

            // Create a new source with body TrackingId
            _faceSource = new FaceFrameSource(kinect, bodyId, faceFeatures);

            // Create new reader
            _faceReader = _faceSource.OpenReader();

            Console.WriteLine(String.Format("Tracker for body #{0} started.", _bodyId));

            // Initialize FaceFeatureTrackers
            InitialiseFeatureTrackers();

            // Wire events
            _faceReader.FrameArrived   += OnFaceFrameArrived;
            _faceSource.TrackingIdLost += OnTrackingLost;
        }
コード例 #3
0
        void InitializeFace()
        {
            FaceFrameFeatures faceFrameFeatures =
                FaceFrameFeatures.BoundingBoxInColorSpace
                | FaceFrameFeatures.PointsInColorSpace
                | FaceFrameFeatures.RotationOrientation
                | FaceFrameFeatures.FaceEngagement
                | FaceFrameFeatures.Glasses
                | FaceFrameFeatures.Happy
                | FaceFrameFeatures.LeftEyeClosed
                | FaceFrameFeatures.RightEyeClosed
                | FaceFrameFeatures.LookingAway
                | FaceFrameFeatures.MouthMoved
                | FaceFrameFeatures.MouthOpen;

            faceFrameSources = new FaceFrameSource[bodyCount];
            faceFrameReaders = new FaceFrameReader[bodyCount];
            for (int i = 0; i < bodyCount; i++)
            {
                faceFrameSources[i] = new FaceFrameSource(kinectSensor, 0, faceFrameFeatures);
                faceFrameReaders[i] = faceFrameSources[i].OpenReader();
                faceFrameReaders[i].FrameArrived += FaceFrameReader_FrameArrived;
            }
            faceFrameResults = new FaceFrameResult[bodyCount];
            faceBrush        = new List <Brush>()
            {
                Brushes.White,
                Brushes.Orange,
                Brushes.Green,
                Brushes.Red,
                Brushes.LightBlue,
                Brushes.Yellow
            };
        }
        /// <summary>
        /// Initialize Kinect object
        /// </summary>
        private void InitializeHDFace()
        {
            this.CurrentBuilderStatus = "Ready To Start Capture";

            this.sensor     = KinectSensor.GetDefault();
            this.bodySource = this.sensor.BodyFrameSource;

            this.bodyReader = this.bodySource.OpenReader();
            this.bodyReader.FrameArrived += this.BodyReader_FrameArrived;
            this.bodyCount = this.sensor.BodyFrameSource.BodyCount;

            this.highDefinitionFaceFrameSource = new HighDefinitionFaceFrameSource(this.sensor);
            this.highDefinitionFaceFrameSource.TrackingIdLost += this.HdFaceSource_TrackingIdLost;

            this.highDefinitionFaceFrameReader = this.highDefinitionFaceFrameSource.OpenReader();
            this.highDefinitionFaceFrameReader.FrameArrived += this.HdFaceReader_FrameArrived;

            this.currentFaceModel     = new FaceModel();
            this.currentFaceAlignment = new FaceAlignment();

            this.coordinateMapper = this.sensor.CoordinateMapper;

            FaceFrameFeatures faceFrameFeatures =
                FaceFrameFeatures.BoundingBoxInColorSpace
                | FaceFrameFeatures.PointsInColorSpace
                | FaceFrameFeatures.RotationOrientation
                | FaceFrameFeatures.FaceEngagement
                | FaceFrameFeatures.Glasses
                | FaceFrameFeatures.Happy
                | FaceFrameFeatures.LeftEyeClosed
                | FaceFrameFeatures.RightEyeClosed
                | FaceFrameFeatures.LookingAway
                | FaceFrameFeatures.MouthMoved
                | FaceFrameFeatures.MouthOpen;


            // create the face frame source with the required face frame features and an initial tracking Id of 0
            this.faceFrameSource = new FaceFrameSource(this.sensor, 0, faceFrameFeatures);

            // open the corresponding reader
            this.faceFrameReader = this.faceFrameSource.OpenReader();


            this.faceFrameResult = null;


            // wire handler for face frame arrival
            if (this.faceFrameReader != null)
            {
                // wire handler for face frame arrival
                this.faceFrameReader.FrameArrived += this.Reader_FaceFrameArrived;
            }

            this.InitializeMesh();


            this.UpdateMesh();

            this.sensor.Open();
        }
コード例 #5
0
    void Start()
    {
        sensor     = KinectSensor.GetDefault();
        bodySource = sensor.BodyFrameSource;
        bodyReader = bodySource.OpenReader();
        bodyReader.FrameArrived += BodyReader_FrameArrived;
        FaceFrameFeatures faceFrameFeatures =
            FaceFrameFeatures.BoundingBoxInColorSpace
            | FaceFrameFeatures.PointsInColorSpace
            | FaceFrameFeatures.BoundingBoxInInfraredSpace
            | FaceFrameFeatures.PointsInInfraredSpace
            | FaceFrameFeatures.RotationOrientation
            | FaceFrameFeatures.FaceEngagement
            | FaceFrameFeatures.Glasses
            | FaceFrameFeatures.Happy
            | FaceFrameFeatures.LeftEyeClosed
            | FaceFrameFeatures.RightEyeClosed
            | FaceFrameFeatures.LookingAway
            | FaceFrameFeatures.MouthMoved
            | FaceFrameFeatures.MouthOpen;

        FaceFrameSource = FaceFrameSource.Create(sensor, currentTrackingId, faceFrameFeatures);

        FaceFrameSource.TrackingIdLost += HdFaceSource_TrackingIdLost;

        FaceFrameReader = FaceFrameSource.OpenReader();
        FaceFrameReader.FrameArrived += HdFaceReader_FrameArrived;

        //CurrentFaceModel = FaceModel.Create();
        currentFaceAlignment = FaceAlignment.Create();

        sensor.Open();
    }
コード例 #6
0
        private void SafeOpenSensor()
        {
            if (sensorStatus == SensorStatus.Closed)
            {
                kinectSensor.Open();

                bodies = new Body[kinectSensor.BodyFrameSource.BodyCount];

                colorFrameReader = kinectSensor.ColorFrameSource.OpenReader();
                colorFrameReader.FrameArrived += colorFrameReader_FrameArrived;

                bodyFrameReader = kinectSensor.BodyFrameSource.OpenReader();
                bodyFrameReader.FrameArrived += bodyFrameReader_FrameArrived;

                FaceFrameFeatures fff = FaceFrameFeatures.BoundingBoxInColorSpace |
                                        FaceFrameFeatures.FaceEngagement |
                                        FaceFrameFeatures.Glasses |
                                        FaceFrameFeatures.Happy |
                                        FaceFrameFeatures.LeftEyeClosed |
                                        FaceFrameFeatures.MouthOpen |
                                        FaceFrameFeatures.PointsInColorSpace |
                                        FaceFrameFeatures.RightEyeClosed;

                faceFrameSource = new FaceFrameSource(kinectSensor, 0, fff);

                faceFrameReader = faceFrameSource.OpenReader();
                faceFrameReader.FrameArrived += faceFrameReader_FrameArrived;

                sensorStatus = SensorStatus.Opened;
            }
        }
コード例 #7
0
        void InitializeFace()
        {
            FaceFrameFeatures faceFrameFeatures =
                FaceFrameFeatures.BoundingBoxInColorSpace
                | FaceFrameFeatures.PointsInColorSpace
                | FaceFrameFeatures.RotationOrientation
                | FaceFrameFeatures.FaceEngagement
                | FaceFrameFeatures.Glasses
                | FaceFrameFeatures.Happy
                | FaceFrameFeatures.LeftEyeClosed
                | FaceFrameFeatures.RightEyeClosed
                | FaceFrameFeatures.LookingAway
                | FaceFrameFeatures.MouthMoved
                | FaceFrameFeatures.MouthOpen;

            faceFrameSources = new FaceFrameSource[bodyCount];
            faceFrameReaders = new FaceFrameReader[bodyCount];
            for (int i = 0; i < bodyCount; i++)
            {
                faceFrameSources[i] = new FaceFrameSource(kinect, 0, faceFrameFeatures);
                faceFrameReaders[i] = faceFrameSources[i].OpenReader();
                faceFrameReaders[i].FrameArrived += faceFrameReader_FrameArrived;
            }
            faceFrameResults = new FaceFrameResult[bodyCount];
        }
コード例 #8
0
        /// <summary>
        /// Initializes a new instance of the MainWindow class.
        /// </summary>
        public MainWindow()
        {
            // one sensor is currently supported
            this.kinectSensor = KinectSensor.GetDefault();

            this.bodies = new Body[this.kinectSensor.BodyFrameSource.BodyCount];

            // get the coordinate mapper
            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            // get the color frame details
            FrameDescription frameDescription = this.kinectSensor.ColorFrameSource.FrameDescription;

            // specify the required face frame results
            FaceFrameFeatures faceFrameFeatures =
                FaceFrameFeatures.BoundingBoxInInfraredSpace
                | FaceFrameFeatures.PointsInInfraredSpace
                | FaceFrameFeatures.RotationOrientation
                | FaceFrameFeatures.MouthOpen;

            // create a face frame source + reader to track each face in the FOV
            this.faceFrameSource = new FaceFrameSource(this.kinectSensor, 0, faceFrameFeatures);
            this.faceFrameReader = this.faceFrameSource.OpenReader();
            //faceFrameResult = new FaceFrameResult();

            #region Depth
            // open the reader for the depth frames
            this.depthFrameReader = this.kinectSensor.DepthFrameSource.OpenReader();

            // wire handler for frame arrival
            this.depthFrameReader.FrameArrived += this.Reader_FrameArrived;

            // get FrameDescription from DepthFrameSource
            this.depthFrameDescription = this.kinectSensor.DepthFrameSource.FrameDescription;

            // allocate space to put the pixels being received and converted
            this.depthPixels = new byte[this.depthFrameDescription.Width * this.depthFrameDescription.Height];

            // create the bitmap to display
            this.depthBitmap = new WriteableBitmap(this.depthFrameDescription.Width, this.depthFrameDescription.Height, 96.0, 96.0, PixelFormats.Gray8, null);
            #endregion

            // open the reader for the body frames
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            // wire handler for body frame arrival
            this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived;

            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // open the sensor
            this.kinectSensor.Open();

            // initialize the components (controls) of the window
            this.InitializeComponent();
        }
コード例 #9
0
        public SkeletonFaceTracking(KinectSensor kinect)
        {
            this.kinectSensor      = kinect;
            this.skeletonPublisher = new NetworkPublisher();
            this.skeletonPublisher.SetConflate();
            this.skeletonPublisher.Bind("33406");

            this.coordinateMapper         = this.kinectSensor.CoordinateMapper;
            this.bodyFrameReader          = this.kinectSensor.BodyFrameSource.OpenReader();
            this.bodyFrameReader.IsPaused = true;

            this.filter = new KinectJointFilter(smoothingParam, smoothingParam, smoothingParam);
            this.filter.Init(smoothingParam, smoothingParam, smoothingParam);

            this.dicoPos      = new Dictionary <JointType, object>(25);
            this.jointPoints  = new Dictionary <JointType, Point>(25);
            this.dicoBodies   = new Dictionary <ulong, Dictionary <JointType, object> >(25);
            this.dicoFaces    = new Dictionary <ulong, Dictionary <String, String> >(11);
            this.dicoFeatures = new Dictionary <string, string>(11);
            this.dicoOr       = new Dictionary <JointType, Vector4>(25);
            this.qChild       = new Quaternion();
            this.qParent      = new Quaternion();

            this.maxBodyCount = this.kinectSensor.BodyFrameSource.BodyCount;
            // specify the required face frame results
            FaceFrameFeatures faceFrameFeatures =
                FaceFrameFeatures.BoundingBoxInColorSpace
                | FaceFrameFeatures.PointsInColorSpace
                | FaceFrameFeatures.RotationOrientation
                | FaceFrameFeatures.FaceEngagement
                | FaceFrameFeatures.Glasses
                | FaceFrameFeatures.Happy
                | FaceFrameFeatures.LeftEyeClosed
                | FaceFrameFeatures.RightEyeClosed
                | FaceFrameFeatures.LookingAway
                | FaceFrameFeatures.MouthMoved
                | FaceFrameFeatures.MouthOpen;

            // create a face frame source + reader to track each face in the FOV
            this.faceFrameSources = new FaceFrameSource[this.maxBodyCount];
            this.faceFrameReaders = new FaceFrameReader[this.maxBodyCount];
            for (int i = 0; i < this.maxBodyCount; i++)
            {
                // create the face frame source with the required face frame features and an initial tracking Id of 0
                this.faceFrameSources[i] = new FaceFrameSource(this.kinectSensor, 0, faceFrameFeatures);

                // open the corresponding reader
                this.faceFrameReaders[i] = this.faceFrameSources[i].OpenReader();

                // pausing the reader to prevent getting frames before we need them
                this.faceFrameReaders[i].IsPaused = true;
            }

            // allocate storage to store face frame results for each face in the FOV
            this.faceFrameResults = new FaceFrameResult[this.maxBodyCount];
        }
コード例 #10
0
        public void Initialize(DIOManager dioManager)
        {
            this.dioManager     = dioManager;
            this.BodySrcManager = dioManager.bodySrcManager;

            updateFrame = 0;

            // one sensor is currently supported
            kinectSensor = KinectSensor.GetDefault();

            // set the maximum number of bodies that would be tracked by Kinect
            bodyCount = kinectSensor.BodyFrameSource.BodyCount;

            // allocate storage to store body objects
            bodies = new Body[bodyCount];

            if (BodySrcManager == null)
            {
                Debug.Log("Falta asignar Game Object as BodySrcManager");
            }
            else
            {
                bodyManager = BodySrcManager.GetComponent <BodySourceManager>();
            }

            // specify the required face frame results
            FaceFrameFeatures faceFrameFeatures =
                FaceFrameFeatures.BoundingBoxInColorSpace
                | FaceFrameFeatures.PointsInColorSpace
                | FaceFrameFeatures.BoundingBoxInInfraredSpace
                | FaceFrameFeatures.PointsInInfraredSpace
                | FaceFrameFeatures.RotationOrientation
                | FaceFrameFeatures.FaceEngagement
                | FaceFrameFeatures.Glasses
                | FaceFrameFeatures.Happy
                | FaceFrameFeatures.LeftEyeClosed
                | FaceFrameFeatures.RightEyeClosed
                | FaceFrameFeatures.LookingAway
                | FaceFrameFeatures.MouthMoved
                | FaceFrameFeatures.MouthOpen;

            // create a face frame source + reader to track each face in the FOV
            faceFrameSources = new FaceFrameSource[bodyCount];
            faceFrameReaders = new FaceFrameReader[bodyCount];
            for (int i = 0; i < bodyCount; i++)
            {
                // create the face frame source with the required face frame features and an initial tracking Id of 0
                faceFrameSources[i] = FaceFrameSource.Create(kinectSensor, 0, faceFrameFeatures);

                // open the corresponding reader
                faceFrameReaders[i] = faceFrameSources[i].OpenReader();
            }
            initialize = true;
        }
コード例 #11
0
        public void InitializeFaceStuff()
        {
            // get the color frame details
            FrameDescription frameDescription = this.kinectSensor.ColorFrameSource.FrameDescription;

            // set the display specifics
            this.displayWidth  = frameDescription.Width;
            this.displayHeight = frameDescription.Height;
            //   this.displayRect = new Rect(0.0, 0.0, this.displayWidth, this.displayHeight);

            // open the reader for the body frames
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            //// wire handler for body frame arrival
            //this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived;

            // set the maximum number of bodies that would be tracked by Kinect
            this.bodyCount = this.kinectSensor.BodyFrameSource.BodyCount;

            // allocate storage to store body objects
            this.bodies = new Body[this.bodyCount];

            // specify the required face frame results
            FaceFrameFeatures faceFrameFeatures =
                FaceFrameFeatures.BoundingBoxInColorSpace
                | FaceFrameFeatures.PointsInColorSpace
                | FaceFrameFeatures.RotationOrientation
                | FaceFrameFeatures.FaceEngagement
                | FaceFrameFeatures.Glasses
                | FaceFrameFeatures.Happy
                | FaceFrameFeatures.LeftEyeClosed
                | FaceFrameFeatures.RightEyeClosed
                | FaceFrameFeatures.LookingAway
                | FaceFrameFeatures.MouthMoved
                | FaceFrameFeatures.MouthOpen;

            // create a face frame source + reader to track each face in the FOV
            this.faceFrameSources = new FaceFrameSource[this.bodyCount];
            this.faceFrameReaders = new FaceFrameReader[this.bodyCount];
            for (int i = 0; i < this.bodyCount; i++)
            {
                // create the face frame source with the required face frame features and an initial tracking Id of 0
                this.faceFrameSources[i] = new FaceFrameSource(this.kinectSensor, 0, faceFrameFeatures);

                // open the corresponding reader
                this.faceFrameReaders[i] = this.faceFrameSources[i].OpenReader();
            }

            // Create the drawing group we'll use for drawing
            this.drawingGroup = new DrawingGroup();

            // allocate storage to store face frame results for each face in the FOV
            this.faceFrameResults = new FaceFrameResult[this.bodyCount];
        }
コード例 #12
0
        public static FaceFrameFeatures RequiredFaceFrameFeatures()
        {
            FaceFrameFeatures faceFrameFeatures =
                FaceFrameFeatures.BoundingBoxInColorSpace
                | FaceFrameFeatures.PointsInColorSpace
                | FaceFrameFeatures.BoundingBoxInInfraredSpace
                | FaceFrameFeatures.PointsInInfraredSpace
                | FaceFrameFeatures.RotationOrientation
                | FaceFrameFeatures.Glasses
                | FaceFrameFeatures.LookingAway;

            return(faceFrameFeatures);
        }
コード例 #13
0
    /*KalmanFilterSimple1D kalman_X;
     * KalmanFilterSimple1D kalman_Y;
     * KalmanFilterSimple1D kalman_mod;*/

    void Start()
    {
        updateFrame = 0;

        /*kalman_X = new KalmanFilterSimple1D(f: 1, h: 1, q: qq, r: rr);
         * kalman_Y = new KalmanFilterSimple1D(f: 1, h: 1, q: qq, r: rr);
         * kalman_mod = new KalmanFilterSimple1D(f: 1, h: 1, q: qq, r: rr);*/

        sx = new StreamWriter("coords_X.txt");
        kx = new StreamWriter("coords_KX.txt");


        // one sensor is currently supported
        kinectSensor = KinectSensor.GetDefault();


        // set the maximum number of bodies that would be tracked by Kinect
        bodyCount = kinectSensor.BodyFrameSource.BodyCount;

        // allocate storage to store body objects
        bodies = new Body[bodyCount];

        // specify the required face frame results
        FaceFrameFeatures faceFrameFeatures =
            FaceFrameFeatures.BoundingBoxInColorSpace
            | FaceFrameFeatures.PointsInColorSpace
            | FaceFrameFeatures.BoundingBoxInInfraredSpace
            | FaceFrameFeatures.PointsInInfraredSpace
            | FaceFrameFeatures.RotationOrientation
            | FaceFrameFeatures.FaceEngagement
            | FaceFrameFeatures.Glasses
            | FaceFrameFeatures.Happy
            | FaceFrameFeatures.LeftEyeClosed
            | FaceFrameFeatures.RightEyeClosed
            | FaceFrameFeatures.LookingAway
            | FaceFrameFeatures.MouthMoved
            | FaceFrameFeatures.MouthOpen;

        // create a face frame source + reader to track each face in the FOV
        faceFrameSources = new FaceFrameSource[bodyCount];
        faceFrameReaders = new FaceFrameReader[bodyCount];
        for (int i = 0; i < bodyCount; i++)
        {
            // create the face frame source with the required face frame features and an initial tracking Id of 0
            faceFrameSources[i] = FaceFrameSource.Create(kinectSensor, 0, faceFrameFeatures);

            // open the corresponding reader
            faceFrameReaders[i] = faceFrameSources[i].OpenReader();
        }
    }
コード例 #14
0
        private void InitializeFaceReaders()
        {
            this.FaceFrameResults = new FaceFrameResult[this.Sensor.BodyFrameSource.BodyCount];
            this.faceFrameSources = new FaceFrameSource[this.Sensor.BodyFrameSource.BodyCount];
            this.faceFrameReaders = new FaceFrameReader[this.Sensor.BodyFrameSource.BodyCount];

            FaceFrameFeatures faceFrameFeatures = RequiredFaceFrameFeatures();

            for (int i = this.faceFrameSources.Length - 1; i >= 0; --i)
            {
                this.faceFrameSources[i] = FaceFrameSource.Create(this.Sensor, 0, faceFrameFeatures);
                this.faceFrameReaders[i] = this.faceFrameSources[i].OpenReader();
            }
        }
コード例 #15
0
        public MainPage()
        {
            //Kinect センサー V2 ののオブジェクトを取得します。
            this.kinectSensor = KinectSensor.GetDefault();

            //カラーフレームに関する情報が格納されたオブジェクトを取得します。
            FrameDescription frameDescription = this.kinectSensor.ColorFrameSource.FrameDescription;

            // カラーフレームの幅、高さが格納されます。今回は、幅が 1920、高さが 1080 が入ります。
            this.displayWidth  = frameDescription.Width;
            this.displayHeight = frameDescription.Height;

            // Body フレームを取得するためのオブジェクトを作成します。
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            // フレーム情報が Kinect で取得されたことを示すイベント "FrameArrived" が発生した際に
            // "Reader_BodyFrameArrived" の処理が実行されるようにイベントハンドラを登録します。
            this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived;

            // Kinect センサーで取得できる Body の最大人数の値を格納します。
            this.bodyCount = this.kinectSensor.BodyFrameSource.BodyCount;

            // 取得した、各 Body フレームの情報を配列で保持します。
            this.bodies = new Body[this.bodyCount];

            // 必要な Face フレームの情報を特定します。
            FaceFrameFeatures faceFrameFeatures =
                FaceFrameFeatures.BoundingBoxInColorSpace
                | FaceFrameFeatures.PointsInColorSpace
                | FaceFrameFeatures.RotationOrientation
                | FaceFrameFeatures.FaceEngagement
                | FaceFrameFeatures.Glasses
                | FaceFrameFeatures.Happy
                | FaceFrameFeatures.LeftEyeClosed
                | FaceFrameFeatures.RightEyeClosed
                | FaceFrameFeatures.LookingAway
                | FaceFrameFeatures.MouthMoved
                | FaceFrameFeatures.MouthOpen;

            //Face フレームのデータを取得するための設定をおこないます。
            this.faceFrameSource = new FaceFrameSource(this.kinectSensor, 0, faceFrameFeatures);
            this.faceFrameReader = this.faceFrameSource.OpenReader();

            // Kinect Sensor の処理を開始します。
            this.kinectSensor.Open();

            // アプリの起動に必要な初期化処理を実行します。
            this.InitializeComponent();
        }
コード例 #16
0
 /// <summary>
 /// Acquire the latest valid face frames and draw their data to a panel with
 /// WPF shapes and text.
 /// </summary>
 /// <param name="parentPanel">The Panel in which the shapes will be children.
 /// Make sure the parentPanel has its width and height set to what is expected
 /// for the requested features.</param>
 /// <param name="displayFeatures">The FaceFrameFeatures to be rendered, use flags to
 /// display many features. Be aware of ColorSpace vs InfraredSpace when choosing.</param>
 public void DrawLatestFaceResults(Panel parentPanel, FaceFrameFeatures displayFeatures)
 {
     parentPanel.Children.Clear();
     for (int i = 0; i < this.bodyCount; i++)
     {
         if (this.faceFrameReaders[i] != null)
         {
             FaceFrame frame = this.faceFrameReaders[i].AcquireLatestFrame();
             if (frame != null && frame.FaceFrameResult != null)
             {
                 DrawFaceFeatures(parentPanel, frame.FaceFrameResult, displayFeatures, this.faceColors[i], i);
             }
         }
     }
 }
コード例 #17
0
        public FaceManager(KinectManager kinectManager, BodyManager bodyManager, FaceFrameFeatures faceFrameFeatures)
        {
            _KinectManager = kinectManager;

            _BodyManager = bodyManager;
            _BodyManager.EngagedBodyUpdated += BodyManager_EngagedBodyUpdated;
            _BodyManager.BodyRemoved        += BodyManager_BodyRemoved;

            _FaceFrameFeatures = faceFrameFeatures;

            _ColorFrameDesc = kinectManager.KinectSensor.ColorFrameSource.CreateFrameDescription(ImageFormat);
            _ColorPixels    = new byte[_ColorFrameDesc.Width * _ColorFrameDesc.Height * _ColorFrameDesc.BytesPerPixel];

            CreateFaceTrackers(_KinectManager.KinectSensor.BodyFrameSource.BodyCount);
        }
コード例 #18
0
        /// <summary>
        /// Initialize Kinect object
        /// </summary>
        private void InitializeHDFace()
        {
            this.CurrentBuilderStatus = "Ready To Start Capture";

            this.sensor     = KinectSensor.GetDefault();
            this.bodySource = this.sensor.BodyFrameSource;
            this.bodyReader = this.bodySource.OpenReader();
            this.bodyReader.FrameArrived += this.BodyReader_FrameArrived;

            // set the maximum number of bodies that would be tracked by Kinect
            this.bodyCount = this.sensor.BodyFrameSource.BodyCount;

            // allocate storage to store body objects
            this.bodies = new Body[this.bodyCount];

            // specify the required face frame results
            FaceFrameFeatures faceFrameFeatures =
                FaceFrameFeatures.BoundingBoxInColorSpace
                | FaceFrameFeatures.PointsInColorSpace
                | FaceFrameFeatures.RotationOrientation
                | FaceFrameFeatures.FaceEngagement
                | FaceFrameFeatures.Glasses
                | FaceFrameFeatures.Happy
                | FaceFrameFeatures.LeftEyeClosed
                | FaceFrameFeatures.RightEyeClosed
                | FaceFrameFeatures.LookingAway
                | FaceFrameFeatures.MouthMoved
                | FaceFrameFeatures.MouthOpen;

            // create a face frame source + reader to track each face in the FOV
            this.faceFrameSource = new FaceFrameSource(this.sensor, 0, faceFrameFeatures);
            this.faceFrameReader = faceFrameSource.OpenReader();
            this.faceFrameReader.FrameArrived += this.Reader_FaceFrameArrived;

            this.highDefinitionFaceFrameSource = new HighDefinitionFaceFrameSource(this.sensor);
            this.highDefinitionFaceFrameSource.TrackingIdLost += this.HdFaceSource_TrackingIdLost;

            this.highDefinitionFaceFrameReader = this.highDefinitionFaceFrameSource.OpenReader();
            this.highDefinitionFaceFrameReader.FrameArrived += this.HdFaceReader_FrameArrived;

            this.currentFaceModel     = new FaceModel();
            this.currentFaceAlignment = new FaceAlignment();

            this.InitializeMesh();
            this.UpdateMesh();

            this.sensor.Open();
        }
コード例 #19
0
        private void InitializeFaceReaders()
        {
            this.m_FaceFrameResults = new FaceFrameResult[this.m_Sensor.BodyFrameSource.BodyCount];
            this.m_FaceFrameSources = new FaceFrameSource[this.m_Sensor.BodyFrameSource.BodyCount];
            this.m_FaceFrameReaders = new FaceFrameReader[this.m_Sensor.BodyFrameSource.BodyCount];

            FaceFrameFeatures faceFrameFeatures = faceFrameFeatureType == FaceFrameFeatureType.Required
                                ? RequiredFaceFrameFeatures()
                                : FullFaceFrameFeatures();

            for (int i = 0; i < this.m_FaceFrameSources.Length; ++i)
            {
                this.m_FaceFrameSources[i] = FaceFrameSource.Create(this.m_Sensor, 0, faceFrameFeatures);
                this.m_FaceFrameReaders[i] = this.m_FaceFrameSources[i].OpenReader();
            }
        }
コード例 #20
0
        public void InitializeFace()
        {
            if (_kinect == null)
            {
                return;
            }

            //if (_bodyReader != null) return;

            if (faceFrameReaders != null)
            {
                return;
            }

            // set the maximum number of bodies that would be tracked by Kinect
            this.bodyCount = _kinect.BodyFrameSource.BodyCount;

            // specify the required face frame results
            FaceFrameFeatures faceFrameFeatures =
                FaceFrameFeatures.BoundingBoxInColorSpace
                | FaceFrameFeatures.PointsInColorSpace
                | FaceFrameFeatures.RotationOrientation
                | FaceFrameFeatures.FaceEngagement
                | FaceFrameFeatures.Glasses
                | FaceFrameFeatures.Happy
                | FaceFrameFeatures.LeftEyeClosed
                | FaceFrameFeatures.RightEyeClosed
                | FaceFrameFeatures.LookingAway
                | FaceFrameFeatures.MouthMoved
                | FaceFrameFeatures.MouthOpen;


            // create a face frame source + reader to track each face in the FOV
            this.faceFrameSources = new FaceFrameSource[this.bodyCount];
            this.faceFrameReaders = new FaceFrameReader[this.bodyCount];
            for (int i = 0; i < this.bodyCount; i++)
            {
                // create the face frame source with the required face frame features and an initial tracking Id of 0
                this.faceFrameSources[i] = new FaceFrameSource(this._kinect, 0, faceFrameFeatures);

                // open the corresponding reader
                this.faceFrameReaders[i] = this.faceFrameSources[i].OpenReader();
            }

            // allocate storage to store face frame results for each face in the FOV
            this.faceFrameResults = new FaceFrameResult[this.bodyCount];
        }
コード例 #21
0
        private short _frameStopTalking = 0;   //number of frame without talking


        public KinectVideoSource(ILogger <KinectVideoSource> logger, IAudioSource audioSource, KBWrapper.IKbWrapper kb)
        {
            _logger = logger;
            _logger.LogInformation("Kinect video source loaded.");
            _audioSource = audioSource;
            _KB          = kb;

            _kinect = KinectSensor.GetDefault();
            //kinect availability callback
            _kinect.IsAvailableChanged += Sensor_IsAvailableChanged;
            //frame of color camera and bodies callback
            _multiSourceFrameReader = _kinect.OpenMultiSourceFrameReader(FrameSourceTypes.Color);

            //features needed of a face
            FaceFrameFeatures faceFrameFeatures = FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.MouthMoved | FaceFrameFeatures.MouthOpen;

            //BodyCount == 6, we need arrays for detect up to 6 faces at time
            _faceFrameSources = new FaceFrameSource[_kinect.BodyFrameSource.BodyCount];
            _faceFrameReaders = new FaceFrameReader[_kinect.BodyFrameSource.BodyCount];
            _faceFrameResults = new FaceFrameResult[_kinect.BodyFrameSource.BodyCount];

            _bodies          = new Body[_kinect.BodyFrameSource.BodyCount];
            _bodyFrameReader = _kinect.BodyFrameSource.OpenReader();


            for (int i = 0; i < _kinect.BodyFrameSource.BodyCount; i++)
            {
                _faceFrameSources[i] = new FaceFrameSource(_kinect, 0, faceFrameFeatures);
                _faceFrameReaders[i] = _faceFrameSources[i].OpenReader();
            }
            if (!_kinect.IsOpen)
            {
                _kinect.Open();
            }

            if (!float.TryParse(ConfigurationManager.AppSettings["Framerate"], out _framerate))
            {
                _framerate = 20f;
            }

            _timer = new System.Timers.Timer(1000.0 / _framerate)
            {
                AutoReset = true, Enabled = false
            };
            _timer.Elapsed += OnTimerTick;
        }
コード例 #22
0
        public MainPage()
        {
            // one sensor is currently supported
            this.kinectSensor = KinectSensor.GetDefault();

            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            this.multiSourceFrameReader = this.kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Infrared | FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.BodyIndex | FrameSourceTypes.Body);

            this.multiSourceFrameReader.MultiSourceFrameArrived += this.Reader_MultiSourceFrameArrived;

            // specify the required face frame results
            // init with all the features so they are accessible later.
            this.faceFrameFeatures =
                FaceFrameFeatures.BoundingBoxInColorSpace
                | FaceFrameFeatures.PointsInColorSpace
                | FaceFrameFeatures.BoundingBoxInInfraredSpace
                | FaceFrameFeatures.PointsInInfraredSpace
                | FaceFrameFeatures.RotationOrientation
                | FaceFrameFeatures.FaceEngagement
                | FaceFrameFeatures.Glasses
                | FaceFrameFeatures.Happy
                | FaceFrameFeatures.LeftEyeClosed
                | FaceFrameFeatures.RightEyeClosed
                | FaceFrameFeatures.LookingAway
                | FaceFrameFeatures.MouthMoved
                | FaceFrameFeatures.MouthOpen;

            this.faceManager = new FaceManager(this.kinectSensor, this.faceFrameFeatures);

            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // use the window object as the view model in this simple example
            this.DataContext = this;

            // open the sensor
            this.kinectSensor.Open();

            this.InitializeComponent();

            // new
            this.Loaded += MainPage_Loaded;
        }
コード例 #23
0
        public static FaceFrameFeatures FullFaceFrameFeatures()
        {
            FaceFrameFeatures faceFrameFeatures =
                FaceFrameFeatures.BoundingBoxInColorSpace
                | FaceFrameFeatures.PointsInColorSpace
                | FaceFrameFeatures.BoundingBoxInInfraredSpace
                | FaceFrameFeatures.PointsInInfraredSpace
                | FaceFrameFeatures.RotationOrientation
                | FaceFrameFeatures.FaceEngagement
                | FaceFrameFeatures.Glasses
                | FaceFrameFeatures.Happy
                | FaceFrameFeatures.LeftEyeClosed
                | FaceFrameFeatures.RightEyeClosed
                | FaceFrameFeatures.LookingAway
                | FaceFrameFeatures.MouthMoved
                | FaceFrameFeatures.MouthOpen;

            return(faceFrameFeatures);
        }
コード例 #24
0
        /// <summary>
        /// Create a new FaceManager using an existing sensor which has already opened.
        /// The faceFrameResult will return the requested faceFeatures.
        /// </summary>
        /// <param name="sensor"></param>
        /// <param name="faceFeatures"></param>
        public FaceManager(KinectSensor sensor, FaceFrameFeatures faceFeatures)
        {
            this.kinectSensor = sensor;

            // open the reader for the body frames
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            // wire handler for body frame arrival
            this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived;

            this.bodyCount = this.kinectSensor.BodyFrameSource.BodyCount;

            // allocate storage to store body objects
            this.bodies = new Body[this.bodyCount];

            // create a face frame source + reader to track each face in the FOV
            this.faceFrameSources = new FaceFrameSource[this.bodyCount];
            this.faceFrameReaders = new FaceFrameReader[this.bodyCount];

            for (int i = 0; i < this.bodyCount; i++)
            {
                // create the face frame source with the required face frame features and an initial tracking Id of 0
                this.faceFrameSources[i] = new FaceFrameSource(this.kinectSensor, 0, faceFeatures);

                // open the corresponding reader
                this.faceFrameReaders[i] = this.faceFrameSources[i].OpenReader();
            }

            // populate face result colors - one for each face index
            this.faceColors = new List <Color>()
            {
                Colors.Red,
                Colors.Orange,
                Colors.Green,
                Colors.LightBlue,
                Colors.Indigo,
                Colors.Violet
            };
        }
コード例 #25
0
ファイル: FaceManager.cs プロジェクト: ruscles/tutorial
        /// <summary>
        /// Create a new FaceManager using an existing sensor which has already opened.
        /// The faceFrameResult will return the requested faceFeatures.
        /// </summary>
        /// <param name="sensor"></param>
        /// <param name="faceFeatures"></param>
        public FaceManager(KinectSensor sensor, FaceFrameFeatures faceFeatures)
        {
            this.kinectSensor = sensor;

            // open the reader for the body frames
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            // wire handler for body frame arrival
            this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived;

            this.bodyCount = this.kinectSensor.BodyFrameSource.BodyCount;

            // allocate storage to store body objects
            this.bodies = new Body[this.bodyCount];

            // create a face frame source + reader to track each face in the FOV
            this.faceFrameSources = new FaceFrameSource[this.bodyCount];
            this.faceFrameReaders = new FaceFrameReader[this.bodyCount];

            for (int i = 0; i < this.bodyCount; i++)
            {
                // create the face frame source with the required face frame features and an initial tracking Id of 0
                this.faceFrameSources[i] = new FaceFrameSource(this.kinectSensor, 0, faceFeatures);

                // open the corresponding reader
                this.faceFrameReaders[i] = this.faceFrameSources[i].OpenReader();
            }

            // populate face result colors - one for each face index
            this.faceColors = new List<Color>()
            {
                Colors.Red,
                Colors.Orange,
                Colors.Green,
                Colors.LightBlue,
                Colors.Indigo,
                Colors.Violet
            };
        }
コード例 #26
0
        private static void initializeKinectV2()
        {
            sensor = KinectSensor.GetDefault();

            _bodyCount = sensor.BodyFrameSource.BodyCount;

            _coordinateMapper = sensor.CoordinateMapper;

            // enable body frame
            _bodyFrameReader = sensor.BodyFrameSource.OpenReader();
            _bodyFrameReader.FrameArrived += _bodyFrameReader_FrameArrived;

            // allocate buffer to store bodies
            _bodies = new Body[_bodyCount];

            // specify the required face frame results
            FaceFrameFeatures faceFrameFeatures =
                FaceFrameFeatures.RotationOrientation
                | FaceFrameFeatures.MouthOpen
                | FaceFrameFeatures.MouthMoved
                | FaceFrameFeatures.LeftEyeClosed
                | FaceFrameFeatures.RightEyeClosed;

            _faceFrameSources = new FaceFrameSource[_bodyCount];
            _faceFrameReaders = new FaceFrameReader[_bodyCount];
            for (int i = 0; i < _bodyCount; i++)
            {
                _faceFrameSources[i] = new FaceFrameSource(sensor, 0, faceFrameFeatures);
                _faceFrameReaders[i] = _faceFrameSources[i].OpenReader();
                _faceFrameReaders[i].FrameArrived += _faceFrameReader_FrameArrived;
            }

            // allocate buffer to store face frame results
            _faceFrameResults = new FaceFrameResult[_bodyCount];

            sensor.Open();
        }
コード例 #27
0
    void Awake()
    {
        _Sensor = KinectSensor.GetDefault();

        if (_Sensor != null)
        {
            _Reader = _Sensor.BodyFrameSource.OpenReader();
            if (!_Sensor.IsOpen)
            {
                _Sensor.Open();
            }
        }

        bodyCount = _Sensor.BodyFrameSource.BodyCount;
        FaceFrameFeatures faceFrameFeatures = FaceFrameFeatures.RotationOrientation;

        faceFrameSources = new FaceFrameSource[bodyCount];
        faceFrameReaders = new FaceFrameReader[bodyCount];

        avatarBodies = new Avatar.Body[bodyCount];
        for (int i = 0; i < bodyCount; i++)
        {
            faceFrameSources[i] = FaceFrameSource.Create(_Sensor, 0, faceFrameFeatures);
            faceFrameReaders[i] = faceFrameSources[i].OpenReader();
        }

        for (int i = 0; i < bodyCount; i++)
        {
            avatarBodies[i] = new Avatar.Body();
            for (JointType jt = JointType.SpineBase; jt <= JointType.ThumbRight; jt++)
            {
                avatarBodies[i].Joints[jt]           = new Avatar.Joint();
                avatarBodies[i].Joints[jt].JointType = jt;
            }
        }
    }
コード例 #28
0
        void InitializeFace()
        {
            FaceFrameFeatures faceFrameFeatures =
                FaceFrameFeatures.BoundingBoxInColorSpace
                | FaceFrameFeatures.PointsInColorSpace
                | FaceFrameFeatures.RotationOrientation
                | FaceFrameFeatures.FaceEngagement
                | FaceFrameFeatures.Glasses
                | FaceFrameFeatures.Happy
                | FaceFrameFeatures.LeftEyeClosed
                | FaceFrameFeatures.RightEyeClosed
                | FaceFrameFeatures.LookingAway
                | FaceFrameFeatures.MouthMoved
                | FaceFrameFeatures.MouthOpen;

            faceFrameSources = new FaceFrameSource[bodyCount];
            faceFrameReaders = new FaceFrameReader[bodyCount];
            for (int i = 0; i < bodyCount; i++)
            {
                faceFrameSources[i] = new FaceFrameSource(_sensor, 0, faceFrameFeatures);
                faceFrameReaders[i] = faceFrameSources[i].OpenReader();
                faceFrameReaders[i].FrameArrived += faceFrameReader_FrameArrived;
            }
            faceFrameResults = new FaceFrameResult[bodyCount];

            //畫人臉偵測框的顏色種類存放用的 List
            faceBrush = new List <System.Windows.Media.Brush>()
            {
                System.Windows.Media.Brushes.Brown,
                System.Windows.Media.Brushes.Orange,
                System.Windows.Media.Brushes.Green,
                System.Windows.Media.Brushes.Red,
                System.Windows.Media.Brushes.LightBlue,
                System.Windows.Media.Brushes.Yellow
            };
        }
コード例 #29
0

        
コード例 #30
0
ファイル: MainPage.xaml.cs プロジェクト: ruscles/tutorial
        private void SetupCurrentDisplay(DisplayFrameType newDisplayFrameType, bool isFullScreen = true)
        {
            if (isFullScreen)
            {
                RootGrid.RowDefinitions.Clear();
                RootGrid.RowDefinitions.Add(new RowDefinition() { Height = new GridLength(0) });
                RootGrid.RowDefinitions.Add(new RowDefinition() { Height = new GridLength(1, GridUnitType.Star) });
                RootGrid.RowDefinitions.Add(new RowDefinition() { Height = new GridLength(0) });
                FullScreenBackButton.Visibility = Windows.UI.Xaml.Visibility.Visible;
            }
            else
            {
                RootGrid.RowDefinitions.Clear();
                RootGrid.RowDefinitions.Add(new RowDefinition() { Height = new GridLength(70) });
                RootGrid.RowDefinitions.Add(new RowDefinition() { Height = new GridLength(1, GridUnitType.Star) });
                RootGrid.RowDefinitions.Add(new RowDefinition() { Height = new GridLength(100) });
                FullScreenBackButton.Visibility = Windows.UI.Xaml.Visibility.Collapsed;
            }

            CurrentDisplayFrameType = newDisplayFrameType;
            // Frames used by more than one type are declared outside the switch
            FrameDescription colorFrameDescription = null;
            FrameDescription depthFrameDescription = null;
            FrameDescription infraredFrameDescription = null;
            // reset the display methods
            FacePointsCanvas.Children.Clear();
            if (this.BodyJointsGrid != null)
            {
                this.BodyJointsGrid.Visibility = Visibility.Collapsed;
            }
            if (this.FrameDisplayImage != null)
            {
                this.FrameDisplayImage.Source = null;
            }
            switch (CurrentDisplayFrameType)
            {
                case DisplayFrameType.Infrared:
                    infraredFrameDescription = this.kinectSensor.InfraredFrameSource.FrameDescription;
                    this.CurrentFrameDescription = infraredFrameDescription;
                    // allocate space to put the pixels being received and converted
                    this.infraredFrameData = new ushort[infraredFrameDescription.Width * infraredFrameDescription.Height];
                    this.infraredPixels = new byte[infraredFrameDescription.Width * infraredFrameDescription.Height * BytesPerPixel];
                    this.bitmap = new WriteableBitmap(infraredFrameDescription.Width, infraredFrameDescription.Height);
                    break;

                case DisplayFrameType.Color:
                    colorFrameDescription = this.kinectSensor.ColorFrameSource.FrameDescription;
                    this.CurrentFrameDescription = colorFrameDescription;
                    // create the bitmap to display
                    this.bitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height);
                    break;

                case DisplayFrameType.Depth:
                    depthFrameDescription = this.kinectSensor.DepthFrameSource.FrameDescription;
                    this.CurrentFrameDescription = depthFrameDescription;
                    // allocate space to put the pixels being received and converted
                    this.depthFrameData = new ushort[depthFrameDescription.Width * depthFrameDescription.Height];
                    this.depthPixels = new byte[depthFrameDescription.Width * depthFrameDescription.Height * BytesPerPixel];
                    this.bitmap = new WriteableBitmap(depthFrameDescription.Width, depthFrameDescription.Height);
                    break;

                case DisplayFrameType.BodyMask:
                    colorFrameDescription = this.kinectSensor.ColorFrameSource.FrameDescription;
                    this.CurrentFrameDescription = colorFrameDescription;
                    // allocate space to put the pixels being received and converted
                    this.colorMappedToDepthPoints = new DepthSpacePoint[colorFrameDescription.Width * colorFrameDescription.Height];
                    this.bitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height);
                    break;

                case DisplayFrameType.BodyJoints:
                    depthFrameDescription = this.kinectSensor.DepthFrameSource.FrameDescription;
                    // instantiate a new Canvas
                    this.drawingCanvas = new Canvas();
                    // set the clip rectangle to prevent rendering outside the canvas
                    this.drawingCanvas.Clip = new RectangleGeometry();
                    this.drawingCanvas.Clip.Rect = new Rect(0.0, 0.0, this.BodyJointsGrid.Width, this.BodyJointsGrid.Height);
                    this.drawingCanvas.Width = this.BodyJointsGrid.Width;
                    this.drawingCanvas.Height = this.BodyJointsGrid.Height;
                    // reset the body joints grid
                    this.BodyJointsGrid.Visibility = Visibility.Visible;
                    this.BodyJointsGrid.Children.Clear();
                    // add canvas to DisplayGrid
                    this.BodyJointsGrid.Children.Add(this.drawingCanvas);
                    bodiesManager = new BodiesManager(this.coordinateMapper, this.drawingCanvas, this.kinectSensor.BodyFrameSource.BodyCount);
                    break;

                case DisplayFrameType.BackgroundRemoved:
                    colorFrameDescription = this.kinectSensor.ColorFrameSource.FrameDescription;
                    depthFrameDescription = this.kinectSensor.DepthFrameSource.FrameDescription;
                    // Actual current frame is going to be a map of depth and color, choosing the larger to display(color)
                    this.CurrentFrameDescription = colorFrameDescription;
                    // allocate space to put the pixels being received and converted
                    this.depthFrameData = new ushort[depthFrameDescription.Width * depthFrameDescription.Height];
                    this.colorMappedToDepthPoints = new DepthSpacePoint[colorFrameDescription.Width * colorFrameDescription.Height];
                    this.bitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height);
                    break;
                case DisplayFrameType.FaceOnColor:
                    colorFrameDescription = this.kinectSensor.ColorFrameSource.FrameDescription;
                    this.CurrentFrameDescription = colorFrameDescription;
                    // create the bitmap to display
                    this.bitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height);
                    this.FacePointsCanvas.Width = colorFrameDescription.Width;
                    this.FacePointsCanvas.Height = colorFrameDescription.Height;
                    this.faceFrameFeatures =
                            FaceFrameFeatures.BoundingBoxInColorSpace
                            | FaceFrameFeatures.PointsInColorSpace
                            | FaceFrameFeatures.RotationOrientation
                            | FaceFrameFeatures.FaceEngagement
                            | FaceFrameFeatures.Glasses
                            | FaceFrameFeatures.Happy
                            | FaceFrameFeatures.LeftEyeClosed
                            | FaceFrameFeatures.RightEyeClosed
                            | FaceFrameFeatures.LookingAway
                            | FaceFrameFeatures.MouthMoved
                            | FaceFrameFeatures.MouthOpen;
                    break;

                case DisplayFrameType.FaceOnInfrared:
                    infraredFrameDescription = this.kinectSensor.InfraredFrameSource.FrameDescription;
                    this.CurrentFrameDescription = infraredFrameDescription;
                    // allocate space to put the pixels being received and converted
                    this.infraredFrameData = new ushort[infraredFrameDescription.Width * infraredFrameDescription.Height];
                    this.infraredPixels = new byte[infraredFrameDescription.Width * infraredFrameDescription.Height * BytesPerPixel];
                    this.bitmap = new WriteableBitmap(infraredFrameDescription.Width, infraredFrameDescription.Height);
                    this.FacePointsCanvas.Width = infraredFrameDescription.Width;
                    this.FacePointsCanvas.Height = infraredFrameDescription.Height;
                    break;

                case DisplayFrameType.FaceGame:
                    colorFrameDescription = this.kinectSensor.ColorFrameSource.FrameDescription;
                    this.CurrentFrameDescription = colorFrameDescription;
                    this.FacePointsCanvas.Width = colorFrameDescription.Width;
                    this.FacePointsCanvas.Height = colorFrameDescription.Height;
                    break;

                default:
                    break;
            }
        }
コード例 #31
0
        public MainWindow()
        {
            kinect = KinectSensor.GetDefault();

            InfraredFrameSource infraredFrameSource = kinect.InfraredFrameSource;

            multiSourceFrameReader = kinect.OpenMultiSourceFrameReader(FrameSourceTypes.Infrared | FrameSourceTypes.Body);
            multiSourceFrameReader.MultiSourceFrameArrived += MultiSource_FrameArrived;

            frameDescription = infraredFrameSource.FrameDescription;

            bodies = new Body[kinect.BodyFrameSource.BodyCount];

            FaceFrameFeatures faceFrameFeatures =
                FaceFrameFeatures.BoundingBoxInInfraredSpace
                | FaceFrameFeatures.PointsInInfraredSpace
                | FaceFrameFeatures.RotationOrientation
                | FaceFrameFeatures.FaceEngagement
                | FaceFrameFeatures.Glasses
                | FaceFrameFeatures.Happy
                | FaceFrameFeatures.LeftEyeClosed
                | FaceFrameFeatures.RightEyeClosed
                | FaceFrameFeatures.LookingAway
                | FaceFrameFeatures.MouthMoved
                | FaceFrameFeatures.MouthOpen;

            faceFrameSources = new FaceFrameSource[6];
            faceFrameReaders = new FaceFrameReader[6];
            faceFrameResults = new FaceFrameResult[6];

            for (int i = 0; i < 6; i++)
            {
                faceFrameSources[i] = new FaceFrameSource(kinect, 0, faceFrameFeatures);
                faceFrameReaders[i] = faceFrameSources[i].OpenReader();
                faceFrameReaders[i].FrameArrived += Face_FrameArrived;
            }

            faceBrush = new List <Brush>()
            {
                Brushes.Pink,
                Brushes.Orange,
                Brushes.Yellow,
                Brushes.Purple,
                Brushes.Red,
                Brushes.Blue
            };

            infraredBitmap = new WriteableBitmap(frameDescription.Width,
                                                 frameDescription.Height,
                                                 96.0, 96.0,
                                                 PixelFormats.Gray32Float,
                                                 null);
            infraredPixels     = new ushort[frameDescription.LengthInPixels];
            drawingGroup       = new DrawingGroup();
            drawingImageSource = new DrawingImage(drawingGroup);

            kinect.Open();

            DataContext = this;

            InitializeComponent();
        }
コード例 #32
0
        public void Evaluate(int SpreadMax)
        {
            if (this.FInvalidateConnect)
            {
                if (this.FInRuntime.IsConnected)
                {
                    //Cache runtime node
                    this.runtime = this.FInRuntime[0];

                    this.runtime.SkeletonFrameReady += SkeletonReady;

                    if (runtime != null)
                    {
                        FaceFrameFeatures faceFrameFeatures =
                            FaceFrameFeatures.BoundingBoxInColorSpace
                            | FaceFrameFeatures.PointsInColorSpace
                            | FaceFrameFeatures.RotationOrientation
                            | FaceFrameFeatures.FaceEngagement
                            | FaceFrameFeatures.Glasses
                            | FaceFrameFeatures.Happy
                            | FaceFrameFeatures.LeftEyeClosed
                            | FaceFrameFeatures.RightEyeClosed
                            | FaceFrameFeatures.LookingAway
                            | FaceFrameFeatures.MouthMoved
                            | FaceFrameFeatures.MouthOpen;

                        for (int i = 0; i < this.faceFrameSources.Length; i++)
                        {
                            this.faceFrameSources[i] = new FaceFrameSource(this.runtime.Runtime, 0, faceFrameFeatures);
                            this.faceFrameReaders[i] = this.faceFrameSources[i].OpenReader();
                            this.faceFrameReaders[i].FrameArrived += this.faceReader_FrameArrived;
                        }
                    }
                }
                else
                {
                    this.runtime.SkeletonFrameReady -= SkeletonReady;
                    for (int i = 0; i < this.faceFrameSources.Length; i++)
                    {
                        this.faceFrameReaders[i].FrameArrived -= this.faceReader_FrameArrived;
                        this.faceFrameReaders[i].Dispose();
                        this.faceFrameSources[i].Dispose();
                    }
                }

                this.FInvalidateConnect = false;
            }

            List <FaceFrameResult> results = new List <FaceFrameResult>();


            for (int i = 0; i < lastResults.Length; i++)
            {
                if (this.lastResults[i] != null && this.faceFrameReaders[i].FaceFrameSource.IsTrackingIdValid)
                {
                    results.Add(lastResults[i]);
                }
            }

            this.FOutWearGlasses.SliceCount      = results.Count;
            this.FOutUserIndex.SliceCount        = results.Count;
            this.FOutSizeInfrared.SliceCount     = results.Count;
            this.FOutSizeColor.SliceCount        = results.Count;
            this.FOutRightEyeClosed.SliceCount   = results.Count;
            this.FOutPositionInfrared.SliceCount = results.Count;
            this.FOutPositionColor.SliceCount    = results.Count;
            this.FOutPointsColor.SliceCount      = results.Count;
            this.FOutOrientation.SliceCount      = results.Count;
            this.FOutMouthOpen.SliceCount        = results.Count;
            this.FOutMouthMoved.SliceCount       = results.Count;
            this.FOutlookAway.SliceCount         = results.Count;
            this.FOutLeftEyeClosed.SliceCount    = results.Count;
            this.FOutHappy.SliceCount            = results.Count;
            this.FOutEngaged.SliceCount          = results.Count;
            this.FOutPointsWorld.SliceCount      = results.Count;

            for (int i = 0; i < results.Count; i++)
            {
                this.WriteFaceData(results[i], i);
            }
        }
コード例 #33
0
ファイル: MainPage.xaml.cs プロジェクト: ruscles/tutorial
        public MainPage()
        {
            // one sensor is currently supported
            this.kinectSensor = KinectSensor.GetDefault();

            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            this.multiSourceFrameReader = this.kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Infrared | FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.BodyIndex | FrameSourceTypes.Body);

            this.multiSourceFrameReader.MultiSourceFrameArrived += this.Reader_MultiSourceFrameArrived;

            // specify the required face frame results
            // init with all the features so they are accessible later.
            this.faceFrameFeatures =
                FaceFrameFeatures.BoundingBoxInColorSpace
                | FaceFrameFeatures.PointsInColorSpace
                | FaceFrameFeatures.BoundingBoxInInfraredSpace
                | FaceFrameFeatures.PointsInInfraredSpace
                | FaceFrameFeatures.RotationOrientation
                | FaceFrameFeatures.FaceEngagement
                | FaceFrameFeatures.Glasses
                | FaceFrameFeatures.Happy
                | FaceFrameFeatures.LeftEyeClosed
                | FaceFrameFeatures.RightEyeClosed
                | FaceFrameFeatures.LookingAway
                | FaceFrameFeatures.MouthMoved
                | FaceFrameFeatures.MouthOpen;

            this.faceManager = new FaceManager(this.kinectSensor, this.faceFrameFeatures);

            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // use the window object as the view model in this simple example
            this.DataContext = this;

            // open the sensor
            this.kinectSensor.Open();

            this.InitializeComponent();

            this.Loaded += MainPage_Loaded;

            //lab 13
            // Initialize the gesture detection objects for our gestures
            this.gestureDetectorList = new List<GestureDetector>();

            //lab 13
            // Create a gesture detector for each body (6 bodies => 6 detectors)
            int maxBodies = this.kinectSensor.BodyFrameSource.BodyCount;
            for (int i = 0; i < maxBodies; ++i)
            {
                GestureResultView result = new GestureResultView(i, false, false, 0.0f);
                GestureDetector detector = new GestureDetector(this.kinectSensor, result);
                result.PropertyChanged += GestureResult_PropertyChanged;
                this.gestureDetectorList.Add(detector);
            }
        }
コード例 #34
0
ファイル: FaceManager.cs プロジェクト: ruscles/tutorial
        /// <summary>
        /// Create shapes and text to apply to a parent panel which represent the
        /// current state of a faceFrameResult.
        /// </summary>
        /// <param name="parentPanel">The Panel in which the shapes will be added</param>
        /// <param name="faceFrameResult">The reult of the face tracking</param>
        /// <param name="displayFeatures">The FaceFrameFeatures to be drawn</param>
        /// <param name="color">The color of the shapes and text</param>
        /// <param name="bodyIndex">The index of the body/face</param>
        private void DrawFaceFeatures(Panel parentPanel, FaceFrameResult faceFrameResult, FaceFrameFeatures displayFeatures, Color color, int bodyIndex)
        {
            if (parentPanel.Width == 0 ||
                Double.IsNaN(parentPanel.Width) ||
                parentPanel.Height == 0 ||
                Double.IsNaN(parentPanel.Height))
            {
                // The parent Panel must have a size to be rendered on
                return;
            }

            string messages = "";
            bool renderMessages = false;
            int fontSize = (int)((double)parentPanel.Height * (double)0.023);
            Point messagesPosition = new Point(fontSize * bodyIndex * 10, 0);

            // Face points and bounding boxes
            if (displayFeatures.HasFlag(FaceFrameFeatures.BoundingBoxInColorSpace))
            {
                double lineSize = 7;
                int posX = faceFrameResult.FaceBoundingBoxInColorSpace.Left;
                int posY = faceFrameResult.FaceBoundingBoxInColorSpace.Top;
                int width = faceFrameResult.FaceBoundingBoxInColorSpace.Right - posX + (int)lineSize;
                int height = faceFrameResult.FaceBoundingBoxInColorSpace.Bottom - posY + (int)lineSize;
                Rectangle rect = CreateFaceBoxRectangle(color, lineSize, width, height);
                Canvas.SetLeft(rect, posX);
                Canvas.SetTop(rect, posY);
                parentPanel.Children.Add(rect);
                messagesPosition = new Point(posX, posY + height);
            }
            if (displayFeatures.HasFlag(FaceFrameFeatures.BoundingBoxInInfraredSpace))
            {
                double lineSize = 7;
                int posX = faceFrameResult.FaceBoundingBoxInInfraredSpace.Left;
                int posY = faceFrameResult.FaceBoundingBoxInInfraredSpace.Top;
                int width = faceFrameResult.FaceBoundingBoxInInfraredSpace.Right - posX + (int)lineSize;
                int height = faceFrameResult.FaceBoundingBoxInInfraredSpace.Bottom - posY + (int)lineSize;
                Rectangle rect = CreateFaceBoxRectangle(color, lineSize, width, height);
                Canvas.SetLeft(rect, posX);
                Canvas.SetTop(rect, posY);
                parentPanel.Children.Add(rect);
                messagesPosition = new Point(posX, posY + height);
            }
            if (displayFeatures.HasFlag(FaceFrameFeatures.PointsInColorSpace))
            {
                foreach (KeyValuePair<FacePointType, Point> facePointKVP in
                        faceFrameResult.FacePointsInColorSpace)
                {
                    Size ellipseSize = new Size(10, 10);
                    Ellipse ellipse = CreateFacePointEllipse(color, ellipseSize);
                    Canvas.SetLeft(ellipse, facePointKVP.Value.X - (ellipseSize.Width / 2));
                    Canvas.SetTop(ellipse, facePointKVP.Value.Y - (ellipseSize.Height / 2));
                    parentPanel.Children.Add(ellipse);
                }
            }
            if (displayFeatures.HasFlag(FaceFrameFeatures.PointsInInfraredSpace))
            {
                foreach (KeyValuePair<FacePointType, Point> facePointKVP in
                        faceFrameResult.FacePointsInInfraredSpace)
                {
                    Size ellipseSize = new Size(3, 3);
                    Ellipse ellipse = CreateFacePointEllipse(color, ellipseSize);
                    Canvas.SetLeft(ellipse, facePointKVP.Value.X - (ellipseSize.Width / 2));
                    Canvas.SetTop(ellipse, facePointKVP.Value.Y - (ellipseSize.Height / 2));
                    parentPanel.Children.Add(ellipse);
                }
            }
            // Rotation stuff
            if (displayFeatures.HasFlag(FaceFrameFeatures.RotationOrientation))
            {
                int pitch, yaw, roll = 0;
                ExtractFaceRotationInDegrees(faceFrameResult.FaceRotationQuaternion,
                    out pitch, out yaw, out roll);
                messages += "Rotation Pitch: " + pitch + "\n";
                messages += "Rotation Yaw: " + yaw + "\n";
                messages += "Rotation Roll: " + roll + "\n";
                renderMessages = true;
            }

            // Other Face Properties and states
            if (displayFeatures.HasFlag(FaceFrameFeatures.FaceEngagement))
            {
                messages += FacePropertyToString(FaceProperty.Engaged,
                    faceFrameResult.FaceProperties[FaceProperty.Engaged]);
                renderMessages = true;
            }
            if (displayFeatures.HasFlag(FaceFrameFeatures.Glasses))
            {
                messages += FacePropertyToString(FaceProperty.WearingGlasses,
                    faceFrameResult.FaceProperties[FaceProperty.WearingGlasses]);
                renderMessages = true;
            }
            if (displayFeatures.HasFlag(FaceFrameFeatures.Happy))
            {
                messages += FacePropertyToString(FaceProperty.Happy,
                    faceFrameResult.FaceProperties[FaceProperty.Happy]);
                renderMessages = true;
            }
            if (displayFeatures.HasFlag(FaceFrameFeatures.LeftEyeClosed))
            {
                messages += FacePropertyToString(FaceProperty.LeftEyeClosed,
                    faceFrameResult.FaceProperties[FaceProperty.LeftEyeClosed]);
                renderMessages = true;
            }
            if (displayFeatures.HasFlag(FaceFrameFeatures.RightEyeClosed))
            {
                messages += FacePropertyToString(FaceProperty.RightEyeClosed,
                    faceFrameResult.FaceProperties[FaceProperty.RightEyeClosed]);
                renderMessages = true;
            }
            if (displayFeatures.HasFlag(FaceFrameFeatures.LookingAway))
            {
                messages += FacePropertyToString(FaceProperty.LookingAway,
                    faceFrameResult.FaceProperties[FaceProperty.LookingAway]);
                renderMessages = true;
            }
            if (displayFeatures.HasFlag(FaceFrameFeatures.MouthMoved))
            {
                messages += FacePropertyToString(FaceProperty.MouthMoved,
                    faceFrameResult.FaceProperties[FaceProperty.MouthMoved]);
                renderMessages = true;
            }
            if (displayFeatures.HasFlag(FaceFrameFeatures.MouthOpen))
            {
                messages += FacePropertyToString(FaceProperty.MouthOpen,
                    faceFrameResult.FaceProperties[FaceProperty.MouthOpen]);
                renderMessages = true;
            }

            if (renderMessages)
            {
                TextBlock textBlock = new TextBlock();
                textBlock.Text = messages;
                textBlock.Foreground = new SolidColorBrush(color);
                textBlock.FontSize = fontSize;
                Canvas.SetLeft(textBlock, messagesPosition.X);
                Canvas.SetTop(textBlock, messagesPosition.Y);
                parentPanel.Children.Add(textBlock);
            }
        }
コード例 #35
0
        private void SetupCurrentDisplay(DisplayFrameType newDisplayFrameType)
        {
            CurrentDisplayFrameType = newDisplayFrameType;
            // Frames used by more than one type are declared outside the switch
            FrameDescription colorFrameDescription = null;
            FrameDescription infraredFrameDescription = null;
            // reset the display methods
            FacePointsCanvas.Children.Clear();
            if (this.BodyJointsGrid != null)
            {
                this.BodyJointsGrid.Visibility = Visibility.Collapsed;
            }
            if (this.FrameDisplayImage != null)
            {
                this.FrameDisplayImage.Source = null;
            }
            switch (CurrentDisplayFrameType)
            {
                case DisplayFrameType.FaceOnColor:
                    colorFrameDescription = this.kinectSensor.ColorFrameSource.FrameDescription;
                    this.CurrentFrameDescription = colorFrameDescription;
                    // create the bitmap to display
                    this.bitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height);
                    this.FacePointsCanvas.Width = colorFrameDescription.Width;
                    this.FacePointsCanvas.Height = colorFrameDescription.Height;
                    this.faceFrameFeatures =
                            FaceFrameFeatures.BoundingBoxInColorSpace
                            | FaceFrameFeatures.PointsInColorSpace
                            | FaceFrameFeatures.RotationOrientation
                            | FaceFrameFeatures.FaceEngagement
                            | FaceFrameFeatures.Glasses
                            | FaceFrameFeatures.Happy
                            | FaceFrameFeatures.LeftEyeClosed
                            | FaceFrameFeatures.RightEyeClosed
                            | FaceFrameFeatures.LookingAway
                            | FaceFrameFeatures.MouthMoved
                            | FaceFrameFeatures.MouthOpen;
                    break;

                case DisplayFrameType.FaceOnInfrared:
                    infraredFrameDescription = this.kinectSensor.InfraredFrameSource.FrameDescription;
                    this.CurrentFrameDescription = infraredFrameDescription;
                    // allocate space to put the pixels being received and converted
                    this.infraredFrameData = new ushort[infraredFrameDescription.Width * infraredFrameDescription.Height];
                    this.infraredPixels = new byte[infraredFrameDescription.Width * infraredFrameDescription.Height * BytesPerPixel];
                    this.bitmap = new WriteableBitmap(infraredFrameDescription.Width, infraredFrameDescription.Height);
                    this.FacePointsCanvas.Width = infraredFrameDescription.Width;
                    this.FacePointsCanvas.Height = infraredFrameDescription.Height;
                    break;

                default:
                    break;
            }
        }
コード例 #36
0
ファイル: MainPage.xaml.cs プロジェクト: ruscles/tutorial
        public MainPage()
        {
            // one sensor is currently supported
            this.kinectSensor = KinectSensor.GetDefault();

            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            this.multiSourceFrameReader = this.kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Infrared | FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.BodyIndex | FrameSourceTypes.Body);

            this.multiSourceFrameReader.MultiSourceFrameArrived += this.Reader_MultiSourceFrameArrived;

            // specify the required face frame results
            // init with all the features so they are accessible later.
            this.faceFrameFeatures =
                FaceFrameFeatures.BoundingBoxInColorSpace
                | FaceFrameFeatures.PointsInColorSpace
                | FaceFrameFeatures.BoundingBoxInInfraredSpace
                | FaceFrameFeatures.PointsInInfraredSpace
                | FaceFrameFeatures.RotationOrientation
                | FaceFrameFeatures.FaceEngagement
                | FaceFrameFeatures.Glasses
                | FaceFrameFeatures.Happy
                | FaceFrameFeatures.LeftEyeClosed
                | FaceFrameFeatures.RightEyeClosed
                | FaceFrameFeatures.LookingAway
                | FaceFrameFeatures.MouthMoved
                | FaceFrameFeatures.MouthOpen;

            this.faceManager = new FaceManager(this.kinectSensor, this.faceFrameFeatures);

            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // use the window object as the view model in this simple example
            this.DataContext = this;

            // open the sensor
            this.kinectSensor.Open();

            this.InitializeComponent();

            // new
            this.Loaded += MainPage_Loaded;
        }
コード例 #37
0
ファイル: FaceManager.cs プロジェクト: ruscles/tutorial
 /// <summary>
 /// Acquire the latest valid face frames and draw their data to a panel with 
 /// WPF shapes and text.
 /// </summary>
 /// <param name="parentPanel">The Panel in which the shapes will be children. 
 /// Make sure the parentPanel has its width and height set to what is expected 
 /// for the requested features.</param>
 /// <param name="displayFeatures">The FaceFrameFeatures to be rendered, use flags to 
 /// display many features. Be aware of ColorSpace vs InfraredSpace when choosing.</param>
 public void DrawLatestFaceResults(Panel parentPanel, FaceFrameFeatures displayFeatures)
 {
     parentPanel.Children.Clear();
     for (int i = 0; i < this.bodyCount; i++)
     {
         if (this.faceFrameReaders[i] != null)
         {
             FaceFrame frame = this.faceFrameReaders[i].AcquireLatestFrame();
             if (frame != null && frame.FaceFrameResult != null)
             {
                 DrawFaceFeatures(parentPanel, frame.FaceFrameResult, displayFeatures, this.faceColors[i], i);
             }
         }
     }
 }
コード例 #38
-1
        /// <summary>
        /// Extended CTOR
        /// </summary>
        /// <param name="bodyId">Id of the tracked body</param>
        /// <param name="faceFeatures">Set of requested face features to track</param>
        /// <param name="kinect">Kinect sensor that is tracking</param>
        public FaceTracker(ulong bodyId, FaceFrameFeatures faceFeatures, KinectSensor kinect)
        {
            // Pin-point start of tracking
            _startTracking = DateTime.Now;

            // Save variables
            _bodyId = bodyId;
            _faceFeatures = faceFeatures;
            // _kinectId = kinect.UniqueKinectId --> NotImplementedYet

            // Create a new source with body TrackingId
            _faceSource = new FaceFrameSource(kinect, bodyId, faceFeatures);

            // Create new reader
            _faceReader = _faceSource.OpenReader();

            Console.WriteLine(String.Format("Tracker for body #{0} started.", _bodyId));

            // Initialize FaceFeatureTrackers
            InitialiseFeatureTrackers();

            // Wire events
            _faceReader.FrameArrived += OnFaceFrameArrived;
            _faceSource.TrackingIdLost += OnTrackingLost;
        }