Exemple #1
0
        public void Evaluate(int SpreadMax)
        {
            if (this.FInvalidateConnect)
            {
                if (this.FInRuntime.PluginIO.IsConnected)
                {
                    //Cache runtime node
                    this.runtime = this.FInRuntime[0];

                    if (runtime != null)
                    {
                        faceSrc = new Microsoft.Kinect.Face.FaceFrameSource(this.runtime.Runtime);
                        faceSrc.FaceFrameFeatures = FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.LookingAway | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Happy | FaceFrameFeatures.MouthMoved | FaceFrameFeatures.MouthOpen | FaceFrameFeatures.RightEyeClosed | FaceFrameFeatures.RotationOrientation;
                        faceReader = faceSrc.OpenReader();
                        faceReader.FrameArrived += this.faceReader_FrameArrived;

                        hdSrc = new HighDefinitionFaceFrameSource(this.runtime.Runtime);
                        hdRead = hdSrc.OpenReader();
                        hdRead.FrameArrived += hdRead_FrameArrived;
                    }
                }
                else
                {
                    if (faceSrc != null)
                    {
                        faceReader.FrameArrived -= faceReader_FrameArrived;
                        faceReader.Dispose();
                    }
                }

                this.FInvalidateConnect = false;
            }
        }
        // Primary function. Runs when the window loads in.
        private void Window_Loaded(object sender, RoutedEventArgs e)
        {
            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _sensor.Open();

                _bodies = new Body[_sensor.BodyFrameSource.BodyCount];

                _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body);
                _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;

                _bodyReader = _sensor.BodyFrameSource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                // 2) Initialize the face source with the desired features
                _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace |
                                                              FaceFrameFeatures.FaceEngagement |
                                                              FaceFrameFeatures.Glasses |
                                                              FaceFrameFeatures.Happy |
                                                              FaceFrameFeatures.LeftEyeClosed |
                                                              FaceFrameFeatures.MouthOpen |
                                                              FaceFrameFeatures.PointsInColorSpace |
                                                              FaceFrameFeatures.RightEyeClosed);
                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;
            }
        }
        public MainWindow()
        {
            InitializeComponent();

            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _sensor.Open();

                _bodies = new Body[_sensor.BodyFrameSource.BodyCount];

                _colorReader = _sensor.ColorFrameSource.OpenReader();
                _colorReader.FrameArrived += ColorReader_FrameArrived;
                _bodyReader = _sensor.BodyFrameSource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                // 2) Initialize the face source with the desired features
                _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace |
                                                              FaceFrameFeatures.FaceEngagement |
                                                              FaceFrameFeatures.Glasses |
                                                              FaceFrameFeatures.Happy |
                                                              FaceFrameFeatures.LeftEyeClosed |
                                                              FaceFrameFeatures.MouthOpen |
                                                              FaceFrameFeatures.PointsInColorSpace |
                                                              FaceFrameFeatures.RightEyeClosed |
                                                              FaceFrameFeatures.LookingAway);
                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;
            }
        }
Exemple #4
0
        public void Evaluate(int SpreadMax)
        {
            if (this.FInvalidateConnect)
            {
                if (this.FInRuntime.PluginIO.IsConnected)
                {
                    //Cache runtime node
                    this.runtime = this.FInRuntime[0];

                    if (runtime != null)
                    {
                        faceSrc = new Microsoft.Kinect.Face.FaceFrameSource(this.runtime.Runtime);
                        faceSrc.FaceFrameFeatures = FaceFrameFeatures.BoundingBoxInInfraredSpace | FaceFrameFeatures.BoundingBoxInColorSpace;
                        faceReader = faceSrc.OpenReader();
                        faceReader.FrameArrived += this.faceReader_FrameArrived;

                        /* hdSrc = new HighDefinitionFaceFrameSource(this.runtime.Runtime);
                         * hdRead = hdSrc.OpenReader();
                         * hdRead.FrameArrived += hdRead_FrameArrived;*/
                    }
                }
                else
                {
                    if (faceSrc != null)
                    {
                        faceReader.FrameArrived -= faceReader_FrameArrived;
                        faceReader.Dispose();
                    }
                }

                this.FInvalidateConnect = false;
            }
        }
        public MainWindow()
        {
            InitializeComponent();
            network.init();
             _sensor = KinectSensor.GetDefault();
            if(_sensor != null)
            {
                _sensor.Open();

                // Identify the bodies
                _bodies = new Body[_sensor.BodyFrameSource.BodyCount];

                _colorReader = _sensor.ColorFrameSource.OpenReader();
                _colorReader.FrameArrived += ColorReader_FrameArrived;
                _bodyReader = _sensor.BodyFrameSource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                // Initialize the face source with the desired features, some are commented out, include later.
                _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace);// |
                /*
                                                                FaceFrameFeatures.FaceEngagement |
                                                                FaceFrameFeatures.Glasses |
                                                                FaceFrameFeatures.Happy |
                                                                FaceFrameFeatures.LeftEyeClosed |
                                                                FaceFrameFeatures.MouthOpen |
                                                                FaceFrameFeatures.PointsInColorSpace |
                                                                FaceFrameFeatures.RightEyeClosed);
                                                                */

                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;
            }
        }
        public MainWindow()
        {
            InitializeComponent();
            var hubConnection = new HubConnection("http://divewakeweb.azurewebsites.net/");
            stockTickerHubProxy = hubConnection.CreateHubProxy("WakeHub");
            hubConnection.Start().Wait();
            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _sensor.Open();

                _bodies = new Body[_sensor.BodyFrameSource.BodyCount];

                _colorReader = _sensor.ColorFrameSource.OpenReader();
                _colorReader.FrameArrived += ColorReader_FrameArrived;
                _bodyReader = _sensor.BodyFrameSource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                // 2) Initialize the face source with the desired features
                _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace |
                                                              FaceFrameFeatures.FaceEngagement |
                                                              FaceFrameFeatures.Glasses |
                                                              FaceFrameFeatures.LeftEyeClosed |
                                                              FaceFrameFeatures.PointsInColorSpace |
                                                              FaceFrameFeatures.RightEyeClosed);
                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;
            }
        }
        public MainWindow()
        {
            InitializeComponent();
            network.init();
            _sensor = KinectSensor.GetDefault();
            if(_sensor != null)
            {
                _sensor.Open();

                bodyCount = _sensor.BodyFrameSource.BodyCount;
                // Identify the bodies 
                _bodies = new Body[bodyCount];

                _colorReader = _sensor.ColorFrameSource.OpenReader();
                _colorReader.FrameArrived += ColorReader_FrameArrived;
                _bodyReader = _sensor.BodyFrameSource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                // Initialize the face source with the desired features.
                _faceSources = new FaceFrameSource[bodyCount];
                _faceReaders = new FaceFrameReader[bodyCount];

                for(int i = 0; i < bodyCount; i++)
                {
                    // Create the face frame source with the required features and initial tracking id of 0
                    _faceSources[i] = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace);

                    // open the corresponding reader
                    _faceReaders[i] = _faceSources[i].OpenReader();
                    _faceReaders[i].FrameArrived += FaceReader_FrameArrived;
                }

                _faceResults = new FaceFrameResult[bodyCount];

                // Set the arrays and values for person switches and timeouts
                personSize = 3;
                ims = new Image[3] {maskImage, maskImage2, maskImage3};
                trackedInd = new bool[3] { false, false, false };
                _persons = new Person[personSize];
                for(int i = 0; i < personSize; i++)
                {
                    _persons[i] = new Person(0, ims[i], -1);
                }
                paths = new String[3] { "pack://application:,,,/Images/tinfoil.png",
                                        "pack://application:,,,/Images/cowboy.png",
                                        "pack://application:,,,/Images/napolean.png"};
            }
        }
        public void matchFaceWithBody(FaceFrameSource[] faceFrameSources, FaceFrameResult[] faceFrameResults, FaceProcessor faceProcessor)
        {
            foreach (var body in bodies)
            {
                if (!body.IsTracked)
                {
                    continue;
                }

                int i = getBodyIndex(body);
                //Debug.Print("Body {0} comes with ID {1}", i, body.TrackingId);

                if (faceFrameSources[i].IsTrackingIdValid)
                {
                    // check if we have valid face frame results
                    //Debug.Print("Source Valid {0}", i);

                    if (faceFrameResults[i] != null)
                    {
                        //Debug.Print("Result Valid {0}", i);
                        int pitch, yaw, roll;
                        faceProcessor.ExtractFaceRotationInDegrees(faceFrameResults[i].FaceRotationQuaternion, out pitch,
                            out yaw, out roll);

                        eManager.users[faceFrameSources[i].TrackingId].headPitch = pitch;
                        eManager.users[faceFrameSources[i].TrackingId].headYaw = yaw;
                        eManager.users[faceFrameSources[i].TrackingId].headRoll = roll;
                    }
                    else
                    {
                        eManager.users[faceFrameSources[i].TrackingId].headPitch = 1000;
                        eManager.users[faceFrameSources[i].TrackingId].headYaw = 1000;
                        eManager.users[faceFrameSources[i].TrackingId].headRoll = 1000;
                    }

                    if (eManager.HasEngaged && eManager.Engager.body.TrackingId == faceFrameSources[i].TrackingId)
                    {
                        //Debug.Print("Engager: {0}", eManager.Engager.body.TrackingId);
                        eManager.setTrackerFaceOrientation(faceFrameSources[i].TrackingId);

                    }
                }
                else
                {
                    faceFrameSources[i].TrackingId = bodies[i].TrackingId;
                }
            }
        }
        private void Page_Unloaded(object sender, RoutedEventArgs e)
        {
            if (_colorReader != null)
            {
                _colorReader.Dispose();
                _colorReader = null;
            }

            if (_bodyReader != null)
            {
                _bodyReader.Dispose();
                _bodyReader = null;
            }

            if (_faceReader != null)
            {
                _faceReader.Dispose();
                _faceReader = null;
            }

            if (_faceSource != null)
            {
                _faceSource = null;
            }

            if (_sensor != null)
            {
                _sensor.Close();
            }
        }
Exemple #10
0
 private int GetIndex(FaceFrameSource src)
 {
     for (int i = 0; i < faceFrameSources.Length;i++)
     {
         if (src == faceFrameSources[i]) { return i; }
     }
     return 0;
 }
Exemple #11
0
        // sensor value position at times.
        private void Window_Closing(object sender, System.ComponentModel.CancelEventArgs e)
        {
            if (_colorReader != null)
            {
                _colorReader.Dispose();
                _colorReader = null;
            }

            if (_bodyReader != null)
            {
                _bodyReader.Dispose();
                _bodyReader = null;
            }

            if (_faceReader != null)
            {
                _faceReader.Dispose();
                _faceReader = null;
            }

            if (_faceSource != null)
            {
                _faceSource.Dispose();
                _faceSource = null;
            }

            if (_sensor != null)
            {
                _sensor.Close();
            }
        }
 /// <summary>
 /// Constructor
 /// </summary>
 /// <param name="sensor">Kinect sensor</param>
 public SingleFaceProcessor(KinectSensor sensor)
 {
     this.frameSource = new FaceFrameSource(sensor, 0, FaceUtils.AllFeatures());
     this.framereader = this.frameSource.OpenReader();
     this.framereader.FrameArrived += this.FrameArrived;
 }
 private void InitializeFace()
 {
     m_FaceSource = new Microsoft.Kinect.Face.FaceFrameSource(m_Sensor, m_CurrentTrackingId, Microsoft.Kinect.Face.FaceFrameFeatures.BoundingBoxInColorSpace | Microsoft.Kinect.Face.FaceFrameFeatures.BoundingBoxInInfraredSpace | Microsoft.Kinect.Face.FaceFrameFeatures.PointsInColorSpace | Microsoft.Kinect.Face.FaceFrameFeatures.PointsInInfraredSpace);
     m_FaceReader = m_FaceSource.OpenReader();
     m_FaceReader.FrameArrived += M_FaceReader_FrameArrived;
 }
        /// <summary>
        /// Process body frames
        /// </summary>
        private void OnBodyFrameReceived(object sender, BodyFrameArrivedEventArgs e)
        {
            // Get Frame ref
            BodyFrameReference bodyRef = e.FrameReference;

            if (bodyRef == null) return;

            // Get body frame
            using (BodyFrame frame = bodyRef.AcquireFrame())
            {
                if (frame == null) return;

                // Allocate array when required
                if (_bodies == null)
                    _bodies = new Body[frame.BodyCount];

                // Refresh bodies
                frame.GetAndRefreshBodyData(_bodies);

                foreach (Body body in _bodies)
                {
                    if (body.IsTracked && _faceSource == null)
                    {
                        // Create new sources with body TrackingId
                        _faceSource = new FaceFrameSource(_kinect, body.TrackingId, _faceFrameFeatures);

                        // Create new reader
                        _faceReader = _faceSource.OpenReader();

                        // Wire events
                        _faceReader.FrameArrived += OnFaceFrameArrived;
                        _faceSource.TrackingIdLost += OnTrackingIdLost;
                    }
                }
            }
        }
        /// <summary>
        /// Handle when the tracked body is gone
        /// </summary>
        private void OnTrackingIdLost(object sender, TrackingIdLostEventArgs e)
        {
            // Update UI
            HappyResult.Text = "No face tracked";
            EngagedResult.Text = "No face tracked";
            GlassesResult.Text = "No face tracked";
            LeftEyeResult.Text = "No face tracked";
            RightEyeResult.Text = "No face tracked";
            MouthOpenResult.Text = "No face tracked";
            MouthMovedResult.Text = "No face tracked";
            LookingAwayResult.Text = "No face tracked";

            // Reset values for next body
            _faceReader = null;
            _faceSource = null;
        }
        private void DisposeOfObjects()
        {
            jadeCalculation.Dispose();
            jadeCalculation = null;

            m_colorBitmap = null;

            m_FaceSource.Dispose();
            m_FaceSource = null;

            m_FaceReader.Dispose();
            m_FaceReader = null;

            m_currentTrackedBody = null;

            m_MSFReader.Dispose();
            m_MSFReader = null;
            m_CoordMapper = null;

            m_Sensor.Close();
            m_Sensor = null;
        }
        private void DisposeOfObjects()
        {
            engine.Dispose();
            engine = null;

            closeFile();
            m_colorBitmap = null;

            m_FaceSource.Dispose();
            m_FaceSource = null;

            m_FaceReader.Dispose();
            m_FaceReader = null;

            m_currentTrackedBody = null;

            m_MSFReader.Dispose();
            m_MSFReader = null;
            m_CoordMapper = null;

            m_Sensor.Close();
            m_Sensor = null;
        }
Exemple #18
0
 void InitializeFace()
 {
     FaceFrameFeatures faceFrameFeatures =
             FaceFrameFeatures.BoundingBoxInColorSpace
             | FaceFrameFeatures.PointsInColorSpace
             | FaceFrameFeatures.RotationOrientation
             | FaceFrameFeatures.FaceEngagement
             | FaceFrameFeatures.Glasses
             | FaceFrameFeatures.Happy
             | FaceFrameFeatures.LeftEyeClosed
             | FaceFrameFeatures.RightEyeClosed
             | FaceFrameFeatures.LookingAway
             | FaceFrameFeatures.MouthMoved
             | FaceFrameFeatures.MouthOpen;
     faceFrameSources = new FaceFrameSource[bodyCount];
     faceFrameReaders = new FaceFrameReader[bodyCount];
     for ( int i = 0; i < bodyCount; i++ ) {
         faceFrameSources[i] = new FaceFrameSource( kinect, 0, faceFrameFeatures );
         faceFrameReaders[i] = faceFrameSources[i].OpenReader();
         faceFrameReaders[i].FrameArrived += faceFrameReader_FrameArrived;
     }
     faceFrameResults = new FaceFrameResult[bodyCount];
     faceBrush = new List<Brush>()
         {
             Brushes.White,
             Brushes.Orange,
             Brushes.Green,
             Brushes.Red,
             Brushes.LightBlue,
             Brushes.Yellow
         };
 }
        // Manages closing all active processes when exiting the primary window.
        private void Window_Closed(object sender, EventArgs e)
        {
            if (_reader != null)
            {
                _reader.Dispose();
            }

            if (_sensor != null)
            {
                _sensor.Close();
            }

            if (_faceReader != null)
            {
                _faceReader.Dispose();
                _faceReader = null;
            }

            if (_faceSource != null)
            {
                _faceSource.Dispose();
                _faceSource = null;
            }
        }
        private void readFaceFrame()
        {
            // specify the required matchFaceWithBody frame results
            FaceFrameFeatures faceFrameFeatures =
                FaceFrameFeatures.BoundingBoxInColorSpace
                | FaceFrameFeatures.PointsInColorSpace
                | FaceFrameFeatures.RotationOrientation
                | FaceFrameFeatures.FaceEngagement
                | FaceFrameFeatures.LookingAway;

            // create a matchFaceWithBody frame source + reader to track each matchFaceWithBody in the FOV
            faceFrameSources = new FaceFrameSource[bodyCount];
            faceFrameReaders = new FaceFrameReader[bodyCount];
            for (int i = 0; i < bodyCount; i++)
            {
                // create the matchFaceWithBody frame source with the required matchFaceWithBody frame features and an initial tracking Id of 0
                faceFrameSources[i] = new FaceFrameSource(sensor, 0, faceFrameFeatures);
                ffsDic.Add(faceFrameSources[i], i);

                // open the corresponding reader
                faceFrameReaders[i] = faceFrameSources[i].OpenReader();
            }

            // allocate storage to store matchFaceWithBody frame results for each matchFaceWithBody in the FOV
            faceFrameResults = new FaceFrameResult[bodyCount];

            for (int i = 0; i < bodyCount; i++)
            {
                if (faceFrameReaders[i] != null)
                {
                    // wire handler for matchFaceWithBody frame arrival
                    faceFrameReaders[i].FrameArrived += face_FrameArrived;
                }
            }
        }
        /// <summary>
        /// Kinectセンサーを初期化し、データの取得用に各種変数を初期化します
        /// </summary>
        private void Initialize()
        {
            // Kinectセンサーを取得
            this.kinect = KinectSensor.GetDefault();

            if (this.kinect == null) return;

            // Kinectセンサーの情報を取得
            var desc = kinect.ColorFrameSource.FrameDescription;
            // 各種描画用変数をセンサー情報をもとに初期化
            this.colorPixels = new byte[desc.Width * desc.Height * bytePerPixel];
            this._ColorBitmap = new WriteableBitmap(desc.Width, desc.Height, 96.0, 96.0, PixelFormats.Bgr32, null);
            this._FacePointBitmap = new RenderTargetBitmap(desc.Width, desc.Height, 96.0, 96.0, PixelFormats.Default);

            // KinectセンサーからBody(骨格情報)とColor(色情報)を取得するFrameReaderを作成
            this.reader = kinect.OpenMultiSourceFrameReader(FrameSourceTypes.Body | FrameSourceTypes.Color);
            this.reader.MultiSourceFrameArrived += OnMultiSourceFrameArrived;

            // FaceFrameSourceを作成
            faceSource = new FaceFrameSource(kinect,0, DefaultFaceFrameFeatures);

            // Readerを作成する
            faceReader = faceSource.OpenReader();

            // FaceReaderからフレームを受け取ることができるようになった際に発生するイベント
            faceReader.FrameArrived += OnFaceFrameArrived;
            // FaceFrameSourceが指定されたTrackingIdのトラッキングに失敗した際に発生するイベント
            faceSource.TrackingIdLost += OnTrackingIdLost;

            // センサーの開始
            kinect.Open();
        }
Exemple #22
0
 int GetFaceSourceIndex( FaceFrameSource source )
 {
     int index = -1;
     for ( int i=0; i<bodyCount; i++ ) {
         if ( faceFrameSources[i]==source ) {
             index = i;
             break;
         }
     }
     return index;
 }
        /// <summary>
        /// setup kinect
        /// start search for bodies
        /// start bodyframereader to add body to facereader
        /// start facereader
        /// adds faceframereader to track mouth
        /// </summary>
        private void SetupKinect()
        {
            if (_kinectSensor == null) return;

            _bodies = new Body[_kinectSensor.BodyFrameSource.BodyCount];
            _bodyReader = _kinectSensor.BodyFrameSource.OpenReader();
            _bodyReader.FrameArrived += BodyReader_FrameArrived;

            // Initialize the face source with the desired features
            _faceSource = new FaceFrameSource(_kinectSensor, 0, FaceFrameFeatures.MouthOpen);
            _faceReader = _faceSource.OpenReader();
            _faceReader.FrameArrived += FaceReader_FrameArrived;

            Connected?.Invoke();
        }
        /// <summary>
        /// Returns the index of the face frame source
        /// </summary>
        /// <param name="faceFrameSource">the face frame source</param>
        /// <returns>the index of the face source in the face source array</returns>
        private int GetFaceSourceIndex(FaceFrameSource faceFrameSource)
        {
            int index = -1;

            for (int i = 0; i < this.bodyCount; i++)
            {
                if (this.faceFrameSources[i] == faceFrameSource)
                {
                    index = i;
                    break;
                }
            }

            return index;
        }
        /// <summary>
        /// Extended CTOR
        /// </summary>
        /// <param name="bodyId">Id of the tracked body</param>
        /// <param name="faceFeatures">Set of requested face features to track</param>
        /// <param name="kinect">Kinect sensor that is tracking</param>
        public FaceTracker(ulong bodyId, FaceFrameFeatures faceFeatures, KinectSensor kinect)
        {
            // Pin-point start of tracking
            _startTracking = DateTime.Now;

            // Save variables
            _bodyId = bodyId;
            _faceFeatures = faceFeatures;
            // _kinectId = kinect.UniqueKinectId --> NotImplementedYet

            // Create a new source with body TrackingId
            _faceSource = new FaceFrameSource(kinect, bodyId, faceFeatures);

            // Create new reader
            _faceReader = _faceSource.OpenReader();

            Console.WriteLine(String.Format("Tracker for body #{0} started.", _bodyId));

            // Initialize FaceFeatureTrackers
            InitialiseFeatureTrackers();

            // Wire events
            _faceReader.FrameArrived += OnFaceFrameArrived;
            _faceSource.TrackingIdLost += OnTrackingLost;
        }
Exemple #26
-1
        public void InitializeHDFace()
        {
          
            this.sensor = KinectSensor.GetDefault();
            this.bodySource = this.sensor.BodyFrameSource;
            FrameDescription colorFrameDescription = this.sensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra);
            
            this.highDefinitionFaceFrameSource = new HighDefinitionFaceFrameSource(sensor);
            this.highDefinitionFaceFrameSource.TrackingIdLost += this.HdFaceSource_TrackingIdLost;

            this.highDefinitionFaceFrameReader = this.highDefinitionFaceFrameSource.OpenReader();
            this.highDefinitionFaceFrameReader.FrameArrived += this.HdFaceReader_FrameArrived; //event gor high def face

            if (scenarioselected == 2)
            {
                this.highDefinitionFaceFrameSource2 = new HighDefinitionFaceFrameSource(sensor);
                this.highDefinitionFaceFrameSource2.TrackingIdLost += this.HdFaceSource_TrackingIdLost2;

                this.highDefinitionFaceFrameReader2 = this.highDefinitionFaceFrameSource2.OpenReader();
                this.highDefinitionFaceFrameReader2.FrameArrived += this.HdFaceReader_FrameArrived2; //event gor high def face
                faceSource2 = new FaceFrameSource(sensor, 0, DefaultFaceFrameFeatures);
                faceReader2 = faceSource2.OpenReader();
                faceReader2.FrameArrived += OnFaceFrameArrived2; //event for face data
                faceSource2.TrackingIdLost += OnTrackingIdLost2;

            }

            this.reader = sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Body | FrameSourceTypes.Color);
            
            this.reader.MultiSourceFrameArrived += OnMultiSourceFrameArrived; //event for multiple source (Position)
            this.currentFaceAlignment = new FaceAlignment();

            faceSource = new FaceFrameSource(sensor, 0, DefaultFaceFrameFeatures);
            faceReader = faceSource.OpenReader();
            faceReader.FrameArrived += OnFaceFrameArrived; //event for face data
            
            faceSource.TrackingIdLost += OnTrackingIdLost;
            this.pixels = new byte[colorFrameDescription.Width * colorFrameDescription.Height * colorFrameDescription.BytesPerPixel];
            this.sensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;
            this.sensor.Open();
            
        }