Ejemplo n.º 1
0
        public MainWindow()
        {
            InitializeComponent();
            var hubConnection = new HubConnection("http://divewakeweb.azurewebsites.net/");
            stockTickerHubProxy = hubConnection.CreateHubProxy("WakeHub");
            hubConnection.Start().Wait();
            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _sensor.Open();

                _bodies = new Body[_sensor.BodyFrameSource.BodyCount];

                _colorReader = _sensor.ColorFrameSource.OpenReader();
                _colorReader.FrameArrived += ColorReader_FrameArrived;
                _bodyReader = _sensor.BodyFrameSource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                // 2) Initialize the face source with the desired features
                _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace |
                                                              FaceFrameFeatures.FaceEngagement |
                                                              FaceFrameFeatures.Glasses |
                                                              FaceFrameFeatures.LeftEyeClosed |
                                                              FaceFrameFeatures.PointsInColorSpace |
                                                              FaceFrameFeatures.RightEyeClosed);
                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;
            }
        }
Ejemplo n.º 2
0
        public MainWindow()
        {
            InitializeComponent();

            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _sensor.Open();

                _bodies = new Body[_sensor.BodyFrameSource.BodyCount];

                _colorReader = _sensor.ColorFrameSource.OpenReader();
                _colorReader.FrameArrived += ColorReader_FrameArrived;
                _bodyReader = _sensor.BodyFrameSource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                // 2) Initialize the face source with the desired features
                _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace |
                                                              FaceFrameFeatures.FaceEngagement |
                                                              FaceFrameFeatures.Glasses |
                                                              FaceFrameFeatures.Happy |
                                                              FaceFrameFeatures.LeftEyeClosed |
                                                              FaceFrameFeatures.MouthOpen |
                                                              FaceFrameFeatures.PointsInColorSpace |
                                                              FaceFrameFeatures.RightEyeClosed |
                                                              FaceFrameFeatures.LookingAway);
                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;
            }
        }
Ejemplo n.º 3
0
        public void Evaluate(int SpreadMax)
        {
            if (this.FInvalidateConnect)
            {
                if (this.FInRuntime.PluginIO.IsConnected)
                {
                    //Cache runtime node
                    this.runtime = this.FInRuntime[0];

                    if (runtime != null)
                    {
                        faceSrc = new Microsoft.Kinect.Face.FaceFrameSource(this.runtime.Runtime);
                        faceSrc.FaceFrameFeatures = FaceFrameFeatures.BoundingBoxInInfraredSpace | FaceFrameFeatures.BoundingBoxInColorSpace;
                        faceReader = faceSrc.OpenReader();
                        faceReader.FrameArrived += this.faceReader_FrameArrived;

                        /* hdSrc = new HighDefinitionFaceFrameSource(this.runtime.Runtime);
                         * hdRead = hdSrc.OpenReader();
                         * hdRead.FrameArrived += hdRead_FrameArrived;*/
                    }
                }
                else
                {
                    if (faceSrc != null)
                    {
                        faceReader.FrameArrived -= faceReader_FrameArrived;
                        faceReader.Dispose();
                    }
                }

                this.FInvalidateConnect = false;
            }
        }
Ejemplo n.º 4
0
        // Primary function. Runs when the window loads in.
        private void Window_Loaded(object sender, RoutedEventArgs e)
        {
            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _sensor.Open();

                _bodies = new Body[_sensor.BodyFrameSource.BodyCount];

                _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body);
                _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;

                _bodyReader = _sensor.BodyFrameSource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                // 2) Initialize the face source with the desired features
                _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace |
                                                              FaceFrameFeatures.FaceEngagement |
                                                              FaceFrameFeatures.Glasses |
                                                              FaceFrameFeatures.Happy |
                                                              FaceFrameFeatures.LeftEyeClosed |
                                                              FaceFrameFeatures.MouthOpen |
                                                              FaceFrameFeatures.PointsInColorSpace |
                                                              FaceFrameFeatures.RightEyeClosed);
                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;
            }
        }
        public MainWindow()
        {
            InitializeComponent();
            network.init();
             _sensor = KinectSensor.GetDefault();
            if(_sensor != null)
            {
                _sensor.Open();

                // Identify the bodies
                _bodies = new Body[_sensor.BodyFrameSource.BodyCount];

                _colorReader = _sensor.ColorFrameSource.OpenReader();
                _colorReader.FrameArrived += ColorReader_FrameArrived;
                _bodyReader = _sensor.BodyFrameSource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                // Initialize the face source with the desired features, some are commented out, include later.
                _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace);// |
                /*
                                                                FaceFrameFeatures.FaceEngagement |
                                                                FaceFrameFeatures.Glasses |
                                                                FaceFrameFeatures.Happy |
                                                                FaceFrameFeatures.LeftEyeClosed |
                                                                FaceFrameFeatures.MouthOpen |
                                                                FaceFrameFeatures.PointsInColorSpace |
                                                                FaceFrameFeatures.RightEyeClosed);
                                                                */

                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;
            }
        }
Ejemplo n.º 6
0
        private void DisposeOfObjects()
        {
            engine.Dispose();
            engine = null;

            closeFile();
            m_colorBitmap = null;

            m_FaceSource.Dispose();
            m_FaceSource = null;

            m_FaceReader.Dispose();
            m_FaceReader = null;

            m_currentTrackedBody = null;

            m_MSFReader.Dispose();
            m_MSFReader   = null;
            m_CoordMapper = null;

            m_Sensor.Close();
            m_Sensor = null;
        }
Ejemplo n.º 7
0
 private void InitializeFace()
 {
     m_FaceSource = new Microsoft.Kinect.Face.FaceFrameSource(m_Sensor, m_CurrentTrackingId, Microsoft.Kinect.Face.FaceFrameFeatures.BoundingBoxInColorSpace | Microsoft.Kinect.Face.FaceFrameFeatures.BoundingBoxInInfraredSpace | Microsoft.Kinect.Face.FaceFrameFeatures.PointsInColorSpace | Microsoft.Kinect.Face.FaceFrameFeatures.PointsInInfraredSpace);
     m_FaceReader = m_FaceSource.OpenReader();
     m_FaceReader.FrameArrived += M_FaceReader_FrameArrived;
 }
Ejemplo n.º 8
0
 /// <summary>
 /// Constructor
 /// </summary>
 /// <param name="sensor">Kinect sensor</param>
 public SingleFaceProcessor(KinectSensor sensor)
 {
     this.frameSource = new FaceFrameSource(sensor, 0, FaceUtils.AllFeatures());
     this.framereader = this.frameSource.OpenReader();
     this.framereader.FrameArrived += this.FrameArrived;
 }
Ejemplo n.º 9
0
        // sensor value position at times.
        private void Window_Closing(object sender, System.ComponentModel.CancelEventArgs e)
        {
            if (_colorReader != null)
            {
                _colorReader.Dispose();
                _colorReader = null;
            }

            if (_bodyReader != null)
            {
                _bodyReader.Dispose();
                _bodyReader = null;
            }

            if (_faceReader != null)
            {
                _faceReader.Dispose();
                _faceReader = null;
            }

            if (_faceSource != null)
            {
                _faceSource.Dispose();
                _faceSource = null;
            }

            if (_sensor != null)
            {
                _sensor.Close();
            }
        }
 private void InitializeFace()
 {
     m_FaceSource = new Microsoft.Kinect.Face.FaceFrameSource(m_Sensor, m_CurrentTrackingId, Microsoft.Kinect.Face.FaceFrameFeatures.BoundingBoxInColorSpace | Microsoft.Kinect.Face.FaceFrameFeatures.BoundingBoxInInfraredSpace | Microsoft.Kinect.Face.FaceFrameFeatures.PointsInColorSpace | Microsoft.Kinect.Face.FaceFrameFeatures.PointsInInfraredSpace);
     m_FaceReader = m_FaceSource.OpenReader();
     m_FaceReader.FrameArrived += M_FaceReader_FrameArrived;
 }
Ejemplo n.º 11
0
        /// <summary>
        /// Process body frames
        /// </summary>
        private void OnBodyFrameReceived(object sender, BodyFrameArrivedEventArgs e)
        {
            // Get Frame ref
            BodyFrameReference bodyRef = e.FrameReference;

            if (bodyRef == null) return;

            // Get body frame
            using (BodyFrame frame = bodyRef.AcquireFrame())
            {
                if (frame == null) return;

                // Allocate array when required
                if (_bodies == null)
                    _bodies = new Body[frame.BodyCount];

                // Refresh bodies
                frame.GetAndRefreshBodyData(_bodies);

                foreach (Body body in _bodies)
                {
                    if (body.IsTracked && _faceSource == null)
                    {
                        // Create new sources with body TrackingId
                        _faceSource = new FaceFrameSource(_kinect, body.TrackingId, _faceFrameFeatures);

                        // Create new reader
                        _faceReader = _faceSource.OpenReader();

                        // Wire events
                        _faceReader.FrameArrived += OnFaceFrameArrived;
                        _faceSource.TrackingIdLost += OnTrackingIdLost;
                    }
                }
            }
        }
Ejemplo n.º 12
0
        private void DisposeOfObjects()
        {
            engine.Dispose();
            engine = null;

            closeFile();
            m_colorBitmap = null;

            m_FaceSource.Dispose();
            m_FaceSource = null;

            m_FaceReader.Dispose();
            m_FaceReader = null;

            m_currentTrackedBody = null;

            m_MSFReader.Dispose();
            m_MSFReader = null;
            m_CoordMapper = null;

            m_Sensor.Close();
            m_Sensor = null;
        }
Ejemplo n.º 13
0
        /// <summary>
        /// setup kinect
        /// start search for bodies
        /// start bodyframereader to add body to facereader
        /// start facereader
        /// adds faceframereader to track mouth
        /// </summary>
        private void SetupKinect()
        {
            if (_kinectSensor == null) return;

            _bodies = new Body[_kinectSensor.BodyFrameSource.BodyCount];
            _bodyReader = _kinectSensor.BodyFrameSource.OpenReader();
            _bodyReader.FrameArrived += BodyReader_FrameArrived;

            // Initialize the face source with the desired features
            _faceSource = new FaceFrameSource(_kinectSensor, 0, FaceFrameFeatures.MouthOpen);
            _faceReader = _faceSource.OpenReader();
            _faceReader.FrameArrived += FaceReader_FrameArrived;

            Connected?.Invoke();
        }
        /// <summary>
        /// Kinectセンサーを初期化し、データの取得用に各種変数を初期化します
        /// </summary>
        private void Initialize()
        {
            // Kinectセンサーを取得
            this.kinect = KinectSensor.GetDefault();

            if (this.kinect == null) return;

            // Kinectセンサーの情報を取得
            var desc = kinect.ColorFrameSource.FrameDescription;
            // 各種描画用変数をセンサー情報をもとに初期化
            this.colorPixels = new byte[desc.Width * desc.Height * bytePerPixel];
            this._ColorBitmap = new WriteableBitmap(desc.Width, desc.Height, 96.0, 96.0, PixelFormats.Bgr32, null);
            this._FacePointBitmap = new RenderTargetBitmap(desc.Width, desc.Height, 96.0, 96.0, PixelFormats.Default);

            // KinectセンサーからBody(骨格情報)とColor(色情報)を取得するFrameReaderを作成
            this.reader = kinect.OpenMultiSourceFrameReader(FrameSourceTypes.Body | FrameSourceTypes.Color);
            this.reader.MultiSourceFrameArrived += OnMultiSourceFrameArrived;

            // FaceFrameSourceを作成
            faceSource = new FaceFrameSource(kinect,0, DefaultFaceFrameFeatures);

            // Readerを作成する
            faceReader = faceSource.OpenReader();

            // FaceReaderからフレームを受け取ることができるようになった際に発生するイベント
            faceReader.FrameArrived += OnFaceFrameArrived;
            // FaceFrameSourceが指定されたTrackingIdのトラッキングに失敗した際に発生するイベント
            faceSource.TrackingIdLost += OnTrackingIdLost;

            // センサーの開始
            kinect.Open();
        }
Ejemplo n.º 15
0
        // Manages closing all active processes when exiting the primary window.
        private void Window_Closed(object sender, EventArgs e)
        {
            if (_reader != null)
            {
                _reader.Dispose();
            }

            if (_sensor != null)
            {
                _sensor.Close();
            }

            if (_faceReader != null)
            {
                _faceReader.Dispose();
                _faceReader = null;
            }

            if (_faceSource != null)
            {
                _faceSource.Dispose();
                _faceSource = null;
            }
        }
Ejemplo n.º 16
0
        /// <summary>
        /// Handle when the tracked body is gone
        /// </summary>
        private void OnTrackingIdLost(object sender, TrackingIdLostEventArgs e)
        {
            // Update UI
            HappyResult.Text = "No face tracked";
            EngagedResult.Text = "No face tracked";
            GlassesResult.Text = "No face tracked";
            LeftEyeResult.Text = "No face tracked";
            RightEyeResult.Text = "No face tracked";
            MouthOpenResult.Text = "No face tracked";
            MouthMovedResult.Text = "No face tracked";
            LookingAwayResult.Text = "No face tracked";

            // Reset values for next body
            _faceReader = null;
            _faceSource = null;
        }
        private void DisposeOfObjects()
        {
            jadeCalculation.Dispose();
            jadeCalculation = null;

            m_colorBitmap = null;

            m_FaceSource.Dispose();
            m_FaceSource = null;

            m_FaceReader.Dispose();
            m_FaceReader = null;

            m_currentTrackedBody = null;

            m_MSFReader.Dispose();
            m_MSFReader = null;
            m_CoordMapper = null;

            m_Sensor.Close();
            m_Sensor = null;
        }
        private void Page_Unloaded(object sender, RoutedEventArgs e)
        {
            if (_colorReader != null)
            {
                _colorReader.Dispose();
                _colorReader = null;
            }

            if (_bodyReader != null)
            {
                _bodyReader.Dispose();
                _bodyReader = null;
            }

            if (_faceReader != null)
            {
                _faceReader.Dispose();
                _faceReader = null;
            }

            if (_faceSource != null)
            {
                _faceSource = null;
            }

            if (_sensor != null)
            {
                _sensor.Close();
            }
        }
Ejemplo n.º 19
0
        public void Evaluate(int SpreadMax)
        {
            if (this.FInvalidateConnect)
            {
                if (this.FInRuntime.PluginIO.IsConnected)
                {
                    //Cache runtime node
                    this.runtime = this.FInRuntime[0];

                    if (runtime != null)
                    {
                        faceSrc = new Microsoft.Kinect.Face.FaceFrameSource(this.runtime.Runtime);
                        faceSrc.FaceFrameFeatures = FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.LookingAway | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Happy | FaceFrameFeatures.MouthMoved | FaceFrameFeatures.MouthOpen | FaceFrameFeatures.RightEyeClosed | FaceFrameFeatures.RotationOrientation;
                        faceReader = faceSrc.OpenReader();
                        faceReader.FrameArrived += this.faceReader_FrameArrived;

                        hdSrc = new HighDefinitionFaceFrameSource(this.runtime.Runtime);
                        hdRead = hdSrc.OpenReader();
                        hdRead.FrameArrived += hdRead_FrameArrived;
                    }
                }
                else
                {
                    if (faceSrc != null)
                    {
                        faceReader.FrameArrived -= faceReader_FrameArrived;
                        faceReader.Dispose();
                    }
                }

                this.FInvalidateConnect = false;
            }
        }
Ejemplo n.º 20
-1
        /// <summary>
        /// Extended CTOR
        /// </summary>
        /// <param name="bodyId">Id of the tracked body</param>
        /// <param name="faceFeatures">Set of requested face features to track</param>
        /// <param name="kinect">Kinect sensor that is tracking</param>
        public FaceTracker(ulong bodyId, FaceFrameFeatures faceFeatures, KinectSensor kinect)
        {
            // Pin-point start of tracking
            _startTracking = DateTime.Now;

            // Save variables
            _bodyId = bodyId;
            _faceFeatures = faceFeatures;
            // _kinectId = kinect.UniqueKinectId --> NotImplementedYet

            // Create a new source with body TrackingId
            _faceSource = new FaceFrameSource(kinect, bodyId, faceFeatures);

            // Create new reader
            _faceReader = _faceSource.OpenReader();

            Console.WriteLine(String.Format("Tracker for body #{0} started.", _bodyId));

            // Initialize FaceFeatureTrackers
            InitialiseFeatureTrackers();

            // Wire events
            _faceReader.FrameArrived += OnFaceFrameArrived;
            _faceSource.TrackingIdLost += OnTrackingLost;
        }
Ejemplo n.º 21
-1
        public void InitializeHDFace()
        {
          
            this.sensor = KinectSensor.GetDefault();
            this.bodySource = this.sensor.BodyFrameSource;
            FrameDescription colorFrameDescription = this.sensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra);
            
            this.highDefinitionFaceFrameSource = new HighDefinitionFaceFrameSource(sensor);
            this.highDefinitionFaceFrameSource.TrackingIdLost += this.HdFaceSource_TrackingIdLost;

            this.highDefinitionFaceFrameReader = this.highDefinitionFaceFrameSource.OpenReader();
            this.highDefinitionFaceFrameReader.FrameArrived += this.HdFaceReader_FrameArrived; //event gor high def face

            if (scenarioselected == 2)
            {
                this.highDefinitionFaceFrameSource2 = new HighDefinitionFaceFrameSource(sensor);
                this.highDefinitionFaceFrameSource2.TrackingIdLost += this.HdFaceSource_TrackingIdLost2;

                this.highDefinitionFaceFrameReader2 = this.highDefinitionFaceFrameSource2.OpenReader();
                this.highDefinitionFaceFrameReader2.FrameArrived += this.HdFaceReader_FrameArrived2; //event gor high def face
                faceSource2 = new FaceFrameSource(sensor, 0, DefaultFaceFrameFeatures);
                faceReader2 = faceSource2.OpenReader();
                faceReader2.FrameArrived += OnFaceFrameArrived2; //event for face data
                faceSource2.TrackingIdLost += OnTrackingIdLost2;

            }

            this.reader = sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Body | FrameSourceTypes.Color);
            
            this.reader.MultiSourceFrameArrived += OnMultiSourceFrameArrived; //event for multiple source (Position)
            this.currentFaceAlignment = new FaceAlignment();

            faceSource = new FaceFrameSource(sensor, 0, DefaultFaceFrameFeatures);
            faceReader = faceSource.OpenReader();
            faceReader.FrameArrived += OnFaceFrameArrived; //event for face data
            
            faceSource.TrackingIdLost += OnTrackingIdLost;
            this.pixels = new byte[colorFrameDescription.Width * colorFrameDescription.Height * colorFrameDescription.BytesPerPixel];
            this.sensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;
            this.sensor.Open();
            
        }