void Start() { sensor = KinectSensor.GetDefault(); bodySource = sensor.BodyFrameSource; bodyReader = bodySource.OpenReader(); bodyReader.FrameArrived += BodyReader_FrameArrived; FaceFrameFeatures faceFrameFeatures = FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.BoundingBoxInInfraredSpace | FaceFrameFeatures.PointsInInfraredSpace | FaceFrameFeatures.RotationOrientation | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.RightEyeClosed | FaceFrameFeatures.LookingAway | FaceFrameFeatures.MouthMoved | FaceFrameFeatures.MouthOpen; FaceFrameSource = FaceFrameSource.Create(sensor, currentTrackingId, faceFrameFeatures); FaceFrameSource.TrackingIdLost += HdFaceSource_TrackingIdLost; FaceFrameReader = FaceFrameSource.OpenReader(); FaceFrameReader.FrameArrived += HdFaceReader_FrameArrived; //CurrentFaceModel = FaceModel.Create(); currentFaceAlignment = FaceAlignment.Create(); sensor.Open(); }
public MainWindow() { InitializeComponent(); serialInit(); _sensor = KinectSensor.GetDefault(); if (_sensor != null) { _sensor.Open(); _bodies = new Body[_sensor.BodyFrameSource.BodyCount]; _colorReader = _sensor.ColorFrameSource.OpenReader(); _colorReader.FrameArrived += ColorReader_FrameArrived; _bodyReader = _sensor.BodyFrameSource.OpenReader(); _bodyReader.FrameArrived += BodyReader_FrameArrived; // Initializing the source. _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.MouthOpen | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RightEyeClosed); _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += FaceReader_FrameArrived; } }
// sensor value position at times. private void Window_Closing(object sender, System.ComponentModel.CancelEventArgs e) { if (_colorReader != null) { _colorReader.Dispose(); _colorReader = null; } if (_bodyReader != null) { _bodyReader.Dispose(); _bodyReader = null; } if (_faceReader != null) { _faceReader.Dispose(); _faceReader = null; } if (_faceSource != null) { _faceSource.Dispose(); _faceSource = null; } if (_sensor != null) { _sensor.Close(); } }
private void Page_Unloaded(object sender, RoutedEventArgs e) { if (_colorReader != null) { _colorReader.Dispose(); _colorReader = null; } if (_bodyReader != null) { _bodyReader.Dispose(); _bodyReader = null; } if (_faceReader != null) { _faceReader.Dispose(); _faceReader = null; } if (_faceSource != null) { _faceSource = null; } if (_sensor != null) { _sensor.Close(); } }
private void Window_Unloaded(object sender, RoutedEventArgs e) { socket.Disconnect(); if (_reader != null) { _reader.Dispose(); _reader = null; } if (_sensor != null) { _sensor.Close(); } if (bodyFrameReader != null) { bodyFrameReader.Dispose(); bodyFrameReader = null; } if (_faceReader != null) { _faceReader.Dispose(); _faceReader = null; } if (_faceSource != null) { _faceSource.Dispose(); _faceSource = null; } }
public TrackedBody(KinectManager kinectManager, Body body, ulong currentFrame, int displayWidth, int displayHeight) { //Console.WriteLine("Tracked body invoked."); _KinectManager = kinectManager; _KinectManager.FrameArrived += KinectManager_FrameArrived; _Created = DateTime.UtcNow; _LastKnownJoints = body.Joints; this.TrackingId = body.TrackingId; this.LastTrackedFrame = currentFrame; _FirstTrackedFrame = currentFrame; _DisplayWidth = displayWidth; _DisplayHeight = displayHeight; _ColorFrameDesc = _KinectManager.KinectSensor.ColorFrameSource.CreateFrameDescription(ImageFormat); _DepthFrameDesc = _KinectManager.KinectSensor.DepthFrameSource.FrameDescription; _FaceFrameSource = new FaceFrameSource(_KinectManager.KinectSensor, 0, DefaultFaceFeatures); _FaceFrameReader = _FaceFrameSource.OpenReader(); _FaceFrameReader.FrameArrived += OnFaceFrameArrived; _FaceFrameReader.FaceFrameSource.TrackingIdLost += OnTrackingIdLost; _FaceFrameReader.FaceFrameSource.TrackingId = this.TrackingId; _FaceFrameReader.IsPaused = true; _DepthHFoV_Half_Rad_Tangent = Math.Tan(_DepthFrameDesc.HorizontalFieldOfView / 2 / 180 * Math.PI); }
private void SafeOpenSensor() { if (sensorStatus == SensorStatus.Closed) { kinectSensor.Open(); bodies = new Body[kinectSensor.BodyFrameSource.BodyCount]; colorFrameReader = kinectSensor.ColorFrameSource.OpenReader(); colorFrameReader.FrameArrived += colorFrameReader_FrameArrived; bodyFrameReader = kinectSensor.BodyFrameSource.OpenReader(); bodyFrameReader.FrameArrived += bodyFrameReader_FrameArrived; FaceFrameFeatures fff = FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.MouthOpen | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RightEyeClosed; faceFrameSource = new FaceFrameSource(kinectSensor, 0, fff); faceFrameReader = faceFrameSource.OpenReader(); faceFrameReader.FrameArrived += faceFrameReader_FrameArrived; sensorStatus = SensorStatus.Opened; } }
public MainWindow() { System.Diagnostics.Debug.WriteLine("Starting"); InitializeComponent(); _sensor = KinectSensor.GetDefault(); if (_sensor != null) { _sensor.Open(); _bodies = new Body[_sensor.BodyFrameSource.BodyCount]; _colorReader = _sensor.ColorFrameSource.OpenReader(); _colorReader.FrameArrived += ColorReader_FrameArrived; _bodyReader = _sensor.BodyFrameSource.OpenReader(); _bodyReader.FrameArrived += BodyReader_FrameArrived; // 2) Initialize the face source with the desired features _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.MouthOpen | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RightEyeClosed); _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += FaceReader_FrameArrived; } }
/// <summary> /// Initialize Kinect object /// </summary> private void InitializeHDFace() { this.CurrentBuilderStatus = "Ready To Start Capture"; this.sensor = KinectSensor.GetDefault(); this.bodySource = this.sensor.BodyFrameSource; this.bodyReader = this.bodySource.OpenReader(); this.bodyReader.FrameArrived += this.BodyReader_FrameArrived; this.bodyCount = this.sensor.BodyFrameSource.BodyCount; this.highDefinitionFaceFrameSource = new HighDefinitionFaceFrameSource(this.sensor); this.highDefinitionFaceFrameSource.TrackingIdLost += this.HdFaceSource_TrackingIdLost; this.highDefinitionFaceFrameReader = this.highDefinitionFaceFrameSource.OpenReader(); this.highDefinitionFaceFrameReader.FrameArrived += this.HdFaceReader_FrameArrived; this.currentFaceModel = new FaceModel(); this.currentFaceAlignment = new FaceAlignment(); this.coordinateMapper = this.sensor.CoordinateMapper; FaceFrameFeatures faceFrameFeatures = FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RotationOrientation | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.RightEyeClosed | FaceFrameFeatures.LookingAway | FaceFrameFeatures.MouthMoved | FaceFrameFeatures.MouthOpen; // create the face frame source with the required face frame features and an initial tracking Id of 0 this.faceFrameSource = new FaceFrameSource(this.sensor, 0, faceFrameFeatures); // open the corresponding reader this.faceFrameReader = this.faceFrameSource.OpenReader(); this.faceFrameResult = null; // wire handler for face frame arrival if (this.faceFrameReader != null) { // wire handler for face frame arrival this.faceFrameReader.FrameArrived += this.Reader_FaceFrameArrived; } this.InitializeMesh(); this.UpdateMesh(); this.sensor.Open(); }
/// <summary> /// Extended CTOR /// </summary> /// <param name="bodyId">Id of the tracked body</param> /// <param name="faceFeatures">Set of requested face features to track</param> /// <param name="kinect">Kinect sensor that is tracking</param> public FaceTracker(ulong bodyId, FaceFrameFeatures faceFeatures, KinectSensor kinect) { // Pin-point start of tracking _startTracking = DateTime.Now; // Save variables _bodyId = bodyId; _faceFeatures = faceFeatures; // _kinectId = kinect.UniqueKinectId --> NotImplementedYet // Create a new source with body TrackingId _faceSource = new FaceFrameSource(kinect, bodyId, faceFeatures); // Create new reader _faceReader = _faceSource.OpenReader(); Console.WriteLine(String.Format("Tracker for body #{0} started.", _bodyId)); // Initialize FaceFeatureTrackers InitialiseFeatureTrackers(); // Wire events _faceReader.FrameArrived += OnFaceFrameArrived; _faceSource.TrackingIdLost += OnTrackingLost; }
public KinectBodyView(ApplicationViewModel app, string name) { // Gets application pointer. _app = app; ModuleName = name; NumFramesTest = _app.NumFramesTest_DTW; mean1 = mean2 = mean3 = mean4 = 0; r1Joints = new List <CameraSpacePoint>(); r2Joints = new List <CameraSpacePoint>(); r3Joints = new List <CameraSpacePoint>(); r4Joints = new List <CameraSpacePoint>(); r5Joints = new List <CameraSpacePoint>(); m1Joints = new List <CameraSpacePoint>(); m2Joints = new List <CameraSpacePoint>(); m3Joints = new List <CameraSpacePoint>(); m4Joints = new List <CameraSpacePoint>(); _CoM = new CenterOfMass(_app); TestStopWatch = new Stopwatch(); // Gets Kinect sensor reference. _sensor = KinectSensor.GetDefault(); // If there is an active kinect / of accessible studio library. if (_sensor != null) { // Opens the sensor. _sensor.Open(); // open the reader for the body frames _bodyFrameReader = _sensor.BodyFrameSource.OpenReader(); _bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; // get the coordinate mapper this.coordinateMapper = _sensor.CoordinateMapper; // get the depth (display) extents FrameDescription frameDescription = _sensor.DepthFrameSource.FrameDescription; // get size of joint space this.displayWidth = frameDescription.Width; this.displayHeight = frameDescription.Height; _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.RightEyeClosed); _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += FaceReader_FrameArrived; } // Sets flag for recording DoubleStance position references to false IsCalibrating = false; IsTesting = false; CreateBones(); }
public FaceFrameReader GetNextReader(ulong trackingId) { Console.WriteLine(counter); FaceFrameReader reader = _FaceFrameReaders.Where(o => o.IsPaused && o.FaceFrameSource.TrackingId == 0).FirstOrDefault(); counter++; return(reader); }
/// <summary> /// Initializes a new instance of the MainWindow class. /// </summary> public MainWindow() { // one sensor is currently supported this.kinectSensor = KinectSensor.GetDefault(); this.bodies = new Body[this.kinectSensor.BodyFrameSource.BodyCount]; // get the coordinate mapper this.coordinateMapper = this.kinectSensor.CoordinateMapper; // get the color frame details FrameDescription frameDescription = this.kinectSensor.ColorFrameSource.FrameDescription; // specify the required face frame results FaceFrameFeatures faceFrameFeatures = FaceFrameFeatures.BoundingBoxInInfraredSpace | FaceFrameFeatures.PointsInInfraredSpace | FaceFrameFeatures.RotationOrientation | FaceFrameFeatures.MouthOpen; // create a face frame source + reader to track each face in the FOV this.faceFrameSource = new FaceFrameSource(this.kinectSensor, 0, faceFrameFeatures); this.faceFrameReader = this.faceFrameSource.OpenReader(); //faceFrameResult = new FaceFrameResult(); #region Depth // open the reader for the depth frames this.depthFrameReader = this.kinectSensor.DepthFrameSource.OpenReader(); // wire handler for frame arrival this.depthFrameReader.FrameArrived += this.Reader_FrameArrived; // get FrameDescription from DepthFrameSource this.depthFrameDescription = this.kinectSensor.DepthFrameSource.FrameDescription; // allocate space to put the pixels being received and converted this.depthPixels = new byte[this.depthFrameDescription.Width * this.depthFrameDescription.Height]; // create the bitmap to display this.depthBitmap = new WriteableBitmap(this.depthFrameDescription.Width, this.depthFrameDescription.Height, 96.0, 96.0, PixelFormats.Gray8, null); #endregion // open the reader for the body frames this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader(); // wire handler for body frame arrival this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; // set IsAvailableChanged event notifier this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; // open the sensor this.kinectSensor.Open(); // initialize the components (controls) of the window this.InitializeComponent(); }
private void StartFaceTracking(ulong trackingId) { FaceFrameReader faceFrameReader = _FaceFrameReaders.Where(o => o.FaceFrameSource.TrackingId == 0).FirstOrDefault(); if (faceFrameReader != null) { faceFrameReader.FaceFrameSource.TrackingId = trackingId; faceFrameReader.IsPaused = false; } }
public MainWindow() { InitializeComponent(); _SessionId = Guid.NewGuid(); string programData = Environment.GetFolderPath(Environment.SpecialFolder.CommonApplicationData); _Folder = Path.Combine(programData, "Kinect", _SessionId.ToString()); _KinectSensor = KinectSensor.GetDefault(); _FacialRecognizer = new FacialRecognizer(); _UserEventController = new UserEventController(); UserRecognized += _UserEventController.OnUserRecognized; // create the colorFrameDescription from the ColorFrameSource using Bgra format FrameDescription colorFrameDescription = _KinectSensor.ColorFrameSource.CreateFrameDescription( ColorImageFormat.Bgra); // create the bitmap to display _ColorBitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null); // set IsAvailableChanged event notifier _KinectSensor.IsAvailableChanged += Sensor_IsAvailableChanged; // open the sensor _KinectSensor.Open(); // set the status text StatusText = _KinectSensor.IsAvailable ? Properties.Resources.RunningStatusText : Properties.Resources.NoSensorStatusText; // use the window object as the view model in this simple example DataContext = this; // initialize the components (controls) of the window _Bodies = new Body[_KinectSensor.BodyFrameSource.BodyCount]; _BodyReader = _KinectSensor.BodyFrameSource.OpenReader(); _BodyReader.FrameArrived += BodyReader_FrameArrived; _FaceSource = new FaceFrameSource(_KinectSensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.MouthOpen | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RightEyeClosed); _FaceReader = _FaceSource.OpenReader(); _FaceReader.FrameArrived += FaceReader_FrameArrived; _ColorFrameReader = _KinectSensor.ColorFrameSource.OpenReader(); _ColorFrameReader.FrameArrived += Reader_ColorFrameArrived; }
/// <summary> /// 初始化臉部 /// </summary> private void InitializeFace() { _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.MouthOpen | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RightEyeClosed); // Open face Reader and build face event _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += _faceReader_FrameArrived; }
public MainPage() { //Kinect センサー V2 ののオブジェクトを取得します。 this.kinectSensor = KinectSensor.GetDefault(); //カラーフレームに関する情報が格納されたオブジェクトを取得します。 FrameDescription frameDescription = this.kinectSensor.ColorFrameSource.FrameDescription; // カラーフレームの幅、高さが格納されます。今回は、幅が 1920、高さが 1080 が入ります。 this.displayWidth = frameDescription.Width; this.displayHeight = frameDescription.Height; // Body フレームを取得するためのオブジェクトを作成します。 this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader(); // フレーム情報が Kinect で取得されたことを示すイベント "FrameArrived" が発生した際に // "Reader_BodyFrameArrived" の処理が実行されるようにイベントハンドラを登録します。 this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; // Kinect センサーで取得できる Body の最大人数の値を格納します。 this.bodyCount = this.kinectSensor.BodyFrameSource.BodyCount; // 取得した、各 Body フレームの情報を配列で保持します。 this.bodies = new Body[this.bodyCount]; // 必要な Face フレームの情報を特定します。 FaceFrameFeatures faceFrameFeatures = FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RotationOrientation | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.RightEyeClosed | FaceFrameFeatures.LookingAway | FaceFrameFeatures.MouthMoved | FaceFrameFeatures.MouthOpen; //Face フレームのデータを取得するための設定をおこないます。 this.faceFrameSource = new FaceFrameSource(this.kinectSensor, 0, faceFrameFeatures); this.faceFrameReader = this.faceFrameSource.OpenReader(); // Kinect Sensor の処理を開始します。 this.kinectSensor.Open(); // アプリの起動に必要な初期化処理を実行します。 this.InitializeComponent(); }
public FacialState(KinectSensor sensor, ulong trackingId, EventHandler <TrackingIdLostEventArgs> handler) { this.Source = new FaceFrameSource(sensor, trackingId, FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.MouthOpen | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RightEyeClosed); this.Source.TrackingIdLost += handler; this.Reader = this.Source.OpenReader(); }
/// <summary> /// Initialize Kinect object /// </summary> private void InitializeHDFace() { this.CurrentBuilderStatus = "Ready To Start Capture"; this.sensor = KinectSensor.GetDefault(); this.bodySource = this.sensor.BodyFrameSource; this.bodyReader = this.bodySource.OpenReader(); this.bodyReader.FrameArrived += this.BodyReader_FrameArrived; // set the maximum number of bodies that would be tracked by Kinect this.bodyCount = this.sensor.BodyFrameSource.BodyCount; // allocate storage to store body objects this.bodies = new Body[this.bodyCount]; // specify the required face frame results FaceFrameFeatures faceFrameFeatures = FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RotationOrientation | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.RightEyeClosed | FaceFrameFeatures.LookingAway | FaceFrameFeatures.MouthMoved | FaceFrameFeatures.MouthOpen; // create a face frame source + reader to track each face in the FOV this.faceFrameSource = new FaceFrameSource(this.sensor, 0, faceFrameFeatures); this.faceFrameReader = faceFrameSource.OpenReader(); this.faceFrameReader.FrameArrived += this.Reader_FaceFrameArrived; this.highDefinitionFaceFrameSource = new HighDefinitionFaceFrameSource(this.sensor); this.highDefinitionFaceFrameSource.TrackingIdLost += this.HdFaceSource_TrackingIdLost; this.highDefinitionFaceFrameReader = this.highDefinitionFaceFrameSource.OpenReader(); this.highDefinitionFaceFrameReader.FrameArrived += this.HdFaceReader_FrameArrived; this.currentFaceModel = new FaceModel(); this.currentFaceAlignment = new FaceAlignment(); this.InitializeMesh(); this.UpdateMesh(); this.sensor.Open(); }
/// <summary> /// Handle when the tracked body is gone /// </summary> private void OnTrackingIdLost(object sender, TrackingIdLostEventArgs e) { // Update UI HappyResult.Text = "No face tracked"; EngagedResult.Text = "No face tracked"; GlassesResult.Text = "No face tracked"; LeftEyeResult.Text = "No face tracked"; RightEyeResult.Text = "No face tracked"; MouthOpenResult.Text = "No face tracked"; MouthMovedResult.Text = "No face tracked"; LookingAwayResult.Text = "No face tracked"; // Reset values for next body _faceReader = null; _faceSource = null; }
private void BeginFaceTracking(ulong trackingId) { if (!_FaceFrameReaders.ContainsKey(trackingId)) { FaceFrameSource frameSource = new FaceFrameSource(_KinectSensor, trackingId, TrackedFaceFeatures); frameSource.TrackingIdLost += FaceFrameSource_TrackingIdLost; FaceFrameReader frameReader = frameSource.OpenReader(); frameReader.FrameArrived += FaceFrameReader_FrameArrived; _FaceFrameSources.Add(trackingId, frameSource); _FaceFrameReaders.Add(trackingId, frameReader); //Console.WriteLine("Created face frame tracker for bodyId {0}.", trackedBody.Value.Body.TrackingId); } }
private void Window_Loaded(object sender, RoutedEventArgs e) { _sensor = KinectSensor.GetDefault(); if (_sensor != null) { /// <summary> /// Tarea a realizar por alumno /// Fase Inicialización /// </summary> /// ///////////////////////////////////////////////////////////////////////////////////////////////// // Obtener fuentes de cuerpos, lector de cuerpos, handler para eventos de frames de cuerpos _bodySource = _sensor.BodyFrameSource; _bodyReader = _bodySource.OpenReader(); _bodyReader.FrameArrived += BodyReader_FrameArrived; // Obtener fuente facial, lector facial, handler para eventos de frames faciales. _faceSource = new HighDefinitionFaceFrameSource(_sensor); _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += FaceReader_FrameArrived; //Añadimos el reader de gestos faciales. y el handler. _faceFrameSource = new FaceFrameSource(this._sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.MouthOpen | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RightEyeClosed ); _faceFrameReader = this._faceFrameSource.OpenReader(); _faceFrameReader.FrameArrived += FaceFrameReader_FrameArrived; // Crear FaceModel, FaceAlignmet _faceModel = new FaceModel(); _faceAlignment = new FaceAlignment(); // Abrir el sensor. _sensor.Open(); // Asignar el multireader multiSourceReader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body); multiSourceReader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived; } }
public KinectBodyView(ApplicationViewModel app) { // Gets application pointer. _app = app; _CoM = new CenterOfMass(_app); // Gets Kinect sensor reference. _sensor = KinectSensor.GetDefault(); // If there is an active kinect / of accessible studio library. if (_sensor != null) { // Opens the sensor. _sensor.Open(); // open the reader for the body frames _bodyFrameReader = _sensor.BodyFrameSource.OpenReader(); _bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; // get the coordinate mapper this.coordinateMapper = _sensor.CoordinateMapper; // get the depth (display) extents FrameDescription frameDescription = _sensor.DepthFrameSource.FrameDescription; // get size of joint space this.displayWidth = frameDescription.Width; this.displayHeight = frameDescription.Height; _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.RightEyeClosed); _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += FaceReader_FrameArrived; } // Sets flag for recording DoubleStance position references to false RecordDoubleStance = false; ExecuteDoubleStanceTest = false; poseDoubleStance = new PoseDoubleStance(_app); CreateBones(); }
/// <summary> /// Process body frames /// </summary> private void OnBodyFrameReceived(object sender, BodyFrameArrivedEventArgs e) { // Get Frame ref BodyFrameReference bodyRef = e.FrameReference; if (bodyRef == null) { return; } // Get body frame using (BodyFrame frame = bodyRef.AcquireFrame()) { if (frame == null) { return; } // Allocate array when required if (_bodies == null) { _bodies = new Body[frame.BodyCount]; } // Refresh bodies frame.GetAndRefreshBodyData(_bodies); foreach (Body body in _bodies) { if (body.IsTracked && _faceSource == null) { // Create new sources with body TrackingId _faceSource = new FaceFrameSource(_kinect, body.TrackingId, _faceFrameFeatures); // Create new reader _faceReader = _faceSource.OpenReader(); // Wire events _faceReader.FrameArrived += OnFaceFrameArrived; _faceSource.TrackingIdLost += OnTrackingIdLost; } } } }
public void Stop() { MainWindow.RecordStarted = false; //CSVDataFile.CSV_Close(); SeriesCollection[0].OnSeriesUpdatedFinish(); if (this.faceFrameReaders != null) { // FaceFrameReader is IDisposable this.faceFrameReaders = null; } if (this.hdFaceFrameReaders != null) { // FaceFrameReader is IDisposable this.hdFaceFrameReaders.Dispose(); this.hdFaceFrameReaders = null; } if (this.faceFrameSource != null) { // FaceFrameSource is IDisposable this.faceFrameSource = null; } if (CSVDataFile != null) { CSVDataFile.CSV_Close(); } if (this.bodyFrameReader != null) { // BodyFrameReader is IDisposable this.bodyFrameReader.Dispose(); this.bodyFrameReader = null; } if (this.kinect != null) { this.kinect.Close(); this.kinect = null; } }
void Start() { _Sensor = KinectSensor.GetDefault(); if (_Sensor != null) { _Reader = _Sensor.BodyFrameSource.OpenReader(); if (enableFacetracking) { faceFrameSource = FaceFrameSource.Create(_Sensor, 0, FaceFrameFeatures.RotationOrientation); faceframeReader = faceFrameSource.OpenReader(); } if (enableHDFace) { highDefinitionFaceFrameSource = HighDefinitionFaceFrameSource.Create(_Sensor); highDefinitionFaceFrameReader = highDefinitionFaceFrameSource.OpenReader(); CurrentFaceModel = FaceModel.Create(); currentFaceAlignment = FaceAlignment.Create(); var triangles = new int[FaceModel.TriangleCount * 3]; int tryCount = (int)FaceModel.TriangleCount; uint[] TriInd = FaceModel.TriangleIndices.ToArray(); for (int i = 0; i < tryCount; i += 3) { triangles[i] = (int)TriInd[i]; triangles[i + 1] = (int)TriInd[i + 1]; triangles[i + 2] = (int)TriInd[i + 2]; } _CurrentFaceMeshTriangles = triangles; } if (!_Sensor.IsOpen) { _Sensor.Open(); } controller = GameObject.Find("Controller").GetComponent <Controller>(); Debug.Log("KinectBodyManager::Started"); } }
public FacialMicroExpressions(KinectSensor sensor) { _sensor = sensor; // _msReader = source; _eyesState = EyesState.Opened; this._faceAlignment = new FaceAlignment(); this._leftBrow = new float[30]; this._rightBrow = new float[30]; this._leftBrowDelta = new float[30]; this._rightBrowDelta = new float[30]; _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.LookingAway | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Happy | FaceFrameFeatures.MouthMoved | FaceFrameFeatures.MouthOpen | FaceFrameFeatures.RightEyeClosed | FaceFrameFeatures.RotationOrientation); _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += _faceReader_FrameArrived; // _msReader.MultiSourceFrameArrived += _msReader_MultiSourceFrameArrived; //TODO: Use HDFace to dermine gulping, Eyebrows _hdSource = new HighDefinitionFaceFrameSource(_sensor); _hdReader = _hdSource.OpenReader(); _hdReader.FrameArrived += _hdReader_FrameArrived; }
public MainPage() { InitializeComponent(); _sensor = KinectSensor.GetDefault(); if (_sensor != null) { _sensor.Open(); _bodies = new Body[_sensor.BodyFrameSource.BodyCount]; _colorReader = _sensor.ColorFrameSource.OpenReader(); _colorReader.FrameArrived += ColorReader_FrameArrived; _bodyReader = _sensor.BodyFrameSource.OpenReader(); _bodyReader.FrameArrived += BodyReader_FrameArrived; _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.PointsInColorSpace); _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += FaceReader_FrameArrived; } }
private void MainForm_FormClosing(object sender, FormClosingEventArgs e) { if (sensorStatus == SensorStatus.Opened) { if (bmp != null) { bmp.Dispose(); } if (colorFrameReader != null) { colorFrameReader.Dispose(); colorFrameReader = null; } if (bodyFrameReader != null) { bodyFrameReader.Dispose(); bodyFrameReader = null; } if (faceFrameReader != null) { faceFrameReader.Dispose(); faceFrameReader = null; } if (faceFrameSource != null) { faceFrameSource.Dispose(); faceFrameSource = null; } if (kinectSensor != null) { kinectSensor.Close(); } } }
public MainWindow() { InitializeComponent(); // Inisialisasi Kinect _sensor = KinectSensor.GetDefault(); if (_sensor == null) { statusText = "Kinect Tidak Ada"; } if (_sensor != null) { _sensor.Open(); _bodies = new Body[_sensor.BodyFrameSource.BodyCount]; _colorReader = _sensor.ColorFrameSource.OpenReader(); _colorReader.FrameArrived += ColorReader_FrameArrived; _bodyReader = _sensor.BodyFrameSource.OpenReader(); _bodyReader.FrameArrived += BodyReader_FrameArrived; // Inisialisasi sumber wajah dengan fitur _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.MouthOpen | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RightEyeClosed); _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += FaceReader_FrameArrived; FrameDescription frameDescription = _sensor.ColorFrameSource.FrameDescription; } }