void Start() { sensor = KinectSensor.GetDefault(); bodySource = sensor.BodyFrameSource; bodyReader = bodySource.OpenReader(); bodyReader.FrameArrived += BodyReader_FrameArrived; FaceFrameFeatures faceFrameFeatures = FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.BoundingBoxInInfraredSpace | FaceFrameFeatures.PointsInInfraredSpace | FaceFrameFeatures.RotationOrientation | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.RightEyeClosed | FaceFrameFeatures.LookingAway | FaceFrameFeatures.MouthMoved | FaceFrameFeatures.MouthOpen; FaceFrameSource = FaceFrameSource.Create(sensor, currentTrackingId, faceFrameFeatures); FaceFrameSource.TrackingIdLost += HdFaceSource_TrackingIdLost; FaceFrameReader = FaceFrameSource.OpenReader(); FaceFrameReader.FrameArrived += HdFaceReader_FrameArrived; //CurrentFaceModel = FaceModel.Create(); currentFaceAlignment = FaceAlignment.Create(); sensor.Open(); }
/// <summary> /// Initialize Kinect object /// </summary> private void InitializeHDFace() { this.CurrentBuilderStatus = "Ready To Start Capture"; this.sensor = KinectSensor.GetDefault(); this.bodySource = this.sensor.BodyFrameSource; this.bodyReader = this.bodySource.OpenReader(); this.bodyReader.FrameArrived += this.BodyReader_FrameArrived; this.bodyCount = this.sensor.BodyFrameSource.BodyCount; this.highDefinitionFaceFrameSource = new HighDefinitionFaceFrameSource(this.sensor); this.highDefinitionFaceFrameSource.TrackingIdLost += this.HdFaceSource_TrackingIdLost; this.highDefinitionFaceFrameReader = this.highDefinitionFaceFrameSource.OpenReader(); this.highDefinitionFaceFrameReader.FrameArrived += this.HdFaceReader_FrameArrived; this.currentFaceModel = new FaceModel(); this.currentFaceAlignment = new FaceAlignment(); this.coordinateMapper = this.sensor.CoordinateMapper; FaceFrameFeatures faceFrameFeatures = FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RotationOrientation | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.RightEyeClosed | FaceFrameFeatures.LookingAway | FaceFrameFeatures.MouthMoved | FaceFrameFeatures.MouthOpen; // create the face frame source with the required face frame features and an initial tracking Id of 0 this.faceFrameSource = new FaceFrameSource(this.sensor, 0, faceFrameFeatures); // open the corresponding reader this.faceFrameReader = this.faceFrameSource.OpenReader(); this.faceFrameResult = null; // wire handler for face frame arrival if (this.faceFrameReader != null) { // wire handler for face frame arrival this.faceFrameReader.FrameArrived += this.Reader_FaceFrameArrived; } this.InitializeMesh(); this.UpdateMesh(); this.sensor.Open(); }
void InitializeFace() { FaceFrameFeatures faceFrameFeatures = FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RotationOrientation | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.RightEyeClosed | FaceFrameFeatures.LookingAway | FaceFrameFeatures.MouthMoved | FaceFrameFeatures.MouthOpen; faceFrameSources = new FaceFrameSource[bodyCount]; faceFrameReaders = new FaceFrameReader[bodyCount]; for (int i = 0; i < bodyCount; i++) { faceFrameSources[i] = new FaceFrameSource(kinectSensor, 0, faceFrameFeatures); faceFrameReaders[i] = faceFrameSources[i].OpenReader(); faceFrameReaders[i].FrameArrived += FaceFrameReader_FrameArrived; } faceFrameResults = new FaceFrameResult[bodyCount]; faceBrush = new List <Brush>() { Brushes.White, Brushes.Orange, Brushes.Green, Brushes.Red, Brushes.LightBlue, Brushes.Yellow }; }
public MainWindow() { System.Diagnostics.Debug.WriteLine("Starting"); InitializeComponent(); _sensor = KinectSensor.GetDefault(); if (_sensor != null) { _sensor.Open(); _bodies = new Body[_sensor.BodyFrameSource.BodyCount]; _colorReader = _sensor.ColorFrameSource.OpenReader(); _colorReader.FrameArrived += ColorReader_FrameArrived; _bodyReader = _sensor.BodyFrameSource.OpenReader(); _bodyReader.FrameArrived += BodyReader_FrameArrived; // 2) Initialize the face source with the desired features _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.MouthOpen | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RightEyeClosed); _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += FaceReader_FrameArrived; } }
void Start() { _Sensor = KinectSensor.GetDefault(); if (_Sensor != null) { _Reader = _Sensor.BodyFrameSource.OpenReader(); if (!_Sensor.IsOpen) { _Sensor.Open(); } } this.faceFrameSources = new FaceFrameSource[_Sensor.BodyFrameSource.BodyCount]; this.faceFrameReaders = new FaceFrameReader[_Sensor.BodyFrameSource.BodyCount]; // specify the required face frame results FaceFrameFeatures faceFrameFeatures = FaceFrameFeatures.RotationOrientation | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.LookingAway; for (int i = 0; i < _Sensor.BodyFrameSource.BodyCount; i++) { // create the face frame source with the required face frame features and an initial tracking Id of 0 faceFrameSources[i] = FaceFrameSource.Create(_Sensor, 0, faceFrameFeatures); // open the corresponding reader faceFrameReaders[i] = faceFrameSources[i].OpenReader(); } }
private void Window_Unloaded(object sender, RoutedEventArgs e) { socket.Disconnect(); if (_reader != null) { _reader.Dispose(); _reader = null; } if (_sensor != null) { _sensor.Close(); } if (bodyFrameReader != null) { bodyFrameReader.Dispose(); bodyFrameReader = null; } if (_faceReader != null) { _faceReader.Dispose(); _faceReader = null; } if (_faceSource != null) { _faceSource.Dispose(); _faceSource = null; } }
public TrackedBody(KinectManager kinectManager, Body body, ulong currentFrame, int displayWidth, int displayHeight) { //Console.WriteLine("Tracked body invoked."); _KinectManager = kinectManager; _KinectManager.FrameArrived += KinectManager_FrameArrived; _Created = DateTime.UtcNow; _LastKnownJoints = body.Joints; this.TrackingId = body.TrackingId; this.LastTrackedFrame = currentFrame; _FirstTrackedFrame = currentFrame; _DisplayWidth = displayWidth; _DisplayHeight = displayHeight; _ColorFrameDesc = _KinectManager.KinectSensor.ColorFrameSource.CreateFrameDescription(ImageFormat); _DepthFrameDesc = _KinectManager.KinectSensor.DepthFrameSource.FrameDescription; _FaceFrameSource = new FaceFrameSource(_KinectManager.KinectSensor, 0, DefaultFaceFeatures); _FaceFrameReader = _FaceFrameSource.OpenReader(); _FaceFrameReader.FrameArrived += OnFaceFrameArrived; _FaceFrameReader.FaceFrameSource.TrackingIdLost += OnTrackingIdLost; _FaceFrameReader.FaceFrameSource.TrackingId = this.TrackingId; _FaceFrameReader.IsPaused = true; _DepthHFoV_Half_Rad_Tangent = Math.Tan(_DepthFrameDesc.HorizontalFieldOfView / 2 / 180 * Math.PI); }
private void SafeOpenSensor() { if (sensorStatus == SensorStatus.Closed) { kinectSensor.Open(); bodies = new Body[kinectSensor.BodyFrameSource.BodyCount]; colorFrameReader = kinectSensor.ColorFrameSource.OpenReader(); colorFrameReader.FrameArrived += colorFrameReader_FrameArrived; bodyFrameReader = kinectSensor.BodyFrameSource.OpenReader(); bodyFrameReader.FrameArrived += bodyFrameReader_FrameArrived; FaceFrameFeatures fff = FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.MouthOpen | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RightEyeClosed; faceFrameSource = new FaceFrameSource(kinectSensor, 0, fff); faceFrameReader = faceFrameSource.OpenReader(); faceFrameReader.FrameArrived += faceFrameReader_FrameArrived; sensorStatus = SensorStatus.Opened; } }
// sensor value position at times. private void Window_Closing(object sender, System.ComponentModel.CancelEventArgs e) { if (_colorReader != null) { _colorReader.Dispose(); _colorReader = null; } if (_bodyReader != null) { _bodyReader.Dispose(); _bodyReader = null; } if (_faceReader != null) { _faceReader.Dispose(); _faceReader = null; } if (_faceSource != null) { _faceSource.Dispose(); _faceSource = null; } if (_sensor != null) { _sensor.Close(); } }
public MainWindow() { InitializeComponent(); serialInit(); _sensor = KinectSensor.GetDefault(); if (_sensor != null) { _sensor.Open(); _bodies = new Body[_sensor.BodyFrameSource.BodyCount]; _colorReader = _sensor.ColorFrameSource.OpenReader(); _colorReader.FrameArrived += ColorReader_FrameArrived; _bodyReader = _sensor.BodyFrameSource.OpenReader(); _bodyReader.FrameArrived += BodyReader_FrameArrived; // Initializing the source. _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.MouthOpen | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RightEyeClosed); _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += FaceReader_FrameArrived; } }
private void Page_Unloaded(object sender, RoutedEventArgs e) { if (_colorReader != null) { _colorReader.Dispose(); _colorReader = null; } if (_bodyReader != null) { _bodyReader.Dispose(); _bodyReader = null; } if (_faceReader != null) { _faceReader.Dispose(); _faceReader = null; } if (_faceSource != null) { _faceSource = null; } if (_sensor != null) { _sensor.Close(); } }
void InitializeFace() { FaceFrameFeatures faceFrameFeatures = FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RotationOrientation | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.RightEyeClosed | FaceFrameFeatures.LookingAway | FaceFrameFeatures.MouthMoved | FaceFrameFeatures.MouthOpen; faceFrameSources = new FaceFrameSource[bodyCount]; faceFrameReaders = new FaceFrameReader[bodyCount]; for (int i = 0; i < bodyCount; i++) { faceFrameSources[i] = new FaceFrameSource(kinect, 0, faceFrameFeatures); faceFrameReaders[i] = faceFrameSources[i].OpenReader(); faceFrameReaders[i].FrameArrived += faceFrameReader_FrameArrived; } faceFrameResults = new FaceFrameResult[bodyCount]; }
/// <summary> /// Extended CTOR /// </summary> /// <param name="bodyId">Id of the tracked body</param> /// <param name="faceFeatures">Set of requested face features to track</param> /// <param name="kinect">Kinect sensor that is tracking</param> public FaceTracker(ulong bodyId, FaceFrameFeatures faceFeatures, KinectSensor kinect) { // Pin-point start of tracking _startTracking = DateTime.Now; // Save variables _bodyId = bodyId; _faceFeatures = faceFeatures; // _kinectId = kinect.UniqueKinectId --> NotImplementedYet // Create a new source with body TrackingId _faceSource = new FaceFrameSource(kinect, bodyId, faceFeatures); // Create new reader _faceReader = _faceSource.OpenReader(); Console.WriteLine(String.Format("Tracker for body #{0} started.", _bodyId)); // Initialize FaceFeatureTrackers InitialiseFeatureTrackers(); // Wire events _faceReader.FrameArrived += OnFaceFrameArrived; _faceSource.TrackingIdLost += OnTrackingLost; }
public KinectBodyView(ApplicationViewModel app, string name) { // Gets application pointer. _app = app; ModuleName = name; NumFramesTest = _app.NumFramesTest_DTW; mean1 = mean2 = mean3 = mean4 = 0; r1Joints = new List <CameraSpacePoint>(); r2Joints = new List <CameraSpacePoint>(); r3Joints = new List <CameraSpacePoint>(); r4Joints = new List <CameraSpacePoint>(); r5Joints = new List <CameraSpacePoint>(); m1Joints = new List <CameraSpacePoint>(); m2Joints = new List <CameraSpacePoint>(); m3Joints = new List <CameraSpacePoint>(); m4Joints = new List <CameraSpacePoint>(); _CoM = new CenterOfMass(_app); TestStopWatch = new Stopwatch(); // Gets Kinect sensor reference. _sensor = KinectSensor.GetDefault(); // If there is an active kinect / of accessible studio library. if (_sensor != null) { // Opens the sensor. _sensor.Open(); // open the reader for the body frames _bodyFrameReader = _sensor.BodyFrameSource.OpenReader(); _bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; // get the coordinate mapper this.coordinateMapper = _sensor.CoordinateMapper; // get the depth (display) extents FrameDescription frameDescription = _sensor.DepthFrameSource.FrameDescription; // get size of joint space this.displayWidth = frameDescription.Width; this.displayHeight = frameDescription.Height; _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.RightEyeClosed); _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += FaceReader_FrameArrived; } // Sets flag for recording DoubleStance position references to false IsCalibrating = false; IsTesting = false; CreateBones(); }
/// <summary> /// Initializes a new instance of the MainWindow class. /// </summary> public MainWindow() { // one sensor is currently supported this.kinectSensor = KinectSensor.GetDefault(); this.bodies = new Body[this.kinectSensor.BodyFrameSource.BodyCount]; // get the coordinate mapper this.coordinateMapper = this.kinectSensor.CoordinateMapper; // get the color frame details FrameDescription frameDescription = this.kinectSensor.ColorFrameSource.FrameDescription; // specify the required face frame results FaceFrameFeatures faceFrameFeatures = FaceFrameFeatures.BoundingBoxInInfraredSpace | FaceFrameFeatures.PointsInInfraredSpace | FaceFrameFeatures.RotationOrientation | FaceFrameFeatures.MouthOpen; // create a face frame source + reader to track each face in the FOV this.faceFrameSource = new FaceFrameSource(this.kinectSensor, 0, faceFrameFeatures); this.faceFrameReader = this.faceFrameSource.OpenReader(); //faceFrameResult = new FaceFrameResult(); #region Depth // open the reader for the depth frames this.depthFrameReader = this.kinectSensor.DepthFrameSource.OpenReader(); // wire handler for frame arrival this.depthFrameReader.FrameArrived += this.Reader_FrameArrived; // get FrameDescription from DepthFrameSource this.depthFrameDescription = this.kinectSensor.DepthFrameSource.FrameDescription; // allocate space to put the pixels being received and converted this.depthPixels = new byte[this.depthFrameDescription.Width * this.depthFrameDescription.Height]; // create the bitmap to display this.depthBitmap = new WriteableBitmap(this.depthFrameDescription.Width, this.depthFrameDescription.Height, 96.0, 96.0, PixelFormats.Gray8, null); #endregion // open the reader for the body frames this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader(); // wire handler for body frame arrival this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; // set IsAvailableChanged event notifier this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; // open the sensor this.kinectSensor.Open(); // initialize the components (controls) of the window this.InitializeComponent(); }
public MainWindow() { InitializeComponent(); _SessionId = Guid.NewGuid(); string programData = Environment.GetFolderPath(Environment.SpecialFolder.CommonApplicationData); _Folder = Path.Combine(programData, "Kinect", _SessionId.ToString()); _KinectSensor = KinectSensor.GetDefault(); _FacialRecognizer = new FacialRecognizer(); _UserEventController = new UserEventController(); UserRecognized += _UserEventController.OnUserRecognized; // create the colorFrameDescription from the ColorFrameSource using Bgra format FrameDescription colorFrameDescription = _KinectSensor.ColorFrameSource.CreateFrameDescription( ColorImageFormat.Bgra); // create the bitmap to display _ColorBitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null); // set IsAvailableChanged event notifier _KinectSensor.IsAvailableChanged += Sensor_IsAvailableChanged; // open the sensor _KinectSensor.Open(); // set the status text StatusText = _KinectSensor.IsAvailable ? Properties.Resources.RunningStatusText : Properties.Resources.NoSensorStatusText; // use the window object as the view model in this simple example DataContext = this; // initialize the components (controls) of the window _Bodies = new Body[_KinectSensor.BodyFrameSource.BodyCount]; _BodyReader = _KinectSensor.BodyFrameSource.OpenReader(); _BodyReader.FrameArrived += BodyReader_FrameArrived; _FaceSource = new FaceFrameSource(_KinectSensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.MouthOpen | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RightEyeClosed); _FaceReader = _FaceSource.OpenReader(); _FaceReader.FrameArrived += FaceReader_FrameArrived; _ColorFrameReader = _KinectSensor.ColorFrameSource.OpenReader(); _ColorFrameReader.FrameArrived += Reader_ColorFrameArrived; }
private int GetIndex(FaceFrameSource src) { for (int i = 0; i < faceFrameSources.Length; i++) { if (src == faceFrameSources[i]) { return(i); } } return(0); }
public void Initialize(DIOManager dioManager) { this.dioManager = dioManager; this.BodySrcManager = dioManager.bodySrcManager; updateFrame = 0; // one sensor is currently supported kinectSensor = KinectSensor.GetDefault(); // set the maximum number of bodies that would be tracked by Kinect bodyCount = kinectSensor.BodyFrameSource.BodyCount; // allocate storage to store body objects bodies = new Body[bodyCount]; if (BodySrcManager == null) { Debug.Log("Falta asignar Game Object as BodySrcManager"); } else { bodyManager = BodySrcManager.GetComponent <BodySourceManager>(); } // specify the required face frame results FaceFrameFeatures faceFrameFeatures = FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.BoundingBoxInInfraredSpace | FaceFrameFeatures.PointsInInfraredSpace | FaceFrameFeatures.RotationOrientation | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.RightEyeClosed | FaceFrameFeatures.LookingAway | FaceFrameFeatures.MouthMoved | FaceFrameFeatures.MouthOpen; // create a face frame source + reader to track each face in the FOV faceFrameSources = new FaceFrameSource[bodyCount]; faceFrameReaders = new FaceFrameReader[bodyCount]; for (int i = 0; i < bodyCount; i++) { // create the face frame source with the required face frame features and an initial tracking Id of 0 faceFrameSources[i] = FaceFrameSource.Create(kinectSensor, 0, faceFrameFeatures); // open the corresponding reader faceFrameReaders[i] = faceFrameSources[i].OpenReader(); } initialize = true; }
public FaceFrameTrackerProvider(KinectSensor kinectSensor) { _KinectSensor = kinectSensor; for (int i = 0; i < _KinectSensor.BodyFrameSource.BodyCount + 40; i++) { var faceFrameSource = new FaceFrameSource(_KinectSensor, 0, DefaultFaceFeatures); var faceFrameReader = faceFrameSource.OpenReader(); faceFrameReader.IsPaused = true; _FaceFrameReaders.Add(faceFrameReader); } }
/// <summary> /// 初始化臉部 /// </summary> private void InitializeFace() { _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.MouthOpen | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RightEyeClosed); // Open face Reader and build face event _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += _faceReader_FrameArrived; }
/*KalmanFilterSimple1D kalman_X; * KalmanFilterSimple1D kalman_Y; * KalmanFilterSimple1D kalman_mod;*/ void Start() { updateFrame = 0; /*kalman_X = new KalmanFilterSimple1D(f: 1, h: 1, q: qq, r: rr); * kalman_Y = new KalmanFilterSimple1D(f: 1, h: 1, q: qq, r: rr); * kalman_mod = new KalmanFilterSimple1D(f: 1, h: 1, q: qq, r: rr);*/ sx = new StreamWriter("coords_X.txt"); kx = new StreamWriter("coords_KX.txt"); // one sensor is currently supported kinectSensor = KinectSensor.GetDefault(); // set the maximum number of bodies that would be tracked by Kinect bodyCount = kinectSensor.BodyFrameSource.BodyCount; // allocate storage to store body objects bodies = new Body[bodyCount]; // specify the required face frame results FaceFrameFeatures faceFrameFeatures = FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.BoundingBoxInInfraredSpace | FaceFrameFeatures.PointsInInfraredSpace | FaceFrameFeatures.RotationOrientation | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.RightEyeClosed | FaceFrameFeatures.LookingAway | FaceFrameFeatures.MouthMoved | FaceFrameFeatures.MouthOpen; // create a face frame source + reader to track each face in the FOV faceFrameSources = new FaceFrameSource[bodyCount]; faceFrameReaders = new FaceFrameReader[bodyCount]; for (int i = 0; i < bodyCount; i++) { // create the face frame source with the required face frame features and an initial tracking Id of 0 faceFrameSources[i] = FaceFrameSource.Create(kinectSensor, 0, faceFrameFeatures); // open the corresponding reader faceFrameReaders[i] = faceFrameSources[i].OpenReader(); } }
//Get the index of the sources given a face frame arrived private int GetFaceSourceIndex(FaceFrameSource faceFrameSource) { int index = -1; for (int i = 0; i < 6; i++) { if (_faceFrameSources[i] == faceFrameSource) { index = i; break; } } return(index); }
private void InitializeFaceReaders() { this.FaceFrameResults = new FaceFrameResult[this.Sensor.BodyFrameSource.BodyCount]; this.faceFrameSources = new FaceFrameSource[this.Sensor.BodyFrameSource.BodyCount]; this.faceFrameReaders = new FaceFrameReader[this.Sensor.BodyFrameSource.BodyCount]; FaceFrameFeatures faceFrameFeatures = RequiredFaceFrameFeatures(); for (int i = this.faceFrameSources.Length - 1; i >= 0; --i) { this.faceFrameSources[i] = FaceFrameSource.Create(this.Sensor, 0, faceFrameFeatures); this.faceFrameReaders[i] = this.faceFrameSources[i].OpenReader(); } }
/// <summary> /// Gets the index of the face frame source /// </summary> /// <param name="faceFrameSource">The face frame source</param> /// <returns>The index of the face source in the face source array</returns> public int GetFaceSourceIndex(FaceFrameSource faceFrameSource) { int index = -1; for (int i = 0; i < this.maxBodyCount; i++) { if (this.faceFrameSources[i] == faceFrameSource) { index = i; break; } } return(index); }
int GetFaceSourceIndex(FaceFrameSource source) { int index = -1; for (int i = 0; i < bodyCount; i++) { if (faceFrameSources[i] == source) { index = i; break; } } return(index); }
private void CreateFaceTrackers(int total) { for (int i = 0; i < total; i++) { var faceFrameSource = new FaceFrameSource(_KinectManager.KinectSensor, 0, _FaceFrameFeatures); var faceFrameReader = faceFrameSource.OpenReader(); faceFrameReader.FrameArrived += OnFaceFrameArrived; faceFrameSource.TrackingIdLost += OnTrackingIdLost; _FaceFrameReaders.Add(faceFrameReader); _FaceFrameSources.Add(faceFrameSource); } }
public MainPage() { //Kinect センサー V2 ののオブジェクトを取得します。 this.kinectSensor = KinectSensor.GetDefault(); //カラーフレームに関する情報が格納されたオブジェクトを取得します。 FrameDescription frameDescription = this.kinectSensor.ColorFrameSource.FrameDescription; // カラーフレームの幅、高さが格納されます。今回は、幅が 1920、高さが 1080 が入ります。 this.displayWidth = frameDescription.Width; this.displayHeight = frameDescription.Height; // Body フレームを取得するためのオブジェクトを作成します。 this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader(); // フレーム情報が Kinect で取得されたことを示すイベント "FrameArrived" が発生した際に // "Reader_BodyFrameArrived" の処理が実行されるようにイベントハンドラを登録します。 this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; // Kinect センサーで取得できる Body の最大人数の値を格納します。 this.bodyCount = this.kinectSensor.BodyFrameSource.BodyCount; // 取得した、各 Body フレームの情報を配列で保持します。 this.bodies = new Body[this.bodyCount]; // 必要な Face フレームの情報を特定します。 FaceFrameFeatures faceFrameFeatures = FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RotationOrientation | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.RightEyeClosed | FaceFrameFeatures.LookingAway | FaceFrameFeatures.MouthMoved | FaceFrameFeatures.MouthOpen; //Face フレームのデータを取得するための設定をおこないます。 this.faceFrameSource = new FaceFrameSource(this.kinectSensor, 0, faceFrameFeatures); this.faceFrameReader = this.faceFrameSource.OpenReader(); // Kinect Sensor の処理を開始します。 this.kinectSensor.Open(); // アプリの起動に必要な初期化処理を実行します。 this.InitializeComponent(); }
public FacialState(KinectSensor sensor, ulong trackingId, EventHandler <TrackingIdLostEventArgs> handler) { this.Source = new FaceFrameSource(sensor, trackingId, FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.MouthOpen | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RightEyeClosed); this.Source.TrackingIdLost += handler; this.Reader = this.Source.OpenReader(); }
/// <summary> /// Returns the index of the face frame source /// </summary> /// <param name="faceFrameSource">the face frame source</param> /// <returns>the index of the face source in the face source array</returns> private static int GetFaceSourceIndex(FaceFrameSource faceFrameSource) { int index = -1; for (int i = 0; i < _bodyCount; i++) { if (_faceFrameSources[i] == faceFrameSource) { index = i; break; } } return(index); }
/// <summary> /// Initialize Kinect object /// </summary> private void InitializeHDFace() { this.CurrentBuilderStatus = "Ready To Start Capture"; this.sensor = KinectSensor.GetDefault(); this.bodySource = this.sensor.BodyFrameSource; this.bodyReader = this.bodySource.OpenReader(); this.bodyReader.FrameArrived += this.BodyReader_FrameArrived; // set the maximum number of bodies that would be tracked by Kinect this.bodyCount = this.sensor.BodyFrameSource.BodyCount; // allocate storage to store body objects this.bodies = new Body[this.bodyCount]; // specify the required face frame results FaceFrameFeatures faceFrameFeatures = FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RotationOrientation | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.RightEyeClosed | FaceFrameFeatures.LookingAway | FaceFrameFeatures.MouthMoved | FaceFrameFeatures.MouthOpen; // create a face frame source + reader to track each face in the FOV this.faceFrameSource = new FaceFrameSource(this.sensor, 0, faceFrameFeatures); this.faceFrameReader = faceFrameSource.OpenReader(); this.faceFrameReader.FrameArrived += this.Reader_FaceFrameArrived; this.highDefinitionFaceFrameSource = new HighDefinitionFaceFrameSource(this.sensor); this.highDefinitionFaceFrameSource.TrackingIdLost += this.HdFaceSource_TrackingIdLost; this.highDefinitionFaceFrameReader = this.highDefinitionFaceFrameSource.OpenReader(); this.highDefinitionFaceFrameReader.FrameArrived += this.HdFaceReader_FrameArrived; this.currentFaceModel = new FaceModel(); this.currentFaceAlignment = new FaceAlignment(); this.InitializeMesh(); this.UpdateMesh(); this.sensor.Open(); }