/// <summary> /// Extended CTOR /// </summary> /// <param name="bodyId">Id of the tracked body</param> /// <param name="faceFeatures">Set of requested face features to track</param> /// <param name="kinect">Kinect sensor that is tracking</param> public FaceTracker(ulong bodyId, FaceFrameFeatures faceFeatures, KinectSensor kinect) { // Pin-point start of tracking _startTracking = DateTime.Now; // Save variables _bodyId = bodyId; _faceFeatures = faceFeatures; // _kinectId = kinect.UniqueKinectId --> NotImplementedYet // Create a new source with body TrackingId _faceSource = new FaceFrameSource(kinect, bodyId, faceFeatures); // Create new reader _faceReader = _faceSource.OpenReader(); Console.WriteLine(String.Format("Tracker for body #{0} started.", _bodyId)); // Initialize FaceFeatureTrackers InitialiseFeatureTrackers(); // Wire events _faceReader.FrameArrived += OnFaceFrameArrived; _faceSource.TrackingIdLost += OnTrackingLost; }
private void SafeOpenSensor() { if (sensorStatus == SensorStatus.Closed) { kinectSensor.Open(); bodies = new Body[kinectSensor.BodyFrameSource.BodyCount]; colorFrameReader = kinectSensor.ColorFrameSource.OpenReader(); colorFrameReader.FrameArrived += colorFrameReader_FrameArrived; bodyFrameReader = kinectSensor.BodyFrameSource.OpenReader(); bodyFrameReader.FrameArrived += bodyFrameReader_FrameArrived; FaceFrameFeatures fff = FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.MouthOpen | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RightEyeClosed; faceFrameSource = new FaceFrameSource(kinectSensor, 0, fff); faceFrameReader = faceFrameSource.OpenReader(); faceFrameReader.FrameArrived += faceFrameReader_FrameArrived; sensorStatus = SensorStatus.Opened; } }
public MainWindow() { InitializeComponent(); serialInit(); _sensor = KinectSensor.GetDefault(); if (_sensor != null) { _sensor.Open(); _bodies = new Body[_sensor.BodyFrameSource.BodyCount]; _colorReader = _sensor.ColorFrameSource.OpenReader(); _colorReader.FrameArrived += ColorReader_FrameArrived; _bodyReader = _sensor.BodyFrameSource.OpenReader(); _bodyReader.FrameArrived += BodyReader_FrameArrived; // Initializing the source. _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.MouthOpen | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RightEyeClosed); _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += FaceReader_FrameArrived; } }
public TrackedBody(KinectManager kinectManager, Body body, ulong currentFrame, int displayWidth, int displayHeight) { //Console.WriteLine("Tracked body invoked."); _KinectManager = kinectManager; _KinectManager.FrameArrived += KinectManager_FrameArrived; _Created = DateTime.UtcNow; _LastKnownJoints = body.Joints; this.TrackingId = body.TrackingId; this.LastTrackedFrame = currentFrame; _FirstTrackedFrame = currentFrame; _DisplayWidth = displayWidth; _DisplayHeight = displayHeight; _ColorFrameDesc = _KinectManager.KinectSensor.ColorFrameSource.CreateFrameDescription(ImageFormat); _DepthFrameDesc = _KinectManager.KinectSensor.DepthFrameSource.FrameDescription; _FaceFrameSource = new FaceFrameSource(_KinectManager.KinectSensor, 0, DefaultFaceFeatures); _FaceFrameReader = _FaceFrameSource.OpenReader(); _FaceFrameReader.FrameArrived += OnFaceFrameArrived; _FaceFrameReader.FaceFrameSource.TrackingIdLost += OnTrackingIdLost; _FaceFrameReader.FaceFrameSource.TrackingId = this.TrackingId; _FaceFrameReader.IsPaused = true; _DepthHFoV_Half_Rad_Tangent = Math.Tan(_DepthFrameDesc.HorizontalFieldOfView / 2 / 180 * Math.PI); }
public MainWindow() { System.Diagnostics.Debug.WriteLine("Starting"); InitializeComponent(); _sensor = KinectSensor.GetDefault(); if (_sensor != null) { _sensor.Open(); _bodies = new Body[_sensor.BodyFrameSource.BodyCount]; _colorReader = _sensor.ColorFrameSource.OpenReader(); _colorReader.FrameArrived += ColorReader_FrameArrived; _bodyReader = _sensor.BodyFrameSource.OpenReader(); _bodyReader.FrameArrived += BodyReader_FrameArrived; // 2) Initialize the face source with the desired features _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.MouthOpen | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RightEyeClosed); _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += FaceReader_FrameArrived; } }
void Start() { sensor = KinectSensor.GetDefault(); bodySource = sensor.BodyFrameSource; bodyReader = bodySource.OpenReader(); bodyReader.FrameArrived += BodyReader_FrameArrived; FaceFrameFeatures faceFrameFeatures = FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.BoundingBoxInInfraredSpace | FaceFrameFeatures.PointsInInfraredSpace | FaceFrameFeatures.RotationOrientation | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.RightEyeClosed | FaceFrameFeatures.LookingAway | FaceFrameFeatures.MouthMoved | FaceFrameFeatures.MouthOpen; FaceFrameSource = FaceFrameSource.Create(sensor, currentTrackingId, faceFrameFeatures); FaceFrameSource.TrackingIdLost += HdFaceSource_TrackingIdLost; FaceFrameReader = FaceFrameSource.OpenReader(); FaceFrameReader.FrameArrived += HdFaceReader_FrameArrived; //CurrentFaceModel = FaceModel.Create(); currentFaceAlignment = FaceAlignment.Create(); sensor.Open(); }
public KinectBodyView(ApplicationViewModel app, string name) { // Gets application pointer. _app = app; ModuleName = name; NumFramesTest = _app.NumFramesTest_DTW; mean1 = mean2 = mean3 = mean4 = 0; r1Joints = new List <CameraSpacePoint>(); r2Joints = new List <CameraSpacePoint>(); r3Joints = new List <CameraSpacePoint>(); r4Joints = new List <CameraSpacePoint>(); r5Joints = new List <CameraSpacePoint>(); m1Joints = new List <CameraSpacePoint>(); m2Joints = new List <CameraSpacePoint>(); m3Joints = new List <CameraSpacePoint>(); m4Joints = new List <CameraSpacePoint>(); _CoM = new CenterOfMass(_app); TestStopWatch = new Stopwatch(); // Gets Kinect sensor reference. _sensor = KinectSensor.GetDefault(); // If there is an active kinect / of accessible studio library. if (_sensor != null) { // Opens the sensor. _sensor.Open(); // open the reader for the body frames _bodyFrameReader = _sensor.BodyFrameSource.OpenReader(); _bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; // get the coordinate mapper this.coordinateMapper = _sensor.CoordinateMapper; // get the depth (display) extents FrameDescription frameDescription = _sensor.DepthFrameSource.FrameDescription; // get size of joint space this.displayWidth = frameDescription.Width; this.displayHeight = frameDescription.Height; _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.RightEyeClosed); _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += FaceReader_FrameArrived; } // Sets flag for recording DoubleStance position references to false IsCalibrating = false; IsTesting = false; CreateBones(); }
public MainWindow() { InitializeComponent(); _SessionId = Guid.NewGuid(); string programData = Environment.GetFolderPath(Environment.SpecialFolder.CommonApplicationData); _Folder = Path.Combine(programData, "Kinect", _SessionId.ToString()); _KinectSensor = KinectSensor.GetDefault(); _FacialRecognizer = new FacialRecognizer(); _UserEventController = new UserEventController(); UserRecognized += _UserEventController.OnUserRecognized; // create the colorFrameDescription from the ColorFrameSource using Bgra format FrameDescription colorFrameDescription = _KinectSensor.ColorFrameSource.CreateFrameDescription( ColorImageFormat.Bgra); // create the bitmap to display _ColorBitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null); // set IsAvailableChanged event notifier _KinectSensor.IsAvailableChanged += Sensor_IsAvailableChanged; // open the sensor _KinectSensor.Open(); // set the status text StatusText = _KinectSensor.IsAvailable ? Properties.Resources.RunningStatusText : Properties.Resources.NoSensorStatusText; // use the window object as the view model in this simple example DataContext = this; // initialize the components (controls) of the window _Bodies = new Body[_KinectSensor.BodyFrameSource.BodyCount]; _BodyReader = _KinectSensor.BodyFrameSource.OpenReader(); _BodyReader.FrameArrived += BodyReader_FrameArrived; _FaceSource = new FaceFrameSource(_KinectSensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.MouthOpen | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RightEyeClosed); _FaceReader = _FaceSource.OpenReader(); _FaceReader.FrameArrived += FaceReader_FrameArrived; _ColorFrameReader = _KinectSensor.ColorFrameSource.OpenReader(); _ColorFrameReader.FrameArrived += Reader_ColorFrameArrived; }
public FaceFrameTrackerProvider(KinectSensor kinectSensor) { _KinectSensor = kinectSensor; for (int i = 0; i < _KinectSensor.BodyFrameSource.BodyCount + 40; i++) { var faceFrameSource = new FaceFrameSource(_KinectSensor, 0, DefaultFaceFeatures); var faceFrameReader = faceFrameSource.OpenReader(); faceFrameReader.IsPaused = true; _FaceFrameReaders.Add(faceFrameReader); } }
/// <summary> /// 初始化臉部 /// </summary> private void InitializeFace() { _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.MouthOpen | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RightEyeClosed); // Open face Reader and build face event _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += _faceReader_FrameArrived; }
private void CreateFaceTrackers(int total) { for (int i = 0; i < total; i++) { var faceFrameSource = new FaceFrameSource(_KinectManager.KinectSensor, 0, _FaceFrameFeatures); var faceFrameReader = faceFrameSource.OpenReader(); faceFrameReader.FrameArrived += OnFaceFrameArrived; faceFrameSource.TrackingIdLost += OnTrackingIdLost; _FaceFrameReaders.Add(faceFrameReader); _FaceFrameSources.Add(faceFrameSource); } }
/// <summary> /// Initialize Kinect object /// </summary> private void InitializeHDFace() { this.CurrentBuilderStatus = "Ready To Start Capture"; this.sensor = KinectSensor.GetDefault(); this.bodySource = this.sensor.BodyFrameSource; this.bodyReader = this.bodySource.OpenReader(); this.bodyReader.FrameArrived += this.BodyReader_FrameArrived; // set the maximum number of bodies that would be tracked by Kinect this.bodyCount = this.sensor.BodyFrameSource.BodyCount; // allocate storage to store body objects this.bodies = new Body[this.bodyCount]; // specify the required face frame results FaceFrameFeatures faceFrameFeatures = FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RotationOrientation | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.RightEyeClosed | FaceFrameFeatures.LookingAway | FaceFrameFeatures.MouthMoved | FaceFrameFeatures.MouthOpen; // create a face frame source + reader to track each face in the FOV this.faceFrameSource = new FaceFrameSource(this.sensor, 0, faceFrameFeatures); this.faceFrameReader = faceFrameSource.OpenReader(); this.faceFrameReader.FrameArrived += this.Reader_FaceFrameArrived; this.highDefinitionFaceFrameSource = new HighDefinitionFaceFrameSource(this.sensor); this.highDefinitionFaceFrameSource.TrackingIdLost += this.HdFaceSource_TrackingIdLost; this.highDefinitionFaceFrameReader = this.highDefinitionFaceFrameSource.OpenReader(); this.highDefinitionFaceFrameReader.FrameArrived += this.HdFaceReader_FrameArrived; this.currentFaceModel = new FaceModel(); this.currentFaceAlignment = new FaceAlignment(); this.InitializeMesh(); this.UpdateMesh(); this.sensor.Open(); }
private void BeginFaceTracking(ulong trackingId) { if (!_FaceFrameReaders.ContainsKey(trackingId)) { FaceFrameSource frameSource = new FaceFrameSource(_KinectSensor, trackingId, TrackedFaceFeatures); frameSource.TrackingIdLost += FaceFrameSource_TrackingIdLost; FaceFrameReader frameReader = frameSource.OpenReader(); frameReader.FrameArrived += FaceFrameReader_FrameArrived; _FaceFrameSources.Add(trackingId, frameSource); _FaceFrameReaders.Add(trackingId, frameReader); //Console.WriteLine("Created face frame tracker for bodyId {0}.", trackedBody.Value.Body.TrackingId); } }
public KinectBodyView(ApplicationViewModel app) { // Gets application pointer. _app = app; _CoM = new CenterOfMass(_app); // Gets Kinect sensor reference. _sensor = KinectSensor.GetDefault(); // If there is an active kinect / of accessible studio library. if (_sensor != null) { // Opens the sensor. _sensor.Open(); // open the reader for the body frames _bodyFrameReader = _sensor.BodyFrameSource.OpenReader(); _bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; // get the coordinate mapper this.coordinateMapper = _sensor.CoordinateMapper; // get the depth (display) extents FrameDescription frameDescription = _sensor.DepthFrameSource.FrameDescription; // get size of joint space this.displayWidth = frameDescription.Width; this.displayHeight = frameDescription.Height; _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.RightEyeClosed); _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += FaceReader_FrameArrived; } // Sets flag for recording DoubleStance position references to false RecordDoubleStance = false; ExecuteDoubleStanceTest = false; poseDoubleStance = new PoseDoubleStance(_app); CreateBones(); }
/// <summary> /// Process body frames /// </summary> private void OnBodyFrameReceived(object sender, BodyFrameArrivedEventArgs e) { // Get Frame ref BodyFrameReference bodyRef = e.FrameReference; if (bodyRef == null) { return; } // Get body frame using (BodyFrame frame = bodyRef.AcquireFrame()) { if (frame == null) { return; } // Allocate array when required if (_bodies == null) { _bodies = new Body[frame.BodyCount]; } // Refresh bodies frame.GetAndRefreshBodyData(_bodies); foreach (Body body in _bodies) { if (body.IsTracked && _faceSource == null) { // Create new sources with body TrackingId _faceSource = new FaceFrameSource(_kinect, body.TrackingId, _faceFrameFeatures); // Create new reader _faceReader = _faceSource.OpenReader(); // Wire events _faceReader.FrameArrived += OnFaceFrameArrived; _faceSource.TrackingIdLost += OnTrackingIdLost; } } } }
void Start() { _Sensor = KinectSensor.GetDefault(); if (_Sensor != null) { _Reader = _Sensor.BodyFrameSource.OpenReader(); if (enableFacetracking) { faceFrameSource = FaceFrameSource.Create(_Sensor, 0, FaceFrameFeatures.RotationOrientation); faceframeReader = faceFrameSource.OpenReader(); } if (enableHDFace) { highDefinitionFaceFrameSource = HighDefinitionFaceFrameSource.Create(_Sensor); highDefinitionFaceFrameReader = highDefinitionFaceFrameSource.OpenReader(); CurrentFaceModel = FaceModel.Create(); currentFaceAlignment = FaceAlignment.Create(); var triangles = new int[FaceModel.TriangleCount * 3]; int tryCount = (int)FaceModel.TriangleCount; uint[] TriInd = FaceModel.TriangleIndices.ToArray(); for (int i = 0; i < tryCount; i += 3) { triangles[i] = (int)TriInd[i]; triangles[i + 1] = (int)TriInd[i + 1]; triangles[i + 2] = (int)TriInd[i + 2]; } _CurrentFaceMeshTriangles = triangles; } if (!_Sensor.IsOpen) { _Sensor.Open(); } controller = GameObject.Find("Controller").GetComponent <Controller>(); Debug.Log("KinectBodyManager::Started"); } }
public FacialMicroExpressions(KinectSensor sensor) { _sensor = sensor; // _msReader = source; _eyesState = EyesState.Opened; this._faceAlignment = new FaceAlignment(); this._leftBrow = new float[30]; this._rightBrow = new float[30]; this._leftBrowDelta = new float[30]; this._rightBrowDelta = new float[30]; _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.LookingAway | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Happy | FaceFrameFeatures.MouthMoved | FaceFrameFeatures.MouthOpen | FaceFrameFeatures.RightEyeClosed | FaceFrameFeatures.RotationOrientation); _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += _faceReader_FrameArrived; // _msReader.MultiSourceFrameArrived += _msReader_MultiSourceFrameArrived; //TODO: Use HDFace to dermine gulping, Eyebrows _hdSource = new HighDefinitionFaceFrameSource(_sensor); _hdReader = _hdSource.OpenReader(); _hdReader.FrameArrived += _hdReader_FrameArrived; }
public MainWindow() { InitializeComponent(); // Inisialisasi Kinect _sensor = KinectSensor.GetDefault(); if (_sensor == null) { statusText = "Kinect Tidak Ada"; } if (_sensor != null) { _sensor.Open(); _bodies = new Body[_sensor.BodyFrameSource.BodyCount]; _colorReader = _sensor.ColorFrameSource.OpenReader(); _colorReader.FrameArrived += ColorReader_FrameArrived; _bodyReader = _sensor.BodyFrameSource.OpenReader(); _bodyReader.FrameArrived += BodyReader_FrameArrived; // Inisialisasi sumber wajah dengan fitur _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.MouthOpen | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RightEyeClosed); _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += FaceReader_FrameArrived; FrameDescription frameDescription = _sensor.ColorFrameSource.FrameDescription; } }
public MainPage() { InitializeComponent(); _sensor = KinectSensor.GetDefault(); if (_sensor != null) { _sensor.Open(); _bodies = new Body[_sensor.BodyFrameSource.BodyCount]; _colorReader = _sensor.ColorFrameSource.OpenReader(); _colorReader.FrameArrived += ColorReader_FrameArrived; _bodyReader = _sensor.BodyFrameSource.OpenReader(); _bodyReader.FrameArrived += BodyReader_FrameArrived; _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.PointsInColorSpace); _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += FaceReader_FrameArrived; } }
public BodyAnalysis(KinectSensor sensor) { _sensor = sensor; FrameDescription depthFrameDescription = _sensor.DepthFrameSource.FrameDescription; _depthWidth = depthFrameDescription.Width; int depthHeight = depthFrameDescription.Height; // allocate space to put the pixels being received and converted this.depthFrameData = new ushort[_depthWidth * depthHeight]; //_msReader = reader; //_msReader.MultiSourceFrameArrived += _msReader_MultiSourceFrameArrived; //reader.FrameArrived += reader_FrameArrived; _coordinateMapper = _sensor.CoordinateMapper; _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.BoundingBoxInInfraredSpace | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.LookingAway | FaceFrameFeatures.MouthMoved | FaceFrameFeatures.MouthOpen | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.PointsInInfraredSpace | FaceFrameFeatures.RightEyeClosed | FaceFrameFeatures.RotationOrientation); _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += _faceReader_FrameArrived; _faceAlignment = new FaceAlignment(); _hdSource = new HighDefinitionFaceFrameSource(_sensor); _hdReader = _hdSource.OpenReader(); _hdReader.FrameArrived += _hdReader_FrameArrived; }
void IFaceCamera <System.Drawing.PointF> .Start() { _sensor = KinectSensor.GetDefault(); if (_sensor != null) { _sensor.IsAvailableChanged += OnKinectSensorChanged; _bodySource = _sensor.BodyFrameSource; _bodyReader = _bodySource.OpenReader(); _bodyReader.FrameArrived += OnBodyReaderFrameArrived; _faceSourceHighDef = new HighDefinitionFaceFrameSource(_sensor); _faceReaderHighDef = _faceSourceHighDef.OpenReader(); _faceReaderHighDef.FrameArrived += OnFaceReaderHighDefFrameArrived; _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.MouthOpen | FaceFrameFeatures.MouthMoved | FaceFrameFeatures.RightEyeClosed); _faceSource.TrackingIdLost += _faceSource_TrackingIdLost; _faceSourceHighDef.TrackingIdLost += _faceSource_TrackingIdLost; _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += OnFaceReaderFrameArrived; _faceModel = new FaceModel(); _faceAlignment = new FaceAlignment(); _sensor.Open(); } }
private void Window_Loaded(object sender, RoutedEventArgs e) { //獲取感測器資料 _sensor = KinectSensor.GetDefault(); if (_sensor != null) { //_sensor.Open(); //姿勢事件 poseture.Poseture_List += Poseture_result; // 2) Initialize the background removal tool. _backgroundRemovalTool = new BackgroundRemovalTool(_sensor.CoordinateMapper); //MultiSourceFrameReader and event _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.BodyIndex); _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived; //骨架、骨架事件 bodyFrameReader = _sensor.BodyFrameSource.OpenReader(); bodyFrameReader.FrameArrived += BodyFrameReader_FrameArrived; _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.MouthOpen | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RightEyeClosed); // Open face Reader and build face event _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += _faceReader_FrameArrived; } }
/// <summary> /// Kinectセンサーを初期化し、データの取得用に各種変数を初期化します /// </summary> private void Initialize() { // Kinectセンサーを取得 this.kinect = KinectSensor.GetDefault(); if (this.kinect == null) { return; } // Kinectセンサーの情報を取得 var desc = kinect.ColorFrameSource.FrameDescription; // 各種描画用変数をセンサー情報をもとに初期化 this.colorPixels = new byte[desc.Width * desc.Height * bytePerPixel]; this._ColorBitmap = new WriteableBitmap(desc.Width, desc.Height, 96.0, 96.0, PixelFormats.Bgr32, null); this._FacePointBitmap = new RenderTargetBitmap(desc.Width, desc.Height, 96.0, 96.0, PixelFormats.Default); // KinectセンサーからBody(骨格情報)とColor(色情報)を取得するFrameReaderを作成 this.reader = kinect.OpenMultiSourceFrameReader(FrameSourceTypes.Body | FrameSourceTypes.Color); this.reader.MultiSourceFrameArrived += OnMultiSourceFrameArrived; // FaceFrameSourceを作成 faceSource = new FaceFrameSource(kinect, 0, DefaultFaceFrameFeatures); // Readerを作成する faceReader = faceSource.OpenReader(); // FaceReaderからフレームを受け取ることができるようになった際に発生するイベント faceReader.FrameArrived += OnFaceFrameArrived; // FaceFrameSourceが指定されたTrackingIdのトラッキングに失敗した際に発生するイベント faceSource.TrackingIdLost += OnTrackingIdLost; // センサーの開始 kinect.Open(); }
private void Window_Loaded(object sender, RoutedEventArgs e) { //Open Face_Features的頁面 show_face_features.Show(); _sensor = KinectSensor.GetDefault(); if (_sensor != null) { _sensor.Open(); _bodies = new Body[_sensor.BodyFrameSource.BodyCount]; _colorReader = _sensor.ColorFrameSource.OpenReader(); _colorReader.FrameArrived += ColorReader_FrameArrived; _bodyReader = _sensor.BodyFrameSource.OpenReader(); _bodyReader.FrameArrived += BodyReader_FrameArrived; // 2) Initialize the face source with the desired features _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.MouthOpen | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RightEyeClosed | FaceFrameFeatures.LookingAway | FaceFrameFeatures.MouthMoved | FaceFrameFeatures.RotationOrientation ); _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += FaceReader_FrameArrived; } }
public MainPage() { this.InitializeComponent(); _sensor = KinectSensor.GetDefault(); _faceData = new FaceData(); if (_sensor != null) { _sensor.Open(); _bodies = new Body[_sensor.BodyFrameSource.BodyCount]; //_colorReader = _sensor.ColorFrameSource.OpenReader(); //_colorReader.FrameArrived += ColorReader_FrameArrived; _bodyReader = _sensor.BodyFrameSource.OpenReader(); _bodyReader.FrameArrived += BodyReader_FrameArrived; // 2) Initialize the face source with the desired features // specify the required face frame results FaceFrameFeatures faceFrameFeatures = FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RotationOrientation | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.RightEyeClosed | FaceFrameFeatures.LookingAway | FaceFrameFeatures.MouthMoved | FaceFrameFeatures.MouthOpen; _normalFaceSource = new FaceFrameSource(_sensor, 0, faceFrameFeatures); _normalFaceReader = _normalFaceSource.OpenReader(); _normalFaceReader.FrameArrived += NormalFaceReader_FrameArrived; // from HD _hdFaceSource = new HighDefinitionFaceFrameSource(_sensor); _hdFaceReader = _hdFaceSource.OpenReader(); _hdFaceReader.FrameArrived += HDFaceReader_FrameArrived; _faceModel = new FaceModel(); _faceAlignment = new FaceAlignment(); } //tmp canvas /* * // Add a Line Element * var myLine = new Line(); * // myLine.Stroke = System.Windows.Media.Brushes.LightSteelBlue; * myLine.Stroke = new SolidColorBrush(Colors.Blue); * //myLine.Stroke = * * myLine.X1 = 100; * myLine.X2 = 150; * myLine.Y1 = 100; * myLine.Y2 = 150; * myLine.HorizontalAlignment = HorizontalAlignment.Left; * myLine.VerticalAlignment = VerticalAlignment.Center; * myLine.StrokeThickness = 2; * canvasHD.Children.Add(myLine); */ // tmp write_log("llllllllllooooooooooooooooooonnnnnnnnnnnnnnnnnnnnnggggggggggggggg"); write_log(FaceHD.MouthUpperlipMidbottom.ToString()); var face = new FaceHD(); write_log(face.dump_str()); emoji.Source = Emoji.none; var autoEvent = new AutoResetEvent(false); //tmp.Text = "tmp"; var stateTimer = new Timer(tmp_callback, autoEvent, 1000, 1000); //var stateTimer = new Timer(tmp_callback); //var timer = new System.Timers.Timer(1000); }
private void InitializeFace() { m_FaceSource = new Microsoft.Kinect.Face.FaceFrameSource(m_Sensor, m_CurrentTrackingId, Microsoft.Kinect.Face.FaceFrameFeatures.BoundingBoxInColorSpace | Microsoft.Kinect.Face.FaceFrameFeatures.BoundingBoxInInfraredSpace | Microsoft.Kinect.Face.FaceFrameFeatures.PointsInColorSpace | Microsoft.Kinect.Face.FaceFrameFeatures.PointsInInfraredSpace); m_FaceReader = m_FaceSource.OpenReader(); m_FaceReader.FrameArrived += M_FaceReader_FrameArrived; }
/// <summary> /// Initializes a new instance of the MainWindow class /// </summary> public MainWindow() { // only one sensor is currently supported this.kinectSensor = KinectSensor.GetDefault(); // set IsAvailableChanged event notifier this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; // open the sensor this.kinectSensor.Open(); // set the status text this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText : Properties.Resources.NoSensorStatusText; // open the reader for the body frames this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader(); // set the BodyFramedArrived event notifier this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; // initialize the BodyViewer object for displaying tracked bodies in the UI this.kinectBodyView = new KinectBodyView(this.kinectSensor); // initialize the gesture detection objects for our gestures this.gestureDetectorList = new List <GestureDetector>(); // initialize the MainWindow this.InitializeComponent(); // set our data context objects for display in UI this.DataContext = this; this.kinectBodyViewbox.DataContext = this.kinectBodyView; // create a gesture detector for each body (6 bodies => 6 detectors) and create content controls to display results in the UI int col0Row = 0; int col1Row = 0; int maxBodies = this.kinectSensor.BodyFrameSource.BodyCount; for (int i = 0; i < maxBodies; ++i) { GestureResultView result = new GestureResultView(i, false, false, 0.0f); // foreach (var database in GestureHelper.gestures) // { GestureDetector detector = new GestureDetector(this.kinectSensor, result); this.gestureDetectorList.Add(detector); // } // split gesture results across the first two columns of the content grid ContentControl contentControl = new ContentControl(); contentControl.Content = this.gestureDetectorList[i].GestureResultView; if (i % 2 == 0) { // Gesture results for bodies: 0, 2, 4 Grid.SetColumn(contentControl, 0); Grid.SetRow(contentControl, col0Row); ++col0Row; } else { // Gesture results for bodies: 1, 3, 5 Grid.SetColumn(contentControl, 1); Grid.SetRow(contentControl, col1Row); ++col1Row; } this.contentGrid.Children.Add(contentControl); } // Face detection _faceSource = new FaceFrameSource(kinectSensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.MouthOpen | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.LookingAway | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RightEyeClosed); _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += FaceReader_FrameArrived; this.sp = new SerialPort(Properties.Settings.Default.ComPort, Properties.Settings.Default.Baudrate, Parity.None, 8, StopBits.One); }
public void Start() { SendInfo("People Tracker Started"); this.bodySubscriptions = new Dictionary <ulong, IDisposable>(); this.faceSubscriptions = new Dictionary <ulong, IDisposable>(); this.trackedPeople = new Dictionary <ulong, TrackedPerson>(); bodies = new Body[kinect.BodyFrameSource.BodyCount]; var bodyReader = this.kinect.BodyFrameSource.OpenReader(); var bodyFrameObservable = this.kinect .BodyFrameArrivedObservable(bodyReader) .SelectBodies(bodies).Sample(TimeSpan.FromSeconds(1)); faceFrameSource = new FaceFrameSource(kinect, 0, FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Happy | FaceFrameFeatures.Glasses); var faceReader = faceFrameSource.OpenReader(); var faceFramesObservable = Observable.FromEvent <FaceFrameArrivedEventArgs>( ev => { faceReader.FrameArrived += (s, ei) => ev(ei); }, ev => { faceReader.FrameArrived -= (s, ei) => ev(ei); }).SelectFaceFrame(); kinect.SceneChanges() .Subscribe(_ => { var trackingId = _.SceneChangedType.TrackingId; if (_.SceneChangedType is PersonEnteredScene) { SendInfo(string.Format("Person {0} entered scene", trackingId)); TrackedPerson person = new TrackedPerson() { TrackingId = trackingId, EnteredScene = DateTime.UtcNow }; trackedPeople.Add(trackingId, person); bodySubscriptions.Add(trackingId, SubscribeToBody(person, bodyFrameObservable, faceFramesObservable)); faceFrameSource.TrackingId = trackingId; } else if (_.SceneChangedType is PersonLeftScene) { var person = trackedPeople[trackingId]; person.LeftScene = DateTime.UtcNow; person.TotalInScene = person.LeftScene - person.EnteredScene; SendInfo(string.Format("Person {0} left the scene {1} Engaged:{2} Happy:{3} Height:{4} FirstLocation:{5} LastLocation:{6}", trackingId, person.TotalInScene, person.Engaged, person.Happy, person.Height, person.FirstLocation, person.LastLocation)); trackedPeople.Remove(trackingId); var subscription = bodySubscriptions[trackingId]; bodySubscriptions.Remove(trackingId); subscription.Dispose(); SendPerson(person); } }); }