//------------ public MainWindow() { InitializeComponent(); //configuring end point ep = new IPEndPoint(IP, 9999); //initializing KinectSensor _sensor = KinectSensor.GetDefault(); if (_sensor != null) { // Listen for body data. _bodySource = _sensor.BodyFrameSource; _bodyReader = _bodySource.OpenReader(); _bodyReader.FrameArrived += BodyReader_FrameArrived; /**/ // Listen for multisurce data. _multiReader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color); _multiReader.MultiSourceFrameArrived += MultiReader_MultiSourceFrameArrived; /**/ // Listen for HD face data. _faceSource = new HighDefinitionFaceFrameSource(_sensor); _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += FaceReader_FrameArrived; _faceModel = new FaceModel(); _faceAlignment = new FaceAlignment(); // Start tracking! _sensor.Open(); } }
private void InitializeKinect() { _sensor = KinectSensor.GetDefault(); if (_sensor != null) { _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth);// | FrameSourceTypes.LongExposureInfrared); _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived; FrameDescription colorFrameDescription = _sensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra); ColorBitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null); _bodySource = _sensor.BodyFrameSource; _bodyReader = _bodySource.OpenReader(); _bodyReader.FrameArrived += BodyReader_FrameArrived; _faceSource = new HighDefinitionFaceFrameSource(_sensor); _faceReader = _faceSource.OpenReader(); _faceSource.TrackingQuality = FaceAlignmentQuality.Low; _faceReader.FrameArrived += FaceReader_FrameArrived; _faceModel = new FaceModel(); _faceAlignment = new FaceAlignment(); _sensor.Open(); } }
void _hdReader_FrameArrived(HighDefinitionFaceFrameReader sender, HighDefinitionFaceFrameArrivedEventArgs args) { using (var hdFaceFrame = args.FrameReference.AcquireFrame()) { if (hdFaceFrame != null && _hdSource.TrackingId != 0) { hdFaceFrame.GetAndRefreshFaceAlignmentResult(this._faceAlignment); var animationUnits = this._faceAlignment.AnimationUnits; if (_faceAlignment.Quality == FaceAlignmentQuality.High) { foreach (var animUnit in animationUnits) { if (animUnit.Key == FaceShapeAnimations.LefteyebrowLowerer) { _leftBrow[ndx] = animUnit.Value; } if (animUnit.Key == FaceShapeAnimations.RighteyebrowLowerer) { _rightBrow[ndx] = animUnit.Value; } ndx++; if (ndx == 30) { ndx = 0; //get average brow movements var leftBrowMovementSum = 0.0f; var rightBrowMovementSum = 0.0f; for (int i = 0; i < 30; i++) { leftBrowMovementSum += _leftBrow[i]; rightBrowMovementSum += _rightBrow[i]; } _rightBrowDelta[0] = _rightBrowDelta[1]; _leftBrowDelta[0] = _leftBrowDelta[1]; _rightBrowDelta[1] = rightBrowMovementSum / 30; _leftBrowDelta[1] = leftBrowMovementSum / 30; } var rightBrowDiff = Math.Abs(_rightBrowDelta[1] * _rightBrowDelta[1] - _rightBrowDelta[0] * _rightBrowDelta[0]); var leftBrowDiff = Math.Abs(_leftBrowDelta[1] * _leftBrowDelta[1] - _leftBrowDelta[0] * _leftBrowDelta[0]); if (leftBrowDiff > 0.015 && rightBrowDiff > 0.015) { browToleranceCount++; if (browToleranceCount > 350) { OnEyebrowsDrawnUpArrived(new EyebrowsDrawnUpArrivedEventArgs() { Confidence = 1.0f }); browToleranceCount = 0; } } } } } } }
private void InitKinect() { Size displaySize = new Size(0, 0); this.kinectSensor = KinectSensor.GetDefault(); if (this.kinectSensor != null) { this.kinectSensor.Open(); var frameDescription = this.kinectSensor.DepthFrameSource.FrameDescription; displaySize.Width = frameDescription.Width; displaySize.Height = frameDescription.Height; this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader(); this.faceFrameSource = new HighDefinitionFaceFrameSource(kinectSensor); this.faceFrameReader = this.faceFrameSource.OpenReader(); this.UptimeText = Properties.Resources.InitializingStatusTextFormat; this.currentAlignment = new FaceAlignment(); } else { this.UptimeText = Properties.Resources.NoSensorFoundText; } this.kinectCanvas = new KinectCanvas(this.kinectSensor, displaySize); }
/// <summary> /// Initializes a new instance of the KinectFacialRecognitionEngine class /// </summary> public KinectFacialRecognitionEngine(KinectSensor kinect, params IRecognitionProcessor[] processors) { this.Kinect = kinect; this.ProcessingEnabled = true; this.Processors = processors; if (this.Processors == null || !this.Processors.Any()) { throw new ArgumentException("Please pass in at least one recognition processor!"); } this.bodies = new Body[kinect.BodyFrameSource.BodyCount]; this.colorImageBuffer = new byte[4 * kinect.ColorFrameSource.FrameDescription.LengthInPixels]; this.imageWidth = kinect.ColorFrameSource.FrameDescription.Width; this.imageHeight = kinect.ColorFrameSource.FrameDescription.Height; this.msReader = this.Kinect.OpenMultiSourceFrameReader(FrameSourceTypes.Body | FrameSourceTypes.Color); this.msReader.MultiSourceFrameArrived += this.MultiSourceFrameArrived; this.faceSource = new HighDefinitionFaceFrameSource(kinect); this.faceSource.TrackingQuality = FaceAlignmentQuality.High; this.faceReader = this.faceSource.OpenReader(); this.faceReader.FrameArrived += this.FaceFrameArrived; this.recognizerWorker = new BackgroundWorker(); this.recognizerWorker.DoWork += this.RecognizerWorker_DoWork; this.recognizerWorker.RunWorkerCompleted += this.RecognizerWorker_RunWorkerCompleted; }
public MainWindow() { InitializeComponent(); _sensor = KinectSensor.GetDefault(); if (_sensor != null) { // Listen for body data. _bodySource = _sensor.BodyFrameSource; _bodyReader = _bodySource.OpenReader(); _bodyReader.FrameArrived += BodyReader_FrameArrived; _colorReader = _sensor.ColorFrameSource.OpenReader(); _colorReader.FrameArrived += ColorReader_FrameArrived; // Listen for HD face data. _faceSource = new HighDefinitionFaceFrameSource(_sensor); _faceSourceSub = new HighDefinitionFaceFrameSource(_sensor); // _faceSource.TrackingIdLost += OnTrackingIdLost; _faceReader = _faceSource.OpenReader(); _faceReaderSub = _faceSourceSub.OpenReader(); _faceReader.FrameArrived += FaceReader_FrameArrived; _faceReaderSub.FrameArrived += FaceReaderSub_FrameArrived; _faceModel = new FaceModel(); _faceAlignment = new FaceAlignment(); _faceAlignmentSub = new FaceAlignment(); // Start tracking! _sensor.Open(); } }
void initialize() { IsFaceModelCollectCompleted = false; FaceCaptureStatus = ""; FaceVertices = new List <CameraSpacePoint>(); sensor = KinectSensor.GetDefault(); if (sensor == null) { return; } sensor.Open(); bodySource = sensor.BodyFrameSource; bodyReader = bodySource.OpenReader(); hdFaceFrameSource = HighDefinitionFaceFrameSource.Create(sensor); hdFaceFrameReader = hdFaceFrameSource.OpenReader(); faceModel = FaceModel.Create(); faceAlignment = FaceAlignment.Create(); FaceModelBuilderAttributes attributes = FaceModelBuilderAttributes.None; faceModelBuilder = hdFaceFrameSource.OpenModelBuilder(attributes); faceModelBuilder.CollectFaceDataAsync(collectFaceModelCompleted, collectFaceModelFailed); }
static void Main(string[] args) { _sensor = KinectSensor.GetDefault(); _worker.getSubjectID(); if (_sensor != null) { _sensor.Open(); Console.WriteLine("sensorOpened"); if (_sensor.IsOpen) { _coordinateMapper = _sensor.CoordinateMapper; _bodyFrameReader = _sensor.BodyFrameSource.OpenReader(); _bodyFrameReader.FrameArrived += BodyFrameReader_FrameArrived; _faceSource = new HighDefinitionFaceFrameSource(_sensor); _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += FaceReader_FrameArrived; _faceModel = new FaceModel(); _faceAlignment = new FaceAlignment(); } } string input = Console.ReadLine(); _sensor.Close(); }
public void InitTracker() { lastSensorAvail = false; sensor = KinectSensor.GetDefault(); bodySource = sensor.BodyFrameSource; bodyReader = bodySource.OpenReader(); bodyReader.FrameArrived += NewBodyReaderFrame; hdFaceFrameSource = new HighDefinitionFaceFrameSource(sensor); hdFaceFrameSource.TrackingIdLost += HdFaceSource_TrackingIdLost; hdFaceFrameReader = hdFaceFrameSource.OpenReader(); hdFaceFrameReader.FrameArrived += HdFaceReader_FrameArrived; sensor.IsAvailableChanged += SensorAvailableChanged; Console.WriteLine("Face tracker ready."); dest = IPAddress.Parse(ip); endPoint = new IPEndPoint(dest, port); sendBuffer = new byte[48]; Console.WriteLine("UDP Socket created for port {0}", port); }
private void Window_Loaded(object sender, RoutedEventArgs e) { _sensor = KinectSensor.GetDefault(); /*ColorFrameReader cfr = _sensor.ColorFrameSource.OpenReader(); * fd = _sensor.ColorFrameSource.FrameDescription; * colordata=new byte[fd.LengthInPixels*4]; * bitmap = new WriteableBitmap(fd.Width, fd.Height, 96, 96, PixelFormats.Bgr32, null); * * this.image.Source = bitmap;*/ if (_sensor != null) { _bodySource = _sensor.BodyFrameSource; _bodyReader = _bodySource.OpenReader(); //_bodyReader.FrameArrived += BodyReader_FrameArrived; _faceSource = new HighDefinitionFaceFrameSource(_sensor); _faceReader = _faceSource.OpenReader(); //_faceReader.FrameArrived += FaceReader_FrameArrived; _faceModel = new FaceModel(); _faceAlignment = new FaceAlignment(); //cfr.FrameArrived += cfr_FrameArrived; //_sensor.Open(); } }
/// <summary> /// Kinectセンサーを初期化し、データの取得用に各種変数を初期化します /// </summary> private void Initialize() { // Kinectセンサーを取得 this.kinect = KinectSensor.GetDefault(); if (kinect == null) { return; } // KinectセンサーからBody(骨格情報)とColor(色情報)を取得するFrameReaderを作成 reader = kinect.OpenMultiSourceFrameReader(FrameSourceTypes.Body); reader.MultiSourceFrameArrived += OnMultiSourceFrameArrived; // Kinectセンサーから詳細なFaceTrackingを行う、ソースとFrameReaderを宣言 this.hdFaceFrameSource = new HighDefinitionFaceFrameSource(this.kinect); this.hdFaceFrameSource.TrackingIdLost += this.OnTrackingIdLost; this.hdFaceFrameReader = this.hdFaceFrameSource.OpenReader(); this.hdFaceFrameReader.FrameArrived += this.OnFaceFrameArrived; this.faceModel = new FaceModel(); this.faceAlignment = new FaceAlignment(); // 各種Viewのアップデート InitializeMesh(); UpdateMesh(); // センサーの開始 kinect.Open(); }
public FacePage() { System.Media.SoundPlayer player = new System.Media.SoundPlayer(); player.Play(); InitializeComponent(); currFaceState = FaceState.KinectWait; currBodyState = BodyState.KinectWait; faceSamples = new double[NUM_SAMPLES]; flagRuns = new int[Enum.GetNames(typeof(FlagType)).Length]; _sensor = KinectSensor.GetDefault(); if (_sensor != null) { currFaceState = FaceState.FaceWait; currBodyState = BodyState.BodyWait; _infraredSource = _sensor.InfraredFrameSource; _infraredReader = _infraredSource.OpenReader(); _infraredReader.FrameArrived += InfraredReader_FrameArrived; _bodySource = _sensor.BodyFrameSource; _bodyReader = _bodySource.OpenReader(); _bodyReader.FrameArrived += BodyReader_FrameArrived; _faceSource = new HighDefinitionFaceFrameSource(_sensor); _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += FaceReader_FrameArrived; _sensor.Open(); } }
void OnApplicationQuit() { if (CurrentFaceModel != null) { CurrentFaceModel.Dispose(); CurrentFaceModel = null; } highDefinitionFaceFrameSource = null; if (highDefinitionFaceFrameReader != null) { highDefinitionFaceFrameReader.Dispose(); highDefinitionFaceFrameReader = null; } if (_Reader != null) { _Reader.Dispose(); _Reader = null; } if (_Sensor != null) { if (_Sensor.IsOpen) { _Sensor.Close(); } _Sensor = null; } }
private void Window_Closing(object sender, System.ComponentModel.CancelEventArgs e) { if (faceModelBuilder != null) { faceModelBuilder.Dispose(); faceModelBuilder = null; } if (hdFaceFrameReader != null) { hdFaceFrameReader.Dispose(); hdFaceFrameReader = null; } if (bodyFrameReader != null) { bodyFrameReader.Dispose(); bodyFrameReader = null; } if (faceModel != null) { faceModel.Dispose(); faceModel = null; } if (kinect != null) { kinect.Close(); kinect = null; } }
/// <summary> /// Initialize Kinect object /// </summary> private void InitializeHDFace() { this.CurrentBuilderStatus = "Ready To Start Capture"; this.sensor = KinectSensor.GetDefault(); this.bodySource = this.sensor.BodyFrameSource; this.bodyReader = this.bodySource.OpenReader(); this.bodyReader.FrameArrived += this.BodyReader_FrameArrived; this.bodyCount = this.sensor.BodyFrameSource.BodyCount; this.highDefinitionFaceFrameSource = new HighDefinitionFaceFrameSource(this.sensor); this.highDefinitionFaceFrameSource.TrackingIdLost += this.HdFaceSource_TrackingIdLost; this.highDefinitionFaceFrameReader = this.highDefinitionFaceFrameSource.OpenReader(); this.highDefinitionFaceFrameReader.FrameArrived += this.HdFaceReader_FrameArrived; this.currentFaceModel = new FaceModel(); this.currentFaceAlignment = new FaceAlignment(); this.coordinateMapper = this.sensor.CoordinateMapper; FaceFrameFeatures faceFrameFeatures = FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RotationOrientation | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.RightEyeClosed | FaceFrameFeatures.LookingAway | FaceFrameFeatures.MouthMoved | FaceFrameFeatures.MouthOpen; // create the face frame source with the required face frame features and an initial tracking Id of 0 this.faceFrameSource = new FaceFrameSource(this.sensor, 0, faceFrameFeatures); // open the corresponding reader this.faceFrameReader = this.faceFrameSource.OpenReader(); this.faceFrameResult = null; // wire handler for face frame arrival if (this.faceFrameReader != null) { // wire handler for face frame arrival this.faceFrameReader.FrameArrived += this.Reader_FaceFrameArrived; } this.InitializeMesh(); this.UpdateMesh(); this.sensor.Open(); }
private void Window_Loaded(object sender, RoutedEventArgs e) { _sensor = KinectSensor.GetDefault(); if (_sensor != null) { _bodySource = _sensor.BodyFrameSource; _bodyReader = _bodySource.OpenReader(); _bodyReader.FrameArrived += BodyReader_FrameArrived; _faceSource = new HighDefinitionFaceFrameSource(_sensor); _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += FaceReader_FrameArrived; _faceModel = new FaceModel(); _faceAlignment = new FaceAlignment(); _sensor.Open(); //Added by Aditya _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color); _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived; } }
void Start() { //this like InitializeHDFace() theGeometry = new Mesh(); //SetViewCollectionStatus(); sensor = KinectSensor.GetDefault(); bodySource = sensor.BodyFrameSource; bodyReader = bodySource.OpenReader(); bodyReader.FrameArrived += BodyReader_FrameArrived; highDefinitionFaceFrameSource = HighDefinitionFaceFrameSource.Create(sensor); highDefinitionFaceFrameSource.TrackingIdLost += HdFaceSource_TrackingIdLost; highDefinitionFaceFrameReader = highDefinitionFaceFrameSource.OpenReader(); highDefinitionFaceFrameReader.FrameArrived += HdFaceReader_FrameArrived; CurrentFaceModel = FaceModel.Create(); currentFaceAlignment = FaceAlignment.Create(); sensor.Open(); tempAus = new Dictionary <string, float>(); actorBlendshapeNames = getBlendShapeNames(actorMesh); }
private void Page_Loaded(object sender, RoutedEventArgs e) { _sensor = KinectSensor.GetDefault(); _coordinateMapper = _sensor.CoordinateMapper; _collectedMeasurements = new List <double>(); if (_sensor != null) { _infraredFrameDescription = _sensor.InfraredFrameSource.FrameDescription; _infraredBitmap = new WriteableBitmap(_infraredFrameDescription.Width, _infraredFrameDescription.Height, 96.0, 96.0, PixelFormats.Gray32Float, null); camera.Source = _infraredBitmap; _bodyReader = _sensor.BodyFrameSource.OpenReader(); _bodyCount = _sensor.BodyFrameSource.BodyCount; _bodies = new Body[_bodyCount]; _bodyReader.FrameArrived += BodyReader_FrameArrived; _faceModel = new FaceModel(); _faceAlignment = new FaceAlignment(); _faceFrameSource = new HighDefinitionFaceFrameSource(_sensor); _faceFrameReader = _faceFrameSource.OpenReader(); _faceFrameReader.FrameArrived += FaceReader_FrameArrived; _irReader = _sensor.InfraredFrameSource.OpenReader(); _irReader.FrameArrived += InfraredReader_FrameArrived; _sensor.Open(); } _settingsVM = DevPortalVM.LoadContext(SETTINGS_FILENAME); DevPortalGrid.DataContext = _settingsVM; _devicePortalClient = new DevPortalHelper(_settingsVM); }
private void Page_Unloaded(object sender, RoutedEventArgs e) { _settingsVM.SaveContext(SETTINGS_FILENAME); if (_faceFrameReader != null) { this._faceFrameReader.Dispose(); this._faceFrameReader = null; } if (_faceFrameSource != null) { _faceFrameSource = null; } if (_bodyReader != null) { _bodyReader.Dispose(); _bodyReader = null; } if (_sensor != null) { _sensor.Close(); } }
private static void StartFace() { FaceFrameSource = HighDefinitionFaceFrameSource.Create(sensor); if (FaceFrameSource != null) { faceReader = FaceFrameSource.OpenReader(); faceModel = FaceModel.Create(); faceAlignment = FaceAlignment.Create(); faceGeometry = new Vector[FaceModel.VertexCount]; } }
public void Evaluate(int SpreadMax) { if (this.FOutGeom[0] == null) { this.FOutGeom[0] = new DX11Resource <DX11IndexOnlyGeometry>(); this.FOutFaceVertices[0] = new DX11Resource <IDX11ReadableStructureBuffer>(); this.FOutFaceUV[0] = new DX11Resource <IDX11ReadableStructureBuffer>(); } if (this.FInvalidateConnect) { if (this.FInRuntime.PluginIO.IsConnected) { //Cache runtime node this.runtime = this.FInRuntime[0]; if (runtime != null) { //this.runtime.SkeletonFrameReady += SkeletonReady; this.faceFrameSource = new HighDefinitionFaceFrameSource(this.runtime.Runtime); this.faceFrameReader = this.faceFrameSource.OpenReader(); this.faceFrameReader.FrameArrived += this.faceReader_FrameArrived; this.faceFrameReader.IsPaused = true; } } else { //this.runtime.SkeletonFrameReady -= SkeletonReady; this.faceFrameReader.FrameArrived -= this.faceReader_FrameArrived; this.faceFrameReader.Dispose(); } this.FInvalidateConnect = false; } if (this.faceFrameSource != null) { ulong id = 0; try { id = ulong.Parse(this.FInId[0]); } catch { } this.faceFrameSource.TrackingId = id; this.faceFrameReader.IsPaused = this.FInPaused[0]; } this.FOutPaused[0] = this.faceFrameReader != null ? this.faceFrameReader.IsPaused : true; }
List <int[]> list_arr_index = new List <int[]>();// 配列のlist public MainWindow() { InitializeComponent(); _sensor = KinectSensor.GetDefault(); if (_sensor != null) { // Linten for body data. _bodySource = _sensor.BodyFrameSource; _bodyReader = _bodySource.OpenReader(); _bodyReader.FrameArrived += BodyReaderFrameArrived; // Listen for HD face data. _faceSource = new HighDefinitionFaceFrameSource(_sensor); _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += FaceReader_FrameArrived; _faceModel = new FaceModel(); _faceAlignment = new FaceAlignment(); // multi frame reader this.multiFrameReader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Depth | FrameSourceTypes.Infrared); // IRフレームの情報取得用オブジェクト取得 infraredFrameDescription = _sensor.InfraredFrameSource.FrameDescription; // Depth Frame description depthFrameDescription = _sensor.DepthFrameSource.FrameDescription; infraredRect = new Int32Rect(0, 0, infraredFrameDescription.Width, infraredFrameDescription.Height); depthRect = new Int32Rect(0, 0, depthFrameDescription.Width, depthFrameDescription.Height); // multistream event handlerをセット multiFrameReader.MultiSourceFrameArrived += ReaderMultiFrameArrived; // ----------------------------------------- // IRフレームの画面表示用設定 // ----------------------------------------- // 表示用のWritableBitmapを作成 infraredBitmap = new WriteableBitmap(this.infraredFrameDescription.Width, this.infraredFrameDescription.Height, 96.0, 96.0, PixelFormats.Gray16, null); depthBitmap = new WriteableBitmap(this.depthFrameDescription.Width, this.depthFrameDescription.Height, 96.0, 96.0, PixelFormats.Gray16, null); // WriteableBitmapをWPFのImageコントローラーのソースに関連付け //ColorImage.Source = this.infraredBitmap; //ここでは動かない // start tracking _sensor.Open(); } }
/// <summary> /// コンストラクタ。実行時に一度だけ実行される。 /// </summary> public MainWindow() { InitializeComponent(); //Kinect 本体への参照を確保する。 this.kinect = KinectSensor.GetDefault(); //読み込む画像のフォーマットとリーダを設定する。 this.colorImageFormat = ColorImageFormat.Bgra; this.colorFrameDescription = this.kinect.ColorFrameSource.CreateFrameDescription(this.colorImageFormat); this.colorFrameReader = this.kinect.ColorFrameSource.OpenReader(); this.colorFrameReader.FrameArrived += ColorFrameReader_FrameArrived; // 顔回転検出用 //this.faceFrameSource = new FaceFrameSource(this.kinect, 0, ); if (this.kinect != null) { _bodySource = kinect.BodyFrameSource; _bodyReader = _bodySource.OpenReader(); _bodyReader.FrameArrived += BodyReader_FrameArrived; _faceSource = new HighDefinitionFaceFrameSource(kinect); _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += FaceReader_FrameArrived; _faceModel = new FaceModel(); _faceAlignment = new FaceAlignment(); } //Kinect の動作を開始する。 //aviWriter.FrameRate = 30; //aviWriter.Open(@"c:\users\abelab\desktop\log\test.avi", 1920, 1080); //writer.Open(@"c:\users\abelab\desktop\log\test.avi", 1920, 1080, 30, VideoCodec.MPEG4); /* * for (int i=0; i<1347; i++) * { * sw.Write(i + ",,,,,,"); * } * sw.WriteLine(); * for(int i=0; i<1347; i++) * { * sw.Write("X(m),Y(m),Z(m),X(pixel),Y(pixel),,"); * } * sw.WriteLine(); */ this.kinect.Open(); }
protected virtual void OnApplicationQuit() { if (bodyFrameReader != null) { bodyFrameReader.Dispose(); bodyFrameReader = null; } if (colorFrameReader != null) { colorFrameReader.Dispose(); colorFrameReader = null; } if (depthFrameReader != null) { depthFrameReader.Dispose(); depthFrameReader = null; } if (infraredFrameReader != null) { infraredFrameReader.Dispose(); infraredFrameReader = null; } if (bodyIndexFrameReader != null) { bodyIndexFrameReader.Dispose(); bodyIndexFrameReader = null; } if (faceFrameReader != null) { faceFrameReader.Dispose(); faceFrameReader = null; } if (frameView != null) { frameView.FrameTexture = null; } if (KinectSensor != null && KinectSensor.IsOpen) { KinectSensor.Close(); KinectSensor = null; } }
/// <summary> /// Initialize Kinect object /// </summary> private void InitializeHDFace() { this.CurrentBuilderStatus = "Ready To Start Capture"; this.sensor = KinectSensor.GetDefault(); this.bodySource = this.sensor.BodyFrameSource; this.bodyReader = this.bodySource.OpenReader(); this.bodyReader.FrameArrived += this.BodyReader_FrameArrived; // set the maximum number of bodies that would be tracked by Kinect this.bodyCount = this.sensor.BodyFrameSource.BodyCount; // allocate storage to store body objects this.bodies = new Body[this.bodyCount]; // specify the required face frame results FaceFrameFeatures faceFrameFeatures = FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RotationOrientation | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.RightEyeClosed | FaceFrameFeatures.LookingAway | FaceFrameFeatures.MouthMoved | FaceFrameFeatures.MouthOpen; // create a face frame source + reader to track each face in the FOV this.faceFrameSource = new FaceFrameSource(this.sensor, 0, faceFrameFeatures); this.faceFrameReader = faceFrameSource.OpenReader(); this.faceFrameReader.FrameArrived += this.Reader_FaceFrameArrived; this.highDefinitionFaceFrameSource = new HighDefinitionFaceFrameSource(this.sensor); this.highDefinitionFaceFrameSource.TrackingIdLost += this.HdFaceSource_TrackingIdLost; this.highDefinitionFaceFrameReader = this.highDefinitionFaceFrameSource.OpenReader(); this.highDefinitionFaceFrameReader.FrameArrived += this.HdFaceReader_FrameArrived; this.currentFaceModel = new FaceModel(); this.currentFaceAlignment = new FaceAlignment(); this.InitializeMesh(); this.UpdateMesh(); this.sensor.Open(); }
private void Window_Loaded(object sender, RoutedEventArgs e) { _sensor = KinectSensor.GetDefault(); if (_sensor != null) { /// <summary> /// Tarea a realizar por alumno /// Fase Inicialización /// </summary> /// ///////////////////////////////////////////////////////////////////////////////////////////////// // Obtener fuentes de cuerpos, lector de cuerpos, handler para eventos de frames de cuerpos _bodySource = _sensor.BodyFrameSource; _bodyReader = _bodySource.OpenReader(); _bodyReader.FrameArrived += BodyReader_FrameArrived; // Obtener fuente facial, lector facial, handler para eventos de frames faciales. _faceSource = new HighDefinitionFaceFrameSource(_sensor); _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += FaceReader_FrameArrived; //Añadimos el reader de gestos faciales. y el handler. _faceFrameSource = new FaceFrameSource(this._sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.MouthOpen | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RightEyeClosed ); _faceFrameReader = this._faceFrameSource.OpenReader(); _faceFrameReader.FrameArrived += FaceFrameReader_FrameArrived; // Crear FaceModel, FaceAlignmet _faceModel = new FaceModel(); _faceAlignment = new FaceAlignment(); // Abrir el sensor. _sensor.Open(); // Asignar el multireader multiSourceReader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body); multiSourceReader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived; } }
public override bool StartSensor() { _bodySource = _kinect.BodyFrameSource; _bodyReader = _bodySource.OpenReader(); _bodyReader.FrameArrived += BodyReader_FrameArrived; _hdFaceFrameSource = new HighDefinitionFaceFrameSource(_kinect); _hdFaceFrameSource.TrackingIdLost += HdFaceSource_TrackingIdLost; _hdFaceFrameReader = _hdFaceFrameSource.OpenReader(); _hdFaceFrameReader.FrameArrived += HdFaceReader_FrameArrived; _currentFaceModel = new FaceModel(); _currentFaceAlignment = new FaceAlignment(); InitializeMesh(); UpdateMesh(); // Text style for our jig _style = new TextStyle(); _style.Font = new FontDescriptor("standard.shx", false, false, 0, 0); _style.TextSize = 10; var res = base.StartSensor(); if (res) { if (_faceModelBuilder != null) { _faceModelBuilder.Dispose(); } _faceModelBuilder = _hdFaceFrameSource.OpenModelBuilder( FaceModelBuilderAttributes.None ); _faceModelBuilder.BeginFaceDataCollection(); _faceModelBuilder.CollectionCompleted += HdFaceBuilder_CollectionCompleted; } return(res); }
private void Dispose(bool disposing) { if (Disposed) { return; } if (disposing) { //Free any other managed objects here... ClosePipes(); Client.CloseMainWindow(); Client.Close(); Client.Dispose(); Client = null; SpeechSynthesizer.SpeakAsyncCancelAll(); SpeechSynthesizer.Dispose(); SpeechSynthesizer = null; SpeechRecognitionEngine.RecognizeAsyncStop(); SpeechRecognitionEngine.Dispose(); SpeechRecognitionEngine = null; HighDefinitionFaceFrameReader.Dispose(); HighDefinitionFaceFrameReader = null; HighDefinitionFaceFrameSource = null; BodyFrameReader.Dispose(); BodyFrameReader = null; BodyFrameSource = null; KinectSensor.Close(); KinectSensor = null; Form.Close(); Form.Dispose(); Form = null; } //Free any unmanaged objects here... Disposed = true; }
protected virtual void Awake() { KinectSensor = KinectSensor.GetDefault(); if (KinectSensor != null) { bodyFrameReader = KinectSensor.BodyFrameSource.OpenReader(); colorFrameReader = KinectSensor.ColorFrameSource.OpenReader(); depthFrameReader = KinectSensor.DepthFrameSource.OpenReader(); infraredFrameReader = KinectSensor.InfraredFrameSource.OpenReader(); bodyIndexFrameReader = KinectSensor.BodyIndexFrameSource.OpenReader(); faceFrameSource = HighDefinitionFaceFrameSource.Create(KinectSensor); faceFrameReader = faceFrameSource.OpenReader(); KinectSensor.Open(); } }
private void InitializeHDFace() // Initializes Kinect object { this.sensor = KinectSensor.GetDefault(); this.bodySource = this.sensor.BodyFrameSource; this.bodyReader = this.bodySource.OpenReader(); this.bodyReader.FrameArrived += this.BodyReader_FrameArrived; this.highDefinitionFaceFrameSource = new HighDefinitionFaceFrameSource(this.sensor); this.highDefinitionFaceFrameSource.TrackingIdLost += this.HdFaceSource_TrackingIdLost; this.highDefinitionFaceFrameReader = this.highDefinitionFaceFrameSource.OpenReader(); this.highDefinitionFaceFrameReader.FrameArrived += this.HdFaceReader_FrameArrived; this.currentFaceModel = new FaceModel(); this.currentFaceAlignment = new FaceAlignment(); this.sensor.Open(); }