/// <summary>
        /// Initializes a new instance of the KinectFacialRecognitionEngine class
        /// </summary>
        public KinectFacialRecognitionEngine(KinectSensor kinect, params IRecognitionProcessor[] processors)
        {
            this.Kinect = kinect;

            this.ProcessingEnabled = true;
            this.Processors = processors;

            if (this.Processors == null || !this.Processors.Any())
                throw new ArgumentException("Please pass in at least one recognition processor!");

            this.bodies = new Body[kinect.BodyFrameSource.BodyCount];
            this.colorImageBuffer = new byte[4 * kinect.ColorFrameSource.FrameDescription.LengthInPixels];
            this.imageWidth = kinect.ColorFrameSource.FrameDescription.Width;
            this.imageHeight = kinect.ColorFrameSource.FrameDescription.Height;

            this.msReader = this.Kinect.OpenMultiSourceFrameReader(FrameSourceTypes.Body | FrameSourceTypes.Color);
            this.msReader.MultiSourceFrameArrived += this.MultiSourceFrameArrived;

            this.faceSource = new HighDefinitionFaceFrameSource(kinect);
            this.faceSource.TrackingQuality = FaceAlignmentQuality.High;
            this.faceReader = this.faceSource.OpenReader();
            this.faceReader.FrameArrived += this.FaceFrameArrived;

            this.recognizerWorker = new BackgroundWorker();
            this.recognizerWorker.DoWork += this.RecognizerWorker_DoWork;
            this.recognizerWorker.RunWorkerCompleted += this.RecognizerWorker_RunWorkerCompleted;
        }
示例#2
0
        public MainWindow()
        {
            InitializeComponent();
            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                // Listen for body data.
                _bodySource = _sensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                _colorReader = _sensor.ColorFrameSource.OpenReader();
                _colorReader.FrameArrived += ColorReader_FrameArrived;

                // Listen for HD face data.
                _faceSource = new HighDefinitionFaceFrameSource(_sensor);
                _faceSourceSub = new HighDefinitionFaceFrameSource(_sensor);
               // _faceSource.TrackingIdLost += OnTrackingIdLost;
                _faceReader = _faceSource.OpenReader();
                _faceReaderSub = _faceSourceSub.OpenReader();

                _faceReader.FrameArrived += FaceReader_FrameArrived;
                _faceReaderSub.FrameArrived += FaceReaderSub_FrameArrived;

                _faceModel = new FaceModel();
                _faceAlignment = new FaceAlignment();
                _faceAlignmentSub = new FaceAlignment();
                // Start tracking!        
                _sensor.Open();
            }
        }
        private void Window_Loaded(object sender, RoutedEventArgs e)
        {
            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _bodySource = _sensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                _faceSource = new HighDefinitionFaceFrameSource(_sensor);

                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;

                _faceModel = new FaceModel();
                _faceAlignment = new FaceAlignment();

                _sensor.Open();
            }
        }
        public MainPage()
        {
            InitializeComponent();

            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _bodySource = _sensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                _faceSource = new HighDefinitionFaceFrameSource(_sensor);

                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;

                _faceModel = new FaceModel();
                _faceAlignment = new FaceAlignment();

                _sensor.Open();
            }
        }
示例#5
0
        public void Evaluate(int SpreadMax)
        {
            if (this.FInvalidateConnect)
            {
                if (this.FInRuntime.PluginIO.IsConnected)
                {
                    //Cache runtime node
                    this.runtime = this.FInRuntime[0];

                    if (runtime != null)
                    {
                        faceSrc = new Microsoft.Kinect.Face.FaceFrameSource(this.runtime.Runtime);
                        faceSrc.FaceFrameFeatures = FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.LookingAway | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Happy | FaceFrameFeatures.MouthMoved | FaceFrameFeatures.MouthOpen | FaceFrameFeatures.RightEyeClosed | FaceFrameFeatures.RotationOrientation;
                        faceReader = faceSrc.OpenReader();
                        faceReader.FrameArrived += this.faceReader_FrameArrived;

                        hdSrc = new HighDefinitionFaceFrameSource(this.runtime.Runtime);
                        hdRead = hdSrc.OpenReader();
                        hdRead.FrameArrived += hdRead_FrameArrived;
                    }
                }
                else
                {
                    if (faceSrc != null)
                    {
                        faceReader.FrameArrived -= faceReader_FrameArrived;
                        faceReader.Dispose();
                    }
                }

                this.FInvalidateConnect = false;
            }
        }
        /// <summary>
        /// Kinectセンサーを初期化し、データの取得用に各種変数を初期化します
        /// </summary>
        private void Initialize()
        {
            // Kinectセンサーを取得
            this.kinect = KinectSensor.GetDefault();

            if (kinect == null) return;

            // KinectセンサーからBody(骨格情報)とColor(色情報)を取得するFrameReaderを作成
            reader = kinect.OpenMultiSourceFrameReader(FrameSourceTypes.Body);
            reader.MultiSourceFrameArrived += OnMultiSourceFrameArrived;

            // Kinectセンサーから詳細なFaceTrackingを行う、ソースとFrameReaderを宣言
            // 1st persion
            this.hdFaceFrameSource = new HighDefinitionFaceFrameSource(this.kinect);
            this.hdFaceFrameSource.TrackingIdLost += this.OnTrackingIdLost;
            
            this.hdFaceFrameReader = this.hdFaceFrameSource.OpenReader();
            this.hdFaceFrameReader.FrameArrived += this.OnFaceFrameArrived;

            this.faceModel = new FaceModel();
            this.faceAlignment = new FaceAlignment();

            
            this._colorReader = this.kinect.ColorFrameSource.OpenReader();
            this._colorReader.FrameArrived += ColorReader_FrameArrived;
            // 各種Viewのアップデート
            InitializeMesh();
            UpdateMesh();

            // センサーの開始
            kinect.Open();
        }
        public async Task StopAsync()
        {
            if (_isStopped)
                return;

            System.Diagnostics.Debug.WriteLine(">>> StopAsync (queue size {0})", _recordQueue.Count);

            _isStarted = false;
            _isStopped = true;

            if (_bodyReader != null)
            {
                _bodyReader.FrameArrived -= _bodyReader_FrameArrived;
                _bodyReader.Dispose();
                _bodyReader = null;
            }

            if (_colorReader != null)
            {
                _colorReader.FrameArrived -= _colorReader_FrameArrived;
                _colorReader.Dispose();
                _colorReader = null;
            }

            if (_depthReader != null)
            {
                _depthReader.FrameArrived -= _depthReader_FrameArrived;
                _depthReader.Dispose();
                _depthReader = null;
            }
            if (_faceReader != null)
            {
                _faceReader.FrameArrived -= _faceReader_FrameArrived;
                _faceReader.Dispose();
                _faceReader = null;
            }

            if (_infraredReader != null)
            {
                _infraredReader.FrameArrived -= _infraredReader_FrameArrived;
                _infraredReader.Dispose();
                _infraredReader = null;
            }

            try
            {
                await _processFramesTask;
            }
            catch
            {
                System.Diagnostics.Debug.WriteLine("!!! Process Canceled (in StopAsync)");
            }
            _processFramesTask = null;

            await CloseWriterAsync();

            System.Diagnostics.Debug.WriteLine("<<< StopAsync (DONE!)");
        }
        public void Start()
        {
            if (_isStarted)
                return;

            if (_isStopped)
                throw new InvalidOperationException("Cannot restart a recording after it has been stopped");

            if (_sensor != null)
            {
                if (EnableBodyRecorder)
                {
                    _bodyReader = _sensor.BodyFrameSource.OpenReader();
                    _bodyReader.FrameArrived += _bodyReader_FrameArrived;
                }

                if (EnableColorRecorder)
                {
                    _colorReader = _sensor.ColorFrameSource.OpenReader();
                    _colorReader.FrameArrived += _colorReader_FrameArrived;
                }

                if (EnableDepthRecorder)
                {
                    _depthReader = _sensor.DepthFrameSource.OpenReader();
                    _depthReader.FrameArrived += _depthReader_FrameArrived;
                }

                if (EnableFaceRecorder)
                {
                    _faceReader = _faceSource.OpenReader();
                    _faceReader.FrameArrived += _faceReader_FrameArrived;
                }

                if (EnableInfraredRecorder)
                {
                    _infraredReader = _sensor.InfraredFrameSource.OpenReader();
                    _infraredReader.FrameArrived += _infraredReader_FrameArrived;
                }

                if (!_sensor.IsOpen)
                    _sensor.Open();

            }

            _isStarted = true;

            try
            {
                _writerSemaphore.Wait();

                // initialize and write file metadata
                var metadata = new RMetadata()
                {
                    Version = this.GetType().GetTypeInfo().Assembly.GetName().Version.ToString(),
                    //ColorCodecId = this.ColorRecorderCodec.CodecId
                };
                if (_sensor != null)
                {
                    //metadata.DepthCameraIntrinsics = _sensor.CoordinateMapper.GetDepthCameraIntrinsics();
                    //metadata.DepthFrameToCameraSpaceTable = _sensor.CoordinateMapper.GetDepthFrameToCameraSpaceTable();
                }
                else
                {
                    var sensor = KinectSensor.GetDefault();
                    if (sensor != null)
                    {
                        //metadata.DepthCameraIntrinsics = sensor.CoordinateMapper.GetDepthCameraIntrinsics();
                        //metadata.DepthFrameToCameraSpaceTable = sensor.CoordinateMapper.GetDepthFrameToCameraSpaceTable();
                    }
                }
                _writer.Write(JsonConvert.SerializeObject(metadata));
            }
            catch (Exception ex)
            {
                // TODO: Change to log the error
                System.Diagnostics.Debug.WriteLine(ex);
            }
            finally
            {
                _writerSemaphore.Release();
            }

            _processFramesTask = ProcessFramesAsync();
        }
示例#9
0
        /// <summary>
        /// Initialize Kinect object
        /// </summary>
        public void InitializeHDFace()
        {
            this.sensor = KinectSensor.GetDefault();
            this.bodySource = this.sensor.BodyFrameSource;
            this.bodyReader = this.bodySource.OpenReader();
            this.bodyReader.FrameArrived += this.BodyReader_FrameArrived;

            this.highDefinitionFaceFrameSource = new HighDefinitionFaceFrameSource(this.sensor);
            this.highDefinitionFaceFrameSource.TrackingIdLost += this.HdFaceSource_TrackingIdLost;

            this.highDefinitionFaceFrameReader = this.highDefinitionFaceFrameSource.OpenReader();
            this.highDefinitionFaceFrameReader.FrameArrived += this.HdFaceReader_FrameArrived;

            this.highDefinitionFaceFrameSource.TrackingIdLost += (x, y) => Log.LogMessage("Lost tracking id " + y.TrackingId);

            this.currentFaceModel = new FaceModel();

            this.currentFaceAlignment = new FaceAlignment();

            this.sensor.Open();
        }
示例#10
0
文件: Server.cs 项目: Zerseu/Reflecta
    public Server()
    {
        Form = new CustomPerPixelAlphaForm();
        FormSetProperties();
        FormDock();
        Form.Show();

        var clientBuildDirectory = Environment.CurrentDirectory + "\\..\\..\\..\\..\\..\\Reflecta.Client\\bin";
        var clientStartInfo = new ProcessStartInfo
        {
            FileName = clientBuildDirectory + "\\Client.exe",
            WorkingDirectory = clientBuildDirectory,
            WindowStyle = ProcessWindowStyle.Minimized
        };
        Client = Process.Start(clientStartInfo);

        OpenPipes();

        SpeechSynthesizer = new SpeechSynthesizer();
        SpeechSynthesizer.SelectVoiceByHints(VoiceGender.Female);
        SpeechSynthesizer.SpeakStarted += SpeechSynthesizer_SpeakStarted;
        SpeechSynthesizer.VisemeReached += SpeechSynthesizer_VisemeReached;
        SpeechSynthesizer.SpeakCompleted += SpeechSynthesizer_SpeakCompleted;

        SpeechRecognitionEngine = new SpeechRecognitionEngine();
        SpeechRecognitionEngine.UnloadAllGrammars();
        SpeechRecognitionEngine.LoadGrammar(new Grammar(new GrammarBuilder(KnownCommands)));
        SpeechRecognitionEngine.SpeechRecognized += SpeechRecognitionEngine_SpeechRecognized;
        SpeechRecognitionEngine.SetInputToDefaultAudioDevice();
        SpeechRecognitionEngine.RecognizeAsync(RecognizeMode.Multiple);

        KinectSensor = KinectSensor.GetDefault();
        KinectSensor.Open();

        BodyFrameSource = KinectSensor.BodyFrameSource;
        BodyFrameReader = BodyFrameSource.OpenReader();
        BodyFrameReader.FrameArrived += BodyFrameReader_FrameArrived;
        Bodies = null;
        BodyDESP = new DESPQuaternion[(int) MoCapKinectBone.Count];
        for (var i = 0; i < (int) MoCapKinectBone.Count; i++)
            BodyDESP[i] = new DESPQuaternion();

        HighDefinitionFaceFrameSource = new HighDefinitionFaceFrameSource(KinectSensor);
        HighDefinitionFaceFrameSource.TrackingQuality = FaceAlignmentQuality.High;
        HighDefinitionFaceFrameReader = HighDefinitionFaceFrameSource.OpenReader();
        HighDefinitionFaceFrameReader.FrameArrived += HighDefinitionFaceFrameReader_FrameArrived;
        FaceAlignment = new FaceAlignment();

        FaceDESP = new DESPQuaternion();
        FaceExpressionDESP = new DESPFloat[(int) MoCapKinectFacialExpression.Count];
        for (var i = 0; i < (int) MoCapKinectFacialExpression.Count; i++)
            FaceExpressionDESP[i] = new DESPFloat();
    }
示例#11
0
文件: Server.cs 项目: Zerseu/Reflecta
    private void Dispose(bool disposing)
    {
        if (Disposed)
            return;

        if (disposing)
        {
            //Free any other managed objects here...

            ClosePipes();

            Client.CloseMainWindow();
            Client.Close();
            Client.Dispose();
            Client = null;

            SpeechSynthesizer.SpeakAsyncCancelAll();
            SpeechSynthesizer.Dispose();
            SpeechSynthesizer = null;

            SpeechRecognitionEngine.RecognizeAsyncStop();
            SpeechRecognitionEngine.Dispose();
            SpeechRecognitionEngine = null;

            HighDefinitionFaceFrameReader.Dispose();
            HighDefinitionFaceFrameReader = null;
            HighDefinitionFaceFrameSource = null;

            BodyFrameReader.Dispose();
            BodyFrameReader = null;
            BodyFrameSource = null;

            KinectSensor.Close();
            KinectSensor = null;

            Form.Close();
            Form.Dispose();
            Form = null;
        }

        //Free any unmanaged objects here...

        Disposed = true;
    }
        /// <summary>
        /// Initialize Kinect object
        /// </summary>
        private void InitializeHDFace()
        {
            this.CurrentBuilderStatus = "Ready To Start Capture";

            this.sensor = KinectSensor.GetDefault();
            this.bodySource = this.sensor.BodyFrameSource;
            this.bodyReader = this.bodySource.OpenReader();
            this.bodyReader.FrameArrived += this.BodyReader_FrameArrived;

            this.highDefinitionFaceFrameSource = new HighDefinitionFaceFrameSource(this.sensor);
            this.highDefinitionFaceFrameSource.TrackingIdLost += this.HdFaceSource_TrackingIdLost;

            this.highDefinitionFaceFrameReader = this.highDefinitionFaceFrameSource.OpenReader();
            this.highDefinitionFaceFrameReader.FrameArrived += this.HdFaceReader_FrameArrived;

            this.currentFaceModel = new FaceModel();
            this.currentFaceAlignment = new FaceAlignment();

            this.InitializeMesh();
            this.UpdateMesh();

            this.sensor.Open();
            Console.Write("\n\n******************************************************\n" + "Command, Issued_TS, Checked_TS, Done_TS, Errors\n");
        }
示例#13
0
        public void Evaluate(int SpreadMax)
        {
            if (this.FOutGeom[0] == null)
            {
                this.FOutGeom[0] = new DX11Resource<DX11IndexOnlyGeometry>();
                this.FOutFaceVertices[0] = new DX11Resource<IDX11ReadableStructureBuffer>();
                this.FOutFaceUV[0] = new DX11Resource<IDX11ReadableStructureBuffer>();
            }

            if (this.FInvalidateConnect)
            {
                if (this.FInRuntime.PluginIO.IsConnected)
                {
                    //Cache runtime node
                    this.runtime = this.FInRuntime[0];

                    if (runtime != null)
                    {
                        //this.runtime.SkeletonFrameReady += SkeletonReady;
                        this.faceFrameSource = new HighDefinitionFaceFrameSource(this.runtime.Runtime);
                        this.faceFrameReader = this.faceFrameSource.OpenReader();
                        this.faceFrameReader.FrameArrived += this.faceReader_FrameArrived;
                        this.faceFrameReader.IsPaused = true;
                    }
                }
                else
                {
                    //this.runtime.SkeletonFrameReady -= SkeletonReady;
                    this.faceFrameReader.FrameArrived -= this.faceReader_FrameArrived;
                    this.faceFrameReader.Dispose();

                }

                this.FInvalidateConnect = false;
            }

            if (this.faceFrameSource != null)
            {
                ulong id = 0;
                try
                {
                    id = ulong.Parse(this.FInId[0]);
                }
                catch
                {

                }
                this.faceFrameSource.TrackingId = id;
                this.faceFrameReader.IsPaused = this.FInPaused[0];
            }

            this.FOutPaused[0] = this.faceFrameReader != null ? this.faceFrameReader.IsPaused : true;
        }
	        private void InitializeHDFace() 				// Initializes Kinect object
	        {
			this.sensor = KinectSensor.GetDefault();
			this.bodySource = this.sensor.BodyFrameSource;
			this.bodyReader = this.bodySource.OpenReader();
			this.bodyReader.FrameArrived += this.BodyReader_FrameArrived;
			
			this.highDefinitionFaceFrameSource = new HighDefinitionFaceFrameSource(this.sensor);
			this.highDefinitionFaceFrameSource.TrackingIdLost += this.HdFaceSource_TrackingIdLost;
			
			this.highDefinitionFaceFrameReader = this.highDefinitionFaceFrameSource.OpenReader();
			this.highDefinitionFaceFrameReader.FrameArrived += this.HdFaceReader_FrameArrived;
			
			this.currentFaceModel = new FaceModel();
			this.currentFaceAlignment = new FaceAlignment();
			
			this.sensor.Open();
	        }
示例#15
0
        private void InitializeHDFace()
        {
            hdFaceFrameSource = new HighDefinitionFaceFrameSource( kinect );
            if ( hdFaceFrameSource==null ) {
                throw new Exception( "Cannot create HD Face Frame Source" );
            }
            hdFaceFrameReader = hdFaceFrameSource.OpenReader();
            hdFaceFrameReader.FrameArrived += hdFaceFrameReader_FrameArrived;
            faceModel = new FaceModel();
            faceAlignment = new FaceAlignment();

            FaceModelBuilderAttributes attributes = FaceModelBuilderAttributes.None;
            faceModelBuilder = hdFaceFrameSource.OpenModelBuilder( attributes );
            if ( faceModelBuilder==null ) {
                throw new Exception( "Cannot open Face Model Builder" );
            }
            faceModelBuilder.BeginFaceDataCollection();
            faceModelBuilder.CollectionCompleted += faceModelBuilder_CollectionCompleted;
        }
示例#16
0
 private void Window_Closing( object sender, System.ComponentModel.CancelEventArgs e )
 {
     if ( faceModelBuilder != null ) {
         faceModelBuilder.Dispose();
         faceModelBuilder = null;
     }
     if ( hdFaceFrameReader != null ) {
         hdFaceFrameReader.Dispose();
         hdFaceFrameReader = null;
     }
     if ( bodyFrameReader != null ) {
         bodyFrameReader.Dispose();
         bodyFrameReader = null;
     }
     if ( faceModel!=null ) {
         faceModel.Dispose();
         faceModel = null;
     }
     if ( kinect != null ) {
         kinect.Close();
         kinect = null;
     }
 }
示例#17
0
        private void Window_Loaded(object sender, RoutedEventArgs e)
        {
            #region Interface

            ColorRButton.IsChecked = true;
            RecordButton.IsEnabled = false;
            BodyCheckBox.IsEnabled = false;
            ColorCheckBox.IsEnabled = false;
            DepthCheckBox.IsEnabled = false;
            InfraredCheckBox.IsEnabled = false;
            FaceCheckBox.IsEnabled = false;
            AudioCheckBox.IsEnabled = false;

            #endregion

            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _sensor.Open();
                _bodies = new List<CustomBody>();

                _faceSource = new HighDefinitionFaceFrameSource(_sensor);
                _faceModel = new FaceModel();
                _faceAlignment = new FaceAlignment();

                _bodyReader = _sensor.BodyFrameSource.OpenReader();
                _bodyReader.FrameArrived += _bodyReader_FrameArrived;

                _colorReader = _sensor.ColorFrameSource.OpenReader();
                _colorReader.FrameArrived += _colorReader_FrameArrived;

                _depthReader = _sensor.DepthFrameSource.OpenReader();
                _depthReader.FrameArrived += _depthReader_FrameArrived;

                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += _faceReader_FrameArrived;

                _infraredReader = _sensor.InfraredFrameSource.OpenReader();
                _infraredReader.FrameArrived += _infraredReader_FrameArrived;


            }
        }
示例#18
0
 /// <summary>
 /// Constructor
 /// </summary>
 /// <param name="sensor">Kinect sensor</param>
 public SingleHdFaceProcessor(KinectSensor sensor)
 {
     this.frameSource = new HighDefinitionFaceFrameSource(sensor);
     this.framereader = this.frameSource.OpenReader();
     this.framereader.FrameArrived += this.FrameArrived;
 }
示例#19
0
 private void Window_Closed(object sender, EventArgs e)
 {
     if (_bodyReader != null)
     {
         _bodyReader.Dispose();
         _bodyReader = null;
     }
     if (_colorReader != null)
     {
         _colorReader.Dispose();
         _colorReader = null;
     }
     if (_depthReader != null)
     {
         _depthReader.Dispose();
         _depthReader = null;
     }
     if (_faceReader != null)
     {
         _faceReader.Dispose();
         _faceReader = null;
     }
     if (_infraredReader != null)
     {
         _infraredReader.Dispose();
         _infraredReader = null;
     }
     if (_sensor != null)
     {
         _sensor.Close();
         _sensor = null;
     }
 }
示例#20
0
        protected virtual void Dispose(bool disposing)
        {
            if (disposing)
            {
                if (_bodyReader != null)
                {
                    _bodyReader.FrameArrived -= _bodyReader_FrameArrived;
                    _bodyReader.Dispose();
                    _bodyReader = null;
                }

                if (_colorReader != null)
                {
                    _colorReader.FrameArrived -= _colorReader_FrameArrived;
                    _colorReader.Dispose();
                    _colorReader = null;
                }

                if (_depthReader != null)
                {
                    _depthReader.FrameArrived -= _depthReader_FrameArrived;
                    _depthReader.Dispose();
                    _depthReader = null;
                }

                if (_faceReader != null)
                {
                    _faceReader.FrameArrived -= _faceReader_FrameArrived;
                    _faceReader.Dispose();
                    _faceReader = null;
                }

                if (_infraredReader != null)
                {
                    _infraredReader.FrameArrived -= _infraredReader_FrameArrived;
                    _infraredReader.Dispose();
                    _infraredReader = null;
                }

                try
                {
                    _writerSemaphore.Wait();
                    if (_writer != null)
                    {
                        _writer.Flush();

                        if (_writer.BaseStream != null)
                        {
                            _writer.BaseStream.Flush();
                        }

                        _writer.Dispose();
                        _writer = null;
                    }
                }
                catch (Exception ex)
                {
                    // TODO: Change to log the error
                    System.Diagnostics.Debug.WriteLine(ex);
                }
                finally
                {
                    _writerSemaphore.Dispose();
                }

                if (_processFramesCancellationTokenSource != null)
                {
                    _processFramesCancellationTokenSource.Dispose();
                    _processFramesCancellationTokenSource = null;
                }
            }
        }
示例#21
-1
        public void InitializeHDFace()
        {
          
            this.sensor = KinectSensor.GetDefault();
            this.bodySource = this.sensor.BodyFrameSource;
            FrameDescription colorFrameDescription = this.sensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra);
            
            this.highDefinitionFaceFrameSource = new HighDefinitionFaceFrameSource(sensor);
            this.highDefinitionFaceFrameSource.TrackingIdLost += this.HdFaceSource_TrackingIdLost;

            this.highDefinitionFaceFrameReader = this.highDefinitionFaceFrameSource.OpenReader();
            this.highDefinitionFaceFrameReader.FrameArrived += this.HdFaceReader_FrameArrived; //event gor high def face

            if (scenarioselected == 2)
            {
                this.highDefinitionFaceFrameSource2 = new HighDefinitionFaceFrameSource(sensor);
                this.highDefinitionFaceFrameSource2.TrackingIdLost += this.HdFaceSource_TrackingIdLost2;

                this.highDefinitionFaceFrameReader2 = this.highDefinitionFaceFrameSource2.OpenReader();
                this.highDefinitionFaceFrameReader2.FrameArrived += this.HdFaceReader_FrameArrived2; //event gor high def face
                faceSource2 = new FaceFrameSource(sensor, 0, DefaultFaceFrameFeatures);
                faceReader2 = faceSource2.OpenReader();
                faceReader2.FrameArrived += OnFaceFrameArrived2; //event for face data
                faceSource2.TrackingIdLost += OnTrackingIdLost2;

            }

            this.reader = sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Body | FrameSourceTypes.Color);
            
            this.reader.MultiSourceFrameArrived += OnMultiSourceFrameArrived; //event for multiple source (Position)
            this.currentFaceAlignment = new FaceAlignment();

            faceSource = new FaceFrameSource(sensor, 0, DefaultFaceFrameFeatures);
            faceReader = faceSource.OpenReader();
            faceReader.FrameArrived += OnFaceFrameArrived; //event for face data
            
            faceSource.TrackingIdLost += OnTrackingIdLost;
            this.pixels = new byte[colorFrameDescription.Width * colorFrameDescription.Height * colorFrameDescription.BytesPerPixel];
            this.sensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;
            this.sensor.Open();
            
        }