Example #1
0
    void Start()
    {
        //Use the Kinect Sensor
        k2 = KinectSensor.GetDefault();

        Assert.IsTrue(k2 != null, "A Kinect could not be found");
        //Start running the sensor
        if (k2 != null)
        {
            //Read depth data
            depthSource = k2.DepthFrameSource;
            depthReader = depthSource.OpenReader();
            var depthFrame = depthSource.FrameDescription;


            k2.Open();
            Assert.IsTrue(k2.IsOpen, "Kinect Sensor is offline");
            if (k2.IsOpen)
            {
                Debug.Log("Kinect Sensor Running");
                depthArray = new ushort[depthFrame.LengthInPixels];
            }
        }
    }
 internal static byte[] GetNewPixelArray(this DepthFrameSource depthFrame)
 {
     return(new byte[depthFrame.FrameDescription.LengthInPixels * ((PixelFormats.Bgr32.BitsPerPixel + 7) / 8)]);
 }
Example #3
0
        public void Start()
        {
            if (_isRunning)
                return;

            _isRunning = true;

            _depthFrameSource = _sensorService.Sensor.DepthFrameSource;
            this.DepthFrameDescription = _depthFrameSource.FrameDescription;
            _depthFrameReader = _depthFrameSource.OpenReader();
            _depthFrameReader.FrameArrived += _depthFrameReader_FrameArrived;

            
            if (IsCalibrating)
            {
                this.ServiceState = ServiceStates.NotReady.ToString();
                OnServiceStateChanged(ServiceStates.NotReady);
            }
            else
            {
                this.ServiceState = ServiceStates.Open.ToString();
                OnServiceStateChanged(ServiceStates.Open);
            }
        }
        private void MainWindow_Loaded(object sender, RoutedEventArgs e)
        {
            binaryFilePath = System.IO.Path.Combine(Environment.CurrentDirectory, "data.bin");


            this.kinectSensor = KinectSensor.GetDefault();
            if (this.kinectSensor != null)
            {
                this.kinectSensor.Open();

                #region ColorFrame
                this.colorFrameSource               = this.kinectSensor.ColorFrameSource;
                this.colorFrameReader               = this.colorFrameSource.OpenReader();
                this.colorFrameDescription          = this.colorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra);
                this.colorFrameReader.FrameArrived += colorFrameReader_FrameArrived;
                this.colorPixelData = new Byte[this.colorFrameDescription.LengthInPixels * 4];
                this.colorBitmap    = new WriteableBitmap(this.colorFrameDescription.Width,
                                                          this.colorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null);
                this.colorBitmapRect   = new Int32Rect(0, 0, this.colorBitmap.PixelWidth, this.colorBitmap.PixelHeight);
                this.colorBitmapStride = this.colorFrameDescription.Width * 4;

                #endregion

                #region DepthFrame
                this.depthFrameSource               = this.kinectSensor.DepthFrameSource;
                this.depthFrameReader               = this.depthFrameSource.OpenReader();
                this.depthFrameDesription           = this.kinectSensor.DepthFrameSource.FrameDescription;
                this.depthFrameReader.FrameArrived += depthReader_FrameArrived;
                this.depthBitmap     = new WriteableBitmap(this.depthFrameDesription.Width, this.depthFrameDesription.Height, 96.0, 96.0, PixelFormats.Gray16, null);
                this.depthBitmapRect = new Int32Rect(0, 0, this.depthBitmap.PixelWidth, this.depthBitmap.PixelHeight);
                this.depthPixelData  = new ushort[this.depthFrameDesription.LengthInPixels];          //取代宽度*高度,更加方便
                this.depthStride     = this.depthFrameDesription.Width * 2;
                #endregion

                #region FacePoints
                // Listen for body data.
                _bodySource = this.kinectSensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;
                // Listen for HD face data.
                _faceSource = new HighDefinitionFaceFrameSource(this.kinectSensor);
                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;
                _faceModel     = new FaceModel();
                _faceAlignment = new FaceAlignment();
                #endregion

                abstractColorFrame.buffer      = new byte[1920 * 1080 * 4];
                abstractDepthFrame.buffer      = new ushort[512 * 424];
                abstractFacePointsFrame.buffer = new CameraSpacePoint[1347];

                colorImage.Source = this.colorBitmap;
                depthImage.Source = this.depthBitmap;

                queueSaver        = new QueueSaver(binaryFilePath);
                mainWritingThread = new Thread(new ThreadStart(saveAll2File));
                mainWritingThread.Start();
                string txtPath = System.IO.Path.Combine(Environment.CurrentDirectory, "word.txt");
                if (File.Exists(txtPath))
                {
                    wordReader = new StreamReader(txtPath, Encoding.Default);
                }
                else
                {
                    if (MessageBox.Show("提词器文本文件找不到!") == MessageBoxResult.OK)
                    {
                        Application.Current.Shutdown();
                    }
                }
                if (!wordReader.EndOfStream)
                {
                    this.txtBlock_narrator.Text = wordReader.ReadLine();
                }
                else
                {
                    this.txtBlock_narrator.Text       = "采集结束。";
                    this.btn_startRecording.IsEnabled = false;
                }
            }
        }