public MainWindow()
        {
            InitializeComponent();
            try
            {
                ///キネクト本体の接続を確保、たしか接続されてない場合はfalseとかになった記憶
                this.kinect = KinectSensor.GetDefault();
                ///読み込む画像のフォーマット(rgbとか)を指定、どうやって読み込むかのリーダの設定も
                this.colorImageFormat = ColorImageFormat.Bgra;
                this.colorFrameDescription = this.kinect.ColorFrameSource.CreateFrameDescription(this.colorImageFormat);
                this.colorFrameReader = this.kinect.ColorFrameSource.OpenReader();
                this.colorFrameReader.FrameArrived += colorFrameReader_FrameArrived;
                this.kinect.Open();//キネクト起動!!
                if (!kinect.IsOpen)
                {
                    this.errorLog.Visibility = Visibility.Visible;
                    this.errorLog.Content = "キネクトが見つからないよ!残念!";
                    throw new Exception("キネクトが見つかりませんでした!!!");
                }
                ///bodyを格納するための配列作成
                bodies = new Body[kinect.BodyFrameSource.BodyCount];

                ///ボディリーダーを開く
                bodyFrameReader = kinect.BodyFrameSource.OpenReader();
                bodyFrameReader.FrameArrived += bodyFrameReader_FrameArrived;
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
                Close();
            }
        }
Ejemplo n.º 2
0
        protected override void OnNavigatedTo( NavigationEventArgs e )
        {
            base.OnNavigatedTo( e );

            try {
                // Kinectを開く
                kinect = KinectSensor.GetDefault();
                kinect.Open();

                // 表示のためのデータを作成
                depthFrameDesc = kinect.DepthFrameSource.FrameDescription;

                // Depthリーダーを開く
                depthFrameReader = kinect.DepthFrameSource.OpenReader();
                depthFrameReader.FrameArrived += depthFrameReader_FrameArrived;

                // 表示のためのデータ
                depthBitmap = new WriteableBitmap( depthFrameDesc.Width,
                                                   depthFrameDesc.Height );
                ImageDepth.Source = depthBitmap;

                depthBuffer = new ushort[depthFrameDesc.LengthInPixels];
                depthBitmapBuffer = new byte[depthFrameDesc.LengthInPixels * 4];

                depthPoint = new Point( depthFrameDesc.Width / 2,
                                        depthFrameDesc.Height / 2 );
            }
            catch ( Exception ex ) {
                MessageDialog dlg = new MessageDialog(ex.Message);
                dlg.ShowAsync();
            }
        }
        private void Window_Loaded( object sender, RoutedEventArgs e )
        {
            try {
                kinect = KinectSensor.GetDefault();
                if ( kinect == null ) {
                    throw new Exception("Kinectを開けません");
                }

                kinect.Open();

                // 表示のためのデータを作成
                depthFrameDesc = kinect.DepthFrameSource.FrameDescription;

                // 表示のためのビットマップに必要なものを作成
                depthImage = new WriteableBitmap( depthFrameDesc.Width, depthFrameDesc.Height,
                    96, 96, PixelFormats.Gray16, null );
                depthBuffer = new ushort[depthFrameDesc.LengthInPixels];
                depthRect = new Int32Rect( 0, 0, depthFrameDesc.Width, depthFrameDesc.Height );
                depthStride = (int)(depthFrameDesc.Width * depthFrameDesc.BytesPerPixel);

                ImageDepth.Source = depthImage;

                // 初期の位置表示座標
                depthPoint = new Point( depthFrameDesc.Width / 2, depthFrameDesc.Height / 2 );

                // Depthリーダーを開く
                depthFrameReader = kinect.DepthFrameSource.OpenReader();
                depthFrameReader.FrameArrived += depthFrameReader_FrameArrived;
            }
            catch ( Exception ex ) {
                MessageBox.Show( ex.Message );
                Close();
            }
        }
        public MainWindow()
        {
            _kinectSensor = KinectSensor.GetDefault();

            _depthFrameDescription = _kinectSensor.DepthFrameSource.FrameDescription;

            _depthFrameReader = _kinectSensor.DepthFrameSource.OpenReader();

            _depthFrameReader.FrameArrived += Reader_FrameArrived;

            _cameraSpacePoints = new CameraSpacePoint[_depthFrameDescription.Width * _depthFrameDescription.Height];

            _trackingDiagnostics = new TrackingDiagnostics();

            _heatMap = new HeatMap();

            _energyHistory = new EnergyHistory();

            _temporalMedianImage = new TemporalMedianImage(GlobVar.TemporalFrameCounter);

            _stopwatch = new Stopwatch();

            BodiesHistory.Initialize();

            GlobVar.CoordinateMapper = _kinectSensor.CoordinateMapper;

            GlobVar.TimeStamps = new List<TimeSpan>();

            // initialize the components (controls) of the GUI window
            InitializeComponent();

            _kinectSensor.Open();
        }
        protected override void OnNavigatedTo( NavigationEventArgs e )
        {
            base.OnNavigatedTo( e );

            try {
                // Kinectを開く
                kinect = KinectSensor.GetDefault();
                if ( kinect == null ) {
                    throw new Exception( "Kinectを開けません" );
                }

                kinect.Open();

                // カラー画像の情報を作成する(BGRAフォーマット)
                colorFrameDesc = kinect.ColorFrameSource.CreateFrameDescription( ColorImageFormat.Bgra );

                colorBitmap = new WriteableBitmap( colorFrameDesc.Width, colorFrameDesc.Height );
                ImageColor.Source = colorBitmap;

                colorBuffer = new byte[colorFrameDesc.Width * colorFrameDesc.Height * colorFrameDesc.BytesPerPixel];

                // カラーリーダーを開く
                colorFrameReader = kinect.ColorFrameSource.OpenReader();
                colorFrameReader.FrameArrived += colorFrameReader_FrameArrived;
            }
            catch ( Exception ex ) {
                MessageDialog dlg = new MessageDialog( ex.Message );
                dlg.ShowAsync();
            }
        }
Ejemplo n.º 6
0
        //セットアップ
        public KinectImage()
        #region
        {
            //キネクト
            this.kinect = KinectSensor.GetDefault();
            
            //bodyIndexFrameの処理
            this.bodyIndexFrameDes = this.kinect.BodyIndexFrameSource.FrameDescription;
            this.bodyIndexFrameReader = this.kinect.BodyIndexFrameSource.OpenReader();
            this.bodyIndexFrameReader.FrameArrived += this.BodyIndexFrame_Arrived;
            //画像情報
            this.kinectImgPackage = new ShadowPackage();
            this.imageWidth =  this.bodyIndexFrameDes.Width;  // imgW;
            this.imageHeight = this.bodyIndexFrameDes.Height; // imgH;

            this.imageBytePerPixel = (int)this.bodyIndexFrameDes.BytesPerPixel;
            this.bitmapRec = new Int32Rect(0, 0, this.imageWidth, this.imageHeight);
            this.bitmapStride = (int)(this.imageWidth * this.imageBytePerPixel);
           
            this.bodyIndexBuffer = new byte[this.imageWidth *
                                                this.imageHeight * this.imageBytePerPixel];
            this.kinectImage = new Mat(this.imageHeight, this.imageWidth, MatType.CV_8UC1);
            //キネクト開始
            this.kinect.Open();
            
        }
Ejemplo n.º 7
0
        /// <summary>
        /// コンストラクタ
        /// </summary>
        public MainWindow()
        {
            InitializeComponent();

            // Init Kinect Sensors
            this.kinect = KinectSensor.GetDefault();

            if (kinect == null)
            {
                this.showCloseDialog("Kinectが接続されていないか、利用できません。アプリケーションを終了します。");
            }

            this.colorImageFormat = ColorImageFormat.Bgra;
            this.colorFrameDescription = this.kinect.ColorFrameSource.CreateFrameDescription(this.colorImageFormat);
            this.colorFrameReader = this.kinect.ColorFrameSource.OpenReader();
            this.colorFrameReader.FrameArrived += ColorFrameReader_FrameArrived;
            bodyFrameReader = kinect.BodyFrameSource.OpenReader();
            bodyFrameReader.FrameArrived += bodyFrameReader_FrameArrived;

            this.kinect.Open();
            this.bodies = this.bodies = new Body[kinect.BodyFrameSource.BodyCount];

            KinectRegion.SetKinectRegion(this, kinectRegion);
            this.kinectRegion.KinectSensor = KinectSensor.GetDefault();

            this.isTraining = false;
        }
        //public httpc

        public MainWindow()
        {
            frameCount = 0;

            // get the kinectSensor object
            this.kinect = KinectSensor.GetDefault();

            // open the reader for the depth frames
            this.depthFrameReader = this.kinect.DepthFrameSource.OpenReader();

            // wire handler for frame arrival
            this.depthFrameReader.FrameArrived += this.Reader_FrameArrived;

            // get FrameDescription from DepthFrameSource
            this.depthFrameDescription = this.kinect.DepthFrameSource.FrameDescription;

            // allocate space to put the pixels being received and converted
            this.depthPixels = new byte[this.depthFrameDescription.Width * this.depthFrameDescription.Height];

            // open the sensor
            this.kinect.Open();

            // use the window object as the view model in this simple example
            this.DataContext = this;

            // initialize the components (controls) of the window
            this.InitializeComponent();
        }
Ejemplo n.º 9
0
        protected override void OnNavigatedTo( NavigationEventArgs e )
        {
            base.OnNavigatedTo( e );

            try {
                // Kinectを開く
                kinect = KinectSensor.GetDefault();
                if ( kinect == null ) {
                    throw new Exception( "Kinectを開けません" );
                }

                kinect.Open();

                // 赤外線画像の情報を取得する
                infraredFrameDesc = kinect.InfraredFrameSource.FrameDescription;

                // 画像化のためのバッファを作成する
                infraredBitmapBuffer = new byte[infraredFrameDesc.LengthInPixels * 4];
                infraredBitmap = new WriteableBitmap(
                    infraredFrameDesc.Width, infraredFrameDesc.Height );
                ImageInfrared.Source = infraredBitmap;

                infraredBuffer = new ushort[infraredFrameDesc.LengthInPixels];

                // 赤外線画像リーダーを開く
                infraredFrameReader = kinect.InfraredFrameSource.OpenReader();
                infraredFrameReader.FrameArrived += infraredFrameReader_FrameArrived;
            }
            catch ( Exception ex ) {
                MessageDialog dlg = new MessageDialog( ex.Message );
                dlg.ShowAsync();
            }
        }
Ejemplo n.º 10
0
 public BodyDrawer(CoordinateMapper coordinateMapper, FrameDescription frameDescription, DrawingGroup drawingGroup)
 {
     this.bonesToDraw = (new BodyInitializer()).GetBones();
     this.coordinateMapper = coordinateMapper;
     this.displayHeight = frameDescription.Height;
     this.displayWidth = frameDescription.Width;
     this.drawingGroup = drawingGroup;
 }
Ejemplo n.º 11
0
 public BasicPhotoService(ISensorService<KinectSensor> sensorService)
 {
     _greenScreen = new KinectNoBackgroundView();
     //_greenScreen.Start();
     _sensorService = sensorService;
     _colorFrameDescription = _sensorService.Sensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra);
     this.Photo = new WriteableBitmap(_colorFrameDescription.Width, _colorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null);
     Init();
 }
        public void Init()
        {
            this.ColorReader = (ColorFrameReader) this.Sensor.OpenRGB();
            this.ColorReader.FrameArrived += ColorReader_FrameArrived;
            this.ColorFrameDescription = this.Kinect.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Rgba);

            this.ColorImage = new WriteableBitmap(this.ColorFrameDescription.Width, this.ColorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null);

            Window.SetColorDisplaySource(this.ColorImage);
            this.Sensor.Open();
        }
Ejemplo n.º 13
0
        public KinectDepthService(ISensorService<KinectSensor> sensorService)
        {
            _sensorService = sensorService;

            this.DepthBytes = new Subject<byte[]>();

            _depthFrameDescription = _sensorService.Sensor.DepthFrameSource.FrameDescription;
            this.PixelHeight = _depthFrameDescription.Height;
            this.PixelWidth = _depthFrameDescription.Width;
            _depthPixels = new byte[this.PixelWidth * this.PixelHeight];
        }
Ejemplo n.º 14
0
 public bool InitializeFrame()
 {
     this.infraredFrameReader = this.kinectSensor.InfraredFrameSource.OpenReader();
     if (this.infraredFrameReader != null)
     {
         this.infraredFrameReader.FrameArrived += this.Reader_InfraredFrameArrived;
         this.infraredFrameDescription = this.kinectSensor.InfraredFrameSource.FrameDescription;
         this.infraredBitmap = new WriteableBitmap(this.infraredFrameDescription.Width, this.infraredFrameDescription.Height, 96.0, 96.0, PixelFormats.Gray32Float, null);
         this.stopwatch.Start();
         return true;
     }
     return false;
 }
Ejemplo n.º 15
0
 public bool InitializeFrame()
 {
     this.depthFrameReader = this.kinectSensor.DepthFrameSource.OpenReader();
     if (this.depthFrameReader != null)
     {
         this.depthFrameReader.FrameArrived += this.Reader_FrameArrived;
         this.depthFrameDescription = this.kinectSensor.DepthFrameSource.FrameDescription;
         this.depthPixels = new byte[this.depthFrameDescription.Width * this.depthFrameDescription.Height];
         this.depthBitmap = new WriteableBitmap(this.depthFrameDescription.Width, this.depthFrameDescription.Height, 96.0, 96.0, PixelFormats.Gray8, null);
         this.stopwatch.Start();
         return true;
     }
     return false;
 }
Ejemplo n.º 16
0
        /// Initializes a new instance of the MainWindow class.
        public MainWindow()
        {
            // get the _kinectSensor object
            this._kinectSensor = KinectSensor.GetDefault();

            // open the reader for bodyIndex frames
            this._bodyIndexFrameReader = this._kinectSensor.BodyIndexFrameSource.OpenReader();

            // wire handler for frame arrival
            this._bodyIndexFrameReader.FrameArrived += this.Reader_FrameArrived;

            this._bodyIndexFrameDescription = this._kinectSensor.BodyIndexFrameSource.FrameDescription;

            // allocate space to put the pixels being converted
            this._bodyIndexPixels = new uint[this._bodyIndexFrameDescription.Width * this._bodyIndexFrameDescription.Height];

            // create the bitmap to display
            this._bodyIndexBitmap = new WriteableBitmap(this._bodyIndexFrameDescription.Width, this._bodyIndexFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null);

            // set IsAvailableChanged event notifier
            this._kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // open the sensor
            this._kinectSensor.Open();

            // set the status text
            this.StatusText = this._kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText
                                                            : Properties.Resources.NoSensorStatusText;

            // use the window object as a view model
            this.DataContext = this;

            // initialize the components (controls) of the window
            this.InitializeComponent();

            _kinectSensor = KinectSensor.GetDefault();

            if (_kinectSensor != null)
            {
                _kinectSensor.Open();

                _bodies = new Body[_kinectSensor.BodyFrameSource.BodyCount];

                _reader = _kinectSensor.BodyFrameSource.OpenReader();
                _reader.FrameArrived += BodyReader_FrameArrived;

                _recorder = new KinectFileManager();
            }
        }
Ejemplo n.º 17
0
        public bool InitializeFrame()
        {
            this.colorFrameReader = this.kinectSensor.ColorFrameSource.OpenReader();
            if (this.colorFrameReader != null)
            {
                // wire color frame arrive event
                this.colorFrameReader.FrameArrived += this.Reader_ColorFrameArrived;

                this.colorFrameDescription = this.kinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra);
                this.colorBitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null);
                return true;

            }
            return false;
        }
Ejemplo n.º 18
0
        public void ListenForFrames()
        {
            sensor = KinectSensor.GetDefault();

            depthFrameReader = sensor.DepthFrameSource.OpenReader();

            depthFrameReader.FrameArrived += Reader_FrameArrived;

            depthFrameDescription = sensor.DepthFrameSource.FrameDescription;

            // allocate space to put the pixels being received and converted
            depthPixels = new ushort[this.depthFrameDescription.Width * this.depthFrameDescription.Height];

            sensor.Open();

            Console.WriteLine("Extractor ready to grab frames.");
        }
        public DepthRecorder()
        {
            InitializeComponent();

            this.Sensor = new KinectV2();
            this.Sensor.Initialize();

            this.depthFrameReader = this.Kinect.DepthFrameSource.OpenReader();
            this.depthFrameReader.FrameArrived += depthFrameReader_FrameArrived;
            this.depthFrameDescription = this.Kinect.DepthFrameSource.FrameDescription;
            this.depthPixels = new byte[this.depthFrameDescription.Width * this.depthFrameDescription.Height];
            this.depthBitmap = new WriteableBitmap(this.depthFrameDescription.Width, this.depthFrameDescription.Height, 96.0, 96.0, PixelFormats.Gray8, null);

            #region Coordinate Mapper Configuration
            this.coordinateMapper = this.Kinect.CoordinateMapper;
            FrameDescription depthFrameDescription = this.Kinect.DepthFrameSource.FrameDescription;

            int depthWidth = depthFrameDescription.Width;
            int depthHeight = depthFrameDescription.Height;

            // allocate space to put the pixels being received and converted
            this.depthFrameData = new ushort[depthWidth * depthHeight];
            this.colorPoints = new ColorSpacePoint[depthWidth * depthHeight];
            this.cameraPoints = new CameraSpacePoint[depthWidth * depthHeight];
            // get FrameDescription from ColorFrameSource
            FrameDescription colorFrameDescription = this.Kinect.ColorFrameSource.FrameDescription;

            int colorWidth = colorFrameDescription.Width;
            int colorHeight = colorFrameDescription.Height;

            // allocate space to put the pixels being received
            this.colorFrameData = new byte[colorWidth * colorHeight * this.bytesPerPixel];
            #endregion

            #region Recorder
            this.Recorder = new PointCloudRecorder(ConfigurationManager.AppSettings["RecordFolder"], new PCL());
            this.Recorder.RecordRGB = false;
            this.Recorder.StateChanged += Recorder_StateChanged;
            this.Recorder.Stopped += Recorder_Stopped;
            this.Recorder.CloudProcessed += Recorder_CloudProcessed;
            #endregion

            this.Sensor.Open();
            this.DataContext = this;
        }
Ejemplo n.º 20
0
        public DepthInterpreter(KinectSensor sensor)
        {

            kinectSensor = sensor;
            this.depthFrameReader = this.kinectSensor.DepthFrameSource.OpenReader();
            this.depthFrameReader.FrameArrived += this.Reader_DepthFrameArrived;
            this.depthFrameDescription = this.kinectSensor.DepthFrameSource.FrameDescription;

            this.depthBitmap = new WriteableBitmap(displayWidth, displayHeight, 96.0, 96.0, format, null);
            this.depthPixelData = new byte[displayWidth * displayWidth];

            this.drawingGroup = new DrawingGroup();
            this.depthOverlay = new DrawingImage(this.drawingGroup);

            depthData = new ushort[this.depthFrameDescription.Width * this.depthFrameDescription.Height];
            depthMap = new DepthMap(depthData, displayWidth);
            
        }
Ejemplo n.º 21
0
 public KinectDevice()
 {
     //kinect設定
     this.kinect = KinectSensor.GetDefault();
     //設定とハンドラ
     //colorImage
     #region
     this.colorImageFormat = ColorImageFormat.Bgra;
     this.colorFrameDescription = this.kinect.ColorFrameSource.CreateFrameDescription(this.colorImageFormat);
     this.colorFrameReader = this.kinect.ColorFrameSource.OpenReader();
     this.colorFrameReader.FrameArrived += ColorFrame_Arrived;
     this.colors = new byte[this.colorFrameDescription.Width
                                    * this.colorFrameDescription.Height
                                    * this.colorFrameDescription.BytesPerPixel];
     #endregion
     //骨格情報
     #region
     this.bodyFrameReader = this.kinect.BodyFrameSource.OpenReader();
     this.bodyFrameReader.FrameArrived += BodyFrame_Arrived;
     #endregion
     //震度情報
     #region
     this.depthFrameReader = this.kinect.DepthFrameSource.OpenReader();
     this.depthFrameReader.FrameArrived += DepthFrame_Arrived;
     this.depthFrameDescription = this.kinect.DepthFrameSource.FrameDescription;
     this.depthBuffer = new ushort[this.depthFrameDescription.LengthInPixels];
     #endregion
     //BodyIndex
     #region
     this.bodyIndexFrameDes = this.kinect.BodyIndexFrameSource.FrameDescription;
     this.bodyIndexFrameReader = this.kinect.BodyIndexFrameSource.OpenReader();
     this.bodyIndexFrameReader.FrameArrived += this.BodyIndexFrame_Arrived;
     this.bodyIndexBuffer = new byte[this.bodyIndexFrameDes.Width *
                                         this.bodyIndexFrameDes.Height * this.bodyIndexFrameDes.BytesPerPixel];
     #endregion
     //kinect開始
     this.package = new ShadowPackage();
     this.imageWidth = this.bodyIndexFrameDes.Width; 
     this.imageHeight = this.bodyIndexFrameDes.Height; 
     this.imageBytePerPixel = (int)this.bodyIndexFrameDes.BytesPerPixel;
     this.kinectImage = new Mat(this.imageHeight, this.imageWidth, MatType.CV_8UC1);
     this.kinect.Open();
 }
        /// <summary>
        /// Initializes a new instance of the KinectIRView class
        /// </summary>
        /// <param name="kinectSensor">Active instance of the Kinect sensor</param>
        public KinectIRView(KinectSensor kinectSensor)
        {
            if (kinectSensor == null)
            {
                throw new ArgumentNullException("kinectSensor");
            }

            // open the reader for the Infrared frames
            this.infraredFrameReader = kinectSensor.InfraredFrameSource.OpenReader();

            // wire handler for frame arrival
            this.infraredFrameReader.FrameArrived += this.Reader_InfraredFrameArrived;

            // get FrameDescription from InfraredFrameSource
            this.infraredFrameDescription = kinectSensor.InfraredFrameSource.FrameDescription;

            // create the bitmap to display
            this.infraredBitmap = new WriteableBitmap(this.infraredFrameDescription.Width, this.infraredFrameDescription.Height, 96.0, 96.0, PixelFormats.Gray32Float, null);
        }
Ejemplo n.º 23
0
        //コンストラクタ
        public getdepth()
        {
            try {
                kinect = KinectSensor.GetDefault();

                this.depthFrameReader = this.kinect.DepthFrameSource.OpenReader();
                this.depthFrameReader.FrameArrived += DepthFrame_Arrived;
                this.depthFrameDescription = this.kinect.DepthFrameSource.FrameDescription;
                this.depthBuffer = new ushort[this.depthFrameDescription.LengthInPixels];
                this.depthImageWidth = this.depthFrameDescription.Width;
                this.depthImageHeight = this.depthFrameDescription.Height;
                this.depthStride = (int)(depthFrameDescription.Width * depthFrameDescription.BytesPerPixel);


                this.kinect.Open();
            }catch(Exception ex)
            {
                Console.WriteLine(ex.Message);
            }
        }
        private void Window_Loaded( object sender, RoutedEventArgs e )
        {
            try {
                kinect = KinectSensor.GetDefault();
                if ( kinect == null ) {
                    throw new Exception("Kinectを開けません");
                }

                kinect.Open();

                // 表示のためのデータを作成
                bodyIndexFrameDesc = kinect.DepthFrameSource.FrameDescription;

                // ボディインデックデータ用のバッファ
                bodyIndexBuffer = new byte[bodyIndexFrameDesc.LengthInPixels];

                // 表示のためのビットマップに必要なものを作成
                bodyIndexColorImage = new WriteableBitmap( bodyIndexFrameDesc.Width, bodyIndexFrameDesc.Height,
                    96, 96, PixelFormats.Bgra32, null );
                bodyIndexColorRect = new Int32Rect( 0, 0, bodyIndexFrameDesc.Width, bodyIndexFrameDesc.Height );
                bodyIndexColorStride = (int)(bodyIndexFrameDesc.Width * bodyIndexColorBytesPerPixel);

                // ボディインデックデータをBGRA(カラー)データにするためのバッファ
                bodyIndexColorBuffer = new byte[bodyIndexFrameDesc.LengthInPixels * bodyIndexColorBytesPerPixel];

                ImageBodyIndex.Source = bodyIndexColorImage;

                // 色付けするために色の配列を作成する
                bodyIndexColors = new Color[]{
                    Colors.Red, Colors.Blue, Colors.Green, Colors.Yellow, Colors.Pink, Colors.Purple,
                };

                // ボディーリーダーを開く
                bodyIndexFrameReader = kinect.BodyIndexFrameSource.OpenReader();
                bodyIndexFrameReader.FrameArrived += bodyIndexFrameReader_FrameArrived;
            }
            catch ( Exception ex ) {
                MessageBox.Show( ex.Message );
                Close();
            }
        }
        /// <summary>
        /// Initializes a new instance of the KinectBodyIndexView class
        /// </summary>
        /// <param name="kinectSensor">Active instance of the Kinect sensor</param>
        public KinectBodyIndexView(KinectSensor kinectSensor)
        {
            if (kinectSensor == null)
            {
                throw new ArgumentNullException("kinectSensor");
            }

            // open the reader for the BodyIndex frames
            this.bodyIndexFrameReader = kinectSensor.BodyIndexFrameSource.OpenReader();

            // wire handler for frame arrival
            this.bodyIndexFrameReader.FrameArrived += this.Reader_BodyIndexFrameArrived;

            this.bodyIndexFrameDescription = kinectSensor.BodyIndexFrameSource.FrameDescription;

            // allocate space to put the pixels being converted
            this.bodyIndexPixels = new uint[this.bodyIndexFrameDescription.Width * this.bodyIndexFrameDescription.Height];

            // create the bitmap to display
            this.bodyIndexBitmap = new WriteableBitmap(this.bodyIndexFrameDescription.Width, this.bodyIndexFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null);
        }
Ejemplo n.º 26
0
        public MainWindow()
        {
            this.kinectSensor = KinectSensor.GetDefault();

            this.frameDescription = this.kinectSensor.DepthFrameSource.FrameDescription;
            this.colorFrameDescription = this.kinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra);

            this.depthFrameReader = this.kinectSensor.DepthFrameSource.OpenReader();
            //this.depthFrameReader.FrameArrived += depthFrameReader_FrameArrived;
            this.depthBitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null);
            this.depthPixels = new byte[this.frameDescription.Width * this.frameDescription.Height];

            this.colorFrameReader = this.kinectSensor.ColorFrameSource.OpenReader();
            this.colorFrameReader.FrameArrived += this.colorFrameReader_FrameArrived;     
            this.colorBitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null);

            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();
            this.bodyFrameReader.FrameArrived += bodyFrameReader_FrameArrived;
            
            this.bodyDrawingGroup = new DrawingGroup();
            this.imageSource = new DrawingImage(bodyDrawingGroup);

            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            this.kinectSensor.Open();
    
            this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.KinectReady : Properties.Resources.NoSensor;
            this.GestureText = "";

            this.DataContext = this;

            descriptionNumber = Convert.ToInt32(Properties.Resources.Precarriage); //Vorlauf Frames

            maxTrainData = Convert.ToInt32(Properties.Resources.MaxTrainData);
            maxTestData = Convert.ToInt32(Properties.Resources.MaxTestData);

            this.InitializeComponent();
        }
Ejemplo n.º 27
0
        public MainWindow()
        {
            this.kinectSensor = KinectSensor.GetDefault();

            this.multiSourceFrameReader = kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth);

            this.multiSourceFrameReader.MultiSourceFrameArrived += this.MultiSourceFrameArrived;

            // create image data
            colorFrameDescription = kinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra);
            this.rawColorBitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height, 96, 96, PixelFormats.Bgr32, null);
            depthFrameDescription = kinectSensor.DepthFrameSource.FrameDescription;
            depthPixels = new byte[depthFrameDescription.Width * depthFrameDescription.Height];
            this.depthBitmap = new WriteableBitmap(depthFrameDescription.Width, depthFrameDescription.Height, 96, 96, PixelFormats.Gray8, null);
            this.colorBitmap = new WriteableBitmap(depthFrameDescription.Width, depthFrameDescription.Height, 96, 96, PixelFormats.Bgr32, null);

            kinectSensor.Open();

            this.DataContext = this;

            InitializeComponent();
        }
Ejemplo n.º 28
0
        /// <summary>
        /// Initializes a new instance of the MainWindow class.
        /// </summary>
        public MainWindow()
        {
            // get the kinectSensor object
            this.kinectSensor = KinectSensor.GetDefault();

            // open the reader for the depth frames
            this.depthFrameReader = this.kinectSensor.DepthFrameSource.OpenReader();

            // wire handler for frame arrival
            this.depthFrameReader.FrameArrived += this.Reader_FrameArrived;

            // get FrameDescription from DepthFrameSource
            this.depthFrameDescription = this.kinectSensor.DepthFrameSource.FrameDescription;

            //
            //this.depthImagePixel

            // allocate space to put the pixels being received and converted
            this.depthPixels = new byte[this.depthFrameDescription.Width * this.depthFrameDescription.Height];

            // create the bitmap to display
            this.depthBitmap = new WriteableBitmap(this.depthFrameDescription.Width, this.depthFrameDescription.Height, 96.0, 96.0, PixelFormats.Gray8, null);

            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // open the sensor
            this.kinectSensor.Open();

            // set the status text
            this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText
                                                            : Properties.Resources.NoSensorStatusText;

            // use the window object as the view model in this simple example
            this.DataContext = this;

            // initialize the components (controls) of the window
            this.InitializeComponent();
        }
Ejemplo n.º 29
0
        private void InitializeArrays(FrameDescription colorFrameDescription, FrameDescription depthFrameDescription, FrameDescription bodyIndexDescription, int bodyCount)
        {
            if (_FrameSourceTypes.HasFlag(FrameSourceTypes.Color))
            {
                ColorPixels = new byte[colorFrameDescription.Width * colorFrameDescription.Height * colorFrameDescription.BytesPerPixel];
            }

            if (_FrameSourceTypes.HasFlag(FrameSourceTypes.Depth))
            {
                DepthPixels = new ushort[depthFrameDescription.Width * depthFrameDescription.Height];
            }

            if (_FrameSourceTypes.HasFlag(FrameSourceTypes.Body))
            {
                Bodies = new Body[bodyCount];
            }

            if (_FrameSourceTypes.HasFlag(FrameSourceTypes.BodyIndex))
            {
                BodyIndexPixels = new byte[bodyIndexDescription.Width * bodyIndexDescription.Height * bodyIndexDescription.BytesPerPixel];
            }
        }
        /// <summary>
        /// Initializes a new instance of the KinectDepthView class
        /// </summary>
        /// <param name="kinectSensor">Active instance of the Kinect sensor</param>
        public KinectDepthView(KinectSensor kinectSensor)
        {
            if (kinectSensor == null)
            {
                throw new ArgumentNullException("kinectSensor");
            }

            // open the reader for the depth frames
            this.depthFrameReader = kinectSensor.DepthFrameSource.OpenReader();

            // wire handler for frame arrival
            this.depthFrameReader.FrameArrived += this.Reader_DepthFrameArrived;

            // get FrameDescription from DepthFrameSource
            this.depthFrameDescription = kinectSensor.DepthFrameSource.FrameDescription;

            // allocate space to put the pixels being received and converted
            this.depthPixels = new byte[this.depthFrameDescription.Width * this.depthFrameDescription.Height];

            // create the bitmap to display
            this.depthBitmap = new WriteableBitmap(this.depthFrameDescription.Width, this.depthFrameDescription.Height, 96.0, 96.0, PixelFormats.Gray8, null);
        }
        protected override void OnNavigatedTo(NavigationEventArgs e)
        {
            base.OnNavigatedTo(e);

            try {
                kinect = KinectSensor.GetDefault();
                if (kinect == null)
                {
                    throw new Exception("Kinectを開けません");
                }

                kinect.Open();

                // 表示のためのデータを作成
                depthFrameDesc = kinect.DepthFrameSource.FrameDescription;

                // ビットマップ
                depthBitmap       = new WriteableBitmap(depthFrameDesc.Width, depthFrameDesc.Height);
                ImageDepth.Source = depthBitmap;

                // Depthデータ用のバッファ
                depthBuffer = new ushort[depthFrameDesc.LengthInPixels];

                // DepthデータをBGRA(カラー)データにするためのバッファ
                depthBitmapBuffer = new byte[depthFrameDesc.LengthInPixels * 4];

                // 距離を表示する位置
                depthPoint = new Point(depthFrameDesc.Width / 2, depthFrameDesc.Height / 2);


                // カラーリーダーを開く
                depthFrameReader = kinect.DepthFrameSource.OpenReader();
                depthFrameReader.FrameArrived += depthFrameReader_FrameArrived;
            }
            catch (Exception ex) {
                MessageDialog dlg = new MessageDialog(ex.Message);
                dlg.ShowAsync();
            }
        }
        public DataExtractor(FrameSourceTypes type)
        {
            Sensor = KinectSensor.GetDefault();

            if (Sensor != null)
            {
                Sensor.Open();

                FrameWidthColor  = Sensor.ColorFrameSource.FrameDescription.Width;
                FrameHeightColor = Sensor.ColorFrameSource.FrameDescription.Height;
                FrameWidthDepth  = Sensor.InfraredFrameSource.FrameDescription.Width;
                FrameHeightDepth = Sensor.InfraredFrameSource.FrameDescription.Height;

                this.depthFrameDescription    = Sensor.DepthFrameSource.FrameDescription;
                this.infraredFrameDescription = Sensor.InfraredFrameSource.FrameDescription;

                _pixelsColor = new byte[FrameWidthColor * FrameHeightColor * 4];
                _pixelsInfra = new byte[FrameWidthDepth * FrameHeightDepth];
                _pixelsDepth = new byte[FrameWidthDepth * FrameHeightDepth];

                this.frameSourceType = type;

                bodyCountTotal = Sensor.BodyFrameSource.BodyCount;

                this.FaceTrackers = new FaceTracker[bodyCountTotal];
                this._faces       = new Face[bodyCountTotal];
                this._bodies      = new Body[bodyCountTotal];

                for (int i = 0; i < this.bodyCountTotal; i++)
                {
                    this.FaceTrackers[i] = new FaceTracker(Sensor, 0);
                    this.FaceTrackers[i].Source.TrackingIdLost += HdFaceSource_TrackingIdLost;
                    this._faces[i] = new Face();
                }

                _reader = Sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Body | FrameSourceTypes.Color | FrameSourceTypes.Infrared | FrameSourceTypes.Depth);
                _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;
            }
        }
    /// <summary>
    /// Called when KinectSource GameObject is initialised by the Unity engine.
    ///
    /// Sets up the sensor to read multiple sources, obtains coordinate maps that are necessary to use different types of Kinect data together,
    /// and initialises the arrays that store the retrieved data.
    /// </summary>
    void Start()
    {
        kinectSensor = KinectSensor.GetDefault();

        if (kinectSensor != null)
        {
            //Enable reading of multiple sources from kinect data.
            multiSourceReader = kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.BodyIndex | FrameSourceTypes.Body);

            //Obtain coordinate mappings and depth data dimensions.
            this.coordinateMapper = this.kinectSensor.CoordinateMapper;
            FrameDescription depthFrameDescription = kinectSensor.DepthFrameSource.FrameDescription;
            int depthWidth  = depthFrameDescription.Width;
            int depthHeight = depthFrameDescription.Height;

            //Create arrays of the required size to hold scanned data.
            this.depthFrameData     = new ushort[depthWidth * depthHeight];
            this.bodyIndexFrameData = new byte[depthWidth * depthHeight];
            this.colorPoints        = new ColorSpacePoint[depthWidth * depthHeight];
            this.cameraPoints       = new CameraSpacePoint[depthWidth * depthHeight];

            //Create Unity Texture2D in which to store the ColorFrame data.
            FrameDescription colorFrameDescription = kinectSensor.ColorFrameSource.FrameDescription;
            int colorWidth  = colorFrameDescription.Width;
            int colorHeight = colorFrameDescription.Height;
            colorData = new byte[colorWidth * colorHeight * bytesPerPixel];

            this.depthPoints = new DepthSpacePoint[colorWidth * colorHeight];

            //Create array for body pixel indicators.
            this.bodyIndexFrameArray = new int[depthWidth * depthHeight];

            //Accesses Kinect Sensor
            if (!kinectSensor.IsOpen)
            {
                kinectSensor.Open();
            }
        }
    }
Ejemplo n.º 34
0
        private void Window_Loaded(object sender, RoutedEventArgs e)
        {
            try {
                kinect = KinectSensor.GetDefault();
                if (kinect == null)
                {
                    throw new Exception("Kinectを開けません");
                }

                kinect.Open();
                coordinateMapper = kinect.CoordinateMapper;
                multiFrameReader = kinect.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth);
                multiFrameReader.MultiSourceFrameArrived += multiFrameReader_MultiSourceFrameArrived;
                FrameDescription depthFrameDescription = kinect.DepthFrameSource.FrameDescription;
                depthWidth       = depthFrameDescription.Width;
                depthHeight      = depthFrameDescription.Height;
                depthImagePixels = new ushort[depthWidth * depthHeight];

                FrameDescription colorFrameDescription = kinect.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra);
                colorWidth         = colorFrameDescription.Width;
                colorHeight        = colorFrameDescription.Height;
                colorBytesPerPixel = colorFrameDescription.BytesPerPixel;
                colorImagePixels   = new byte[colorWidth * colorHeight * colorBytesPerPixel];

                drawBitmap = new WriteableBitmap(
                    depthWidth, depthHeight,
                    96, 96, PixelFormats.Bgra32, null);
                drawStride        = depthWidth * (int)colorBytesPerPixel;
                drawRect          = new Int32Rect(0, 0, depthWidth, depthHeight);
                drawBuffer        = new byte[drawStride * depthHeight];
                ImageColor.Source = drawBitmap;

                InitializeFusion();
            }
            catch (Exception ex) {
                MessageBox.Show(ex.Message);
                Close();
            }
        }
        public MainWindow()
        {
            InitializeComponent();

            // Inisialisasi Kinect

            _sensor = KinectSensor.GetDefault();
            if (_sensor == null)
            {
                statusText = "Kinect Tidak Ada";
            }

            if (_sensor != null)
            {
                _sensor.Open();

                _bodies = new Body[_sensor.BodyFrameSource.BodyCount];

                _colorReader = _sensor.ColorFrameSource.OpenReader();
                _colorReader.FrameArrived += ColorReader_FrameArrived;
                _bodyReader = _sensor.BodyFrameSource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                // Inisialisasi sumber wajah dengan fitur

                _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace
                                                  | FaceFrameFeatures.FaceEngagement
                                                  | FaceFrameFeatures.Glasses
                                                  | FaceFrameFeatures.Happy
                                                  | FaceFrameFeatures.LeftEyeClosed
                                                  | FaceFrameFeatures.MouthOpen
                                                  | FaceFrameFeatures.PointsInColorSpace
                                                  | FaceFrameFeatures.RightEyeClosed);
                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;

                FrameDescription frameDescription = _sensor.ColorFrameSource.FrameDescription;
            }
        }
        /// <summary>
        /// Initializes a new instance of the MainWindow class.
        /// </summary>
        public MainWindow()
        {
            this.kinectSensor = KinectSensor.GetDefault();

            this.multiFrameSourceReader = this.kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Depth | FrameSourceTypes.Color | FrameSourceTypes.BodyIndex);

            this.multiFrameSourceReader.MultiSourceFrameArrived += this.Reader_MultiSourceFrameArrived;

            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            FrameDescription depthFrameDescription = this.kinectSensor.DepthFrameSource.FrameDescription;

            int depthWidth  = depthFrameDescription.Width;
            int depthHeight = depthFrameDescription.Height;

            FrameDescription colorFrameDescription = this.kinectSensor.ColorFrameSource.FrameDescription;

            int colorWidth  = colorFrameDescription.Width;
            int colorHeight = colorFrameDescription.Height;

            this.colorMappedToDepthPoints = new DepthSpacePoint[colorWidth * colorHeight];

            this.bitmap = new WriteableBitmap(colorWidth, colorHeight, 96.0, 96.0, PixelFormats.Bgra32, null);

            // Calculate the WriteableBitmap back buffer size
            this.bitmapBackBufferSize = (uint)((this.bitmap.BackBufferStride * (this.bitmap.PixelHeight - 1)) + (this.bitmap.PixelWidth * this.bytesPerPixel));

            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            //this.kinectSensor.Open();

            this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText
                                                            : Properties.Resources.NoSensorStatusText;

            this.DataContext = this;

            this.InitializeComponent();
            this.fileStructure();
        }
Ejemplo n.º 37
0
        public MainWindow()
        {
            this.kinectSensor = KinectSensor.GetDefault();

            this.multiSourceFrameReader = kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth);

            this.multiSourceFrameReader.MultiSourceFrameArrived += this.MultiSourceFrameArrived;

            // create image data
            colorFrameDescription = kinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra);
            this.rawColorBitmap   = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height, 96, 96, PixelFormats.Bgr32, null);
            depthFrameDescription = kinectSensor.DepthFrameSource.FrameDescription;
            depthPixels           = new byte[depthFrameDescription.Width * depthFrameDescription.Height];
            this.depthBitmap      = new WriteableBitmap(depthFrameDescription.Width, depthFrameDescription.Height, 96, 96, PixelFormats.Gray8, null);
            this.colorBitmap      = new WriteableBitmap(depthFrameDescription.Width, depthFrameDescription.Height, 96, 96, PixelFormats.Bgr32, null);

            kinectSensor.Open();

            this.DataContext = this;

            InitializeComponent();
        }
        public MainWindow()
        {
            kinect = KinectSensor.GetDefault();
            ColorFrameSource colorFrameSource = kinect.ColorFrameSource;

            colorFrameDesc = colorFrameSource.FrameDescription;
            ColorFrameReader colorFrameReader = colorFrameSource.OpenReader();

            colorFrameReader.FrameArrived += Color_FrameArrived;
            colorBitmap = new WriteableBitmap(colorFrameDesc.Width,
                                              colorFrameDesc.Height,
                                              96.0,
                                              96.0,
                                              PixelFormats.Gray8,
                                              null);

            DataContext = this;

            kinect.Open();

            InitializeComponent();
        }
Ejemplo n.º 39
0
        protected override void OnNavigatedTo( NavigationEventArgs e )
        {
            base.OnNavigatedTo( e );

            try {
                // Kinectを開く
                kinect = KinectSensor.GetDefault();
                kinect.Open();

                // ボディインデックスリーダーを開く
                bodyIndexFrameReader = kinect.BodyIndexFrameSource.OpenReader();
                bodyIndexFrameReader.FrameArrived += bodyIndexFrameReader_FrameArrived;

                // 表示のためのデータを作成
                bodyIndexFrameDesc = kinect.BodyIndexFrameSource.FrameDescription;

                // ビットマップ
                bodyIndexColorBitmap = new WriteableBitmap(
                    bodyIndexFrameDesc.Width, bodyIndexFrameDesc.Height );
                ImageBodyIndex.Source = bodyIndexColorBitmap;

                // ボディインデックデータ用のバッファ
                bodyIndexBuffer = new byte[bodyIndexFrameDesc.LengthInPixels];

                // ボディインデックデータをBGRA(カラー)データにするためのバッファ
                bodyIndexColorBuffer = new byte[bodyIndexFrameDesc.LengthInPixels *
                                                bodyIndexColorBytesPerPixels];

                // 色付けするために色の配列を作成する
                bodyIndexColors = new Color[]{
                    Colors.Red, Colors.Blue, Colors.Green,
                    Colors.Yellow, Colors.Pink, Colors.Purple,
                };
            }
            catch ( Exception ex ) {
                MessageDialog dlg = new MessageDialog(ex.Message);
                dlg.ShowAsync();
            }
        }
        /// <summary>
        /// Initializes a new instance of the MainPage class.
        /// </summary>
        public MainPage()
        {
            // initialize the components (controls) of the window
            this.InitializeComponent();

            this.kinectSensor = KinectSensor.GetDefault();

            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            FrameDescription depthFrameDescription = this.kinectSensor.DepthFrameSource.FrameDescription;

            int depthWidth  = depthFrameDescription.Width;
            int depthHeight = depthFrameDescription.Height;

            FrameDescription colorFrameDescription = this.kinectSensor.ColorFrameSource.FrameDescription;

            int colorWidth  = colorFrameDescription.Width;
            int colorHeight = colorFrameDescription.Height;

            this.colorMappedToDepthPoints = new DepthSpacePoint[colorWidth * colorHeight];

            this.bitmap = new WriteableBitmap(colorWidth, colorHeight);

            theImage.Source = this.bitmap;

            this.multiSourceFrameReader = this.kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Depth | FrameSourceTypes.Color | FrameSourceTypes.BodyIndex);

            this.multiSourceFrameReader.MultiSourceFrameArrived += this.Reader_MultiSourceFrameArrived;

            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            this.kinectSensor.Open();

            this.StatusText = this.kinectSensor.IsAvailable ? resourceLoader.GetString("RunningStatusText")
                                                            : resourceLoader.GetString("NoSensorStatusText");

            this.DataContext = this;
        }
Ejemplo n.º 41
0
        //フレームを取得するたびに実行されるイベントハンドラ。このイベントハンドラ内で画面描画を行います。
        void colorReader_FrameArrived(ColorFrameReader sender, ColorFrameArrivedEventArgs args)
        {
            //正常にColorFrameが取得できているかを格納する変数を宣言し、falseを代入します。
            bool colorFrameProcessed = false;

            //イベントハンドラの引数であるargs内にはフレームへの参照が格納されています。
            //その参照から実際のデータを取り出すために、AcquireFrameを実行します。
            using (ColorFrame colorframe = args.FrameReference.AcquireFrame())
            {
                //実際にデータが取得できている場合には処理を進めます。
                if (colorframe != null)
                {
                    //取得したフレームの情報を取りだします。
                    FrameDescription frameDescription = colorframe.FrameDescription;

                    //もしMainPage()コンストラクタ内で指定した形式(幅・高さ)でデータが取得できている場合には処理を進めます。
                    if (frameDescription.Width == this.bitmap.PixelWidth && frameDescription.Height == this.bitmap.PixelHeight)
                    {
                        //カラーフレームのイメージフォーマットがBGRAであればそのまま生のデータを渡します
                        if (colorframe.RawColorImageFormat == ColorImageFormat.Bgra)
                        {
                            colorframe.CopyRawFrameDataToBuffer(this.bitmap.PixelBuffer);
                        }
                        //もしBGRA形式で無い場合には、BGRA形式へ変換後データを渡します
                        else
                        {
                            colorframe.CopyConvertedFrameDataToBuffer(this.bitmap.PixelBuffer, ColorImageFormat.Bgra);
                        }
                        colorFrameProcessed = true;
                    }
                }
            }
            if (colorFrameProcessed)
            {
                //bitmap全体を描画(再描画)します
                this.bitmap.Invalidate();
            }
        }
Ejemplo n.º 42
0
        void Initialize()
        {
            try
            {
                Uninitialize();

                // Kinectを開く
                kinect = KinectSensor.GetDefault();
                if (!kinect.IsOpen)
                {
                    kinect.Open();
                }

                // 表示のためのデータを作成
                depthFrameDesc = kinect.DepthFrameSource.FrameDescription;

                // 表示のためのビットマップに必要なものを作成
                depthImage = new WriteableBitmap(
                    depthFrameDesc.Width, depthFrameDesc.Height,
                    96, 96, PixelFormats.Gray8, null);
                depthBuffer       = new ushort[depthFrameDesc.LengthInPixels];
                depthBitmapBuffer = new byte[depthFrameDesc.LengthInPixels];
                depthRect         = new Int32Rect(0, 0,
                                                  depthFrameDesc.Width, depthFrameDesc.Height);
                depthStride = (int)(depthFrameDesc.Width);

                ImageDepth.Source = depthImage;

                // Depthリーダーを開く
                depthFrameReader = kinect.DepthFrameSource.OpenReader();
                depthFrameReader.FrameArrived += depthFrameReader_FrameArrived;
            }
            catch (Exception ex)
            {
                Uninitialize();
                MessageBox.Show(ex.Message);
            }
        }
Ejemplo n.º 43
0
    void Start()
    {
        sensor = KinectSensor.GetDefault();

        if (sensor != null)
        {
            reader = sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth);

            coordinateMapper = sensor.CoordinateMapper;

            FrameDescription depthFrameDesc = sensor.DepthFrameSource.FrameDescription;
            depthWidth  = depthFrameDesc.Width;
            depthHeight = depthFrameDesc.Height;

            depthData         = new ushort[depthFrameDesc.LengthInPixels];
            depthColorData    = new byte[depthFrameDesc.LengthInPixels * 4]; // 4 BPP for RGBA
            depthZoneData     = new byte[depthFrameDesc.LengthInPixels * 4];
            cameraSpacePoints = new CameraSpacePoint[depthFrameDesc.LengthInPixels];

            textureDepthOriginal = new Texture2D(depthWidth, depthHeight, TextureFormat.RGBA32, false);
            textureDepthModified = new Texture2D(depthWidth, depthHeight, TextureFormat.RGBA32, false);


            riDepth.texture         = textureDepthOriginal;
            riDepthModified.texture = textureDepthModified;

            if (!sensor.IsOpen)
            {
                sensor.Open();
            }
        }
        else
        {
            Debug.LogError("Can't find Kinect Sensor.");
        }

        SetupGreyLookup();
    }
        void Initialize()
        {
            try
            {
                Uninitialize();

                // Kinectを開く
                kinect = KinectSensor.GetDefault();
                if (!kinect.IsOpen)
                {
                    kinect.Open();
                }

                // 赤外線画像の情報を取得する
                infraredFrameDesc = kinect.InfraredFrameSource.FrameDescription;

                // 赤外線リーダーを開く
                infraredFrameReader = kinect.InfraredFrameSource.OpenReader();
                infraredFrameReader.FrameArrived += infraredFrameReader_FrameArrived;

                // 表示のためのビットマップに必要なものを作成
                infraredBuffer = new ushort[infraredFrameDesc.LengthInPixels];
                infraredBitmap = new WriteableBitmap(
                    infraredFrameDesc.Width, infraredFrameDesc.Height,
                    96, 96, PixelFormats.Gray16, null);
                infraredRect = new Int32Rect(0, 0,
                                             infraredFrameDesc.Width, infraredFrameDesc.Height);
                infraredStride = infraredFrameDesc.Width *
                                 (int)infraredFrameDesc.BytesPerPixel;

                ImageInfrared.Source = infraredBitmap;
            }
            catch (Exception ex)
            {
                Uninitialize();
                MessageBox.Show(ex.Message);
            }
        }
Ejemplo n.º 45
0
        /// <summary>
        /// 彩色图像事件处理
        /// <summary>
        private void Reader_ColorFrameArrived(object sender, ColorFrameArrivedEventArgs e)
        {
            using (ColorFrame colorFrame = e.FrameReference.AcquireFrame())
            {
                if (colorFrame == null)
                {
                    return;
                }

                FrameDescription colorFrameDescription = colorFrame.FrameDescription;
                using (KinectBuffer colorBuffer = colorFrame.LockRawImageBuffer())
                {
                    //colorBitmap.Lock(); //保留后台缓冲区用于更新

                    //if ((colorFrameDescription.Width == colorBitmap.PixelWidth) && (colorFrameDescription.Height == colorBitmap.PixelHeight))
                    //{
                    //    colorFrame.CopyConvertedFrameDataToIntPtr(colorBitmap.BackBuffer, (uint)(colorFrameDescription.Width * colorFrameDescription.Height * 4), ColorImageFormat.Bgra);
                    //    colorBitmap.AddDirtyRect(new Int32Rect(0, 0, this.colorBitmap.PixelWidth, this.colorBitmap.PixelHeight));

                    //}

                    //colorBitmap.Unlock(); //释放后台缓冲区,以使之可用于显示

                    //BitmapData bmpShowData = bmpShow.LockBits(new Rectangle(0, 0, colorFrameDescription.Width, colorFrameDescription.Height),
                    //    ImageLockMode.WriteOnly,System.Drawing.Imaging.PixelFormat.Format32bppRgb);
                    //colorFrame.CopyConvertedFrameDataToIntPtr(bmpShowData.Scan0, (uint)(colorFrameDescription.Width * colorFrameDescription.Height * 4),
                    //    ColorImageFormat.Bgra);
                    //bmpShow.UnlockBits(bmpShowData);
                    //bmpShow.RotateFlip(RotateFlipType.Rotate180FlipY); //相对于Y轴上对PictureBox物体做左右翻转

                    ////Graphics gDraw = Graphics.FromImage(bmpShow);    //画图测试
                    ////System.Drawing.Pen penSamp = new System.Drawing.Pen(System.Drawing.Color.Blue,(float)10.0);
                    ////gDraw.DrawLine(penSamp, 0, 0, 500, 500);

                    //picUser.Image = bmpShow;
                }
            }
        }
Ejemplo n.º 46
0
        /*
         * The infrared frame is great for computer vision algorithms where texture is important, such as facial recognition.
         * Data is stored as 16-bit unsigned integers.
         * The infrared frame is also great for green screening, tracking reflective markers, and filtering out low-return (and therefore jittery) depth pixels.
         * Note that the infrared frame is derived from the same sensor as depth, so the images are perfectly aligned.
         * For example, the infrared pixel at row 5 col 9 goes with the depth pixel at row 5 col 9.
         */
        private void updateIREvent(object sender, InfraredFrameArrivedEventArgs e)
        {
            // ColorFrame is IDisposable
            using (InfraredFrame frame = e.FrameReference.AcquireFrame())
            {
                if (frame != null)
                {
                    FrameDescription frameDescription = frame.FrameDescription;
                    ushort[]         frameData        = new ushort[frameDescription.Width * frameDescription.Height];

                    WriteableBitmap Bitmap = BitmapFactory.New(frameDescription.Width, frameDescription.Height);
                    frame.CopyFrameDataToArray(frameData);
                    for (int y = 0; y < frameDescription.Height; y++)
                    {
                        for (int x = 0; x < frameDescription.Width; x++)
                        {
                            int   index          = y * frameDescription.Width + x;
                            float intensityRatio = (float)frameData[index] / InfraredSourceValueMaximum;

                            // 2. dividing by the (average scene value * standard deviations)
                            intensityRatio /= InfraredSceneValueAverage * InfraredSceneStandardDeviations;

                            // 3. limiting the value to InfraredOutputValueMaximum
                            intensityRatio = Math.Min(InfraredOutputValueMaximum, intensityRatio);

                            // 4. limiting the lower value InfraredOutputValueMinimum
                            intensityRatio = Math.Max(InfraredOutputValueMinimum, intensityRatio);

                            // 5. converting the normalized value to a byte and using the result
                            // as the RGB components required by the image
                            byte intensity = (byte)(intensityRatio * 255.0f);
                            Bitmap.SetPixel(x, y, 255, intensity, intensity, intensity);
                        }
                    }
                    SendImage(Bitmap, "kinectir");
                }
            }
        }
Ejemplo n.º 47
0
        public MainWindow()
        {
            this.kinectSensor = KinectSensor.GetDefault();

            this.colorFrameReader = this.kinectSensor.ColorFrameSource.OpenReader();
            this.depthFrameReader = this.kinectSensor.DepthFrameSource.OpenReader();

            this.colorFrameReader.FrameArrived += this.Reader_ColorFrameArrived;
            this.depthFrameReader.FrameArrived += this.Reader_DepthFrameArrived;

            this.depthFrameDescription = this.kinectSensor.DepthFrameSource.FrameDescription;
            this.colorFrameDescription = this.kinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra);

            this.depthPixels = new byte[this.depthFrameDescription.Width * this.depthFrameDescription.Height];

            this.colorSpacePoints = new ColorSpacePoint[this.depthFrameDescription.Width * this.depthFrameDescription.Height];
            this.allColorPixels   = new byte[this.colorFrameDescription.Width * this.colorFrameDescription.Height * 4];
            this.pointData        = new byte[this.depthFrameDescription.Width * this.depthFrameDescription.Height * 5];

            this.depthBitmap = new WriteableBitmap(this.depthFrameDescription.Width, this.depthFrameDescription.Height, 96.0, 96.0, PixelFormats.Gray8, null);
            this.colorBitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null);

            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            this.kinectSensor.Open();

            this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText
                                                            : Properties.Resources.NoSensorStatusText;
            this.depthIndexToColorIndex = new int[this.depthFrameDescription.Width * this.depthFrameDescription.Height];
            this.DataContext            = this;

            this.communicationServer = new ServerCommunication(GetPointDataForNetwork, GetDepthTableForNetwork);
            this.communicationServer.Start();

            MyIP = Dns.GetHostEntry(Dns.GetHostName()).AddressList[3].ToString();

            this.InitializeComponent();
        }
Ejemplo n.º 48
0
        private void colorFrameReader_FrameArrived(object sender, ColorFrameArrivedEventArgs e)
        {
            // ColorFrame is IDisposable
            using (ColorFrame colorFrame = e.FrameReference.AcquireFrame())
            {
                if (colorFrame != null)
                {
                    FrameDescription colorFrameDescription = colorFrame.FrameDescription;

                    using (KinectBuffer colorBuffer = colorFrame.LockRawImageBuffer())
                    {
                        this.colorBitmap.Lock();

                        // verify data and write the new color frame data to the display bitmap
                        if ((colorFrameDescription.Width == this.colorBitmap.PixelWidth) && (colorFrameDescription.Height == this.colorBitmap.PixelHeight))
                        {
                            if ((bool)chk_skin.IsChecked)
                            {
                                var handLeft  = coordinateMapper.MapCameraPointToColorSpace(leftHandPostition);
                                var handRight = coordinateMapper.MapCameraPointToColorSpace(rightHandPostition);

                                colorFrame.CopyConvertedFrameDataToArray(colorFrameData, ColorImageFormat.Bgra);
                                getSkinColor(handLeft, handRight);
                            }

                            colorFrame.CopyConvertedFrameDataToIntPtr(
                                this.colorBitmap.BackBuffer,
                                (uint)(colorFrameDescription.Width * colorFrameDescription.Height * BytesPerPixel),
                                ColorImageFormat.Bgra); //32Bit(4Byte)

                            this.colorBitmap.AddDirtyRect(new Int32Rect(0, 0, this.colorBitmap.PixelWidth, this.colorBitmap.PixelHeight));
                        }

                        this.colorBitmap.Unlock();
                    }
                }
            }
        }
Ejemplo n.º 49
0
    private byte[] textureData; // テクスチャに書き込む色データのバッファ

    // はじめに一度だけ実行 初期化処理
    void Start()
    {
        // Kinectセンサーを取得
        sensor = KinectSensor.GetDefault();

        // 複数データを扱うためのリーダーを開く(BodyIndex, Color, Depth)
        reader = sensor.OpenMultiSourceFrameReader(FrameSourceTypes.BodyIndex
                                                   | FrameSourceTypes.Color
                                                   | FrameSourceTypes.Depth);

        // データ間の対応情報を取得するためのマッパーを用意
        mapper = sensor.CoordinateMapper;

        // BodyIndexのフレーム情報を取得し、データを受け取るバッファを生成
        bodyIndexDescription = sensor.BodyIndexFrameSource.FrameDescription;
        bodyIndexData        = new byte[bodyIndexDescription.Width * bodyIndexDescription.Height];

        // Colorのフレーム情報を取得し、データを受け取るバッファを作成
        colorDescription = sensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Rgba);
        colorData        = new byte[colorDescription.Width * colorDescription.Height * colorDescription.BytesPerPixel];

        // Depthのフレーム情報を取得し、データを受け取るバッファを作成
        depthDescription = sensor.DepthFrameSource.FrameDescription;
        depthData        = new ushort[depthDescription.Width * depthDescription.Height];

        // BodyIndexの解像度に合わせてテクスチャ、データバッファを生成
        texture     = new Texture2D(bodyIndexDescription.Width, bodyIndexDescription.Height, TextureFormat.RGBA32, false);
        textureData = new byte[texture.width * texture.height * 4];

        // 生成したテクスチャをRawImageにセット
        rawImage.texture = texture;

        // センサーをオープン
        if (!sensor.IsOpen)
        {
            sensor.Open();
        }
    }
Ejemplo n.º 50
0
        //call in main window
        public UserFrameManager(KinectSensor sensor)
        {  //call in main window
            UserManager.Users = new Dictionary <ulong, int>();
            kinectSensor      = sensor;

            userouttimer.Interval = new TimeSpan(0, ConfigParams.refreshtime, 0);
            userouttimer.Tick    += userouttimer_Tick;

            //khoi tao bodyframe
            BodyFrameReader bodyFrameReader = kinectSensor.BodyFrameSource.OpenReader();

            UserManager.bodies            = new Body[6];
            bodyFrameReader.FrameArrived += Reader_FrameArrived;
            FrameDescription frameDescription = kinectSensor.DepthFrameSource.FrameDescription;

            //khoi tao userview
            userView = new UserView(sensor, frameDescription.Width, frameDescription.Height);
            main     = (MainWindow)App.Current.MainWindow;
            main.UserView.DataContext = userView;

            //
            UserIn   += OnUserIn;
            NewUser  += OnNewUser;
            LostUser += OnLostUser;
            NoUser   += OnNoUser;
            int maxBodies = sensor.BodyFrameSource.BodyCount;

            handRaiseList = new List <HandRaiseDetector>(maxBodies);
            for (int i = 0; i < maxBodies; i++)
            {
                HandRaiseDetector hrd = new HandRaiseDetector();
                handRaiseList.Add(hrd);
                hrd.HandRaise += hrd_HandRaise;
            }
            gestureDetector            = new GestureDetector(kinectSensor);
            handPointer                = new HandPointer();
            handPointer.DisengageUser += handPointer_DisengageUser;
        }
        protected override void OnNavigatedTo( NavigationEventArgs e )
        {
            base.OnNavigatedTo( e );

            try {
                kinect = KinectSensor.GetDefault();
                if ( kinect == null ) {
                    throw new Exception( "Kinectを開けません" );
                }

                kinect.Open();

                // 表示のためのデータを作成
                depthFrameDesc = kinect.DepthFrameSource.FrameDescription;

                // ビットマップ
                depthBitmap = new WriteableBitmap( depthFrameDesc.Width, depthFrameDesc.Height );
                ImageDepth.Source = depthBitmap;

                // Depthデータ用のバッファ
                depthBuffer = new ushort[depthFrameDesc.LengthInPixels];

                // DepthデータをBGRA(カラー)データにするためのバッファ
                depthBitmapBuffer = new byte[depthFrameDesc.LengthInPixels * 4];

                // 距離を表示する位置
                depthPoint = new Point( depthFrameDesc.Width / 2, depthFrameDesc.Height / 2 );


                // カラーリーダーを開く
                depthFrameReader = kinect.DepthFrameSource.OpenReader();
                depthFrameReader.FrameArrived += depthFrameReader_FrameArrived;
            }
            catch ( Exception ex ) {
                MessageDialog dlg = new MessageDialog(ex.Message);
                dlg.ShowAsync();
            }
        }
Ejemplo n.º 52
0
        void colorFrameReader_FrameArrived(ColorFrameReader sender, ColorFrameArrivedEventArgs args)
        {
            bool colorFrameProcessed = false;

            // ColorFrame es IDisposable por eso usaremos el using, de igual forma, adquirimos el frame que nos esta llegando en esta fracción de segundo
            using (ColorFrame colorFrame = args.FrameReference.AcquireFrame())
            {
                if (colorFrame != null)
                {
                    FrameDescription colorFrameDescription = colorFrame.FrameDescription;

                    // verificamos que la información del colorFrame, si tenga el tamaño que esperamos.
                    if ((colorFrameDescription.Width == this.bitmap.PixelWidth) && (colorFrameDescription.Height == this.bitmap.PixelHeight))
                    {
                        if (colorFrame.RawColorImageFormat == ColorImageFormat.Bgra)//verificamos que la información este en el formato que esperamos
                        {
                            colorFrame.CopyRawFrameDataToArray(this.colorPixels);
                        }
                        else
                        {
                            colorFrame.CopyConvertedFrameDataToArray(this.colorPixels, ColorImageFormat.Bgra);//de lo contrario la convertimos a BGRA
                        }

                        //Nuevo código empieza aquí
                        ChangeColorPixels();
                        //Termina aquí

                        colorPixels.CopyTo(this.bitmap.PixelBuffer);//copiamos los bytes que representan a la imagen en el buffer del bitmap
                        colorFrameProcessed = true;
                    }
                }
            }
            // como todo anduvo muy bien, le decimos al bitmap que se redibuje
            if (colorFrameProcessed)
            {
                this.bitmap.Invalidate();
            }
        }
Ejemplo n.º 53
0
        //Me faire un tableau de 1920 x 1080 tableau de bytes
        //(uint16 -> vouloir un tableau de byte (conversion de donnees 16bit -8bit)
        /// <summary>
        /// Initializes a new instance of the MainWindow class.
        /// </summary>
        public MainWindow()
        {
            this._kinectSensor = KinectSensor.GetDefault();

            this._multiFrameSourceReader = this._kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Depth | FrameSourceTypes.Color);

            this._multiFrameSourceReader.MultiSourceFrameArrived += this.Reader_MultiSourceFrameArrived;

            this._coordinateMapper = this._kinectSensor.CoordinateMapper;

            FrameDescription depthFrameDescription = this._kinectSensor.DepthFrameSource.FrameDescription;

            int depthWidth  = depthFrameDescription.Width;
            int depthHeight = depthFrameDescription.Height;

            FrameDescription colorFrameDescription = this._kinectSensor.ColorFrameSource.FrameDescription;

            int colorWidth  = colorFrameDescription.Width;
            int colorHeight = colorFrameDescription.Height;

            this._colorMappedToDepthPoints = new DepthSpacePoint[colorWidth * colorHeight];

            this._colorBitmap = new WriteableBitmap(colorWidth, colorHeight, 96.0, 96.0, PixelFormats.Bgra32, null);

            this._depthBitmap = new WriteableBitmap(colorWidth, colorHeight, 96.0, 96.0, PixelFormats.Gray8, null);

            // Calculate the WriteableBitmap back buffer size
            this._bitmapBackBufferSize = (uint)((this._colorBitmap.BackBufferStride * (this._colorBitmap.PixelHeight - 1)) + (this._colorBitmap.PixelWidth * this._bytesPerPixel));

            this._kinectSensor.Open();

            var bitmapUri = new Uri("pack://application:,,,/Images/binary.bmp"); //path absolu vers la ressource de l'image

            _headerBitmap = new BitmapImage(bitmapUri);

            this.DataContext = this;
            this.InitializeComponent();
        }
Ejemplo n.º 54
0
        /// <summary>
        /// Processes the color image.
        /// </summary>
        /// <param name="frame">The frame.</param>
        private void ProcessColorImage(ColorFrame frame)
        {
            if (updateColorTexture)
            {
                return;
            }

            // ColorFrameSource in yuy2 format
            if (frame != null)
            {
                FrameDescription frameDescription = frame.FrameDescription;

                using (KinectBuffer buffer = frame.LockRawImageBuffer())
                {
                    // Check resolution
                    if (frameDescription.Width == this.colorTexture.Width && frameDescription.Height == this.colorTexture.Height)
                    {
                        frame.CopyConvertedFrameDataToIntPtr(this.colorTexturePointer, (uint)this.colorPointerSize, ColorImageFormat.Rgba);
                        this.updateColorTexture = true;
                    }
                }
            }
        }
Ejemplo n.º 55
0
        /// <summary>
        ///Kinect传感器的相关初始化
        /// <summary>
        private void InitKinect()
        {
            kinectSensor = KinectSensor.GetDefault(); //获取默认的Kinect

            //彩色帧相关初始化
            colorFrameReader = kinectSensor.ColorFrameSource.OpenReader();
            colorFrameReader.FrameArrived += Reader_ColorFrameArrived; //彩色帧事件
            FrameDescription colorFrameDescription = kinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra);

            colorBitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null);
            bmpShow     = new Bitmap(colorFrameDescription.Width, colorFrameDescription.Height, System.Drawing.Imaging.PixelFormat.Format32bppRgb);

            //Body帧相关初始化
            coordinateMapper = kinectSensor.CoordinateMapper;
            FrameDescription depthFrameDescription = kinectSensor.DepthFrameSource.FrameDescription;

            depthDisplayWidth             = depthFrameDescription.Width;
            depthDisplayHeight            = depthFrameDescription.Height;
            bodyFrameReader               = kinectSensor.BodyFrameSource.OpenReader(); //打开Body帧读取器
            bodyFrameReader.FrameArrived += bodyFrameReader_FrameArrived;              //body帧事件

            kinectSensor.Open();                                                       //打开Kinect传感器
        }
Ejemplo n.º 56
0
        public void initKinect()
        {
            this.kinectSensor = KinectSensor.GetDefault();

            this.colorFrameReader = this.kinectSensor.ColorFrameSource.OpenReader();

            // wire handler for frame arrival
            this.colorFrameReader.FrameArrived += this.Reader_ColorFrameArrived;

            // create the colorFrameDescription from the ColorFrameSource using Bgra format
            FrameDescription colorFrameDescription = this.kinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra);

            // create the bitmap to display
            this.colorBitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null);


            this.kinectSensor.Open();
            this.frameReader = this.kinectSensor.InfraredFrameSource.OpenReader();

            bodyFrameHandler = new BodyFrameHandler(this.kinectSensor, this);
            // faceFrameHandler = new FaceFrameHandler(this.kinectSensor);
            volumeHandler = new VolumeHandler(this.kinectSensor);
        }
Ejemplo n.º 57
0
        private static bool MustConvertContent(string srcFrameId, Version src, Version dst)
        {
            string dstFrameId = ConvertFrameId(srcFrameId, src, dst);

            TagDescription srcMap = TagDescriptionMap.Instance[src];
            TagDescription dstMap = TagDescriptionMap.Instance[dst];

            FrameDescription srcDesc = srcMap[srcFrameId];
            FrameDescription dstDesc = dstMap[dstFrameId];

            if (srcDesc.Type != dstDesc.Type)
            {
                return(true);
            }

            if (srcDesc.Type != FrameDescription.FrameType.Binary &&
                !srcMap.ContentClass(srcDesc.Type).Equals(dstMap.ContentClass(dstDesc.Type)))
            {
                return(true);
            }

            return(false);
        }
Ejemplo n.º 58
0
        public override void MainPage_Loaded(object sender, RoutedEventArgs e)
        {
            this.sensor = KinectSensor.GetDefault();

            FrameDescription coloredFrameDescription =
                this.sensor.ColorFrameSource.FrameDescription;



            this._multiSourceFrameReader = this.sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Body | FrameSourceTypes.BodyIndex);

            this._multiSourceFrameReader.MultiSourceFrameArrived +=
                this.Reader_ColorFrameArrived;

            if (sensor != null)
            {
                sensor.Open();

                if (sensor.IsOpen)
                {
                }
            }
        }
Ejemplo n.º 59
0
        public MainPage()
        {
            // get the kinectSensor object
            this.kinectSensor = KinectSensor.GetDefault();
            // get the depthFrameDescription from the DepthFrameSource
            FrameDescription depthFrameDescription = this.kinectSensor.DepthFrameSource.FrameDescription;

            // open the reader for the depth frames
            this.depthFrameReader = this.kinectSensor.DepthFrameSource.OpenReader();
            // wire handler for frame arrival
            this.depthFrameReader.FrameArrived += this.Reader_DepthFrameArrived;
            // allocate space to put the pixels being received and converted
            this.depthFrameData = new ushort[depthFrameDescription.Width * depthFrameDescription.Height];                    // aquí almacenamos las distancias,
            this.depthPixels    = new byte[depthFrameDescription.Width * depthFrameDescription.Height * this.bytesPerPixel]; //aquí almacenamos convertidos a colorBGRA

            // create the bitmap to display
            this.bitmap = new WriteableBitmap(depthFrameDescription.Width, depthFrameDescription.Height);
            // open the sensor
            this.kinectSensor.Open();
            // initialize the components (controls) of the window
            this.InitializeComponent();
            theImage.Source = this.bitmap;
        }
Ejemplo n.º 60
0
        public BodyAnalysis(KinectSensor sensor)
        {
            _sensor = sensor;
            FrameDescription depthFrameDescription = _sensor.DepthFrameSource.FrameDescription;

            _depthWidth = depthFrameDescription.Width;
            int depthHeight = depthFrameDescription.Height;

            // allocate space to put the pixels being received and converted
            this.depthFrameData = new ushort[_depthWidth * depthHeight];
            //_msReader = reader;
            //_msReader.MultiSourceFrameArrived += _msReader_MultiSourceFrameArrived;
            //reader.FrameArrived += reader_FrameArrived;
            _coordinateMapper         = _sensor.CoordinateMapper;
            _faceSource               = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.BoundingBoxInInfraredSpace | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.LookingAway | FaceFrameFeatures.MouthMoved | FaceFrameFeatures.MouthOpen | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.PointsInInfraredSpace | FaceFrameFeatures.RightEyeClosed | FaceFrameFeatures.RotationOrientation);
            _faceReader               = _faceSource.OpenReader();
            _faceReader.FrameArrived += _faceReader_FrameArrived;
            _faceAlignment            = new FaceAlignment();

            _hdSource = new HighDefinitionFaceFrameSource(_sensor);
            _hdReader = _hdSource.OpenReader();
            _hdReader.FrameArrived += _hdReader_FrameArrived;
        }