Exemplo n.º 1
0
        private KinectStreamer()
        {
            KinectStreamerConfig = new KinectStreamerConfig();

            kinectSensor = KinectSensor.GetDefault();

            CoordinateMapper = kinectSensor.CoordinateMapper;

            multiSourceFrameReader = kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Depth | FrameSourceTypes.Color | FrameSourceTypes.Body);

            multiSourceFrameReader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;

            ColorFrameDescription = kinectSensor.ColorFrameSource.FrameDescription;

            DepthFrameDescription = kinectSensor.DepthFrameSource.FrameDescription;

            depthBitmap = new WriteableBitmap(DepthFrameDescription.Width, DepthFrameDescription.Height, 96.0, 96.0, PixelFormats.Gray8, null);

            colorBitmap = new WriteableBitmap(ColorFrameDescription.Width, ColorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null);

            bitmapBackBufferSize = (uint)((colorBitmap.BackBufferStride * (colorBitmap.PixelHeight - 1)) + (colorBitmap.PixelWidth * this.bytesPerPixel));

            colorPixels = new byte[ColorFrameDescription.Width * ColorFrameDescription.Height];

            depthPixels = new byte[DepthFrameDescription.Width * DepthFrameDescription.Height];

            depthArray = new ushort[DepthFrameDescription.Width * DepthFrameDescription.Height];

            SetupBody();

            kinectSensor.Open();
        }
Exemplo n.º 2
0
        public Recorder(string filename, Dispatcher dispatcher)
        {
            if (!sensor.IsOpen)
            {
                sensor.Open();
            }

            if (sensor.IsOpen)
            {
                bodyReader = sensor.BodyFrameSource.OpenReader();
                colorReader = sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Body | FrameSourceTypes.Depth | FrameSourceTypes.Color);

                logger.Trace("Kinect sensor is open");
            }
            else
            {
                logger.Error("Kinect sensor is not open");
            }

            metadata = MetadataFactory.Create();

            fileStream = new FileStream(filename, FileMode.Create);
            AppendMessageToFileStream(metadata.Serialize());

            this.dispatcher = dispatcher;
        }
        public MainPage()
        
        {
           
            InitializeComponent();
            
            _navigationHelper = new NavigationHelper(this);

            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _sensor.Open();

                _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body);
                _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;

                _gestureController = new GestureController();
                _gestureController.GestureRecognized += GestureController_GestureRecognized;
            }
            // Initialize the gesture detection objects for our gestures
            this.gestureDetectorList = new List<GestureDetector>();

            // Create a gesture detector for each body (6 bodies => 6 detectors)
            int maxBodies = this._sensor.BodyFrameSource.BodyCount;
            for (int i = 0; i < maxBodies; ++i)
            {
                GestureResultView result =
                     new GestureResultView(i, false, false, 0.0f);
                GestureDetector detector =
                    new GestureDetector(this._sensor, result);
                result.PropertyChanged += GestureResult_PropertyChanged;
                this.gestureDetectorList.Add(detector);
            }
        }
Exemplo n.º 4
0
        // Primary function. Runs when the window loads in.
        private void Window_Loaded(object sender, RoutedEventArgs e)
        {
            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _sensor.Open();

                _bodies = new Body[_sensor.BodyFrameSource.BodyCount];

                _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body);
                _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;

                _bodyReader = _sensor.BodyFrameSource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                // 2) Initialize the face source with the desired features
                _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace |
                                                              FaceFrameFeatures.FaceEngagement |
                                                              FaceFrameFeatures.Glasses |
                                                              FaceFrameFeatures.Happy |
                                                              FaceFrameFeatures.LeftEyeClosed |
                                                              FaceFrameFeatures.MouthOpen |
                                                              FaceFrameFeatures.PointsInColorSpace |
                                                              FaceFrameFeatures.RightEyeClosed);
                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;
            }
        }
Exemplo n.º 5
0
        public MainPage()
        {
            // one sensor is currently supported
            this.kinectSensor = KinectSensor.GetDefault();

            SetupCurrentDisplay(DEFAULT_DISPLAYFRAMETYPE);

            this.multiSourceFrameReader =
                this.kinectSensor.OpenMultiSourceFrameReader(
                 FrameSourceTypes.Infrared
                 | FrameSourceTypes.Color
                 | FrameSourceTypes.Depth);

            this.multiSourceFrameReader.MultiSourceFrameArrived +=
                this.Reader_MultiSourceFrameArrived;

            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // use the window object as the view model in this simple example
            this.DataContext = this;

            // open the sensor
            this.kinectSensor.Open();

            this.InitializeComponent();
        }
        public MainWindow()
        {
            InitializeComponent();

            _sensor = connectActiveSensor();
            _reader = getReaderReady();
        }
        /// <summary>
        /// Initializes a new instance of the KinectFacialRecognitionEngine class
        /// </summary>
        public KinectFacialRecognitionEngine(KinectSensor kinect, params IRecognitionProcessor[] processors)
        {
            this.Kinect = kinect;

            this.ProcessingEnabled = true;
            this.Processors = processors;

            if (this.Processors == null || !this.Processors.Any())
                throw new ArgumentException("Please pass in at least one recognition processor!");

            this.bodies = new Body[kinect.BodyFrameSource.BodyCount];
            this.colorImageBuffer = new byte[4 * kinect.ColorFrameSource.FrameDescription.LengthInPixels];
            this.imageWidth = kinect.ColorFrameSource.FrameDescription.Width;
            this.imageHeight = kinect.ColorFrameSource.FrameDescription.Height;

            this.msReader = this.Kinect.OpenMultiSourceFrameReader(FrameSourceTypes.Body | FrameSourceTypes.Color);
            this.msReader.MultiSourceFrameArrived += this.MultiSourceFrameArrived;

            this.faceSource = new HighDefinitionFaceFrameSource(kinect);
            this.faceSource.TrackingQuality = FaceAlignmentQuality.High;
            this.faceReader = this.faceSource.OpenReader();
            this.faceReader.FrameArrived += this.FaceFrameArrived;

            this.recognizerWorker = new BackgroundWorker();
            this.recognizerWorker.DoWork += this.RecognizerWorker_DoWork;
            this.recognizerWorker.RunWorkerCompleted += this.RecognizerWorker_RunWorkerCompleted;
        }
Exemplo n.º 8
0
 public KinectReader(WriteableBitmap depthBitmap, WriteableBitmap colorBitmap)
 {
     this.depthBitmap = depthBitmap;
     this.colorBitmap = colorBitmap;
     this.sensor = KinectSensor.GetDefault();
     sensor.Open();
     this.reader = sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Depth | FrameSourceTypes.Color);
     reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;
 }
 protected virtual void Dispose(Boolean dispossing)
 {
     if (dispossing)
     {
         _reader.MultiSourceFrameArrived -= ProcessMove;
         _reader.Dispose();
         _reader = null;
     }
 }
Exemplo n.º 10
0
        public Core()
        {
            InitExercises();

            sensor = KinectSensor.GetDefault();
            sensor.Open();

            reader = sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body);
            frameLock = new Mutex();
        }
Exemplo n.º 11
0
        public MainWindow()
        {
            InitializeComponent();
            mySensor = KinectSensor.GetDefault();
            mySensor.Open();
            myReader = mySensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Body);
            myReader.MultiSourceFrameArrived += myReader_MultiSourceFrameArrived;

            myBitmap = new WriteableBitmap(1920, 1080, 96.0, 96.0, PixelFormats.Pbgra32, null);
            image1.Source = myBitmap;
        }
Exemplo n.º 12
0
        public MainWindow()
        {
            this.kinectSensor = KinectSensor.GetDefault();

            this.multiFrameSourceReader = this.kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Depth | FrameSourceTypes.Color | FrameSourceTypes.BodyIndex);

            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            FrameDescription depthFrameDescreption = this.kinectSensor.DepthFrameSource.FrameDescription;

            InitializeComponent();
        }
        private void Window_Loaded(object sender, RoutedEventArgs e)
        {
            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _sensor.Open();

                _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body);
                _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;
            }
        }
Exemplo n.º 14
0
        /// <summary>
        /// Constructor
        /// </summary>
        /// <param name="port">Server port</param>
        /// <param name="sensor"></param>
        public KinectFrameServer(int port, KinectSensor sensor)
        {
            this.sensor = sensor;
            this.listener = new KinectClientListener(port);
            this.listener.ClientConnected += listener_ClientConnected;

            this.depthCompressor = new SnappyFrameCompressor(KinectFrameInformation.DepthFrame);
            this.bodyIndexCompressor = new SnappyFrameCompressor(KinectFrameInformation.BodyIndexFrame);

            this.multiSourceReader = sensor.OpenMultiSourceFrameReader(FrameSourceTypes.BodyIndex | FrameSourceTypes.Depth);
            this.multiSourceReader.MultiSourceFrameArrived += multiSourceReader_MultiSourceFrameArrived;
        }
Exemplo n.º 15
0
        private void Window_Closed(object sender, EventArgs e)
        {
            if (_reader != null)
            {
                _reader.Dispose();
                _reader = null;
            }

            if (_sensor != null)
            {
                _sensor.Close();
                _sensor = null;
            }
        }
Exemplo n.º 16
0
        public BackgroundRemovalPage()
        {
            InitializeComponent();

            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _sensor.Open();

                _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body | FrameSourceTypes.BodyIndex);
                _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;
            }
        }
        private void Window_Loaded(object sender, RoutedEventArgs e)
        {
            _kinectSensor = KinectSensor.GetDefault();
            InitializeInfrared();
            _kinectSensor.Open();

            if (_kinectSensor != null)
            {
                _utils = new Utils(_kinectSensor.CoordinateMapper);
                _reader =
                    _kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth |
                                                             FrameSourceTypes.BodyIndex);
            }
            _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;
        }
        private void Window_Loaded(object sender, RoutedEventArgs e)
        {
            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _sensor.Open();

                // 2) Initialize the background removal tool.
                _backgroundRemovalTool = new BackgroundRemovalTool(_sensor.CoordinateMapper);

                _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.BodyIndex);
                _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;
            }
        }
Exemplo n.º 19
0
        private void Page_Loaded(object sender, RoutedEventArgs e)
        {
            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _sensor.Open();

                _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body);
                _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;

                _gestureController = new GestureController(GestureType.All);
                _gestureController.GestureRecognized += GestureController_GestureRecognized;
            }
        }
        public RecordWindow()
        {
            // 基本設定の初期化処理

            // Kinect関連初期化処理
            this.kinectSensor = KinectSensor.GetDefault();
            this.multiFrameSourceReader = this.kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Depth | FrameSourceTypes.Color | FrameSourceTypes.Body | FrameSourceTypes.BodyIndex);
            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            FrameDescription deapthFrameDescription = this.kinectSensor.DepthFrameSource.FrameDescription;
            FrameDescription colorFrameDescription = this.kinectSensor.ColorFrameSource.FrameDescription;

            this.depthWidth = deapthFrameDescription.Width;
            this.depthHeight = deapthFrameDescription.Height;
            this.colorWidth = colorFrameDescription.Width;
            this.colorHeight = colorFrameDescription.Height;

            this.motionDataHandler = new MotionDataHandler(this.recordPath , this.colorWidth, this.colorHeight, this.depthWidth, this.depthHeight);            

            // 描画関連
            this.drawingGroup = new DrawingGroup();
            this.imageSource = new DrawingImage(this.drawingGroup);
            this.colorBitmap = new WriteableBitmap(this.colorWidth, this.colorHeight, 96.0, 96.0, PixelFormats.Bgra32, null);

            // allocate space to put the pixels being received
            this.colorPixels = new byte[this.colorWidth * this.colorHeight * this.bytesPerPixel];
            this.depthBuffer = new ushort[this.depthWidth * this.depthHeight];
            this.bodyIndexBuffer = new byte[this.depthWidth * this.depthHeight];

            // a bone defined as a line between two joints
            this.bones = Utility.GetBones();

            // populate body colors, one for each BodyIndex
            this.bodyColors = new List<Pen>();
            this.bodyColors.Add(new Pen(Brushes.Red, 6));
            this.bodyColors.Add(new Pen(Brushes.Orange, 6));
            this.bodyColors.Add(new Pen(Brushes.Green, 6));
            this.bodyColors.Add(new Pen(Brushes.Blue, 6));
            this.bodyColors.Add(new Pen(Brushes.Indigo, 6));
            this.bodyColors.Add(new Pen(Brushes.Violet, 6));

            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;
            this.kinectSensor.Open();

            this.DataContext = this;

            this.InitializeComponent();
        }
Exemplo n.º 21
0
        static void Main(string[] args)
        {
            sensor = KinectSensor.GetDefault();

            sensor.IsAvailableChanged += Sensor_IsAvailableChanged;

            sensor.Open();

            _multiReader = sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Body);

            _multiReader.MultiSourceFrameArrived += OnMultipleFramesArrivedHandler;

            VirtualMouse.MoveTo(900, 39);
            VirtualMouse.LeftClick();
            Console.ReadKey();
        }
Exemplo n.º 22
0
        public FrameReader()
        {
            _sensor = KinectSensor.GetDefault();

            if (_sensor == null)
            {
                throw new ScannerNotFoundException("No valid plugged-in Kinect sensor found.");
            }

            if (!_sensor.IsOpen)
            {
                _sensor.Open();
            }

            _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body | FrameSourceTypes.BodyIndex);
            _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;
        }
Exemplo n.º 23
0
        public GesturesPage()
        {
            InitializeComponent();

            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _sensor.Open();

                _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body);
                _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;

                _gestureController = new GestureController();
                _gestureController.GestureRecognized += GestureController_GestureRecognized;
            }
        }
Exemplo n.º 24
0
        /// <summary>
        /// この WPF アプリケーションが終了するときに実行されるメソッド。
        /// </summary>
        /// <param name="e">
        /// イベントの発生時に渡されるデータ。
        /// </param>
        protected override void OnClosed(EventArgs e)
        {
            base.OnClosed(e);

            //データの取得を中止して、関連するリソースを破棄する。
            if (this.multiSourceFrameReader != null)
            {
                this.multiSourceFrameReader.Dispose();
                this.multiSourceFrameReader = null;
            }

            //Kinect を停止して、関連するリソースを破棄する。
            if (this.kinect != null)
            {
                this.kinect.Close();
                this.kinect = null;
            }
        }
Exemplo n.º 25
0
        protected override void OnNavigatingFrom(NavigatingCancelEventArgs e)
        {
            base.OnNavigatingFrom(e);

            if (multiReader != null)
            {
                multiReader.MultiSourceFrameArrived -= multiReader_MultiSourceFrameArrived;
                multiReader.Dispose();
                multiReader = null;
            }


            if (kinect != null)
            {
                kinect.Close();
                kinect = null;
            }
        }
Exemplo n.º 26
0
    void OnApplicationQuit()
    {
        if (reader != null)
        {
            reader.Dispose();
            reader = null;
        }

        if (sensor != null)
        {
            if (sensor.IsOpen)
            {
                sensor.Close();
            }

            sensor = null;
        }
    }
        public BackgroundRemovalPage()
        {
            InitializeComponent();

            _navigationHelper = new NavigationHelper(this);

            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _sensor.Open();

                _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body | FrameSourceTypes.BodyIndex);
                _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;

                _backgroundRemoval = new BackgroundRemovalTool(_sensor.CoordinateMapper);
            }
        }
Exemplo n.º 28
0
 void MainPage_Unloaded(object sender, RoutedEventArgs e)
 {
     if (irReader != null)
     {
         irReader.Dispose();
         irReader = null;
     }
     if (msfr != null)
     {
         msfr.Dispose();
         msfr = null;
     }
     if (ksensor != null)
     {
         ksensor.Close();
         ksensor = null;
     }
 }
    /// <summary>
    /// Called when Unity application or build program is closed.
    ///
    /// Closes the data streams from the Kinects and stops the application accessing the sensor.
    /// </summary>
    void OnApplicationQuit()
    {
        if (multiSourceReader != null)
        {
            multiSourceReader.Dispose();
            multiSourceReader = null;
        }

        if (kinectSensor != null)
        {
            if (kinectSensor.IsOpen)
            {
                kinectSensor.Close();
            }

            kinectSensor = null;
        }
    }
            private void Start()
            {
                kinectSensor = KinectSensor.GetDefault();

                if (kinectSensor != null)
                {
                    frameReader = kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Body | FrameSourceTypes.BodyIndex | FrameSourceTypes.Depth);

                    if (!kinectSensor.IsOpen)
                    {
                        kinectSensor.Open();
                    }
                }

                isNewFrame = false;

                centerBodyIndex = -1;
            }
            private void OnApplicationQuit()
            {
                if (frameReader != null)
                {
                    frameReader.Dispose();
                    frameReader = null;
                }

                if (kinectSensor != null)
                {
                    if (kinectSensor.IsOpen)
                    {
                        kinectSensor.Close();
                    }

                    kinectSensor = null;
                }
            }
Exemplo n.º 32
0
    private void OnApplicationQuit()
    {
        if (_Reader != null)
        {
            _Reader.Dispose();
            _Reader = null;
        }

        if (_Sensor != null)
        {
            if (_Sensor.IsOpen)
            {
                _Sensor.Close();
            }

            _Sensor = null;
        }
    }
        public BackgroundRemovalPage()
        {
            InitializeComponent();

            _navigationHelper = new NavigationHelper(this);

            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _sensor.Open();

                _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body | FrameSourceTypes.BodyIndex);
                _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;

                _backgroundRemoval = new BackgroundRemovalTool(_sensor.CoordinateMapper);
            }
        }
Exemplo n.º 34
0
    private void releaseKinectResources()
    {
        if (SourceReaderRef != null)
        {
            SourceReaderRef.Dispose();
            SourceReaderRef = null;
        }

        if (KinectSensorRef != null)
        {
            if (KinectSensorRef.IsOpen)
            {
                KinectSensorRef.Close();
            }

            KinectSensorRef = null;
        }
    }
        public MainWindow()
        {
            InitializeComponent();

            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _sensor.Open();
            }


            _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color |
                                                         FrameSourceTypes.Depth |
                                                         FrameSourceTypes.Infrared);

            _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;
        }
        private void InitializeSceen()
        {
            _sensor = KinectSensor.GetDefault();
            _sensor.Open();

            FrameDescription colorFrameDescription = _sensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra);

            // create the bitmap to display
            this._colorBitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null);

            _jedi = new JediGestureRecognizer(_sensor);
            _jedi.ForceApplying += ForceApplying;
            _jedi.ForceDispel += ForceDispel;

            _frameReader =  _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Body | FrameSourceTypes.Depth | FrameSourceTypes.Color);

            _frameReader.MultiSourceFrameArrived +=MultiSourceFrameArrived;
        }
Exemplo n.º 37
0
        private void Window_Loaded(object sender, RoutedEventArgs e)
        {
            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                locationSocket.ConnectAsync();
                locationSocket.OnOpen  += (senderObject, eArgs) => footstepSocket.ConnectAsync();
                locationSocket.OnClose += (senderObject, eArgs) => locationSocket.ConnectAsync();
                footstepSocket.ConnectAsync();
                footstepSocket.OnOpen  += (senderObject, eArgs) => Console.WriteLine("\n\n!! connection established !!\n");
                footstepSocket.OnClose += (senderObject, eArgs) => locationSocket.ConnectAsync();

                _sensor.Open();
                _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body);
                _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;
            }
        }
Exemplo n.º 38
0
 void MainWindow_Closing(object sender, System.ComponentModel.CancelEventArgs e)
 {
     if (multiFrameReader != null)
     {
         multiFrameReader.Dispose();
         multiFrameReader = null;
     }
     if (gestureFrameReader != null)
     {
         gestureFrameReader.Dispose();
         gestureFrameReader = null;
     }
     if (kinect != null)
     {
         kinect.Close();
         kinect = null;
     }
 }
Exemplo n.º 39
0
        public void Dispose()
        {
            if (_kinect != null)
            {
                if (_kinect.IsOpen)
                {
                    _kinect.Close();
                }
                _kinect = null;
            }

            if (_multiSourceFrameReader != null)
            {
                _multiSourceFrameReader.Dispose();
                _multiSourceFrameReader = null;
            }

            if (_bodyFrameReader != null)
            {
                _bodyFrameReader.Dispose();
                _bodyFrameReader = null;
            }

            if (_faceFrameSources != null)
            {
                for (int i = 0; i < _faceFrameSources.Length; i++)
                {
                    if (_faceFrameSources[i] != null)
                    {
                        _faceFrameSources[i].Dispose();
                    }
                }
            }
            if (_faceFrameReaders != null)
            {
                for (int i = 0; i < _faceFrameReaders.Length; i++)
                {
                    if (_faceFrameReaders[i] != null)
                    {
                        _faceFrameReaders[i].Dispose();
                    }
                }
            }
        }
        private void MainWindow_Initialized(object sender, EventArgs e)
        {
            this.DataContext = this;

            Directory.CreateDirectory(localPhotosSavePath);

            countdownTimer.Tick    += new EventHandler(countdownTimer_Tick);
            countdownTimer.Interval = new TimeSpan(0, 0, 1);

            captureTimer.Tick    += new EventHandler(captureTimer_Tick);
            captureTimer.Interval = new TimeSpan(0, 0, 3);

            displayTimer.Tick    += new EventHandler(displayTimer_Tick);
            displayTimer.Interval = new TimeSpan(0, 0, 8);

            slideshowTimer.Tick    += new EventHandler(slideshowTimer_Tick);
            slideshowTimer.Interval = new TimeSpan(0, 0, 6);

            this.kinectSensor = KinectSensor.GetDefault();

            this.multiFrameSourceReader = this.kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Body | FrameSourceTypes.BodyIndex);
            this.multiFrameSourceReader.MultiSourceFrameArrived += this.Reader_MultiSourceFrameArrived;
            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            FrameDescription depthFrameDescription = this.kinectSensor.DepthFrameSource.FrameDescription;
            int depthWidth  = depthFrameDescription.Width;
            int depthHeight = depthFrameDescription.Height;

            FrameDescription colorFrameDescription = this.kinectSensor.ColorFrameSource.FrameDescription;
            int colorWidth  = colorFrameDescription.Width;
            int colorHeight = colorFrameDescription.Height;

            this.colorMappedToDepthPoints = new DepthSpacePoint[colorWidth * colorHeight];

            this.liveBitmap = new WriteableBitmap(colorWidth, colorHeight, 96.0, 96.0, PixelFormats.Bgra32, null);

            this.bitmapBackBufferSize = (uint)((this.liveBitmap.BackBufferStride * (this.liveBitmap.PixelHeight - 1)) + (this.liveBitmap.PixelWidth * this.bytesPerPixel));

            flashStoryboard    = this.FindResource("flashStoryboard") as Storyboard;
            capture0Storyboard = this.FindResource("capture0Storyboard") as Storyboard;
            capture1Storyboard = this.FindResource("capture1Storyboard") as Storyboard;
            capture2Storyboard = this.FindResource("capture2Storyboard") as Storyboard;
            capture3Storyboard = this.FindResource("capture3Storyboard") as Storyboard;
        }
Exemplo n.º 41
0
        public MainPage()
        {
            // one sensor is currently supported
            this.kinectSensor = KinectSensor.GetDefault();

            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            this.multiSourceFrameReader = this.kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Infrared | FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.BodyIndex | FrameSourceTypes.Body);

            this.multiSourceFrameReader.MultiSourceFrameArrived += this.Reader_MultiSourceFrameArrived;

            // specify the required face frame results
            // init with all the features so they are accessible later.
            this.faceFrameFeatures =
                FaceFrameFeatures.BoundingBoxInColorSpace
                | FaceFrameFeatures.PointsInColorSpace
                | FaceFrameFeatures.BoundingBoxInInfraredSpace
                | FaceFrameFeatures.PointsInInfraredSpace
                | FaceFrameFeatures.RotationOrientation
                | FaceFrameFeatures.FaceEngagement
                | FaceFrameFeatures.Glasses
                | FaceFrameFeatures.Happy
                | FaceFrameFeatures.LeftEyeClosed
                | FaceFrameFeatures.RightEyeClosed
                | FaceFrameFeatures.LookingAway
                | FaceFrameFeatures.MouthMoved
                | FaceFrameFeatures.MouthOpen;

            this.faceManager = new FaceManager(this.kinectSensor, this.faceFrameFeatures);

            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // use the window object as the view model in this simple example
            this.DataContext = this;

            // open the sensor
            this.kinectSensor.Open();

            this.InitializeComponent();

            // new
            this.Loaded += MainPage_Loaded;
        }
Exemplo n.º 42
0
        public MainWindow()
        {
            InitializeComponent();

            ks = KinectSensor.GetDefault();
            ks.Open();

            coordinateMapper = ks.CoordinateMapper;

            colorFrameDesc = ks.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra);
            int colorWidth  = colorFrameDesc.Width;
            int colorHeight = colorFrameDesc.Height;

            depthFrameDesc = ks.DepthFrameSource.FrameDescription;

            uint colorFrameSize = colorFrameDesc.BytesPerPixel * colorFrameDesc.LengthInPixels;
            uint depthFrameSize = depthFrameDesc.BytesPerPixel * depthFrameDesc.LengthInPixels;

            colorPixels = new byte[colorFrameSize];
            depthPixels = new byte[depthFrameSize];

            colorMappedToDepthPoints = new DepthSpacePoint[colorWidth * colorHeight];

            recordStarted = false;

            // Deleting all previous image in ./rgb directory
            System.IO.DirectoryInfo rgb_directory = new DirectoryInfo("./rgb/");
            foreach (FileInfo file in rgb_directory.GetFiles())
            {
                file.Delete();
            }
            // Deleting all previous image in ./d directory
            System.IO.DirectoryInfo d_directory = new DirectoryInfo("./d/");
            foreach (FileInfo file in d_directory.GetFiles())
            {
                file.Delete();
            }

            multiSourceFrameReader = ks.OpenMultiSourceFrameReader(FrameSourceTypes.Depth | FrameSourceTypes.Color);
            multiSourceFrameReader.MultiSourceFrameArrived += msfr_FrameArrived;

            start.Click += start_Click;
            stop.Click  += stop_Click;
        }
Exemplo n.º 43
0
        /// <summary>
        /// Raises the destroy event.
        /// </summary>
        void OnDestroy()
        {
            if (reader != null)
            {
                reader.Dispose();
                reader = null;
            }

            if (sensor != null)
            {
                if (sensor.IsOpen)
                {
                    sensor.Close();
                }

                sensor = null;
            }

            if (texture != null)
            {
                Texture2D.Destroy(texture);
                texture = null;
            }
            if (rgbaMat != null)
            {
                rgbaMat.Dispose();
                rgbaMat = null;
            }
            if (outputMat != null)
            {
                outputMat.Dispose();
                outputMat = null;
            }
            if (maskMat != null)
            {
                maskMat.Dispose();
                maskMat = null;
            }
            if (comicFilter != null)
            {
                comicFilter.Dispose();
                comicFilter = null;
            }
        }
Exemplo n.º 44
0
        /// <summary>
        /// Initializes a new instance of the MainWindow class.
        /// </summary>
        public MainWindow()
        {
            // get the kinectSensor object
            this.kinectSensor = KinectSensor.GetDefault();

            // open the reader for the color frames
            this.colorFrameReader       = this.kinectSensor.ColorFrameSource.OpenReader();
            this.infraredFrameReader    = this.kinectSensor.InfraredFrameSource.OpenReader();
            this.depthFrameReader       = this.kinectSensor.DepthFrameSource.OpenReader();
            this.multiSourceFrameReader = this.kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Infrared | FrameSourceTypes.Depth);

            // wire handler for frame arrival
            this.colorFrameReader.FrameArrived    += this.Reader_ColorFrameArrived;
            this.infraredFrameReader.FrameArrived += this.Reader_InfraredFrameArrived;
            this.depthFrameReader.FrameArrived    += this.Reader_DepthFrameArrived;
            this.multiSourceFrameReader.MultiSourceFrameArrived += this.Reader_MultiSourceFrameArrived;

            //event handler for the coordinate changed callback
            kinectSensor.CoordinateMapper.CoordinateMappingChanged += CoordinateMappingChangedCallback;
            // create the colorFrameDescription from the ColorFrameSource using Bgra format
            FrameDescription colorFrameDescription = this.kinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra);

            // create the bitmap to display
            this.bitmap1 = new WriteableBitmap(imageSizeX, imageSizeY, 96.0, 96.0, PixelFormats.Bgr24, null);
            this.bitmap2 = new WriteableBitmap(imageSizeX, imageSizeY, 96.0, 96.0, PixelFormats.Bgr24, null);
            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // open the sensor
            this.kinectSensor.Open();

            // set the status text
            this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText
                                                            : Properties.Resources.NoSensorStatusText;

            // use the window object as the view model in this simple example
            this.DataContext = this;

            // initialize the components (controls) of the window
            this.InitializeComponent();

            CannyThreshold        = 100;
            CannyThresholdLinking = 100;
        }
Exemplo n.º 45
0
        private void Window_Loaded(object sender, RoutedEventArgs e)
        {
            _sensor = KinectSensor.GetDefault();
            //_sensor.ColorFrameSource

            if (_sensor != null)
            {
                _sensor.Open();

                _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body | FrameSourceTypes.BodyIndex);
                //_bodyIndexReader = _sensor.BodyIndexFrameSource.OpenReader();
                _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;
            }

            StateObject so    = new StateObject();
            IPEndPoint  ipEnd = new IPEndPoint(IPAddress.Parse("127.0.0.1"), 7001);

            try
            {
                so.socket = new Socket(ipEnd.AddressFamily, SocketType.Stream, ProtocolType.Tcp);
                //so.socket = new Socket(ipEnd.AddressFamily, SocketType.Dgram, ProtocolType.Udp);

                so.socket.Bind(ipEnd);
                so.socket.Listen(1);
                so.socket.SendBufferSize = 524288;


                allDone.Reset();
                so.socket.BeginAccept(new AsyncCallback(AcceptCallback), so);
                //allDone.WaitOne();
                sock = so.socket;
                sock.SendBufferSize = 524288;

                //sock = so.socket;
            }
            catch (SocketException se)
            {
                Console.WriteLine(se.Message);
                sock.Close();

                sock.Bind(ipEnd);
                sock.Listen(1);
            }
        }
Exemplo n.º 46
0
        /// <summary>
        /// Inicializa los parametros de la Ventana que contiene el canvas de imagenes y se definen los valores para el cursor y la deteccion de posicion del mismo respecto a la ventana.
        /// </summary>
        private void IniciatizeControls()
        {
            InitializeComponent();

            WindowState = WindowState.Maximized;
            WindowStyle = WindowStyle.None;

            ventana.Background = Brushes.Black;
            ventana.Height     = System.Windows.SystemParameters.PrimaryScreenHeight;
            ventana.Width      = System.Windows.SystemParameters.PrimaryScreenWidth;
            HeightScreen       = (int)ventana.Height;
            WidthScreen        = (int)ventana.Width;

            imgHandLeft        = new Image();
            imgHandLeft.Source = new BitmapImage(Utilities.LoadUriFromResource("Cursores/manoLeft.png"));
            imgHandLeft.Width  = 32;
            imgHandLeft.Height = 32;

            imgHandRight        = new Image();
            imgHandRight.Source = new BitmapImage(Utilities.LoadUriFromResource("Cursores/manoRight.png"));
            imgHandRight.Width  = 32;
            imgHandRight.Height = 32;

            mainScreen.Children.Add(imgHandLeft);
            mainScreen.Children.Add(imgHandRight);

            sensor = KinectSensor.GetDefault();
            sensor.Open();
            sensor.IsAvailableChanged += AvailableChanged;

            this.coordinateMapper = sensor.CoordinateMapper;

            //para detectar si se fue el id trackeado
            reader = sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Body | FrameSourceTypes.BodyIndex | FrameSourceTypes.Color | FrameSourceTypes.Depth);
            reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;

            //se llama cuando se mueven las manos de un body trackeado
            KinectCoreWindow kinectCoreWindow = KinectCoreWindow.GetForCurrentThread();

            kinectCoreWindow.PointerMoved += kinectCoreWindow_PointerMoved;

            //vista360.Children.Clear();  //Limpia por completo el canvas que contiene las imagenes
        }
Exemplo n.º 47
0
        public AnglePage()
        {
            InitializeComponent();

            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _sensor.Open();

                _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body);
                _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;

                _userReporter = new PlayersController();
                _userReporter.BodyEntered += UserReporter_BodyEntered;
                _userReporter.BodyLeft += UserReporter_BodyLeft;
                _userReporter.Start();
            }
        }
Exemplo n.º 48
0
        public void Initialize(GraphicsDevice gdevice)
        {
            this.gdevice = gdevice;
            kinect       = KinectSensor.GetDefault();
            frameReader  = kinect.OpenMultiSourceFrameReader(FrameSourceTypes.Color
                                                             | FrameSourceTypes.Depth | FrameSourceTypes.Body | FrameSourceTypes.BodyIndex);
            frameReader.MultiSourceFrameArrived += FramesArrived;

            var colorDesc   = kinect.ColorFrameSource.FrameDescription;
            var depthDesc   = kinect.DepthFrameSource.FrameDescription;
            var bodyIdxDesc = kinect.BodyIndexFrameSource.FrameDescription;

            colorFrameSize = new Size(colorDesc.Width, colorDesc.Height);
            depthFrameSize = new Size(depthDesc.Width, depthDesc.Height);

            tempCameraPoints = new CameraSpacePoint[depthDesc.LengthInPixels];

            horizNumSpheres = depthDesc.Width / DEPTH_STEP; // implicit int floor
            vertNumSpheres  = depthDesc.Height / DEPTH_STEP;

            for (int i = 0; i < SWAP_SIZE; i++)
            {
                swapChain.Enqueue(new KinectFrameData
                {
                    ColorTexture = new Texture2D(gdevice, colorDesc.Width, colorDesc.Height,
                                                 false, SurfaceFormat.Bgra32),
                    DepthTexture = new Texture2D(gdevice, depthDesc.Width, depthDesc.Height,
                                                 false, SurfaceFormat.Bgra4444), // must be reassembled in shader
                    BodyTexture = new Texture2D(gdevice, bodyIdxDesc.Width, bodyIdxDesc.Height,
                                                false, SurfaceFormat.Alpha8),
                    DepthSpheresCenters = new Vector3[horizNumSpheres * vertNumSpheres],
                    Bodies = new Dictionary <ulong, KBody>()
                });
            }

            // I cannot copy directly kinect frames to textures
            // because the language does not allow casts from unsafe pointer to managed array,
            // so I use temporary buffers.
            tempColorBuffer   = new byte[colorDesc.LengthInPixels * 4];
            tempDepthBuffer   = new ushort[depthDesc.LengthInPixels];
            tempBodyIdxBuffer = new byte[bodyIdxDesc.LengthInPixels];
            tempBodyBuffer    = new KBody[NUM_BODIES];
        }
Exemplo n.º 49
0
        private void StartNonTraceableMode()
        {
            SetUIInNoTrackableMode();

            _sensor = KinectSensor.GetDefault();

            //todo - check, not working
            if (_sensor != null)
            {
                _sensor.Open();

                // 2) Initialize the background removal tool.
                _backgroundRemovalTool = new BackgroundRemovalTool(_sensor.CoordinateMapper);
                _drawSkeleton          = new DrawSkeleton(_sensor, (int)(KinectSkeleton.Width), (int)(KinectSkeleton.Height));

                _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.BodyIndex | FrameSourceTypes.Body);
                _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived_NonTraceable;
            }
        }
        public MainWindow()
        {
            // get the kinectSensor object
            this.kinectSensor = KinectSensor.GetDefault();

            // open the reader for the depth frames
            this.bodyIndexFrameReader = this.kinectSensor.BodyIndexFrameSource.OpenReader();


            // wire handler for frame arrival
            //this.bodyIndexFrameReader.FrameArrived += this.Reader_FrameArrived;
            Closing += OnWindowClosing;
            this.bodyIndexFrameDescription = this.kinectSensor.BodyIndexFrameSource.FrameDescription;
            this.depthFrameDescription     = kinectSensor.DepthFrameSource.FrameDescription;

            // allocate space to put the pixels being converted
            this.bodyIndexPixels = new uint[this.bodyIndexFrameDescription.Width * this.bodyIndexFrameDescription.Height];
            this.spinePixels     = new uint[depthFrameDescription.Width * depthFrameDescription.Height];
            // create the bitmap to display
            this.bodyIndexBitmap = new WriteableBitmap(this.bodyIndexFrameDescription.Width, this.bodyIndexFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null);
            this.spineBitmap     = new WriteableBitmap(this.depthFrameDescription.Width, this.depthFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null);
            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;


            // Create reader for depth and body data
            // To calculate back position
            this.depthAndBodyIndexReader = this.kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Depth | FrameSourceTypes.BodyIndex | FrameSourceTypes.Body);

            this.depthAndBodyIndexReader.MultiSourceFrameArrived += this.MultisorceReader_FrameArrived;
            // open the sensor
            this.kinectSensor.Open();

            // set the status text
            this.StatusText = this.kinectSensor.IsAvailable ? "Kinect is running"
                                                            : "Kinect is not available";

            // use the window object as the view model in this simple example
            this.DataContext = this;

            // initialize the components (controls) of the window
            this.InitializeComponent();
        }
        //Start recording button
        private void StartRecording(object sender, RoutedEventArgs e)
        {
            _sensor = KinectSensor.GetDefault();

            //Start generating baloons
            Generator.Start();

            if (_sensor != null)
            {
                _sensor.Open();

                bodies  = new Body[6];
                _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Body);

                _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;

                _displayBody = !_displayBody;
            }
        }
Exemplo n.º 52
0
        protected void MainWindow_Loaded(object sender, RoutedEventArgs e)
        {
            this.kinect = KinectSensor.GetDefault();
            kinect.Open();

            if (kinect.IsOpen)
            {
                kinectStatusLabel.Content = "Kinect Ready!";
            }
            else
            {
                kinectStatusLabel.Content = "Kinect not Ready!";
            }

            frameReader = kinect.OpenMultiSourceFrameReader(FrameSourceTypes.Body | FrameSourceTypes.Infrared | FrameSourceTypes.Depth | FrameSourceTypes.Color);

            frameReader.MultiSourceFrameArrived += MainWindow_KinectFrameArrived;

        }
Exemplo n.º 53
0
        /// <summary>
        /// コンストラクタ。実行時に一度だけ実行される。
        /// </summary>
        public MainWindow()
        {
            InitializeComponent();

            this.kinect = KinectSensor.GetDefault();

            this.colorFrameDescription
                = this.kinect.ColorFrameSource
                  .CreateFrameDescription(ColorImageFormat.Bgra);

            this.multiSourceFrameReader
                = this.kinect.OpenMultiSourceFrameReader
                      (FrameSourceTypes.Color | FrameSourceTypes.Body);

            this.multiSourceFrameReader.MultiSourceFrameArrived
                += MultiSourceFrameReader_MultiSourceFrameArrived;

            this.kinect.Open();
        }
Exemplo n.º 54
0
        public AnglePage()
        {
            InitializeComponent();

            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _sensor.Open();

                _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body);
                _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;

                _userReporter              = new PlayersController();
                _userReporter.BodyEntered += UserReporter_BodyEntered;
                _userReporter.BodyLeft    += UserReporter_BodyLeft;
                _userReporter.Start();
            }
        }
//////// GUI - END


    void InitializeDefaultSensor()
    {
        kinectSensor = KinectSensor.GetDefault();
        if (kinectSensor == null)
        {
            Debug.LogError("ERROR: No Kinect found!");
            return;
        }

        coordinateMapper = kinectSensor.CoordinateMapper;

        kinectSensor.Open();
        if (kinectSensor.IsOpen)
        {
            multiSourceFrameReader = kinectSensor.OpenMultiSourceFrameReader(
                FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.BodyIndex);
            init = true;
        }
    }
Exemplo n.º 56
0
        public GesturesPage()
        {
            InitializeComponent();

            _navigationHelper = new NavigationHelper(this);

            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _sensor.Open();

                _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body);
                _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;

                _gestureController = new GestureController();
                _gestureController.GestureRecognized += GestureController_GestureRecognized;
            }
        }
Exemplo n.º 57
0
        private static void InitilizeKinect()
        {
            var sensor = KinectSensor.GetDefault(); //.KinectSensors.SingleOrDefault();
            if (sensor != null)
            {
               // _reader = sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Body);
                _reader = sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Body);
                //sensor.ColorStream.Enable();
                //sensor.DepthStream.Enable();
                //sensor.SkeletonStream.Enable();

                //sensor.AllFramesReady += Sensor_AllFramesReady;
                _reader.MultiSourceFrameArrived += Sensor_AllFramesReady;
                _coordinateMapper = sensor.CoordinateMapper;

                //sensor.Start();
                sensor.Open();
            }
        }
Exemplo n.º 58
0
        ///// <summary>
        ///// Intermediate storage for receiving depth frame data from the sensor
        ///// </summary>
        //private ushort[] depthFrameData = null;

        ///// <summary>
        ///// Intermediate storage for receiving color frame data from the sensor
        ///// </summary>
        //private byte[] colorFrameData = null;

        ///// <summary>
        ///// Intermediate storage for receiving body index frame data from the sensor
        ///// </summary>
        //private byte[] bodyIndexFrameData = null;

        ///// <summary>
        ///// Intermediate storage for frame data converted to color
        ///// </summary>
        //private byte[] displayPixels = null;

        ///// <summary>
        ///// Intermediate storage for the color to depth mapping
        ///// </summary>
        //private DepthSpacePoint[] depthPoints = null;



        #endregion

        public VideoHandler(KinectSensor kinectSensor)
        {
            kinectImage       = new Image();
            this.kinectSensor = kinectSensor;
            // open multiframereader for depth, color, and bodyindex frames
            this.multiFrameSourceReader = this.kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Depth | FrameSourceTypes.Color | FrameSourceTypes.BodyIndex);

            // wire handler for frames arrival
            //      this.multiFrameSourceReader.MultiSourceFrameArrived += this.Reader_MultiSourceFrameArrived; //check this

            // get the coordinate mapper
            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            // get FrameDescription from DepthFrameSource
            FrameDescription depthFrameDescription = this.kinectSensor.DepthFrameSource.FrameDescription;

            int depthWidth  = depthFrameDescription.Width;
            int depthHeight = depthFrameDescription.Height;

            // allocate space to put the pixels being received and converted
            //         this.depthFrameData = new ushort[depthWidth * depthHeight];
            //          this.bodyIndexFrameData = new byte[depthWidth * depthHeight];

            // get FrameDescription from ColorFrameSource
            FrameDescription colorFrameDescription = this.kinectSensor.ColorFrameSource.FrameDescription;

            int colorWidth  = colorFrameDescription.Width;
            int colorHeight = colorFrameDescription.Height;

            this.colorMappedToDepthPoints = new DepthSpacePoint[colorWidth * colorHeight];

            //            this.displayPixels = new byte[colorWidth * colorHeight * this.bytesPerPixel];
            //            this.depthPoints = new DepthSpacePoint[colorWidth * colorHeight];

            // create the bitmap to display
            this.bitmapBody = new WriteableBitmap(colorWidth, colorHeight, 96.0, 96.0, PixelFormats.Bgra32, null);

            // Calculate the WriteableBitmap back buffer size
            this.bitmapBackBufferSize = (uint)((this.bitmapBody.BackBufferStride * (this.bitmapBody.PixelHeight - 1)) + (this.bitmapBody.PixelWidth * this.bytesPerPixel));

            //// allocate space to put the pixels being received
            //this.colorFrameData = new byte[colorWidth * colorHeight * this.bytesPerPixel];
        }
Exemplo n.º 59
0
        public SnapHand()
        {
            this.InitializeComponent();

            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _sensor.Open();

                _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body);
                _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;

                _playersController = new PlayersController();
                _playersController.BodyEntered += UserReporter_BodyEntered;
                _playersController.BodyLeft += UserReporter_BodyLeft;
                _playersController.Start();
            }
            viewer.Visualization = Visualization.Color;
        }
Exemplo n.º 60
0
        public CameraPage()
        {
            InitializeComponent();

            
            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)    // pega o sensor que está ativo
            {
                _sensor.Open();     // "abre" o sensor

                // especifica quais streams poderão ser acessados
                _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body);
                _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;

                _playersController = new PlayersController();
                _playersController.BodyEntered += UserReporter_BodyEntered;
                _playersController.BodyLeft += UserReporter_BodyLeft;
                _playersController.Start();
            }
        }