Ejemplo n.º 1
0
        /// <summary>
        /// Called when the main window is created, where everything needs to be initialized
        /// </summary>
        public MainWindow()
        {
            // grab the kinect object
            kinect = KinectSensor.GetDefault();

            // open the depth and color frame readers
            depthReader = kinect.DepthFrameSource.OpenReader();
            colorReader = kinect.ColorFrameSource.OpenReader();

            // event handler - calls frame arrival handler when a frame arrives from the reader
            depthReader.FrameArrived += depthFrameArrived;
            colorReader.FrameArrived += colorFrameArrived;

            // get the frame description from the source
            depthFrameDescription = kinect.DepthFrameSource.FrameDescription;
            colorFrameDescription = kinect.ColorFrameSource.FrameDescription;

            // allocate the array for the conversion of the depth data to color bitmap
            depthPixels = new byte[depthFrameDescription.Width * depthFrameDescription.Height];

            // create the bitmap to display
            depthBitmap = new WriteableBitmap(depthFrameDescription.Width, depthFrameDescription.Height, 96.0, 96.0, PixelFormats.Gray8, null);
            colorBitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null);

            // initialize the gps device
            gpsDevice = new GPSHandler();
            FolderPath = "D:\\KinectTest\\";

            // start the kinect sensor
            kinect.Open();

            InitializeComponent();
        }
Ejemplo n.º 2
0
        /// <summary>
        /// Initializes a new instance of the MainWindow class.
        /// </summary>
        public MainWindow()
        {
            // get the kinectSensor object
            this.kinectSensor = KinectSensor.GetDefault();

            // open the reader for the color frames
            this.colorFrameReader = this.kinectSensor.ColorFrameSource.OpenReader();

            // wire handler for frame arrival
            this.colorFrameReader.FrameArrived += this.Reader_ColorFrameArrived;

            // create the colorFrameDescription from the ColorFrameSource using Bgra format
            FrameDescription colorFrameDescription = this.kinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra);

            // create the bitmap to display
            this.colorBitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null);

            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // open the sensor
            this.kinectSensor.Open();

            // set the status text
            this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText
                                                            : Properties.Resources.NoSensorStatusText;

            // use the window object as the view model in this simple example
            this.DataContext = this;

            // initialize the components (controls) of the window
            this.InitializeComponent();
        }
Ejemplo n.º 3
0
        public MainWindow()
        {
            InitializeComponent();
            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                // Listen for body data.
                _bodySource = _sensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                _colorReader = _sensor.ColorFrameSource.OpenReader();
                _colorReader.FrameArrived += ColorReader_FrameArrived;

                // Listen for HD face data.
                _faceSource = new HighDefinitionFaceFrameSource(_sensor);
                _faceSourceSub = new HighDefinitionFaceFrameSource(_sensor);
               // _faceSource.TrackingIdLost += OnTrackingIdLost;
                _faceReader = _faceSource.OpenReader();
                _faceReaderSub = _faceSourceSub.OpenReader();

                _faceReader.FrameArrived += FaceReader_FrameArrived;
                _faceReaderSub.FrameArrived += FaceReaderSub_FrameArrived;

                _faceModel = new FaceModel();
                _faceAlignment = new FaceAlignment();
                _faceAlignmentSub = new FaceAlignment();
                // Start tracking!        
                _sensor.Open();
            }
        }
        protected override void OnNavigatedTo( NavigationEventArgs e )
        {
            base.OnNavigatedTo( e );

            try {
                // Kinectを開く
                kinect = KinectSensor.GetDefault();
                if ( kinect == null ) {
                    throw new Exception( "Kinectを開けません" );
                }

                kinect.Open();

                // カラー画像の情報を作成する(BGRAフォーマット)
                colorFrameDesc = kinect.ColorFrameSource.CreateFrameDescription( ColorImageFormat.Bgra );

                colorBitmap = new WriteableBitmap( colorFrameDesc.Width, colorFrameDesc.Height );
                ImageColor.Source = colorBitmap;

                colorBuffer = new byte[colorFrameDesc.Width * colorFrameDesc.Height * colorFrameDesc.BytesPerPixel];

                // カラーリーダーを開く
                colorFrameReader = kinect.ColorFrameSource.OpenReader();
                colorFrameReader.FrameArrived += colorFrameReader_FrameArrived;
            }
            catch ( Exception ex ) {
                MessageDialog dlg = new MessageDialog( ex.Message );
                dlg.ShowAsync();
            }
        }
        public MainWindow()
        {
            InitializeComponent();
            try
            {
                ///キネクト本体の接続を確保、たしか接続されてない場合はfalseとかになった記憶
                this.kinect = KinectSensor.GetDefault();
                ///読み込む画像のフォーマット(rgbとか)を指定、どうやって読み込むかのリーダの設定も
                this.colorImageFormat = ColorImageFormat.Bgra;
                this.colorFrameDescription = this.kinect.ColorFrameSource.CreateFrameDescription(this.colorImageFormat);
                this.colorFrameReader = this.kinect.ColorFrameSource.OpenReader();
                this.colorFrameReader.FrameArrived += colorFrameReader_FrameArrived;
                this.kinect.Open();//キネクト起動!!
                if (!kinect.IsOpen)
                {
                    this.errorLog.Visibility = Visibility.Visible;
                    this.errorLog.Content = "キネクトが見つからないよ!残念!";
                    throw new Exception("キネクトが見つかりませんでした!!!");
                }
                ///bodyを格納するための配列作成
                bodies = new Body[kinect.BodyFrameSource.BodyCount];

                ///ボディリーダーを開く
                bodyFrameReader = kinect.BodyFrameSource.OpenReader();
                bodyFrameReader.FrameArrived += bodyFrameReader_FrameArrived;
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
                Close();
            }
        }
Ejemplo n.º 6
0
        /// <summary>
        /// コンストラクタ
        /// </summary>
        public MainWindow()
        {
            InitializeComponent();

            // Init Kinect Sensors
            this.kinect = KinectSensor.GetDefault();

            if (kinect == null)
            {
                this.showCloseDialog("Kinectが接続されていないか、利用できません。アプリケーションを終了します。");
            }

            this.colorImageFormat = ColorImageFormat.Bgra;
            this.colorFrameDescription = this.kinect.ColorFrameSource.CreateFrameDescription(this.colorImageFormat);
            this.colorFrameReader = this.kinect.ColorFrameSource.OpenReader();
            this.colorFrameReader.FrameArrived += ColorFrameReader_FrameArrived;
            bodyFrameReader = kinect.BodyFrameSource.OpenReader();
            bodyFrameReader.FrameArrived += bodyFrameReader_FrameArrived;

            this.kinect.Open();
            this.bodies = this.bodies = new Body[kinect.BodyFrameSource.BodyCount];

            KinectRegion.SetKinectRegion(this, kinectRegion);
            this.kinectRegion.KinectSensor = KinectSensor.GetDefault();

            this.isTraining = false;
        }
        public MainWindow()
        {
            InitializeComponent();
            network.init();
             _sensor = KinectSensor.GetDefault();
            if(_sensor != null)
            {
                _sensor.Open();

                // Identify the bodies
                _bodies = new Body[_sensor.BodyFrameSource.BodyCount];

                _colorReader = _sensor.ColorFrameSource.OpenReader();
                _colorReader.FrameArrived += ColorReader_FrameArrived;
                _bodyReader = _sensor.BodyFrameSource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                // Initialize the face source with the desired features, some are commented out, include later.
                _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace);// |
                /*
                                                                FaceFrameFeatures.FaceEngagement |
                                                                FaceFrameFeatures.Glasses |
                                                                FaceFrameFeatures.Happy |
                                                                FaceFrameFeatures.LeftEyeClosed |
                                                                FaceFrameFeatures.MouthOpen |
                                                                FaceFrameFeatures.PointsInColorSpace |
                                                                FaceFrameFeatures.RightEyeClosed);
                                                                */

                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;
            }
        }
Ejemplo n.º 8
0
        public MainWindow()
        {
            InitializeComponent();

            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _sensor.Open();

                _bodies = new Body[_sensor.BodyFrameSource.BodyCount];

                _colorReader = _sensor.ColorFrameSource.OpenReader();
                _colorReader.FrameArrived += ColorReader_FrameArrived;
                _bodyReader = _sensor.BodyFrameSource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                // 2) Initialize the face source with the desired features
                _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace |
                                                              FaceFrameFeatures.FaceEngagement |
                                                              FaceFrameFeatures.Glasses |
                                                              FaceFrameFeatures.Happy |
                                                              FaceFrameFeatures.LeftEyeClosed |
                                                              FaceFrameFeatures.MouthOpen |
                                                              FaceFrameFeatures.PointsInColorSpace |
                                                              FaceFrameFeatures.RightEyeClosed |
                                                              FaceFrameFeatures.LookingAway);
                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;
            }
        }
        public MainWindow()
        {
            // Get the sensor
            sensor = KinectSensor.GetDefault();
            sensor.Open();

            // Setup readers for each source of data we want to use
            colorFrameReader = sensor.ColorFrameSource.OpenReader();
            bodyFrameReader = sensor.BodyFrameSource.OpenReader();

            // Setup event handlers that use what we get from the readers
            colorFrameReader.FrameArrived += this.Reader_ColorFrameArrived;
            bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived;

            // Get ready to draw graphics
            drawingGroup = new DrawingGroup();

            // Initialize the components (controls) of the window
            InitializeComponent();

            // Initialize color components

            // create the bitmap to display
            colorBitmap = new WriteableBitmap(1920, 1080, 96.0, 96.0, PixelFormats.Bgr32, null);
            ColorImage.Source = colorBitmap;

            // Initialize the game components
            birdHeight = this.Height / 2; // put the bird in the middle of the screen
            prevRightHandHeight = 0;
            prevLeftHandHeight = 0;
            pipeX = -1;
            pipeGapY = 250;
            pipeGapLength = 170;
            randomGenerator = new Random();
        }
        public KinectVizView()
        {
            InitializeComponent();

            // clean up
            Dispatcher.ShutdownStarted += Dispatcher_ShutdownStarted;

            // get the input of the color frames
            this.colorFrameSource = KinectModel.Instance.ColorFrameSource;

            // open the reader for the color frames
            this.colorFrameReader = this.colorFrameSource.OpenReader();

            // wire handler for frame arrival
            this.colorFrameReader.FrameArrived += this.Reader_ColorFrameArrived;

            // create the colorFrameDescription from the ColorFrameSource using Bgra format
            FrameDescription colorFrameDescription = this.colorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra);

            // create the bitmap to display
            this.colorBitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null);

            // use the window object as the view model in this simple example
            this.DataContext = this;
        }
Ejemplo n.º 11
0
        public MainWindow()
        {
            InitializeComponent();
            var hubConnection = new HubConnection("http://divewakeweb.azurewebsites.net/");
            stockTickerHubProxy = hubConnection.CreateHubProxy("WakeHub");
            hubConnection.Start().Wait();
            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _sensor.Open();

                _bodies = new Body[_sensor.BodyFrameSource.BodyCount];

                _colorReader = _sensor.ColorFrameSource.OpenReader();
                _colorReader.FrameArrived += ColorReader_FrameArrived;
                _bodyReader = _sensor.BodyFrameSource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                // 2) Initialize the face source with the desired features
                _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace |
                                                              FaceFrameFeatures.FaceEngagement |
                                                              FaceFrameFeatures.Glasses |
                                                              FaceFrameFeatures.LeftEyeClosed |
                                                              FaceFrameFeatures.PointsInColorSpace |
                                                              FaceFrameFeatures.RightEyeClosed);
                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;
            }
        }
Ejemplo n.º 12
0
 public void TerminateFrame()
 {
     if (this.colorFrameReader != null)
     {
         this.colorFrameReader.Dispose();
         this.colorFrameReader = null;
         this.colorBitmap = null;
     }
 }
Ejemplo n.º 13
0
        void MainWindow_Loaded( object sender, RoutedEventArgs e )
        {
            kinect = Kinect2.KinectSensor.Default;
            kinect.Open();
            colorReader = kinect.ColorFrameSource.OpenReader();
            bodyReader = kinect.BodyFrameSource.OpenReader();

            CompositionTarget.Rendering += CompositionTarget_Rendering;
        }
 /// <summary>
 /// Starts this handler
 /// </summary>
 /// <param name="c">controller class</param>
 /// <param name="sensor">kinect sensor</param>
 public override void Start(Controller c, KinectSensor sensor)
 {
     controller = c;
     if(sensor!= null)
     {
         //load reader and register frame arrived delegate (listener)
         reader = sensor.ColorFrameSource.OpenReader();
         reader.FrameArrived += Reader_FrameArrived;
     }
 }
Ejemplo n.º 15
0
        public void Start() {
            if (_isRunning)
                return;

            _colorFrameReader = _sensorService.Sensor.ColorFrameSource.OpenReader();
            _colorFrameReader.FrameArrived +=_colorFrameReader_FrameArrived;

            _sensorService.Open();
            _isRunning = true;
        }
        public void Init()
        {
            this.ColorReader = (ColorFrameReader) this.Sensor.OpenRGB();
            this.ColorReader.FrameArrived += ColorReader_FrameArrived;
            this.ColorFrameDescription = this.Kinect.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Rgba);

            this.ColorImage = new WriteableBitmap(this.ColorFrameDescription.Width, this.ColorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null);

            Window.SetColorDisplaySource(this.ColorImage);
            this.Sensor.Open();
        }
        void colorFrameReader_FrameArrived( ColorFrameReader sender, ColorFrameArrivedEventArgs args )
        {
            using ( var colorFrame = args.FrameReference.AcquireFrame()){
                // BGRAデータを取得する
                colorFrame.CopyConvertedFrameDataToArray( colorBuffer, ColorImageFormat.Bgra );

                // ビットマップにする
                var stream = colorBitmap.PixelBuffer.AsStream();
                stream.Write( colorBuffer, 0, colorBuffer.Length );
                colorBitmap.Invalidate();
            }
        }
        private void Window_Closing( object sender, System.ComponentModel.CancelEventArgs e )
        {
            if ( colorFrameReader != null ) {
                colorFrameReader.Dispose();
                colorFrameReader = null;
            }

            if ( kinect != null ) {
                kinect.IsAvailableChanged -= kinect_IsAvailableChanged;
                kinect.Close();
                kinect = null;
            }
        }
Ejemplo n.º 19
0
        public void Stop() {
            if (!_isRunning)
                return;

            if (_colorFrameReader != null)
            {
                _colorFrameReader.FrameArrived -= _colorFrameReader_FrameArrived;
                _colorFrameReader.Dispose();
                _colorFrameReader = null;
                Debug.WriteLine("^^^^BASIC PHOTO STOPPED");
            }
            _isRunning = false;
        }
        private void Window_Closing( object sender, System.ComponentModel.CancelEventArgs e )
        {
            // 終了処理
            if ( colorFrameReader != null ) {
                colorFrameReader.Dispose();
                colorFrameReader = null;
            }

            if ( kinect != null ) {
                kinect.Close();
                kinect = null;
            }
        }
Ejemplo n.º 21
0
        /// <summary>
        /// Initializes a new instance of the <see cref="MainPage"/> class.
        /// </summary>
        public MainPage()
        {
            this.InitializeComponent();

            RecordButton.Click += RecordButton_Click;

            ColorCompressionCombo.Items.Add("None (1920x1080)");
            ColorCompressionCombo.Items.Add("None (1280x720)");
            ColorCompressionCombo.Items.Add("None (640x360)");
            ColorCompressionCombo.Items.Add("JPEG (1920x1080)");
            ColorCompressionCombo.Items.Add("JPEG (1280x720)");
            ColorCompressionCombo.Items.Add("JPEG (640x360)");
            ColorCompressionCombo.SelectedIndex = 0;

            SmoothingCombo.Items.Add("None");
            SmoothingCombo.Items.Add("Kalman Filter");
            SmoothingCombo.Items.Add("Double Exponential");
            SmoothingCombo.SelectionChanged += SmoothingCombo_SelectionChanged;
            SmoothingCombo.SelectedIndex = 0;

            DisplayCombo.Items.Add("Body");
            DisplayCombo.Items.Add("Color");
            DisplayCombo.Items.Add("Depth");
            DisplayCombo.Items.Add("Infrared");
            DisplayCombo.SelectionChanged += DisplayCombo_SelectionChanged;
            DisplayCombo.SelectedIndex = 0;

            _sensor = KinectSensor.GetDefault();

            _bodyReader = _sensor.BodyFrameSource.OpenReader();
            _bodyReader.FrameArrived += _bodyReader_FrameArrived;

            _colorReader = _sensor.ColorFrameSource.OpenReader();
            _colorReader.FrameArrived += _colorReader_FrameArrived;
            var colorFrameDesc = _sensor.ColorFrameSource.FrameDescription;
            _colorData = new byte[colorFrameDesc.LengthInPixels * 4];

            _depthReader = _sensor.DepthFrameSource.OpenReader();
            _depthReader.FrameArrived += _depthReader_FrameArrived;
            var depthFrameDesc = _sensor.DepthFrameSource.FrameDescription;
            _depthData = new ushort[depthFrameDesc.LengthInPixels];

            _infraredReader = _sensor.InfraredFrameSource.OpenReader();
            _infraredReader.FrameArrived += _infraredReader_FrameArrived;
            var infraredFrameDesc = _sensor.InfraredFrameSource.FrameDescription;
            _infraredData = new ushort[infraredFrameDesc.LengthInPixels];

            _sensor.Open();
        }
Ejemplo n.º 22
0
        public bool InitializeFrame()
        {
            this.colorFrameReader = this.kinectSensor.ColorFrameSource.OpenReader();
            if (this.colorFrameReader != null)
            {
                // wire color frame arrive event
                this.colorFrameReader.FrameArrived += this.Reader_ColorFrameArrived;

                this.colorFrameDescription = this.kinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra);
                this.colorBitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null);
                return true;

            }
            return false;
        }
        public MainWindow()
        {
            InitializeComponent();
            network.init();
            _sensor = KinectSensor.GetDefault();
            if(_sensor != null)
            {
                _sensor.Open();

                bodyCount = _sensor.BodyFrameSource.BodyCount;
                // Identify the bodies 
                _bodies = new Body[bodyCount];

                _colorReader = _sensor.ColorFrameSource.OpenReader();
                _colorReader.FrameArrived += ColorReader_FrameArrived;
                _bodyReader = _sensor.BodyFrameSource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                // Initialize the face source with the desired features.
                _faceSources = new FaceFrameSource[bodyCount];
                _faceReaders = new FaceFrameReader[bodyCount];

                for(int i = 0; i < bodyCount; i++)
                {
                    // Create the face frame source with the required features and initial tracking id of 0
                    _faceSources[i] = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace);

                    // open the corresponding reader
                    _faceReaders[i] = _faceSources[i].OpenReader();
                    _faceReaders[i].FrameArrived += FaceReader_FrameArrived;
                }

                _faceResults = new FaceFrameResult[bodyCount];

                // Set the arrays and values for person switches and timeouts
                personSize = 3;
                ims = new Image[3] {maskImage, maskImage2, maskImage3};
                trackedInd = new bool[3] { false, false, false };
                _persons = new Person[personSize];
                for(int i = 0; i < personSize; i++)
                {
                    _persons[i] = new Person(0, ims[i], -1);
                }
                paths = new String[3] { "pack://application:,,,/Images/tinfoil.png",
                                        "pack://application:,,,/Images/cowboy.png",
                                        "pack://application:,,,/Images/napolean.png"};
            }
        }
        //leállító művelet: a főablak bezárása esetén leállítja először a colorFrame olvasót, utána a Kinectet
        private void MainWindow_Closing(object sender, CancelEventArgs e)
        {
            if (this.colorFrameReader != null)
            {
                // ColorFrameReder is IDisposable
                this.colorFrameReader.Dispose();
                this.colorFrameReader = null;
            }

            if (this.kinectSensor != null)
            {
                this.kinectSensor.Close();
                this.kinectSensor = null;
            }
            Application.Current.Shutdown();
        }
        /// <summary>
        /// Execute shutdown tasks
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void MainWindow_Closing(object sender, CancelEventArgs e)
        {
            if (this.colorFrameReader != null)
            {
                // ColorFrameReder is IDisposable
                this.colorFrameReader.Dispose();
                this.colorFrameReader = null;
            }

            if (this.kinectSensor == null)
            {
                return;
            }
            this.kinectSensor.Close();
            this.kinectSensor = null;
        }
        public MainPage()
        {
            kinect = KinectSensor.GetDefault();

            colorFrameReader = kinect.ColorFrameSource.OpenReader();
            colorFrameReader.FrameArrived += Reader_ColorFrameArrived;

            FrameDescription colorFrameDescription = kinect.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Rgba);

            colorBitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height);

            kinect.Open();

            DataContext = this;
            InitializeComponent();
        }
Ejemplo n.º 27
0
        /// <summary>
        /// この WPF アプリケーションが終了するときに実行されるメソッド。
        /// </summary>
        /// <param name="e">
        /// イベントの発生時に渡されるデータ。
        /// </param>
        protected override void OnClosed(EventArgs e)
        {
            base.OnClosed(e);

            if (this.colorFrameReader != null)
            {
                this.colorFrameReader.Dispose();
                this.colorFrameReader = null;
            }

            if (this.kinect != null)
            {
                this.kinect.Close();
                this.kinect = null;
            }
        }
Ejemplo n.º 28
0
        public void Stop()
        {
            if (!_isRunning)
            {
                return;
            }

            if (_colorFrameReader != null)
            {
                _colorFrameReader.FrameArrived -= _colorFrameReader_FrameArrived;
                _colorFrameReader.Dispose();
                _colorFrameReader = null;
                Debug.WriteLine("^^^^BASIC PHOTO STOPPED");
            }
            _isRunning = false;
        }
Ejemplo n.º 29
0
 private void CoordinateMapper_CoordinateMappingChanged(object sender, CoordinateMappingChangedEventArgs e)
 {
     _logger.Info(string.Format("Coordinate mapper changed. Configuring events...", this.kinectSensor.UniqueKinectId));
     this.depthFrameReader = this.kinectSensor.DepthFrameSource.OpenReader();
     this.depthFrameReader.FrameArrived += DepthFrameReader_FrameArrived;
     this.colorFrameReader = this.kinectSensor.ColorFrameSource.OpenReader();
     this.colorFrameReader.FrameArrived += ColorFrameReader_FrameArrived;
     this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();
     this.bodyFrameReader.FrameArrived      += BodyFrameReader_FrameArrived;
     this.audioBeamFrameReader               = this.kinectSensor.AudioSource.OpenReader();
     this.audioBeamFrameReader.FrameArrived += AudioBeamFrameReader_FrameArrived;
     this.audioBeamFrameReader.AudioSource.AudioBeams[0].AudioBeamMode = AudioBeamMode.Automatic;
     this.audioBeamFrameReader.AudioSource.AudioBeams[0].BeamAngle     = 0.0f;
     this.coordinateMapper = this.kinectSensor.CoordinateMapper;
     coordinateMapperReady.Set();
 }
Ejemplo n.º 30
0
        public void CloseSensor()
        {
            if (this.colorFrameReader != null)
            {
                // ColorFrameReder is IDisposable

                this.colorFrameReader.Dispose();
                this.colorFrameReader = null;
            }

            if (this.kinectSensor != null)
            {
                this.kinectSensor.Close();
                this.kinectSensor = null;
            }
        }
Ejemplo n.º 31
0
 void Start()
 {
     depthFrameDesc = KinectSensor.GetDefault().DepthFrameSource.FrameDescription;
     depthWidth     = depthFrameDesc.Width;
     depthHeight    = depthFrameDesc.Height;
     // buffer for points mapped to camera space coordinate.
     mapper                   = KinectSensor.GetDefault().CoordinateMapper;
     cameraSpacePoints        = new CameraSpacePoint[depthWidth * depthHeight];
     depthSourceManagerScript = depthSourceManager.GetComponent <DepthSourceManager>();
     colorSourceManagerScript = colorSourceManager.GetComponent <ColorSourceManager>();
     particles                = new ParticleSystem.Particle[depthWidth * depthHeight];
     color_reader             = KinectSensor.GetDefault().ColorFrameSource.OpenReader();
     colorFrameDesc           = KinectSensor.GetDefault().ColorFrameSource.CreateFrameDescription(ColorImageFormat.Rgba);
     colorSpacePoints         = new ColorSpacePoint[depthWidth * depthHeight];
     color_array              = new byte[colorFrameDesc.BytesPerPixel * colorFrameDesc.LengthInPixels];
 }
Ejemplo n.º 32
0
        public MainWindow()
        {
            delete("5000");
            delete("5001");
            delete("6666");
            //以下代码为基准摄像头获取Kinect位图语句
            this.kinectSensor     = KinectSensor.GetDefault();
            this.colorFrameReader = this.kinectSensor.ColorFrameSource.OpenReader();
            this.colorFrameReader.FrameArrived += this.Reader_ColorFrameArrived;
            FrameDescription colorFrameDescription = this.kinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra);

            this.BasicColorBitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null);
            this.kinectSensor.Open();

            //关键代码,不可删去
            this.DataContext = this;

            //初始化窗口
            this.InitializeComponent();


            //确认保存kinect数据的文件夹是否存在,不存在则提前生成
            if (!Directory.Exists(upPath))
            {
                Directory.CreateDirectory(upPath);
            }
            if (!Directory.Exists(rightPath))
            {
                Directory.CreateDirectory(rightPath);
            }
            if (!Directory.Exists(leftPath))
            {
                Directory.CreateDirectory(leftPath);
            }
            if (!Directory.Exists(upHeadPath))
            {
                Directory.CreateDirectory(upHeadPath);
            }
            if (!Directory.Exists(rightHeadPath))
            {
                Directory.CreateDirectory(rightHeadPath);
            }
            if (!Directory.Exists(leftHeadPath))
            {
                Directory.CreateDirectory(leftHeadPath);
            }
        }
Ejemplo n.º 33
0
        private async Task InitializeKinect()
        {
            _kinectSensor = await Sensor.GetDefaultAsync();

            if (_kinectSensor != null)
            {
                await _kinectSensor.OpenAsync();

                //_audioReader = await _kinectSensor.OpenAudioFrameReaderAsync();

                _colorReader = await _kinectSensor.OpenColorFrameReaderAsync(ReaderConfig.HalfRate | ReaderConfig.HalfResolution);

                _depthReader = await _kinectSensor.OpenDepthFrameReaderAsync();

                _bodyIndexReader = await _kinectSensor.OpenBodyIndexFrameReaderAsync();

                _bodyReader = await _kinectSensor.OpenBodyFrameReaderAsync();

                if (_depthReader != null)
                {
                    DepthOutput.Source         = new SoftwareBitmapSource();
                    _depthReader.FrameArrived += DepthReader_FrameArrived;
                }

                if (_colorReader != null)
                {
                    ColorOutput.Source         = new SoftwareBitmapSource();
                    _colorReader.FrameArrived += ColorReader_FrameArrived;
                }

                if (_bodyReader != null)
                {
                    _bodyReader.FrameArrived += BodyReader_FrameArrived;
                }

                if (_bodyIndexReader != null)
                {
                    BodyIndexOutput.Source         = new SoftwareBitmapSource();
                    _bodyIndexReader.FrameArrived += BodyIndexReader_FrameArrived;
                }

                if (_audioReader != null)
                {
                    _audioReader.FrameArrived += AudioReader_FrameArrived;
                }
            }
        }
Ejemplo n.º 34
0
    protected virtual void Awake()
    {
        KinectSensor = KinectSensor.GetDefault();

        if (KinectSensor != null)
        {
            bodyFrameReader      = KinectSensor.BodyFrameSource.OpenReader();
            colorFrameReader     = KinectSensor.ColorFrameSource.OpenReader();
            depthFrameReader     = KinectSensor.DepthFrameSource.OpenReader();
            infraredFrameReader  = KinectSensor.InfraredFrameSource.OpenReader();
            bodyIndexFrameReader = KinectSensor.BodyIndexFrameSource.OpenReader();
            faceFrameSource      = HighDefinitionFaceFrameSource.Create(KinectSensor);
            faceFrameReader      = faceFrameSource.OpenReader();

            KinectSensor.Open();
        }
    }
Ejemplo n.º 35
0
 public void SetColor(bool enable)
 {
     if (this.colorreader == null && enable && this.Runtime.IsAvailable)
     {
         colorreader = this.Runtime.ColorFrameSource.OpenReader();
         colorreader.FrameArrived += this.Runtime_ColorFrameReady;
     }
     else
     {
         if (this.colorreader != null)
         {
             this.colorreader.FrameArrived -= this.Runtime_ColorFrameReady;
             this.colorreader.Dispose();
             this.colorreader = null;
         }
     }
 }
Ejemplo n.º 36
0
        public static void GetColorBgraData(this ColorFrameReader reader, byte[] data)
        {
            if (reader == null)
            {
                return;
            }

            using (var frame = reader.AcquireLatestFrame())
            {
                if (frame == null)
                {
                    return;
                }

                frame.CopyConvertedFrameDataToArray(data, ColorImageFormat.Bgra);
            }
        }
        public KinectSensorAdapter(KinectSensor sensor, bool useNearMode)
        {
            this.sensor = sensor;
            //if (useNearMode)                // older version: near mode isn't applicable in kinect v2 as it already starts detecting depth from 500 millieters which is the near mode in kinect v1
            //{
            //    this.sensor.DepthStream.Range = DepthRange.Near;
            //}

            //this.sensor.ColorFrameReady += new EventHandler<ColorImageFrameReadyEventArgs>(sensor_ColorFrameReady);       // older version
            //this.sensor.DepthFrameReady += new EventHandler<DepthImageFrameReadyEventArgs>(sensor_DepthFrameReady);       // older version

            this._colorReader = this.sensor.ColorFrameSource.OpenReader();                                           // added: initializing the Colored images reader
            this._colorReader.FrameArrived += new EventHandler <ColorFrameArrivedEventArgs>(sensor_ColorFrameReady); // update: intializing the event handler of colored frames arriving from kinect v2

            this._depthReader = this.sensor.DepthFrameSource.OpenReader();                                           // added: initializing the Depth images reader
            this._depthReader.FrameArrived += new EventHandler <DepthFrameArrivedEventArgs>(sensor_DepthFrameReady); // update: intializing the event handler of depth frames arriving from kinect v2
        }
Ejemplo n.º 38
0
        public void Awake()
        {
            //Data = new byte[sourceManager.kinectSensor.ColorFrameSource.FrameDescription.LengthInPixels];
            sensor = KinectSensor.GetDefault();
            reader = sensor.ColorFrameSource.OpenReader();

            //FrameDescription desc = sensor.ColorFrameSource.FrameDescription;
            FrameDescription desc = sensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Rgba);

            Texture = new Texture2D(desc.Width, desc.Height, TextureFormat.RGBA32, false);
            Data    = new byte[desc.BytesPerPixel * desc.LengthInPixels];

            if (!sensor.IsOpen)
            {
                sensor.Open();
            }
        }
Ejemplo n.º 39
0
        /// <summary>
        /// コンストラクタ。実行時に一度だけ実行される。
        /// </summary>
        public MainWindow()
        {
            InitializeComponent();

            //Kinect 本体への参照を確保する。
            this.kinect = KinectSensor.GetDefault();

            //読み込む画像のフォーマットとリーダを設定する。
            this.colorImageFormat = ColorImageFormat.Bgra;
            this.colorFrameDescription
                = this.kinect.ColorFrameSource.CreateFrameDescription(this.colorImageFormat);
            this.colorFrameReader = this.kinect.ColorFrameSource.OpenReader();
            this.colorFrameReader.FrameArrived += ColorFrameReader_FrameArrived;

            //Kinect の動作を開始する。
            this.kinect.Open();
        }
Ejemplo n.º 40
0
    void OnApplicationQuit()
    {
        if (_Reader != null)
        {
            _Reader.Dispose();
            _Reader = null;
        }

        if (_Sensor != null)
        {
            if (_Sensor.IsOpen)
            {
                _Sensor.Close();
            }
            _Sensor = null;
        }
    }
Ejemplo n.º 41
0
    // Use this for initialization
    void Start()
    {
        //turnScript = Player.GetComponent<Turning>();
        clappingScript = Player.GetComponent <VGB_Clapping>();
        // get the sensor object

        this.kinectSensor = KinectSensor.GetDefault();

        if (this.kinectSensor != null)
        {
            this.bodyCount = this.kinectSensor.BodyFrameSource.BodyCount;

            // color reader
            this.colorFrameReader = this.kinectSensor.ColorFrameSource.OpenReader();

            // create buffer from RGBA frame description
            var desc = this.kinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Rgba);


            // body data
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            // body frame to use
            this.bodies = new Body[this.bodyCount];

            // initialize the gesture detection objects for our gestures
            this.gestureDetectorList = new List <GestureDetector>();
            for (int bodyIndex = 0; bodyIndex < this.bodyCount; bodyIndex++)
            {
                //PUT UPDATED UI STUFF HERE FOR NO GESTURE
                DetectTextGameObject.text = "none";
                //this.bodyText[bodyIndex] = "none";
                //Create gesture detectors and add them to gestureDetectorList
                this.gestureDetectorList.Add(new GestureDetector(this.kinectSensor));
            }

            // start getting data from runtime
            this.kinectSensor.Open();
        }
        else
        {
            //kinect sensor not connected
            DetectTextGameObject.text = "not Connected";
        }
    }
Ejemplo n.º 42
0
        private void Window_Loaded(object sender, RoutedEventArgs e)
        {
            try {
                kinect = KinectSensor.GetDefault();
                if (kinect == null)
                {
                    throw new Exception("Kinectを開けません");
                }

                kinect.Open();
                // カラー画像の情報を作成する(BGRAフォーマット)
                colorFrameDesc = kinect.ColorFrameSource.CreateFrameDescription(
                    colorFormat);

                // カラーリーダーを開く
                colorFrameReader = kinect.ColorFrameSource.OpenReader();
                colorFrameReader.FrameArrived += colorFrameReader_FrameArrived;

                // カラー用のビットマップを作成する
                colorBitmap = new WriteableBitmap(
                    colorFrameDesc.Width, colorFrameDesc.Height,
                    96, 96, PixelFormats.Bgra32, null);
                colorStride = colorFrameDesc.Width * (int)colorFrameDesc.BytesPerPixel;
                colorRect   = new Int32Rect(0, 0,
                                            colorFrameDesc.Width, colorFrameDesc.Height);
                colorBuffer       = new byte[colorStride * colorFrameDesc.Height];
                ImageColor.Source = colorBitmap;

                // Bodyの最大数を取得する
                BODY_COUNT = kinect.BodyFrameSource.BodyCount;

                // Bodyを入れる配列を作る
                bodies = new Body[BODY_COUNT];

                // ボディーリーダーを開く
                bodyFrameReader = kinect.BodyFrameSource.OpenReader();
                bodyFrameReader.FrameArrived += bodyFrameReader_FrameArrived;

                InitializeGesture();
            }
            catch (Exception ex) {
                MessageBox.Show(ex.Message);
                Close();
            }
        }
Ejemplo n.º 43
0
        /// <summary>
        /// Initializes a new instance of the MainWindow class
        /// </summary>
        public MainWindow()
        {
            // only one sensor is currently supported
            this.kinectSensor = KinectSensor.GetDefault();
            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            this.colorFrameReader = this.kinectSensor.ColorFrameSource.OpenReader();

            this.colorFrameReader.FrameArrived += this.Reader_ColorFrameArrived;

            FrameDescription colorFrameDescription = this.kinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra);

            this.colorBitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null);


            // open the sensor
            this.kinectSensor.Open();

            // set the status text
            this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText
                                                            : Properties.Resources.NoSensorStatusText;

            // open the reader for the body frames
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            // set the BodyFramedArrived event notifier
            this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived;

            // initialize the BodyViewer object for displaying tracked bodies in the UI
            this.kinectBodyView = new KinectBodyView(this.kinectSensor);

            this.gestureResultView = new GestureResultView(false, false, 0.0f, "");

            // initialize the gesture detection objects for our gestures
            this.gestureDetector = new GestureDetector(this.kinectSensor, this.gestureResultView);

            // initialize the MainWindow
            this.InitializeComponent();

            // set our data context objects for display in UI
            this.DataContext = this;
            this.kinectBodyViewbox.DataContext = this.kinectBodyView;
            this.gestureResultGrid.DataContext = this.gestureResultView;
        }
Ejemplo n.º 44
0
        public MainWindow()
        {
            InitializeComponent();
            DataContext = this;

            //get sensor
            m_KinectSensor = KinectSensor.GetDefault();
            m_KinectSensor.IsAvailableChanged += Sensor_IsAvailableChanged;
            m_KinectSensor.Open();

            //var tempFile = Path.Combine(Path.GetTempPath(), Guid.NewGuid() + ".wav");
            //set up audio
            var devices = new MMDeviceEnumerator();
            MMDeviceCollection endPoints = devices.EnumerateAudioEndPoints(DataFlow.Capture, DeviceState.Active);
            MMDevice           device    = endPoints.FirstOrDefault(x => x.FriendlyName.Contains("Xbox NUI Sensor"));
            var waveFormat = new WaveFormat();

            if (device != null)
            {
                m_AudioCapture = new WasapiCapture(device);
                m_AudioCapture.DataAvailable += OnAudioCaptureOnDataAvailable;

                waveFormat = m_AudioCapture.WaveFormat;
            }
            m_WaveFileWriter = new WaveFileWriter("test.wav", waveFormat);

            //set up video
            m_ColorFrameReader = m_KinectSensor.ColorFrameSource.OpenReader();
            m_ColorFrameReader.FrameArrived += Reader_ColorFrameArrived;
            FrameDescription colorFrameDescription =
                m_KinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra);

            m_ColorBitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height,
                                                colorFrameDescription.LengthInPixels, colorFrameDescription.LengthInPixels, PixelFormats.Bgr32, null);

            //update status bar
            UpdateStatusText();

            //set up writer
            m_Writer = new VideoFileWriter();
            m_Writer.Open("test.avi", colorFrameDescription.Width, colorFrameDescription.Height, 30, VideoCodec.H264);

            m_Recording = false;
            m_AudioCapture?.StartRecording();
        }
Ejemplo n.º 45
0
        /// <summary>
        /// Dispose the kinect and it's readers
        /// </summary>
        public void Dispose()
        {
            if (kinectBody != null)
            {
                kinectBody.Dispose();
                kinectBody = null;
            }

            if (kinectColor != null)
            {
                kinectColor.Dispose();
                kinectColor = null;
            }

            if (kinectDepth != null)
            {
                kinectDepth.Dispose();
                kinectDepth = null;
            }

            if (kinectInfrared != null)
            {
                kinectInfrared.Dispose();
                kinectInfrared = null;
            }

            if (convertStream != null)
            {
                convertStream.Close();
                convertStream.Dispose();
                convertStream = null;
            }

            if (kinectSensor != null)
            {
                kinectSensor.Close();
                kinectSensor = null;
            }

            if (speechEngine != null)
            {
                speechEngine.Dispose();
                speechEngine = null;
            }
        }
Ejemplo n.º 46
0
        /// <summary>
        /// Initializes a new instance of the MainWindow class.
        /// </summary>
        public KinectPage()
        {
            client = (Client)Application.Current.Resources["ApplicationScopeResource"];
            Console.WriteLine("kinectwindow! hello send");

            //이미지 저장 시작
            client.sendMsg("save");

            //시작 시간 받기
            start_time = System.DateTime.Now;


            //키넥트
            // get the kinectSensor object 키넥트센서
            this.kinectSensor = KinectSensor.GetDefault();

            // open the reader for the color frames 컬러프레임 리더
            this.colorFrameReader = this.kinectSensor.ColorFrameSource.OpenReader();

            // wire handler for frame arrival 프레임 도착 핸들러 *****************
            this.colorFrameReader.FrameArrived += this.Reader_ColorFrameArrived;


            // create the colorFrameDescription from the ColorFrameSource using Bgra format
            FrameDescription colorFrameDescription = this.kinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra);

            // create the bitmap to display 보여줄 비트맵 생성
            this.colorBitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null);

            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // open the sensor 센서 오픈!
            this.kinectSensor.Open();

            // set the status text
            this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText
                                                            : Properties.Resources.NoSensorStatusText;

            // use the window object as the view model in this simple example
            this.DataContext = this;

            // initialize the components (controls) of the window
            this.InitializeComponent();
        }
Ejemplo n.º 47
0
        public void CloseSensorAndReader()
        {
            if (reader != null)
            {
                reader.Dispose();
                reader = null;
            }

            if (sensor != null)
            {
                if (sensor.IsOpen)
                {
                    sensor.Close();
                }

                sensor = null;
            }
        }
    void OnApplicationQuit()
    {
        if (reader != null)
        {
            reader.Dispose();
            reader = null;
        }

        if (sensor != null)
        {
            if (sensor.IsOpen)
            {
                sensor.Close();
            }

            sensor = null;
        }
    }
Ejemplo n.º 49
0
        /// <summary>
        /// この WPF アプリケーションが終了するときに実行されるメソッド。
        /// </summary>
        /// <param name="e">
        /// イベントの発生時に渡されるデータ。
        /// </param>
        protected override void OnClosed(EventArgs e)
        {
            base.OnClosed(e);

            //カラー画像の取得を中止して、関連するリソースを破棄する。
            if (this.colorFrameReader != null)
            {
                this.colorFrameReader.Dispose();
                this.colorFrameReader = null;
            }

            //Kinect を停止して、関連するリソースを破棄する。
            if (this.kinect != null)
            {
                this.kinect.Close();
                this.kinect = null;
            }
        }
Ejemplo n.º 50
0
 public MainWindow( )
 {
     this._sensor = KinectSensor.GetDefault();
     this._sensor.IsAvailableChanged += this.Sensor_Status;
     if (this._sensor != null)
     {
         this._sensor.Open();
     }
     this._readerColor = this._sensor.ColorFrameSource.OpenReader();
     this._readerDepth = this._sensor.DepthFrameSource.OpenReader();
     this._readerColor.FrameArrived += this.Reader_FrameArrived;
     this._readerDepth.FrameArrived += this.Reader_FrameArrived;
     InitializeComponent();
     this.Title               = "Record Kinect 2.0";
     this._statusText         = this._sensor.IsAvailable ? "Kinect v2 sensor OK" : "Kinect v2 sensor not found";
     this._subjectID          = DateTime.Now.ToString("yyyy-MM-dd_HH-mm-ss");
     this.txtb_SubjectID.Text = this._subjectID;
 }
Ejemplo n.º 51
0
        public async Task <object> OpenColorReader(dynamic input)
        {
            this.logCallback("OpenColorReader");
            if (this.colorFrameReader != null)
            {
                return(false);
            }
            this.colorFrameCallback = (Func <object, Task <object> >)input.colorFrameCallback;

            this.colorFrameDescription = this.kinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Rgba);
            this.logCallback("color: " + this.colorFrameDescription.Width + "x" + this.colorFrameDescription.Height);

            this.colorFrameReader = this.kinectSensor.ColorFrameSource.OpenReader();
            this.colorFrameReader.FrameArrived += this.ColorReader_ColorFrameArrived;
            this.colorPixels = new byte[4 * this.colorFrameDescription.Width * this.colorFrameDescription.Height];

            return(true);
        }
Ejemplo n.º 52
0
        protected void colorReader_FrameArrived(ColorFrameReader sender, ColorFrameArrivedEventArgs args)
        {
            EventSource.Log.Debug("colorReader_FrameArrived()");

            using (ColorFrame colorFrame = args.FrameReference.AcquireFrame())
            {
                if (colorFrame != null)
                {
                    // get our color frame into our writeable bitmap
                    colorFrame.CopyConvertedFrameDataToArray(this.colorPixels, ColorImageFormat.Bgra);

                    Stream stream = this.liveWB.PixelBuffer.AsStream();
                    stream.Seek(0, SeekOrigin.Begin);
                    stream.Write(this.colorPixels, 0, colorPixels.Length);
                    this.liveWB.Invalidate();
                }
            }
        }
Ejemplo n.º 53
0
 public MainWindow()
 {
     InitializeComponent();
     sensor           = KinectSensor.GetDefault();
     colorFrameReader = sensor.ColorFrameSource.OpenReader();
     colorFrameReader.FrameArrived += ColorFrameReader_FrameArrived;
     frameDescription =
         sensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra);
     // wbData參考一個WriteableBitmap 物件(儲存影像的記憶體區塊)
     wbData = new WriteableBitmap(
         frameDescription.Width, frameDescription.Height, 96, 96, PixelFormats.Bgr32, null);
     // byteData參考一個儲存影像每一個pixel之rgb值得byte陣列
     byteData = new byte[frameDescription.Width * frameDescription.Height * 4];
     //啟動Kinect Sensor
     sensor.Open();
     //顯示訊息
     Result.Text = "Kinect 傳送影像";
 }
Ejemplo n.º 54
0
        private void Window_Loaded(object sender, RoutedEventArgs e)
        {
            #region Interface

            ColorRButton.IsChecked = true;
            RecordButton.IsEnabled = false;
            BodyCheckBox.IsEnabled = false;
            ColorCheckBox.IsEnabled = false;
            DepthCheckBox.IsEnabled = false;
            InfraredCheckBox.IsEnabled = false;
            FaceCheckBox.IsEnabled = false;
            AudioCheckBox.IsEnabled = false;

            #endregion

            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _sensor.Open();
                _bodies = new List<CustomBody>();

                _faceSource = new HighDefinitionFaceFrameSource(_sensor);
                _faceModel = new FaceModel();
                _faceAlignment = new FaceAlignment();

                _bodyReader = _sensor.BodyFrameSource.OpenReader();
                _bodyReader.FrameArrived += _bodyReader_FrameArrived;

                _colorReader = _sensor.ColorFrameSource.OpenReader();
                _colorReader.FrameArrived += _colorReader_FrameArrived;

                _depthReader = _sensor.DepthFrameSource.OpenReader();
                _depthReader.FrameArrived += _depthReader_FrameArrived;

                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += _faceReader_FrameArrived;

                _infraredReader = _sensor.InfraredFrameSource.OpenReader();
                _infraredReader.FrameArrived += _infraredReader_FrameArrived;


            }
        }
Ejemplo n.º 55
0
        public MainWindow()
        {
            InitializeComponent();

            RecordButton.Click += RecordButton_Click;

            ColorCompressionCombo.Items.Add("None (1920x1080)");
            ColorCompressionCombo.Items.Add("None (1280x720)");
            ColorCompressionCombo.Items.Add("None (640x360)");
            ColorCompressionCombo.Items.Add("JPEG (1920x1080)");
            ColorCompressionCombo.Items.Add("JPEG (1280x720)");
            ColorCompressionCombo.Items.Add("JPEG (640x360)");
            ColorCompressionCombo.SelectedIndex = 0;

            SmoothingCombo.Items.Add("None");
            SmoothingCombo.Items.Add("Kalman Filter");
            SmoothingCombo.Items.Add("Double Exponential");
            SmoothingCombo.SelectionChanged += SmoothingCombo_SelectionChanged;
            SmoothingCombo.SelectedIndex = 0;

            DisplayCombo.Items.Add("Body");
            DisplayCombo.Items.Add("Color");
            DisplayCombo.Items.Add("Depth");
            DisplayCombo.Items.Add("Infrared");
            DisplayCombo.SelectionChanged += DisplayCombo_SelectionChanged;
            DisplayCombo.SelectedIndex = 0;

            _sensor = KinectSensor.GetDefault();

            _bodyReader = _sensor.BodyFrameSource.OpenReader();
            _bodyReader.FrameArrived += _bodyReader_FrameArrived;

            _colorReader = _sensor.ColorFrameSource.OpenReader();
            _colorReader.FrameArrived += _colorReader_FrameArrived;

            _depthReader = _sensor.DepthFrameSource.OpenReader();
            _depthReader.FrameArrived += _depthReader_FrameArrived;

            _infraredReader = _sensor.InfraredFrameSource.OpenReader();
            _infraredReader.FrameArrived += _infraredReader_FrameArrived;

            _sensor.Open();
            OutputImage.Source = _colorBitmap.Bitmap;
        }
Ejemplo n.º 56
0
 public KinectDevice()
 {
     //kinect設定
     this.kinect = KinectSensor.GetDefault();
     //設定とハンドラ
     //colorImage
     #region
     this.colorImageFormat = ColorImageFormat.Bgra;
     this.colorFrameDescription = this.kinect.ColorFrameSource.CreateFrameDescription(this.colorImageFormat);
     this.colorFrameReader = this.kinect.ColorFrameSource.OpenReader();
     this.colorFrameReader.FrameArrived += ColorFrame_Arrived;
     this.colors = new byte[this.colorFrameDescription.Width
                                    * this.colorFrameDescription.Height
                                    * this.colorFrameDescription.BytesPerPixel];
     #endregion
     //骨格情報
     #region
     this.bodyFrameReader = this.kinect.BodyFrameSource.OpenReader();
     this.bodyFrameReader.FrameArrived += BodyFrame_Arrived;
     #endregion
     //震度情報
     #region
     this.depthFrameReader = this.kinect.DepthFrameSource.OpenReader();
     this.depthFrameReader.FrameArrived += DepthFrame_Arrived;
     this.depthFrameDescription = this.kinect.DepthFrameSource.FrameDescription;
     this.depthBuffer = new ushort[this.depthFrameDescription.LengthInPixels];
     #endregion
     //BodyIndex
     #region
     this.bodyIndexFrameDes = this.kinect.BodyIndexFrameSource.FrameDescription;
     this.bodyIndexFrameReader = this.kinect.BodyIndexFrameSource.OpenReader();
     this.bodyIndexFrameReader.FrameArrived += this.BodyIndexFrame_Arrived;
     this.bodyIndexBuffer = new byte[this.bodyIndexFrameDes.Width *
                                         this.bodyIndexFrameDes.Height * this.bodyIndexFrameDes.BytesPerPixel];
     #endregion
     //kinect開始
     this.package = new ShadowPackage();
     this.imageWidth = this.bodyIndexFrameDes.Width; 
     this.imageHeight = this.bodyIndexFrameDes.Height; 
     this.imageBytePerPixel = (int)this.bodyIndexFrameDes.BytesPerPixel;
     this.kinectImage = new Mat(this.imageHeight, this.imageWidth, MatType.CV_8UC1);
     this.kinect.Open();
 }
        // Kinectの挿抜イベント
        void kinect_IsAvailableChanged( object sender, IsAvailableChangedEventArgs e )
        {
            // Kinectが接続された
            if ( e.IsAvailable ) {
                // カラーを設定する
                if ( colorFrameReader == null ) {
                    colorFrameReader = kinect.ColorFrameSource.OpenReader();
                    colorFrameReader.FrameArrived += colorFrameReader_FrameArrived;
                }

                TextStatus.Text = "Kinectが接続されました";
            }
            // Kinectが外された
            else {
                // イメージを初期化する
                ImageColor.Source = null;

                TextStatus.Text = "Kinectが外されました";
            }
        }
Ejemplo n.º 58
0
        /// <summary>
        /// 
        /// </summary>
        /// <param name="Sensor">The Kinect Sensor to use</param>
        /// <param name="WaitingTime">Reppresent the time, in milliseconnds, to wait before taking another screenshot</param>
        /// <param name="Checker"></param>
        public KinectInterrogator(KinectSensor Sensor, int WaitingTime)
        {
            attBodyCount = 0;
            attEnableTakingScreenshot = false;
            attBarcodeChecker = new BarCodeRecognized();
            attPlayerChecker = new PlayerChecker();
            attWaitingTime = WaitingTime;
            this.attKinectSensor = Sensor;

            attBodyFrameReader = attKinectSensor.BodyFrameSource.OpenReader();
            attBodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived;

            attColorFrameReader = Sensor.ColorFrameSource.OpenReader();
            attColorFrameReader.FrameArrived += this.Reader_ColorFrameArrived;

            attKinectSensor.Open();
            attLastcheck = DateTime.Now;

            SetBackgroundWorker();
        }
Ejemplo n.º 59
0
        public LiveBackground()
            : base()
        {
            if (!DesignerProperties.GetIsInDesignMode(this))
            {
                _kinectManager = KinectManager.Default;

                // open the reader for the color frames
                this._colorFrameReader = _kinectManager.KinectSensor.ColorFrameSource.OpenReader();
                this._infraredFrameReader = _kinectManager.KinectSensor.InfraredFrameSource.OpenReader();
                this._bodyIndexFrameReader = _kinectManager.KinectSensor.BodyIndexFrameSource.OpenReader();
                FrameDescription bodyDescription = this.GetFrameDescriptionForMode(BackgroundMode.BodyIndex);
                this.bodyIndexPixels = new uint[bodyDescription.Width * bodyDescription.Height];

                // wire handler for frame arrival
                this._colorFrameReader.FrameArrived += this.Reader_ColorFrameArrived;
                this._infraredFrameReader.FrameArrived += this.Reader_InfraredFrameArrived;
                this._bodyIndexFrameReader.FrameArrived += this.Reader_BodyIndexFrameArrived;

                this._needColor = true;
            }
        }
    // Use this for initialization
    void Start()
    {
        GameObject go = GameObject.Find("TextMessage");
        tm = (TextMesh)go.GetComponent("TextMesh");
        tm.text = "Kinect Initialize";

        go = GameObject.Find("TextMessage2");
        tm2 = (TextMesh)go.GetComponent("TextMesh");

        go = GameObject.Find("TextMessageFPS");
        tm3 = (TextMesh)go.GetComponent("TextMesh");

        kinect = KinectSensor.Default;
        kinect.Open();
        colorReader = kinect.ColorFrameSource.OpenReader();

        if (texture == null) {
            texture = new Texture2D(1920,1080, TextureFormat.BGRA32,false);
            renderer.material.mainTexture = texture;
            tm.text = "texture created.";
        }
    }