コード例 #1
0
    // Use this for initialization
    void Start()
    {
        // Find a Kinect sensor
        KinectSensorCollection kinectSensors = KinectSensor.KinectSensors;
        if(kinectSensors.Count == 0)
        {
            this.sensor = null;
            throw new UnityException("Could not find a Kinect sensor.");
        }

        // Enable the skeleton stream
        this.sensor = kinectSensors[0];
        this.sensor.SkeletonStream.Enable();
        if(!this.sensor.SkeletonStream.IsEnabled)
        {
            throw new UnityException("Sensor could not be enabled.");
        }
        // Create the input processor
        this.inputProcessor = new SeatedInfoProcessor();
        //inputProcessor = new InputProcessor(this.sensor.CoordinateMapper, DepthImageFormat.Resolution320x240Fps30);
        this.InputInfo = null;

        Debug.Log("Hello");
        return;
    }
コード例 #2
0
        public MainPage()
        {
            InitializeComponent();

            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _sensor.Open();

                _bodies = new Body[_sensor.BodyFrameSource.BodyCount];

                _colorReader = _sensor.ColorFrameSource.OpenReader();
                _colorReader.FrameArrived += ColorReader_FrameArrived;
                _bodyReader = _sensor.BodyFrameSource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                // 2) Initialize the face source with the desired features
                _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace |
                                                              FaceFrameFeatures.FaceEngagement |
                                                              FaceFrameFeatures.Glasses |
                                                              FaceFrameFeatures.Happy |
                                                              FaceFrameFeatures.LeftEyeClosed |
                                                              FaceFrameFeatures.MouthOpen |
                                                              FaceFrameFeatures.PointsInColorSpace |
                                                              FaceFrameFeatures.RightEyeClosed);
                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;
            }
        }
コード例 #3
0
    void Start () 
    {
        _Sensor = KinectSensor.GetDefault();
        
        if (_Sensor != null) 
        {
            _Reader = _Sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth);
            
            var colorFrameDesc = _Sensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Rgba);
            ColorWidth = colorFrameDesc.Width;
            ColorHeight = colorFrameDesc.Height;
            
            _ColorTexture = new Texture2D(colorFrameDesc.Width, colorFrameDesc.Height, TextureFormat.RGBA32, false);
            _ColorData = new byte[colorFrameDesc.BytesPerPixel * colorFrameDesc.LengthInPixels];
			            
            var depthFrameDesc = _Sensor.DepthFrameSource.FrameDescription;
            _DepthData = new ushort[depthFrameDesc.LengthInPixels];

			_DepthTexture = new Texture2D( depthFrameDesc.Width, depthFrameDesc.Height, TextureFormat.ARGB32, false );
            
            if (!_Sensor.IsOpen)
            {
                _Sensor.Open();
            }
        }
    }
コード例 #4
0
        void kinect_IsAvailableChanged( KinectSensor sender, IsAvailableChangedEventArgs args )
        {
            // Kinectが接続された
            if ( args.IsAvailable ) {
                // カラーを設定する
                if ( colorFrameReader == null ) {
                    // カラー画像の情報を作成する(BGRAフォーマット)
                    colorFrameDesc = kinect.ColorFrameSource.CreateFrameDescription( ColorImageFormat.Bgra );

                    colorBitmap = new WriteableBitmap( colorFrameDesc.Width, colorFrameDesc.Height );
                    ImageColor.Source = colorBitmap;

                    colorBuffer = new byte[colorFrameDesc.Width * colorFrameDesc.Height * colorFrameDesc.BytesPerPixel];

                    // カラーリーダーを開く
                    colorFrameReader = kinect.ColorFrameSource.OpenReader();
                    colorFrameReader.FrameArrived += colorFrameReader_FrameArrived;
                }

                ImageColor.Source = colorBitmap;

                TextStatus.Text = "Kinectが接続されました";
            }
            // Kinectが外された
            else {
                // イメージを初期化する
                ImageColor.Source = null;

                TextStatus.Text = "Kinectが外されました";
            }
        }
コード例 #5
0
ファイル: MainPage.xaml.cs プロジェクト: noa99kee/K4W2-Book
        protected override void OnNavigatedTo( NavigationEventArgs e )
        {
            base.OnNavigatedTo( e );

            try {
                // Kinectを開く
                kinect = KinectSensor.GetDefault();
                if ( kinect == null ) {
                    throw new Exception( "Kinectを開けません" );
                }

                kinect.Open();

                // 赤外線画像の情報を取得する
                infraredFrameDesc = kinect.InfraredFrameSource.FrameDescription;

                // 画像化のためのバッファを作成する
                infraredBitmapBuffer = new byte[infraredFrameDesc.LengthInPixels * 4];
                infraredBitmap = new WriteableBitmap(
                    infraredFrameDesc.Width, infraredFrameDesc.Height );
                ImageInfrared.Source = infraredBitmap;

                infraredBuffer = new ushort[infraredFrameDesc.LengthInPixels];

                // 赤外線画像リーダーを開く
                infraredFrameReader = kinect.InfraredFrameSource.OpenReader();
                infraredFrameReader.FrameArrived += infraredFrameReader_FrameArrived;
            }
            catch ( Exception ex ) {
                MessageDialog dlg = new MessageDialog( ex.Message );
                dlg.ShowAsync();
            }
        }
コード例 #6
0
ファイル: FaceTracker.cs プロジェクト: ushadow/handinput
        /// <summary>
        /// Initializes a new instance of the FaceTracker class from a reference of the Kinect device.
        /// <param name="sensor">Reference to kinect sensor instance</param>
        /// </summary>
        public FaceTracker(KinectSensor sensor)
        {
            if (sensor == null) {
            throw new ArgumentNullException("sensor");
              }

              if (!sensor.ColorStream.IsEnabled) {
            throw new InvalidOperationException("Color stream is not enabled yet.");
              }

              if (!sensor.DepthStream.IsEnabled) {
            throw new InvalidOperationException("Depth stream is not enabled yet.");
              }

              this.operationMode = OperationMode.Kinect;
              this.coordinateMapper = sensor.CoordinateMapper;
              this.initializationColorImageFormat = sensor.ColorStream.Format;
              this.initializationDepthImageFormat = sensor.DepthStream.Format;

              var newColorCameraConfig = new CameraConfig(
              (uint)sensor.ColorStream.FrameWidth,
              (uint)sensor.ColorStream.FrameHeight,
              sensor.ColorStream.NominalFocalLengthInPixels,
              FaceTrackingImageFormat.FTIMAGEFORMAT_UINT8_B8G8R8X8);
              var newDepthCameraConfig = new CameraConfig(
              (uint)sensor.DepthStream.FrameWidth,
              (uint)sensor.DepthStream.FrameHeight,
              sensor.DepthStream.NominalFocalLengthInPixels,
              FaceTrackingImageFormat.FTIMAGEFORMAT_UINT16_D13P3);
              this.Initialize(newColorCameraConfig, newDepthCameraConfig, IntPtr.Zero, IntPtr.Zero, this.DepthToColorCallback);
        }
コード例 #7
0
    // Use this for initialization
    void Start()
    {
        // set up kinect connection
        sensor = KinectSensor.GetDefault();

        if (sensor != null)
        {
            bodyReader = sensor.BodyFrameSource.OpenReader();

        //			faceFrameSources = new FaceFrameSource[bodyCount];
        //			faceFrameReaders = new FaceFrameReader[bodyCount];
        //
        //			for(int i = 0; i < bodyCount; ++i)
        //			{
        //				faceFrameSources[i] = FaceFrameSource.Create(sensor, 0, FaceFrameFeatures.LookingAway);
        //				faceFrameReaders[i] = faceFrameSources[i].OpenReader();
        //			}
        //
        //			faceFrameResults = new FaceFrameResult[bodyCount];

            if (!sensor.IsOpen)
                sensor.Open ();
        }
        else
            Debug.Log ("No Kinect Sensor found. Check connections / power");
    }
コード例 #8
0
        public MainPage()
        
        {
           
            InitializeComponent();
            
            _navigationHelper = new NavigationHelper(this);

            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _sensor.Open();

                _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body);
                _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;

                _gestureController = new GestureController();
                _gestureController.GestureRecognized += GestureController_GestureRecognized;
            }
            // Initialize the gesture detection objects for our gestures
            this.gestureDetectorList = new List<GestureDetector>();

            // Create a gesture detector for each body (6 bodies => 6 detectors)
            int maxBodies = this._sensor.BodyFrameSource.BodyCount;
            for (int i = 0; i < maxBodies; ++i)
            {
                GestureResultView result =
                     new GestureResultView(i, false, false, 0.0f);
                GestureDetector detector =
                    new GestureDetector(this._sensor, result);
                result.PropertyChanged += GestureResult_PropertyChanged;
                this.gestureDetectorList.Add(detector);
            }
        }
コード例 #9
0
	// Use this for initialization
	void Start () {

		mySensor = KinectSensor.GetDefault();

		if (mySensor != null)
		{
			// Total array of data representing a single rendered frame
			colorFrameData = new byte[colorWidth * colorHeight * bytes_per_pixel];

			backgroundTex = new Texture2D(colorWidth, colorHeight, TextureFormat.BGRA32, false);

			if (!mySensor.IsOpen)
			{
				mySensor.Open();
			}

			msFrameReader = mySensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color);

			//Rendering user as part of the Unity Scene background via Main Camera
			Rect cameraRect = Camera.main.pixelRect;
			float rectHeight = cameraRect.height;
			float rectWidth = cameraRect.width;

			if (rectWidth > rectHeight)
				rectWidth = rectHeight * colorWidth / colorHeight;
			else
				rectHeight = rectWidth * colorHeight / colorWidth;

			float foregroundOfsX = (cameraRect.width - rectWidth) / 2;
			float foregroundOfsY = (cameraRect.height - rectHeight) / 2;
			foregroundImgRect = new Rect(foregroundOfsX, foregroundOfsY, rectWidth, rectHeight);
			foregroundGuiRect = new Rect(foregroundOfsX, cameraRect.height - foregroundOfsY, rectWidth, -rectHeight);
		}
	} //End of Start()
コード例 #10
0
ファイル: MainPage.xaml.cs プロジェクト: ruscles/tutorial
        public MainPage()
        {
            // one sensor is currently supported
            this.kinectSensor = KinectSensor.GetDefault();

            // get the infraredFrameDescription from the InfraredFrameSource
            FrameDescription infraredFrameDescription = this.kinectSensor.InfraredFrameSource.FrameDescription;

            // open the reader for the infrared frames
            this.infraredFrameReader = this.kinectSensor.InfraredFrameSource.OpenReader();

            // wire handler for frame arrival
            this.infraredFrameReader.FrameArrived += this.Reader_InfraredFrameArrived;

            // allocate space to put the pixels being received and converted
            this.infraredFrameData = new ushort[infraredFrameDescription.Width * infraredFrameDescription.Height];
            this.infraredPixels = new byte[infraredFrameDescription.Width * infraredFrameDescription.Height * BytesPerPixel];

            // create the bitmap to display
            this.bitmap = new WriteableBitmap(infraredFrameDescription.Width, infraredFrameDescription.Height);

            this.CurrentFrameDescription = infraredFrameDescription;

            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // use the window object as the view model in this simple example
            this.DataContext = this;

            // open the sensor
            this.kinectSensor.Open();

            this.InitializeComponent();
        }
コード例 #11
0
    /// <summary>
    /// Initializes a new instance of the GestureDetector class along with the gesture frame source and reader
    /// </summary>
    /// <param name="kinectSensor">Active sensor to initialize the VisualGestureBuilderFrameSource object with</param>
    /// <param name="gestureResultView">GestureResultView object to store gesture results of a single body to</param>
    public GestureDetector(KinectSensor kinectSensor, KinectManager kinectManager)
    {
        if (kinectSensor == null)
        {
            throw new ArgumentNullException("kinectSensor");
        }

        // create the vgb source. The associated body tracking ID will be set when a valid body frame arrives from the sensor.
        this.vgbFrameSource = VisualGestureBuilderFrameSource.Create(kinectSensor, 0);
        this.vgbFrameSource.TrackingIdLost += this.Source_TrackingIdLost;

        // open the reader for the vgb frames
        this.vgbFrameReader = this.vgbFrameSource.OpenReader();
        if (this.vgbFrameReader != null)
        {
            this.vgbFrameReader.IsPaused = true;
            this.vgbFrameReader.FrameArrived += this.Reader_GestureFrameArrived;
        }

        // load the 'Seated' gesture from the gesture database
        using (VisualGestureBuilderDatabase database =  VisualGestureBuilderDatabase.Create(this.gestureDatabase))
        {
            // we could load all available gestures in the database with a call to vgbFrameSource.AddGestures(database.AvailableGestures), 
            // but for this program, we only want to track one discrete gesture from the database, so we'll load it by name
            foreach (Gesture gesture in database.AvailableGestures)
            {
                if (gesture.Name.Equals(this.shootingGestureName))
                {
                    this.vgbFrameSource.AddGesture(gesture);
                }
            }
        }
    }
コード例 #12
0
        protected override void OnNavigatedTo( NavigationEventArgs e )
        {
            base.OnNavigatedTo( e );

            try {
                // Kinectを開く
                kinect = KinectSensor.GetDefault();
                if ( kinect == null ) {
                    throw new Exception( "Kinectを開けません" );
                }

                kinect.Open();

                // カラー画像の情報を作成する(BGRAフォーマット)
                colorFrameDesc = kinect.ColorFrameSource.CreateFrameDescription( ColorImageFormat.Bgra );

                colorBitmap = new WriteableBitmap( colorFrameDesc.Width, colorFrameDesc.Height );
                ImageColor.Source = colorBitmap;

                colorBuffer = new byte[colorFrameDesc.Width * colorFrameDesc.Height * colorFrameDesc.BytesPerPixel];

                // カラーリーダーを開く
                colorFrameReader = kinect.ColorFrameSource.OpenReader();
                colorFrameReader.FrameArrived += colorFrameReader_FrameArrived;
            }
            catch ( Exception ex ) {
                MessageDialog dlg = new MessageDialog( ex.Message );
                dlg.ShowAsync();
            }
        }
コード例 #13
0
ファイル: MainPage.xaml.cs プロジェクト: noa99kee/K4W2-Book
        protected override void OnNavigatedTo( NavigationEventArgs e )
        {
            base.OnNavigatedTo( e );

            try {
                // Kinectを開く
                kinect = KinectSensor.GetDefault();
                kinect.Open();

                // 表示のためのデータを作成
                depthFrameDesc = kinect.DepthFrameSource.FrameDescription;

                // Depthリーダーを開く
                depthFrameReader = kinect.DepthFrameSource.OpenReader();
                depthFrameReader.FrameArrived += depthFrameReader_FrameArrived;

                // 表示のためのデータ
                depthBitmap = new WriteableBitmap( depthFrameDesc.Width,
                                                   depthFrameDesc.Height );
                ImageDepth.Source = depthBitmap;

                depthBuffer = new ushort[depthFrameDesc.LengthInPixels];
                depthBitmapBuffer = new byte[depthFrameDesc.LengthInPixels * 4];

                depthPoint = new Point( depthFrameDesc.Width / 2,
                                        depthFrameDesc.Height / 2 );
            }
            catch ( Exception ex ) {
                MessageDialog dlg = new MessageDialog(ex.Message);
                dlg.ShowAsync();
            }
        }
コード例 #14
0
        public MainPage()
        {
            // one sensor is currently supported
            this.kinectSensor = KinectSensor.GetDefault();

            SetupCurrentDisplay(DEFAULT_DISPLAYFRAMETYPE);

            this.multiSourceFrameReader =
                this.kinectSensor.OpenMultiSourceFrameReader(
                 FrameSourceTypes.Infrared
                 | FrameSourceTypes.Color
                 | FrameSourceTypes.Depth);

            this.multiSourceFrameReader.MultiSourceFrameArrived +=
                this.Reader_MultiSourceFrameArrived;

            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // use the window object as the view model in this simple example
            this.DataContext = this;

            // open the sensor
            this.kinectSensor.Open();

            this.InitializeComponent();
        }
コード例 #15
0
	// Use this for initialization
	void Start () {

        print("START");
        /*Get Kinect Sensor and start reading data*/

        //Get Default Kinect Sensor
        sensor = null;
        sensor = KinectSensor.GetDefault();

        if( sensor != null )
        {
            //We have a sensor connected

            print("SENSOR CONNECTED");
            //Open the connection/Start reading the data
            reader = sensor.BodyFrameSource.OpenReader();
            if( !sensor.IsOpen )
            {
                sensor.Open();
            }
            

        } else
        {
            print("NO KINECT CONNECTED");
        }

        print(sensor);
	}
コード例 #16
0
	// Use this for initialization
	void Start()
	{
		mySensor = KinectSensor.GetDefault();

		if (mySensor != null)
		{
			if (!mySensor.IsOpen)
			{
				mySensor.Open();
			}

			ninjaTex = new Texture2D(colorWidth, colorHeight, TextureFormat.BGRA32, false);

			msFrameReader = mySensor.OpenMultiSourceFrameReader(FrameSourceTypes.Body | FrameSourceTypes.Depth |
				FrameSourceTypes.BodyIndex);

			// There has to be a more efficient way of tracking these (i.e. using OOP)
			rightHandQueue_X = new Queue<float>();
			rightHandQueue_Y = new Queue<float>();
			rightHandQueue_T = new Queue<float>();

			leftHandQueue_X = new Queue<float>();
			leftHandQueue_Y = new Queue<float>();
			leftHandQueue_T = new Queue<float>();

			rightFootQueue_X = new Queue<float>();
			rightFootQueue_Y = new Queue<float>();
			rightFootQueue_T = new Queue<float>();

			leftFootQueue_X = new Queue<float>();
			leftFootQueue_Y = new Queue<float>();
			leftFootQueue_T = new Queue<float>();

			/** Construct StreamWriter object for collecting user data **/
			sw_v = new StreamWriter("EMILY_V.txt");
			sw_t = new StreamWriter("EMILY_T.txt");
			sw_x = new StreamWriter("EMILY_X.txt");
			sw_y = new StreamWriter("EMILY_Y.txt");


			InitializeSlashRenderer();

			//Rendering user as part of the Unity Scene background via Main Camera
			Rect cameraRect = Camera.main.pixelRect;
			float rectHeight = cameraRect.height;
			float rectWidth = cameraRect.width;

			if (rectWidth > rectHeight)
				rectWidth = rectHeight * colorWidth / colorHeight;
			else
				rectHeight = rectWidth * colorHeight / colorWidth;

			float foregroundOfsX = (cameraRect.width - rectWidth) / 2;
			float foregroundOfsY = (cameraRect.height - rectHeight) / 2;
			foregroundImgRect = new Rect(foregroundOfsX, foregroundOfsY, rectWidth, rectHeight);
			foregroundGuiRect = new Rect(foregroundOfsX, cameraRect.height - foregroundOfsY, rectWidth, -rectHeight);
		}

	}
コード例 #17
0
		/// <summary>
		/// Stops the kinect.
		/// </summary>
        public void StopKinect()
        {
		    if (kinect != null)
            {
                kinect.Stop();
                kinect = null;
		    }
        }
コード例 #18
0
 public ZigInputKinectOne()
 {
     _sensor = KinectSensor.GetDefault();
     _mapper = _sensor.CoordinateMapper;
     _depth = new KinectOneDepth(_sensor);
     _image = new KinectOneImage(_sensor);
     _labelMap = new KinectOneLabelMap(_sensor);
 }
コード例 #19
0
	void Start () {
		_Sensor = KinectSensor.GetDefault();
		if (_Sensor != null) {
			_Reader = _Sensor.BodyFrameSource.OpenReader();
			if (!_Sensor.IsOpen)
				_Sensor.Open();
		}   
	}
コード例 #20
0
 public KinectData()
 {
     kinect = null;
     leftHandY = 0;
     rightHandY = 0;
     headY = 0;
     hipCentreY = 0;
     playerFound = false;
 }
コード例 #21
0
        protected override void OnNavigatingFrom( NavigatingCancelEventArgs e )
        {
            base.OnNavigatingFrom( e );

            if ( kinect != null ) {
                kinect.Close();
                kinect = null;
            }
        }
コード例 #22
0
ファイル: MainPage.xaml.cs プロジェクト: kutabar/tutorial
        public MainPage()
        {
            // one sensor is currently supported
            this.kinectSensor = KinectSensor.GetDefault();

            // open the sensor
            this.kinectSensor.Open();

            this.InitializeComponent();
        }
コード例 #23
0
ファイル: DepthSourceManager.cs プロジェクト: Ran4/Blopper
    void Start()
    {
        _Sensor = KinectSensor.GetDefault();

        if (_Sensor != null)
        {
            _Reader = _Sensor.DepthFrameSource.OpenReader();
            _Data = new ushort[_Sensor.DepthFrameSource.FrameDescription.LengthInPixels];
        }
    }
コード例 #24
0
    public static void initializeKinect()
    {
        // Look through all sensors and start the first connected one.
        // This requires that a Kinect is connected at the time of app startup.
        // To make your app robust against plug/unplug,
        // it is recommended to use KinectSensorChooser provided in Microsoft.Kinect.Toolkit (See components in Toolkit Browser).
        foreach (var potentialSensor in KinectSensor.KinectSensors)
        {
            if (potentialSensor.Status == KinectStatus.Connected)
            {
                sensor = potentialSensor;
                break;
            }
        }

        if (null != sensor)
        {
            // Turn on streaming to receive frames.
            sensor.DepthStream.Enable(DepthImageFormat.Resolution640x480Fps30);
            sensor.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30);

            // Allocate space to put the depth pixels we'll receive.
            depthImagePixels = new DepthImagePixel[sensor.DepthStream.FramePixelDataLength];
            depthPixels = new short[sensor.DepthStream.FramePixelDataLength];// 640*480 = 307200.
            depth1Packet = new byte[sensor.DepthStream.FramePixelDataLength];
            colorPacket	 = new byte[640 * 480 * 3];		// sensor.ColorStream.FramePixelDataLength = 640*480*4.
            positionPacket = new byte[640 * 480 * 12];	// = 3686400.
            depth2Packet = new byte[640 * 480 * 2];		// = 614400.
            SwitchTransferData();

            // Add an event handler to be called whenever there is new depth/color frame data.
            sensor.DepthFrameReady += SensorDepthFrameReady;
            sensor.ColorFrameReady += SensorColorFrameReady;

            // Start the sensor!
            try
            {
                sensor.Start();
            }
            catch (IOException)
            {
                sensor = null;
            }
        }

        if (null == sensor)
        {
            Console.WriteLine("Kinect is not ready (please restart program).");
        }
        else
        {
            Console.WriteLine("Kinect initialization has successfully completed.");
        }
    }
コード例 #25
0
	void OnApplicationQuit() {
		if (_Reader != null){
			_Reader.Dispose();
			_Reader = null;
		}
		
		if (_Sensor != null) {
			if (_Sensor.IsOpen)
				_Sensor.Close();
			_Sensor = null;
		}
	}
コード例 #26
0
    void OnApplicationQuit()
    {
        // Dispose the Kinect sensor
        if(sensor != null)
        {
            sensor.Dispose();
            sensor = null;
        }

        Debug.Log ("Goodbye");
        return;
    }
コード例 #27
0
ファイル: BodyManager.cs プロジェクト: sandybisaria/Jarvis
    // Use this for initialization
    void Start()
    {
        Sensor = KinectSensor.GetDefault();

        if (Sensor != null) {
            Reader = Sensor.BodyFrameSource.OpenReader ();

            if (!Sensor.IsOpen) {
                Sensor.Open ();
            }
        }
    }
コード例 #28
0
        protected override void OnNavigatedTo( NavigationEventArgs e )
        {
            base.OnNavigatedTo( e );

            try {
                kinect = KinectSensor.GetDefault();
                kinect.Open();
            }
            catch ( Exception ex ) {
                MessageDialog dlg = new MessageDialog(ex.Message);
                dlg.ShowAsync();
            }
        }
コード例 #29
0
ファイル: NinjaUser.cs プロジェクト: iEmily/nutra-ninja
	void OnApplicationQuit()
	{
		if (irFrameReader != null)
		{
			irFrameReader.Dispose();
			irFrameReader = null;
		}
		if (mySensor != null)
		{
			if (mySensor.IsOpen)
				mySensor.Close();
			mySensor = null;
		}
	}
コード例 #30
0
ファイル: MainPage.xaml.cs プロジェクト: KinectEx/KinectEx
        /// <summary>
        /// Initializes a new instance of the <see cref="MainPage"/> class.
        /// </summary>
        public MainPage()
        {
            this.InitializeComponent();

            RecordButton.Click += RecordButton_Click;

            ColorCompressionCombo.Items.Add("None (1920x1080)");
            ColorCompressionCombo.Items.Add("None (1280x720)");
            ColorCompressionCombo.Items.Add("None (640x360)");
            ColorCompressionCombo.Items.Add("JPEG (1920x1080)");
            ColorCompressionCombo.Items.Add("JPEG (1280x720)");
            ColorCompressionCombo.Items.Add("JPEG (640x360)");
            ColorCompressionCombo.SelectedIndex = 0;

            SmoothingCombo.Items.Add("None");
            SmoothingCombo.Items.Add("Kalman Filter");
            SmoothingCombo.Items.Add("Double Exponential");
            SmoothingCombo.SelectionChanged += SmoothingCombo_SelectionChanged;
            SmoothingCombo.SelectedIndex = 0;

            DisplayCombo.Items.Add("Body");
            DisplayCombo.Items.Add("Color");
            DisplayCombo.Items.Add("Depth");
            DisplayCombo.Items.Add("Infrared");
            DisplayCombo.SelectionChanged += DisplayCombo_SelectionChanged;
            DisplayCombo.SelectedIndex = 0;

            _sensor = KinectSensor.GetDefault();

            _bodyReader = _sensor.BodyFrameSource.OpenReader();
            _bodyReader.FrameArrived += _bodyReader_FrameArrived;

            _colorReader = _sensor.ColorFrameSource.OpenReader();
            _colorReader.FrameArrived += _colorReader_FrameArrived;
            var colorFrameDesc = _sensor.ColorFrameSource.FrameDescription;
            _colorData = new byte[colorFrameDesc.LengthInPixels * 4];

            _depthReader = _sensor.DepthFrameSource.OpenReader();
            _depthReader.FrameArrived += _depthReader_FrameArrived;
            var depthFrameDesc = _sensor.DepthFrameSource.FrameDescription;
            _depthData = new ushort[depthFrameDesc.LengthInPixels];

            _infraredReader = _sensor.InfraredFrameSource.OpenReader();
            _infraredReader.FrameArrived += _infraredReader_FrameArrived;
            var infraredFrameDesc = _sensor.InfraredFrameSource.FrameDescription;
            _infraredData = new ushort[infraredFrameDesc.LengthInPixels];

            _sensor.Open();
        }
コード例 #31
0
    private bool InitVisualGestures()
    {
        KinectManager kinectManager = KinectManager.Instance;

        KinectInterop.SensorData sensorData = kinectManager != null?kinectManager.GetSensorData() : null;

        Kinect2Interface kinectInterface = sensorData.sensorInterface as Kinect2Interface;
        KinectSensor     kinectSensor    = kinectInterface != null ? kinectInterface.kinectSensor : null;

        if (kinectSensor == null)
        {
            return(false);
        }

        if (gestureDatabase == string.Empty)
        {
            Debug.LogError("Please specify gesture database file!");
            return(false);
        }

        // copy the gesture database file from Resources, if available
        if (!File.Exists(gestureDatabase))
        {
            TextAsset textRes = Resources.Load(gestureDatabase, typeof(TextAsset)) as TextAsset;

            if (textRes != null && textRes.bytes.Length != 0)
            {
                File.WriteAllBytes(gestureDatabase, textRes.bytes);
            }
        }

        // create the vgb source
        vgbFrameSource = VisualGestureBuilderFrameSource.Create(kinectSensor, 0);

        // open the reader
        vgbFrameReader = vgbFrameSource != null?vgbFrameSource.OpenReader() : null;

        if (vgbFrameReader != null)
        {
            vgbFrameReader.IsPaused = true;
        }

        using (VisualGestureBuilderDatabase database = VisualGestureBuilderDatabase.Create(gestureDatabase))
        {
            if (database == null)
            {
                Debug.LogError("Gesture database not found: " + gestureDatabase);
                return(false);
            }

            // check if we need to load all gestures
            bool bAllGestures = (gestureNames.Count == 0);

            foreach (Gesture gesture in database.AvailableGestures)
            {
                bool bAddGesture = bAllGestures || gestureNames.Contains(gesture.Name);

                if (bAddGesture)
                {
                    string sGestureName = gesture.Name;
                    vgbFrameSource.AddGesture(gesture);

                    if (!gestureNames.Contains(sGestureName))
                    {
                        gestureNames.Add(sGestureName);
                    }

                    if (!gestureData.ContainsKey(sGestureName))
                    {
                        VisualGestureData data = new VisualGestureData();
                        data.gestureName = sGestureName;
                        data.timestamp   = Time.realtimeSinceStartup;

                        data.isDiscrete   = (gesture.GestureType == GestureType.Discrete);
                        data.isContinuous = (gesture.GestureType == GestureType.Continuous);

                        gestureData.Add(sGestureName, data);
                    }
                }
            }
        }

        return(true);
    }
コード例 #32
0
        static void Main()
        {
            Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);

            RenderForm form = new RenderForm("Kinect color sample");

            RenderDevice  device    = new RenderDevice(SharpDX.Direct3D11.DeviceCreationFlags.BgraSupport);
            RenderContext context   = new RenderContext(device);
            DX11SwapChain swapChain = DX11SwapChain.FromHandle(device, form.Handle);

            //Allow to draw using direct2d on top of swapchain
            var context2d = new SharpDX.Direct2D1.DeviceContext(swapChain.Texture.QueryInterface <SharpDX.DXGI.Surface>());

            //Call release on texture since queryinterface does an addref
            Marshal.Release(swapChain.Texture.NativePointer);

            var whiteBrush = new SharpDX.Direct2D1.SolidColorBrush(context2d, SharpDX.Color.White);

            KinectSensor sensor = KinectSensor.GetDefault();

            sensor.Open();

            KinectBody[] bodyFrame = null;
            KinectSensorBodyFrameProvider bodyProvider = new KinectSensorBodyFrameProvider(sensor);

            bool doQuit   = false;
            bool doUpload = false;
            ColorRGBAFrameData                 currentData  = null;
            DynamicColorRGBATexture            colorTexture = new DynamicColorRGBATexture(device);
            KinectSensorColorRGBAFrameProvider provider     = new KinectSensorColorRGBAFrameProvider(sensor);

            provider.FrameReceived += (sender, args) => { currentData = args.FrameData; doUpload = true; };

            form.KeyDown += (sender, args) => { if (args.KeyCode == Keys.Escape)
                                                {
                                                    doQuit = true;
                                                }
            };


            FaceFrameResult     frameResult   = null;
            SingleFaceProcessor faceProcessor = new SingleFaceProcessor(sensor);

            faceProcessor.FaceResultAcquired += (sender, args) => { frameResult = args.FrameResult; };

            Func <PointF, Vector2> map = new Func <PointF, Vector2>((p) =>
            {
                float x = p.X / 1920.0f * (float)swapChain.Width;
                float y = p.Y / 1080.0f * (float)swapChain.Height;
                return(new Vector2(x, y));
            });

            Func <float, float, Vector2> mapxy = new Func <float, float, Vector2>((px, py) =>
            {
                float x = px / 1920.0f * (float)swapChain.Width;
                float y = py / 1080.0f * (float)swapChain.Height;
                return(new Vector2(x, y));
            });

            bodyProvider.FrameReceived += (sender, args) =>
            {
                bodyFrame = args.FrameData;
                var body = bodyFrame.TrackedOnly().ClosestBodies().FirstOrDefault();
                if (body != null)
                {
                    faceProcessor.AssignBody(body);
                }
                else
                {
                    faceProcessor.Suspend();
                }
            };

            RenderLoop.Run(form, () =>
            {
                if (doQuit)
                {
                    form.Dispose();
                    return;
                }

                if (doUpload)
                {
                    colorTexture.Copy(context, currentData);
                }

                context.RenderTargetStack.Push(swapChain);

                device.Primitives.ApplyFullTri(context, colorTexture.ShaderView);

                device.Primitives.FullScreenTriangle.Draw(context);
                context.RenderTargetStack.Pop();

                if (frameResult != null)
                {
                    context2d.BeginDraw();
                    var colorBound      = frameResult.FaceBoundingBoxInColorSpace;
                    RectangleF rect     = new RectangleF();
                    Vector2 topLeft     = mapxy(colorBound.Left, colorBound.Top);
                    Vector2 bottomRight = mapxy(colorBound.Right, colorBound.Bottom);
                    rect.Top            = topLeft.Y;
                    rect.Bottom         = bottomRight.Y;
                    rect.Left           = topLeft.X;
                    rect.Right          = bottomRight.X;

                    context2d.DrawRectangle(rect, whiteBrush, 3.0f);

                    foreach (PointF point in frameResult.FacePointsInColorSpace.Values)
                    {
                        var ellipse = new SharpDX.Direct2D1.Ellipse()
                        {
                            Point   = map(point),
                            RadiusX = 5,
                            RadiusY = 5
                        };

                        context2d.FillEllipse(ellipse, whiteBrush);
                    }

                    context2d.EndDraw();
                }

                swapChain.Present(0, SharpDX.DXGI.PresentFlags.None);
            });

            swapChain.Dispose();
            context.Dispose();
            device.Dispose();

            colorTexture.Dispose();
            provider.Dispose();

            bodyProvider.Dispose();
            faceProcessor.Dispose();

            whiteBrush.Dispose();
            context2d.Dispose();

            sensor.Close();
        }
コード例 #33
0
        /// <summary>
        /// initialize kinect sensor and init data members
        /// </summary>
        private bool InitKinect()
        {
            // enumerate and fetch an available sensor
            foreach (var potentialsensor in KinectSensor.KinectSensors)
            {
                if (potentialsensor.Status == KinectStatus.Connected)
                {
                    kinect_sensor = potentialsensor;
                    break;
                }
            }


            if (kinect_sensor != null)
            {
                kinect_data_manager = new KinectDataManager(ref kinect_sensor);
            }

            // enable data stream
            if (kinect_sensor != null)
            {
                // initialize all streams
                kinect_sensor.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30);
                kinect_sensor.DepthStream.Enable(DepthImageFormat.Resolution640x480Fps30);
                //kinect_sensor.SkeletonStream.Enable();
                // can't use IR simultaneously with color!
                //kinect_sensor.ColorStream.Enable(ColorImageFormat.InfraredResolution640x480Fps30);

                // initialize image sources
                kinect_data_manager.ColorStreamBitmap = new WriteableBitmap(
                    kinect_sensor.ColorStream.FrameWidth, kinect_sensor.ColorStream.FrameHeight, 96, 96,
                    PixelFormats.Bgr32, null);

                kinect_data_manager.DepthStreamBitmap = new WriteableBitmap(
                    kinect_sensor.DepthStream.FrameWidth, kinect_sensor.DepthStream.FrameHeight, 96, 96,
                    PixelFormats.Bgr32, null);

                // set source (must after source has been initialized otherwise it's null forever)
                color_disp_img.Source    = kinect_data_manager.ColorStreamBitmap;
                depth_disp_img.Source    = kinect_data_manager.DepthStreamBitmap;
                skeleton_disp_img.Source = kinect_data_manager.skeletonImageSource;

                // bind event handlers
                kinect_sensor.ColorFrameReady    += kinect_colorframe_ready;
                kinect_sensor.DepthFrameReady    += kinect_depthframe_ready;
                kinect_sensor.SkeletonFrameReady += kinect_skeletonframe_ready;

                // enable data stream based on initial check
                if (!colorCheckBox.IsChecked.Value)
                {
                    kinect_sensor.ColorStream.Disable();
                }
                if (!depthCheckBox.IsChecked.Value)
                {
                    kinect_sensor.DepthStream.Disable();
                }
                if (!skeletonCheckBox.IsChecked.Value)
                {
                    kinect_sensor.SkeletonStream.Disable();
                }
            }
            else
            {
                return(false);
            }


            return(true);
        }
コード例 #34
0
 /// <summary>
 /// Initializes a new instance of the AllFramesReadyFrameSource class
 /// </summary>
 public AllFramesReadyFrameSource(KinectSensor sensor)
 {
     this.sensor = sensor;
     this.sensor.AllFramesReady += this.Sensor_AllFramesReady;
 }
コード例 #35
0
        public void Start()
        {
            if (_sensor != null)
            {
                Stop();
            }

            foreach (var potentialSensor in KinectSensor.KinectSensors)
            {
                if (potentialSensor.Status == KinectStatus.Connected && _uniqueKinectId == potentialSensor.UniqueKinectId)
                {
                    _sensor = potentialSensor;
                    break;
                }
            }
            if (_sensor == null)
            {
                MainForm.LogMessageToFile("Sensor not found: " + _uniqueKinectId);
                _isrunning = false;
                return;
            }


            if (_skeleton)
            {
                _sensor.SkeletonStream.Enable();
                _sensor.SkeletonFrameReady += SensorSkeletonFrameReady;
            }

            switch (StreamMode)
            {
            case 0:    //color
                _sensor.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30);
                _sensor.ColorFrameReady += SensorColorFrameReady;
                break;

            case 1:    //depth
                _sensor.DepthStream.Enable(DepthImageFormat.Resolution640x480Fps30);
                _sensor.DepthFrameReady += SensorDepthFrameReady;
                // Allocate space to put the depth pixels we'll receive
                _depthPixels = new short[_sensor.DepthStream.FramePixelDataLength];
                // Allocate space to put the color pixels we'll create
                _colorPixels = new byte[_sensor.DepthStream.FramePixelDataLength * sizeof(int)];
                break;

            case 2:    //infrared
                _sensor.ColorStream.Enable(ColorImageFormat.InfraredResolution640x480Fps30);
                _sensor.ColorFrameReady += SensorColorFrameReady;
                break;
            }


            // Start the sensor
            try
            {
                _sensor.Start();
                _audioStream = _sensor.AudioSource.Start();

                RecordingFormat = new WaveFormat(16000, 16, 1);

                _waveProvider = new BufferedWaveProvider(RecordingFormat)
                {
                    DiscardOnBufferOverflow = true, BufferDuration = TimeSpan.FromMilliseconds(500)
                };


                _sampleChannel = new SampleChannel(_waveProvider);
                _sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter;

                if (HasAudioStream != null)
                {
                    HasAudioStream(this, EventArgs.Empty);
                    HasAudioStream = null;
                }

                _isrunning = true;

                _stopEvent = new ManualResetEvent(false);

                // create and start new thread
                var thread = new Thread(AudioThread)
                {
                    Name = "kinect audio"
                };
                thread.Start();
            }
            catch (Exception ex)//IOException)
            {
                MainForm.LogExceptionToFile(ex);
                _sensor    = null;
                _isrunning = false;
            }
        }
コード例 #36
0
        /// <summary>
        /// Initializes a new instance of the MainWindow class.
        /// </summary>
        public MainWindow()
        {
            // one sensor is currently supported
            this.kinectSensor = KinectSensor.GetDefault();

            // get the coordinate mapper
            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            // get the color frame details
            FrameDescription frameDescription = this.kinectSensor.ColorFrameSource.FrameDescription;

            // set the display specifics
            this.displayWidth  = frameDescription.Width;
            this.displayHeight = frameDescription.Height;
            this.displayRect   = new Rect(0.0, 0.0, this.displayWidth, this.displayHeight);

            // open the reader for the body frames
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            // wire handler for body frame arrival
            this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived;

            // set the maximum number of bodies that would be tracked by Kinect
            this.bodyCount = this.kinectSensor.BodyFrameSource.BodyCount;

            // allocate storage to store body objects
            this.bodies = new Body[this.bodyCount];

            // specify the required face frame results
            FaceFrameFeatures faceFrameFeatures =
                FaceFrameFeatures.BoundingBoxInColorSpace
                | FaceFrameFeatures.PointsInColorSpace
                | FaceFrameFeatures.RotationOrientation
                | FaceFrameFeatures.FaceEngagement
                | FaceFrameFeatures.Glasses
                | FaceFrameFeatures.Happy
                | FaceFrameFeatures.LeftEyeClosed
                | FaceFrameFeatures.RightEyeClosed
                | FaceFrameFeatures.LookingAway
                | FaceFrameFeatures.MouthMoved
                | FaceFrameFeatures.MouthOpen;

            // create a face frame source + reader to track each face in the FOV
            this.faceFrameSources = new FaceFrameSource[this.bodyCount];
            this.faceFrameReaders = new FaceFrameReader[this.bodyCount];
            for (int i = 0; i < this.bodyCount; i++)
            {
                // create the face frame source with the required face frame features and an initial tracking Id of 0
                this.faceFrameSources[i] = new FaceFrameSource(this.kinectSensor, 0, faceFrameFeatures);

                // open the corresponding reader
                this.faceFrameReaders[i] = this.faceFrameSources[i].OpenReader();
            }

            // allocate storage to store face frame results for each face in the FOV
            this.faceFrameResults = new FaceFrameResult[this.bodyCount];

            // populate face result colors - one for each face index
            this.faceBrush = new List <Brush>()
            {
                Brushes.White,
                Brushes.Orange,
                Brushes.Green,
                Brushes.Red,
                Brushes.LightBlue,
                Brushes.Yellow
            };

            //List<Brush> faceBrush;

            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // open the sensor
            this.kinectSensor.Open();

            // set the status text
            this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText
                                                            : Properties.Resources.NoSensorStatusText;

            // Create the drawing group we'll use for drawing
            this.drawingGroup = new DrawingGroup();

            // Create an image source that we can use in our image control
            this.imageSource = new DrawingImage(this.drawingGroup);

            // use the window object as the view model in this simple example
            this.DataContext = this;

            // initialize the components (controls) of the window
            this.InitializeComponent();

            //Generar Clave Unica por evento
            string clave_unica;

            strGuid            = System.Guid.NewGuid().ToString().ToUpper();
            clave_unica        = String.Format(strGuid);
            claveUnica.Content = clave_unica;


            //fecha
            fecha        = DateTimeOffset.Now.ToString("MM/dd/yyyy HH:mm:ss", System.Globalization.CultureInfo.InvariantCulture);
            time.Content = fecha;

            //nombre_pc
            nombrePC       = Environment.MachineName;
            nom_pc.Content = nombrePC;

            //color de cara id_referencia para saber de que cuerpo esta leyendo los datos
            //color_f = faceBrush.Count.ToString();
            //ColorCara.Content = color_f;
        }
コード例 #37
0
        /// <summary>
        /// 距離データをカラー画像に変換する
        /// </summary>
        /// <param name="kinect"></param>
        /// <param name="depthFrame"></param>
        /// <returns></returns>
        private byte[] ConvertDepthColor(KinectSensor kinect,
                                         DepthImageFrame depthFrame)
        {
            ColorImageStream colorStream = kinect.ColorStream;
            DepthImageStream depthStream = kinect.DepthStream;

            // 距離カメラのピクセルごとのデータを取得する
            short[] depthPixel = new short[depthFrame.PixelDataLength];
            depthFrame.CopyPixelDataTo(depthPixel);

            // 距離カメラの座標に対応するRGBカメラの座標を取得する(座標合わせ)
            ColorImagePoint[] colorPoint =
                new ColorImagePoint[depthFrame.PixelDataLength];
            kinect.MapDepthFrameToColorFrame(depthStream.Format, depthPixel,
                                             colorStream.Format, colorPoint);

            byte[] depthColor = new byte[depthFrame.PixelDataLength * Bgr32BytesPerPixel];
            for (int index = 0; index < depthPixel.Length; index++)
            {
                // 距離カメラのデータから、距離を取得する
                int distance = depthPixel[index] >> DepthImageFrame.PlayerIndexBitmaskWidth;

                // 変換した結果が、フレームサイズを超えることがあるため、小さいほうを使う
                int x = Math.Min(colorPoint[index].X, colorStream.FrameWidth - 1);
                int y = Math.Min(colorPoint[index].Y, colorStream.FrameHeight - 1);

                // 動作が遅くなる場合、MapDepthFrameToColorFrame を外すと速くなる場合が
                // あります。外す場合のx,yはこちらを使用してください。
                //int x = index % depthFrame.Width;
                //int y = index / depthFrame.Width;

                int colorIndex = ((y * depthFrame.Width) + x) * Bgr32BytesPerPixel;

                // サポート外 0-40cm
                if (distance == depthStream.UnknownDepth)
                {
                    depthColor[colorIndex]     = 0;
                    depthColor[colorIndex + 1] = 0;
                    depthColor[colorIndex + 2] = 255;
                }
                // 近すぎ 40cm-80cm(default mode)
                else if (distance == depthStream.TooNearDepth)
                {
                    depthColor[colorIndex]     = 0;
                    depthColor[colorIndex + 1] = 255;
                    depthColor[colorIndex + 2] = 0;
                }
                // 遠すぎ 3m(Near),4m(Default)-8m
                else if (distance == depthStream.TooFarDepth)
                {
                    depthColor[colorIndex]     = 255;
                    depthColor[colorIndex + 1] = 0;
                    depthColor[colorIndex + 2] = 0;
                }
                // 有効な距離データ
                else
                {
                    depthColor[colorIndex]     = 0;
                    depthColor[colorIndex + 1] = 255;
                    depthColor[colorIndex + 2] = 255;
                }
            }

            return(depthColor);
        }
コード例 #38
0
        /// <summary>
        /// Initializes a new instance of the MainWindow class.
        /// </summary>
        public MainWindow()
        {
            //Visual Alert Loading
            proximity_alert_image.BeginInit();
            proximity_alert_image.UriSource = new Uri("prox_visual_alert.bmp", UriKind.Relative);
            proximity_alert_image.EndInit();

            contamination_alert_image.BeginInit();
            contamination_alert_image.UriSource = new Uri("contamination_visual_alert.bmp", UriKind.Relative);
            contamination_alert_image.EndInit();

            // one sensor is currently supported
            this.kinectSensor = KinectSensor.GetDefault();

            // get the coordinate mapper
            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            // get the depth (display) extents
            FrameDescription frameDescription = this.kinectSensor.DepthFrameSource.FrameDescription;

            // get size of joint space
            this.displayWidth  = frameDescription.Width;
            this.displayHeight = frameDescription.Height;
            this.displayRect   = new Rect(0.0, 0.0, this.displayWidth, this.displayHeight);

            // open the reader for the body frames
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            // a bone defined as a line between two joints
            this.bones = new List <Tuple <JointType, JointType> >();

            // Torso
            this.bones.Add(new Tuple <JointType, JointType>(JointType.Head, JointType.Neck));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.Neck, JointType.SpineShoulder));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.SpineMid));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineMid, JointType.SpineBase));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipLeft));

            // Right Arm
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderRight, JointType.ElbowRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowRight, JointType.WristRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.HandRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HandRight, JointType.HandTipRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.ThumbRight));

            // Left Arm
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderLeft, JointType.ElbowLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowLeft, JointType.WristLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.HandLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HandLeft, JointType.HandTipLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.ThumbLeft));

            // Right Leg
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HipRight, JointType.KneeRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.KneeRight, JointType.AnkleRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.AnkleRight, JointType.FootRight));

            // Left Leg
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HipLeft, JointType.KneeLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.KneeLeft, JointType.AnkleLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.AnkleLeft, JointType.FootLeft));

            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // open the sensor
            this.kinectSensor.Open();

            // set the status text
            this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText
                                                            : Properties.Resources.NoSensorStatusText;

            // Create the drawing group we'll use for drawing
            this.drawingGroup = new DrawingGroup();

            // Create an image source that we can use in our image control
            this.imageSource = new DrawingImage(this.drawingGroup);

            // use the window object as the view model in this simple example
            this.DataContext = this;

            // initialize the components (controls) of the window - This is where it sets the image source
            this.InitializeComponent();
        }
コード例 #39
0
 public void GenerateTexturedMappedMesh()
 {
     // Setup Kinect
     using (var pipeline = Pipeline.Create())
     {
         this.sensor = new KinectSensor(pipeline, new KinectSensorConfiguration()
         {
             OutputCalibration = true, OutputBodies = true, OutputColor = true, OutputDepth = true
         });
         var calibration = this.sensor.Calibration.Do((kc) => this.calibration = kc.DeepClone());
         this.sensor.ColorImage.Do((image) =>
         {
             if (this.lastColor == null)
             {
                 this.lastColor = image.AddRef();
             }
         });
         var c = this.sensor.DepthImage.Do((image) =>
         {
             if (this.lastImage == null)
             {
                 this.lastImage = image.AddRef();
             }
             if (this.lastImage != null && this.lastColor != null && this.calibration != null)
             {
                 var mesh      = Test.Psi.Kinect.Mesh.MeshFromDepthMap(this.lastImage, this.lastColor, this.calibration);
                 int faceCount = 0;
                 foreach (var face in mesh.Faces)
                 {
                     if (face.Valid)
                     {
                         faceCount++;
                     }
                 }
                 bool writePLY = false;
                 if (writePLY)
                 {
                     string temppath = System.IO.Path.GetTempPath();
                     string fn       = temppath + @"\Mesh-New-" + DateTime.Now.ToString("MM-dd-yy.HH.mm.ss") + ".ply";
                     using (System.IO.StreamWriter file = new System.IO.StreamWriter(fn))
                     {
                         file.WriteLine("ply");
                         file.WriteLine("format ascii 1.0");
                         file.WriteLine("element vertex " + mesh.NumberVertices.ToString());
                         file.WriteLine("property float x");
                         file.WriteLine("property float y");
                         file.WriteLine("property float z");
                         file.WriteLine("property uchar red");
                         file.WriteLine("property uchar green");
                         file.WriteLine("property uchar blue");
                         file.WriteLine("element face " + faceCount.ToString());
                         file.WriteLine("property list uchar int vertex_indices");
                         file.WriteLine("end_header");
                         for (int i = 0; i < mesh.NumberVertices; i++)
                         {
                             file.WriteLine(
                                 string.Format(
                                     "{0:f2} {1:f2} {2:f2} {3:d} {4:d} {5:d}",
                                     mesh.Vertices[i].Pos.X,
                                     mesh.Vertices[i].Pos.Y,
                                     mesh.Vertices[i].Pos.Z,
                                     (int)mesh.Vertices[i].Color.X,
                                     (int)mesh.Vertices[i].Color.Y,
                                     (int)mesh.Vertices[i].Color.Z));
                         }
                         for (int i = 0; i < mesh.NumberFaces; i++)
                         {
                             if (mesh.Faces[i].Valid)
                             {
                                 file.Write("3 ");
                                 int edgeIndex = mesh.Faces[i].Edge;
                                 file.Write(mesh.Edges[edgeIndex].Head.ToString() + " ");
                                 edgeIndex = mesh.Edges[edgeIndex].Cw;
                                 file.Write(mesh.Edges[edgeIndex].Head.ToString() + " ");
                                 edgeIndex = mesh.Edges[edgeIndex].Cw;
                                 file.WriteLine(mesh.Edges[edgeIndex].Head.ToString());
                             }
                         }
                     }
                 }
             }
         });
         pipeline.Run(TimeSpan.FromSeconds(10));
     }
 }
コード例 #40
0
        /// <summary>
        /// Initializes a new instance of the MainWindow class
        /// </summary>
        public MainWindow()
        {
            // only one sensor is currently supported
            this.kinectSensor = KinectSensor.GetDefault();

            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // open the sensor
            this.kinectSensor.Open();

            // set the status text
            this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText
                                                            : Properties.Resources.NoSensorStatusText;

            // open the reader for the body frames
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            // set the BodyFramedArrived event notifier
            this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived;

            // initialize the BodyViewer object for displaying tracked bodies in the UI
            this.kinectBodyView = new KinectBodyView(this.kinectSensor);

            // initialize the MainWindow
            this.InitializeComponent();

            // set our data context objects for display in UI
            this.DataContext = this;
            this.kinectBodyViewbox.DataContext = this.kinectBodyView;

            // create a gesture detector for each body (6 bodies => 6 detectors) and create content controls to display results in the UI

            gestureResultView = new GestureResultView(false, false, 0.0f, "null");
            gestureDetector   = new GestureDetector(this.kinectSensor, gestureResultView);

            ContentControl contentControl = new ContentControl();

            contentControl.Content = this.gestureDetector.GestureResultView;

            Grid.SetColumn(contentControl, 0);
            Grid.SetRow(contentControl, 1);

            this.contentGrid.Children.Add(contentControl);

            MoveTo(0, 0);
            DispatcherTimer timer = new DispatcherTimer();

            timer.Interval = TimeSpan.FromSeconds(0.1);
            timer.Tick    += timer_Tick;
            timer.Start();

            this.Abcsissa = abcsissa;
            this.Ordinate = ordinate;
            if (serialAttached == true)
            {
                this.serialport     = new SerialPort();
                serialport.PortName = "COM3";
                serialport.Open();
                serialport.BaudRate = 57600;
            }
        }//main window
コード例 #41
0
ファイル: MainWindow.xaml.cs プロジェクト: shs6444/Mykinect
        public MainWindow()
        {
            this.kinectSensor = KinectSensor.GetDefault();                           //pc에 연결된 kinectsonsor의 object를 kinectsensor클래스 변수에 저장

            this.colorFrameReader = this.kinectSensor.ColorFrameSource.OpenReader(); //colordata open

            this.colorFrameReader.FrameArrived += this.Read_ColorFrameArrived;

            FrameDescription colorframeDescription = this.kinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra);
            //이미지 프레임의 속성 설정
            FrameDescription frameDescription = this.kinectSensor.DepthFrameSource.FrameDescription;

            this.colorBitmap = new WriteableBitmap(colorframeDescription.Width, colorframeDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null);



            this.coordinateMapper = kinectSensor.CoordinateMapper;

            this.displayWidth  = frameDescription.Width;
            this.displayHeight = frameDescription.Height;

            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            this.bodyFrameReader.FrameArrived += this.Read_BodyFrameArrived;

            if (this.bones != null)
            {
                this.bones.Add(new Tuple <JointType, JointType>(JointType.Head, JointType.Neck));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.Neck, JointType.SpineShoulder));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.SpineMid));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineMid, JointType.SpineBase));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipRight));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipLeft));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderLeft));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderRight));

                this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderRight, JointType.ElbowRight));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowRight, JointType.WristRight));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.HandRight));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.HandRight, JointType.HandTipRight));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.ThumbRight));

                this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderLeft, JointType.ElbowLeft));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowLeft, JointType.WristLeft));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.HandLeft));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.HandLeft, JointType.HandTipLeft));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.ThumbLeft));
            }

            this.bodyColors = new List <Pen>();
            this.bodyColors.Add(new Pen(Brushes.Red, 6));
            this.bodyColors.Add(new Pen(Brushes.Orange, 6));
            this.bodyColors.Add(new Pen(Brushes.Green, 6));
            this.bodyColors.Add(new Pen(Brushes.Blue, 6));
            this.bodyColors.Add(new Pen(Brushes.Indigo, 6));
            this.bodyColors.Add(new Pen(Brushes.Violet, 6));


            this.kinectSensor.Open();

            this.drawingGroup = new DrawingGroup();

            this.imageSource = new DrawingImage(this.drawingGroup);

            this.DataContext = this; //이게들어가야 데이터를 가져올 수 있다.(kinect에서)

            InitializeComponent();
        }
コード例 #42
0
        public MainWindow()
        {
            bones = new List <Tuple <JointType, JointType> >();

            //Torso
            bones.Add(new Tuple <JointType, JointType>(JointType.Head, JointType.Neck));
            bones.Add(new Tuple <JointType, JointType>(JointType.Neck, JointType.SpineShoulder));
            bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.SpineMid));
            bones.Add(new Tuple <JointType, JointType>(JointType.SpineMid, JointType.SpineBase));
            bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderRight));
            bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderLeft));
            bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipRight));
            bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipLeft));

            //Left Arm
            bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderLeft, JointType.ElbowLeft));
            bones.Add(new Tuple <JointType, JointType>(JointType.ElbowLeft, JointType.WristLeft));
            bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.HandLeft));
            bones.Add(new Tuple <JointType, JointType>(JointType.HandLeft, JointType.HandTipLeft));
            bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.ThumbLeft));

            //Right Arm
            bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderRight, JointType.ElbowRight));
            bones.Add(new Tuple <JointType, JointType>(JointType.ElbowRight, JointType.WristRight));
            bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.HandRight));
            bones.Add(new Tuple <JointType, JointType>(JointType.HandRight, JointType.HandTipRight));
            bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.ThumbRight));

            //Left Leg
            bones.Add(new Tuple <JointType, JointType>(JointType.HipLeft, JointType.KneeLeft));
            bones.Add(new Tuple <JointType, JointType>(JointType.KneeLeft, JointType.AnkleLeft));
            bones.Add(new Tuple <JointType, JointType>(JointType.AnkleLeft, JointType.FootLeft));

            //Right Leg
            bones.Add(new Tuple <JointType, JointType>(JointType.HipRight, JointType.KneeRight));
            bones.Add(new Tuple <JointType, JointType>(JointType.KneeRight, JointType.AnkleRight));
            bones.Add(new Tuple <JointType, JointType>(JointType.AnkleRight, JointType.FootRight));

            bodyColors = new List <SolidColorBrush>();

            bodyColors.Add(new SolidColorBrush(Colors.Red));
            bodyColors.Add(new SolidColorBrush(Colors.Green));
            bodyColors.Add(new SolidColorBrush(Colors.Orange));
            bodyColors.Add(new SolidColorBrush(Colors.Blue));
            bodyColors.Add(new SolidColorBrush(Colors.Yellow));
            bodyColors.Add(new SolidColorBrush(Colors.Pink));

            kinect = KinectSensor.GetDefault();

            multiSourceFrameReader = kinect.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Body);
            multiSourceFrameReader.MultiSourceFrameArrived += MultiSourceFrameReader_MultiSourceFrameArrived;

            coordinateMapper      = kinect.CoordinateMapper;
            colorFrameDescription = kinect.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra);

            colorBitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgra32, null);

            kinect.Open();

            DataContext = this;
            InitializeComponent();
        }
コード例 #43
0
ファイル: Paddle.cs プロジェクト: Sub-Z3r0/KinectUnityTest
    // Use this for initialization
    void Start()
    {
        _Sensor = KinectSensor.GetDefault();
        if (_Sensor != null)
        {
            if (!_Sensor.IsOpen)
            {
                _Sensor.Open();
            }

            // Set up Gesture Source
            _Source = VisualGestureBuilderFrameSource.Create(_Sensor, 0);

            // open the reader for the vgb frames
            _Reader = _Source.OpenReader();
            if (_Reader != null)
            {
                _Reader.IsPaused      = true;
                _Reader.FrameArrived += GestureFrameArrived;
            }

            // load the ‘Squat’ gesture from the gesture database
            string path = System.IO.Path.Combine(Application.streamingAssetsPath, databasePath);
            _Database = VisualGestureBuilderDatabase.Create(path);

            // Load all gestures
            IList <Gesture> gesturesList = _Database.AvailableGestures;

            for (int x = 0; x < gesturesList.Count; x++)
            {
                Gesture g = gesturesList[x];

                if (g.Name.Equals(landingLeftGesture))
                {
                    _Source.AddGesture(g);
                }
                if (g.Name.Equals(landingRightGesture))
                {
                    _Source.AddGesture(g);
                }
                if (g.Name.Equals(landing))
                {
                    _Source.AddGesture(g);
                }
                if (g.Name.Equals(moveUpwards))
                {
                    _Source.AddGesture(g);
                }
                if (g.Name.Equals(hover))
                {
                    _Source.AddGesture(g);
                }
                if (g.Name.Equals(moveDownWards))
                {
                    _Source.AddGesture(g);
                }
            }

            //for (int g = 0; g < gesturesList.Count; g++)
            // {
            // Gesture gesture = gesturesList[g];
            // _Source.AddGesture(gesture);
            //}
        }
    }
コード例 #44
0
 protected abstract void OnKinectChanged(KinectSensor oldKinectSensor, KinectSensor newKinectSensor);
コード例 #45
0
        /// <summary>
        /// Initializes a new instance of the KinectBodyView class
        /// </summary>
        /// <param name="kinectSensor">Active instance of the KinectSensor</param>
        public KinectBodyView(KinectSensor kinectSensor)
        {
            if (kinectSensor == null)
            {
                throw new ArgumentNullException("kinectSensor");
            }

            // get the coordinate mapper
            this.coordinateMapper = kinectSensor.CoordinateMapper;

            // get the depth (display) extents
            FrameDescription frameDescription = kinectSensor.DepthFrameSource.FrameDescription;

            // get size of joint space
            this.displayWidth  = frameDescription.Width;
            this.displayHeight = frameDescription.Height;

            // a bone defined as a line between two joints
            this.bones = new List <Tuple <JointType, JointType> >();

            // Torso
            this.bones.Add(new Tuple <JointType, JointType>(JointType.Head, JointType.Neck));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.Neck, JointType.SpineShoulder));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.SpineMid));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineMid, JointType.SpineBase));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipLeft));

            // Right Arm
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderRight, JointType.ElbowRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowRight, JointType.WristRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.HandRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HandRight, JointType.HandTipRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.ThumbRight));

            // Left Arm
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderLeft, JointType.ElbowLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowLeft, JointType.WristLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.HandLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HandLeft, JointType.HandTipLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.ThumbLeft));

            // Right Leg
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HipRight, JointType.KneeRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.KneeRight, JointType.AnkleRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.AnkleRight, JointType.FootRight));

            // Left Leg
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HipLeft, JointType.KneeLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.KneeLeft, JointType.AnkleLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.AnkleLeft, JointType.FootLeft));

            // populate body colors, one for each BodyIndex
            this.bodyColors = new List <Pen>();

            this.bodyColors.Add(new Pen(Brushes.Red, 6));
            this.bodyColors.Add(new Pen(Brushes.Orange, 6));
            this.bodyColors.Add(new Pen(Brushes.Green, 6));
            this.bodyColors.Add(new Pen(Brushes.Blue, 6));
            this.bodyColors.Add(new Pen(Brushes.Indigo, 6));
            this.bodyColors.Add(new Pen(Brushes.Violet, 6));

            // Create the drawing group we'll use for drawing
            this.drawingGroup = new DrawingGroup();

            // Create an image source that we can use in our image control
            this.imageSource = new DrawingImage(this.drawingGroup);
        }
コード例 #46
0
        // Kinect enabled apps should customize which Kinect services it initializes here.
        private void InitializeKinectServices(KinectSensorManager kinectSensorManager, KinectSensor sensor)
        {
            // Application should enable all streams first.
            kinectSensorManager.ColorFormat        = ColorImageFormat.RgbResolution640x480Fps30;
            kinectSensorManager.ColorStreamEnabled = true;

            sensor.SkeletonFrameReady += this.SkeletonsReady;
            kinectSensorManager.TransformSmoothParameters = new TransformSmoothParameters
            {
                Smoothing          = 0.5f,
                Correction         = 0.5f,
                Prediction         = 0.5f,
                JitterRadius       = 0.05f,
                MaxDeviationRadius = 0.04f
            };
            kinectSensorManager.SkeletonStreamEnabled = true;
            kinectSensorManager.KinectSensorEnabled   = true;

            if (!kinectSensorManager.KinectSensorAppConflict)
            {
                // Start speech recognizer after KinectSensor started successfully.
                this.mySpeechRecognizer = SpeechRecognizer.Create();

                if (null != this.mySpeechRecognizer)
                {
                    this.mySpeechRecognizer.SaidSomething += this.RecognizerSaidSomething;
                    this.mySpeechRecognizer.Start(sensor.AudioSource);
                }

                enableAec.Visibility = Visibility.Visible;
                this.UpdateEchoCancellation(this.enableAec);
            }
        }
コード例 #47
0
 private void Sensor_IsAvailableChanged(KinectSensor sender, IsAvailableChangedEventArgs args)
 {
     this.StatusText = this.kinectSensor.IsAvailable ? "Running" : "Not Available";
 }
コード例 #48
0
 private void InicializarKinect()
 {
     kinect = InicializadorKinect.InicializarPrimeiroSensor(10);
     kinect.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30);
     kinect.ColorFrameReady += kinect_ColorFrameReady;
 }
コード例 #49
0
ファイル: MainWindow.xaml.cs プロジェクト: hyzcn/HandMap
        /// <summary>
        /// Initializes a new instance of the MainWindow class.
        /// </summary>
        public MainWindow()
        {
            // get the kinectSensor object
            this.kinectSensor = KinectSensor.GetDefault();

            // get the coordinate mapper
            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            // open the reader for the depth frames
            this.depthFrameReader = this.kinectSensor.DepthFrameSource.OpenReader();

            // wire handler for frame arrival
            this.depthFrameReader.FrameArrived += this.Reader_DepthFrameArrived;

            // get FrameDescription from DepthFrameSource
            this.depthFrameDescription = this.kinectSensor.DepthFrameSource.FrameDescription;

            // allocate space to put the pixels being received and converted
            this.depthPixels = new byte[this.depthFrameDescription.Width * this.depthFrameDescription.Height];

            List <Color> colorList = new List <Color>();

            colorList.Add(Color.FromArgb(100, 0, 0, 255));
            colorList.Add(Color.FromArgb(150, 0, 255, 0));
            colorList.Add(Color.FromArgb(100, 70, 200, 0));
            colorList.Add(Color.FromArgb(100, 100, 180, 0));
            colorList.Add(Color.FromArgb(200, 200, 100, 0));
            colorList.Add(Color.FromArgb(200, 230, 70, 0));
            colorList.Add(Color.FromArgb(255, 255, 0, 0));
            colorList.Add(Color.FromArgb(0, 0, 0, 0));



            BitmapPalette bp = new BitmapPalette(colorList);

            // create the bitmap to display
            this.depthBitmap = new WriteableBitmap(this.depthFrameDescription.Width, this.depthFrameDescription.Height, 96.0, 96.0, PixelFormats.Indexed8, bp);

            // open the reader for the body frames
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            // get FrameDescription from the BodyFrameSource
            this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived;


            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // open the sensor
            this.kinectSensor.Open();

            // set the status text
            this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText
                                                            : Properties.Resources.NoSensorStatusText;

            // use the window object as the view model
            this.DataContext = this;

            // initialize the components (controls) of the window
            this.InitializeComponent();
        }
コード例 #50
0
 private void KinectRibbon_Load(object sender, RibbonUIEventArgs e)
 {
     this.kinectSensor = KinectSensor.GetDefault();
 }
コード例 #51
0
        /// <summary>
        /// Called at the start when the window is loaded
        /// </summary>
        private void InitializeKinect()
        {
            using (Stream recordStream = File.Open(@"C:\Users\Abhi\Projects\harley\data\circleKB.save", FileMode.OpenOrCreate))
            {
                this.circleDetector = new TemplatedGestureDetector("Circle", recordStream);
                this.circleDetector.DisplayCanvas      = videoCanvas;
                this.circleDetector.OnGestureDetected += OnHandGesture;
            }

            this.gestureDetector = new SwipeGestureDetector();
            //this.gestureDetector.DisplayCanvas = videoCanvas;
            this.gestureDetector.OnGestureDetected += OnHandGesture;

            ParallelCombinedGestureDetector parallelCombinedGestureDetector = new ParallelCombinedGestureDetector();

            parallelCombinedGestureDetector.OnGestureDetected += OnHandGesture;
            parallelCombinedGestureDetector.DisplayCanvas      = videoCanvas;
            parallelCombinedGestureDetector.Add(circleDetector);
            parallelCombinedGestureDetector.Add(gestureDetector);

            foreach (var potentialSensor in KinectSensor.KinectSensors)
            {
                if (potentialSensor.Status == KinectStatus.Connected)
                {
                    this.kinectSensor = potentialSensor;
                    break;
                }
            }

            if (null != this.kinectSensor)
            {
                // Turning on skeleton stream
                this.kinectSensor.SkeletonStream.Enable();
                this.kinectSensor.SkeletonFrameReady += this.SensorSkeletonFrameReady;

                // Turn on the color stream to receive color frames
                this.kinectSensor.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30);

                // Allocate space to put the pixels we'll receive
                this.colorPixels = new byte[this.kinectSensor.ColorStream.FramePixelDataLength];

                // This is the bitmap we'll display on-screen
                this.colorBitmap = new WriteableBitmap(this.kinectSensor.ColorStream.FrameWidth, this.kinectSensor.ColorStream.FrameHeight, 96.0, 96.0, PixelFormats.Bgr32, null);

                // Set the image we display to point to the bitmap where we'll put the image data
                this.Image.Source = this.colorBitmap;

                // Add an event handler to be called whenever there is new color frame data
                this.kinectSensor.ColorFrameReady += this.SensorColorFrameReady;

                this.kinectSensor.Start();
            }

            if (null == this.kinectSensor)
            {
                // Connection is failed
                return;
            }

            this.speech = new Speech(this.kinectSensor, grammar, this);
            //this.speech.Start();
        }
コード例 #52
0
        public MainWindow()
        {
            InitializeComponent();
            KinectRegion.SetKinectRegion(this, kinectRegion);
            kinectRegion.KinectSensor = KinectSensor.GetDefault();
            // only one sensor is currently supported
            //kinectRegion.KinectSensor = KinectSensor.GetDefault();
            // set IsAvailableChanged event notifier
            kinectRegion.KinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // open the sensor
            //kinectRegion.KinectSensor.Open();
            // set the status text
            this.StatusText = kinectRegion.KinectSensor.IsAvailable ? Properties.Resources.RunningStatusText
                                                            : Properties.Resources.NoSensorStatusText;

            // open the reader for the body frames
            this.bodyFrameReader = kinectRegion.KinectSensor.BodyFrameSource.OpenReader();

            // set the BodyFramedArrived event notifier
            this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived;
            // initialize the gesture detection objects for our gestures
            this.gestureDetectorList = new List <GestureDetector>();

            // initialize the BodyViewer object for displaying tracked bodies in the UI
            kinectBodyView = new KinectBodyView(kinectRegion.KinectSensor);

            // initialize the MainWindow
            //this.InitializeComponent();

            // set our data context objects for display in UI
            this.DataContext = this;
            //this.kinectBodyViewbox.DataContext = this.kinectBodyView;


            int col0Row   = 0;
            int col1Row   = 0;
            int maxBodies = kinectRegion.KinectSensor.BodyFrameSource.BodyCount;



            a     = 40;
            chord = 0;

            /*GestureResultView result = new GestureResultView(0, false, false, 0.0f, false);
             *
             * GestureDetector detector = new GestureDetector(kinectRegion.KinectSensor, result);
             * this.gestureDetectorList.Add(detector);
             *
             * // split gesture results across the first two columns of the content grid
             * ContentControl contentControl = new ContentControl();
             * contentControl.Content = this.gestureDetectorList[0].GestureResultView;*/

            for (int i = 0; i < maxBodies; ++i)
            {
                GestureResultView result   = new GestureResultView(i, false, false, 0.0f, false);
                GestureDetector   detector = new GestureDetector(kinectRegion.KinectSensor, result);
                this.gestureDetectorList.Add(detector);

                // split gesture results across the first two columns of the content grid
                ContentControl contentControl = new ContentControl();
                contentControl.Content = this.gestureDetectorList[i].GestureResultView;

                if (i % 2 == 0)
                {
                    // Gesture results for bodies: 0, 2, 4
                    Grid.SetColumn(contentControl, 0);
                    Grid.SetRow(contentControl, col0Row);
                    ++col0Row;
                }
                else
                {
                    // Gesture results for bodies: 1, 3, 5
                    Grid.SetColumn(contentControl, 1);
                    Grid.SetRow(contentControl, col1Row);
                    ++col1Row;
                }

                //this.contentGrid.Children.Add(contentControl);
            }

            //this.ImageSource = this.notSeatedImage;
            Loaded += MainWindow_Loaded;

            dev = devInfo.CreateDevice();
            dev.Open();
        }
コード例 #53
0
        public override void Initialize()
        {
            foreach (var potentialSensor in KinectSensor.KinectSensors)
            {
                if (potentialSensor.Status == KinectStatus.Connected)
                {
                    this._sensor = potentialSensor;
                    break;
                }
            }

            if (this._sensor != null)
            {
                // Turn on the skeleton stream to receive skeleton frames
                this._sensor.SkeletonStream.Enable();

                //we only care about the arms and above
                this._sensor.SkeletonStream.TrackingMode = SkeletonTrackingMode.Seated;

                // Add an event handler to be called whenever there is new color frame data
                this._sensor.SkeletonFrameReady += this.SensorSkeletonFrameReady;

                // Start the sensor!
                try
                {
                    this._sensor.Start();
                }
                catch (IOException)
                {
                    this._sensor = null;
                }
            }

            if (this._sensor == null)
            {
                //nothing to do
            }

            //speech recognizer
            RecognizerInfo ri = GetKinectRecognizer();

            if (ri != null)
            {
                this.speechEngine = new SpeechRecognitionEngine(ri.Id);

                var speech = new Choices();
                speech.Add(new SemanticResultValue("kah", "SCREECH"));
                speech.Add(new SemanticResultValue("caw", "SCREECH"));
                speech.Add(new SemanticResultValue("cah", "SCREECH"));
                speech.Add(new SemanticResultValue("cahh", "SCREECH"));
                speech.Add(new SemanticResultValue("kahh", "SCREECH"));
                speech.Add(new SemanticResultValue("kaw", "SCREECH"));
                speech.Add(new SemanticResultValue("caww", "SCREECH"));
                speech.Add(new SemanticResultValue("attack", "SCREECH"));
                speech.Add(new SemanticResultValue("caaaawww", "SCREECH"));
                speech.Add(new SemanticResultValue("start", "START"));
                speech.Add(new SemanticResultValue("begin", "START"));
                speech.Add(new SemanticResultValue("murica", "START"));
                speech.Add(new SemanticResultValue("america", "START"));
                speech.Add(new SemanticResultValue("soar", "START"));
                speech.Add(new SemanticResultValue("fly", "START"));
                speech.Add(new SemanticResultValue("reset", "RESET"));
                speech.Add(new SemanticResultValue("restart", "RESET"));
                speech.Add(new SemanticResultValue("menu", "RESET"));
                speech.Add(new SemanticResultValue("quit", "QUIT"));

                var gb = new GrammarBuilder {
                    Culture = ri.Culture
                };
                gb.Append(speech);

                var g = new Grammar(gb);

                speechEngine.LoadGrammar(g);
                speechEngine.SpeechRecognized          += SpeechRecognized;
                speechEngine.SpeechRecognitionRejected += SpeechRejected;

                speechEngine.SetInputToAudioStream(
                    _sensor.AudioSource.Start(), new SpeechAudioFormatInfo(EncodingFormat.Pcm, 16000, 16, 1, 32000, 2, null));
                speechEngine.RecognizeAsync(RecognizeMode.Multiple);
            }
            else
            {
            }

            base.Initialize();
        }
コード例 #54
0
        /// <summary>
        /// Initializes a new instance of the MainWindow class.
        /// </summary>
        public MainWindow()
        {
            // one sensor is currently supported
            this.kinectSensor = KinectSensor.GetDefault();

            // get the coordinate mapper
            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            // get the depth (display) extents
            FrameDescription frameDescription = this.kinectSensor.DepthFrameSource.FrameDescription;

            // get size of joint space
            this.displayWidth  = frameDescription.Width;
            this.displayHeight = frameDescription.Height;



            //new vars

            // open the reader for the color frames
            this.colorFrameReader = this.kinectSensor.ColorFrameSource.OpenReader();

            // wire handler for frame arrival
            this.colorFrameReader.FrameArrived += this.Reader_ColorFrameArrived;


            // create the colorFrameDescription from the ColorFrameSource using Bgra format
            //added func to send image
            FrameDescription colorFrameDescription = this.kinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra);

            // create the bitmap to display
            this.colorBitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null);

            //run python file on image


            ProcessStartInfo myProcessStartInfo = new ProcessStartInfo(@Directory.GetCurrentDirectory() + "/tensorflow_cpu/python.exe");

            myProcessStartInfo.Arguments = "evaluate.py";
            Process myProcess = new Process();

            myProcess.StartInfo = myProcessStartInfo;
            myProcess.Start();
            myProcess.WaitForExit();
            myProcess.Close();

            // read in file

            string[] lines = File.ReadAllLines("boxes.txt");

            foreach (string line in lines)
            {
                string[] sections = line.Split(':');

                string[] box    = sections[1].Split(',');
                double[] intBox = Array.ConvertAll(box, Double.Parse);
                double   x0     = intBox[0] * displayWidth;
                double   y0     = intBox[1] * displayHeight;
                double   w      = intBox[2] * displayWidth;
                double   h      = intBox[3] * displayHeight;
                double[] bbox   = { x0, y0, w, h };
                label    newBox = new label((sections[0]), bbox);
                this.Labels.Add(newBox);
            }



            // open the reader for the body frames
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            // a bone defined as a line between two joints
            this.bones = new List <Tuple <JointType, JointType> >();

            // Torso
            this.bones.Add(new Tuple <JointType, JointType>(JointType.Head, JointType.Neck));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.Neck, JointType.SpineShoulder));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.SpineMid));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineMid, JointType.SpineBase));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipLeft));

            // Right Arm
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderRight, JointType.ElbowRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowRight, JointType.WristRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.HandRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HandRight, JointType.HandTipRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.ThumbRight));

            // Left Arm
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderLeft, JointType.ElbowLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowLeft, JointType.WristLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.HandLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HandLeft, JointType.HandTipLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.ThumbLeft));

            // Right Leg
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HipRight, JointType.KneeRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.KneeRight, JointType.AnkleRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.AnkleRight, JointType.FootRight));

            // Left Leg
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HipLeft, JointType.KneeLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.KneeLeft, JointType.AnkleLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.AnkleLeft, JointType.FootLeft));

            // populate body colors, one for each BodyIndex
            this.bodyColors = new List <Pen>();

            this.bodyColors.Add(new Pen(Brushes.Red, 6));
            this.bodyColors.Add(new Pen(Brushes.Orange, 6));
            this.bodyColors.Add(new Pen(Brushes.Green, 6));
            this.bodyColors.Add(new Pen(Brushes.Blue, 6));
            this.bodyColors.Add(new Pen(Brushes.Indigo, 6));
            this.bodyColors.Add(new Pen(Brushes.Violet, 6));

            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // open the sensor
            this.kinectSensor.Open();

            // set the status text
            this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText
                                                            : Properties.Resources.NoSensorStatusText;

            // Create the drawing group we'll use for drawing
            this.drawingGroup = new DrawingGroup();

            // Create an image source that we can use in our image control
            this.imageSource = new DrawingImage(this.drawingGroup);

            // use the window object as the view model in this simple example
            this.DataContext = this;

            // initialize the components (controls) of the window
            this.InitializeComponent();
        }
コード例 #55
0
ファイル: MainWindow.xaml.cs プロジェクト: leezhm/ChromaKey
        private void SetKinectSensor(KinectSensor sensor)
        {
            if (null != gSensor)
            {
                gSensor.Stop();
                gSensor = null;

                logger.Debug("Stop current Kinect Sensor and Set it as null");
            }

            if (null != (gSensor = sensor))
            {
                // Enable Kinect Sensor
                gSensor.ColorStream.Enable(CIF);
                gSensor.DepthStream.Enable(DIF);

                // Get the size of Color/Depth Image
                ColorWidth  = gSensor.ColorStream.FrameWidth;
                ColorHeight = gSensor.ColorStream.FrameHeight;
                DepthWidth  = gSensor.DepthStream.FrameWidth;
                DepthHeight = gSensor.DepthStream.FrameHeight;
                Divisor     = ColorWidth / DepthWidth;

                //var parameters = new TransformSmoothParameters
                //{
                //    Smoothing = 0.8f,
                //    Correction = 0.0f,
                //    Prediction = 0.0f,
                //    JitterRadius = 1.0f,
                //    MaxDeviationRadius = 0.5f
                //};

                // If we want to get the player information, we must be enable the Skeleton Stream.
                //gSensor.SkeletonStream.Enable(parameters);
                gSensor.SkeletonStream.Enable();

                // Add the AllFramesReady Event
                gSensor.AllFramesReady += AllFramesReadyEventHandler;

                // Init pixels and bitmap
                if (null == ColorPixels)
                {
                    ColorPixels = new byte[gSensor.ColorStream.FramePixelDataLength];
                }

                if (null == ColorBitmap)
                {
                    ColorBitmap = new WriteableBitmap(ColorWidth, ColorHeight, 96.0, 96.0, PixelFormats.Bgr32, null);
                }

                if (null == PlayerPixels)
                {
                    PlayerPixels = new byte[gSensor.DepthStream.FramePixelDataLength * sizeof(int)];
                }

                if (null == PlayerBitmap)
                {
                    PlayerBitmap = new WriteableBitmap(DepthWidth, DepthHeight, 96.0, 96.0, PixelFormats.Bgr32, null);
                }

                if (null == DepthDatas)
                {
                    DepthDatas = new short[gSensor.DepthStream.FramePixelDataLength];
                }

                if (null == CIP)
                {
                    CIP = new ColorImagePoint[gSensor.DepthStream.FramePixelDataLength];
                }

                // Init Image Control
                imgColor.Source  = ColorBitmap;
                imgPlayer.Source = PlayerBitmap;

                try
                {
                    // Start Kinect Sensor
                    gSensor.Start();
                }
                catch (Exception expt)
                {
                    logger.Fatal(expt.Message);
                    logger.Fatal(expt.StackTrace);
                }
            }
        }
コード例 #56
0
        /// <summary>
        /// 主函数
        /// </summary>
        public GaitParamWindow()
        {
            //取得传感器
            this.kinectSensor = KinectSensor.GetDefault();

            //取得坐标转换器
            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            //获取彩色图像信息
            FrameDescription colorFrameDescription = this.kinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra);

            //打开身体Reader
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            //打开彩色图Reader
            this.colorFrameReader = this.kinectSensor.ColorFrameSource.OpenReader();

            //获得骨骼空间坐标
            this.displayWidth  = colorFrameDescription.Width;
            this.displayHeight = colorFrameDescription.Height;

            //元组链表作为骨骼
            this.bones = new List <Tuple <JointType, JointType> >();

            // 躯干
            this.bones.Add(new Tuple <JointType, JointType>(JointType.Head, JointType.Neck));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.Neck, JointType.SpineShoulder));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.SpineMid));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineMid, JointType.SpineBase));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipLeft));

            // 右臂
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderRight, JointType.ElbowRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowRight, JointType.WristRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.HandRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HandRight, JointType.HandTipRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.ThumbRight));

            // 左臂
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderLeft, JointType.ElbowLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowLeft, JointType.WristLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.HandLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HandLeft, JointType.HandTipLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.ThumbLeft));

            // 右腿
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HipRight, JointType.KneeRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.KneeRight, JointType.AnkleRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.AnkleRight, JointType.FootRight));

            // 左腿
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HipLeft, JointType.KneeLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.KneeLeft, JointType.AnkleLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.AnkleLeft, JointType.FootLeft));



            // 每个人身体的颜色
            this.bodyColors = new List <System.Windows.Media.Pen>();

            this.bodyColors.Add(new System.Windows.Media.Pen(System.Windows.Media.Brushes.Red, 15));
            this.bodyColors.Add(new System.Windows.Media.Pen(System.Windows.Media.Brushes.Orange, 15));
            this.bodyColors.Add(new System.Windows.Media.Pen(System.Windows.Media.Brushes.Green, 15));
            this.bodyColors.Add(new System.Windows.Media.Pen(System.Windows.Media.Brushes.Blue, 15));
            this.bodyColors.Add(new System.Windows.Media.Pen(System.Windows.Media.Brushes.Indigo, 15));
            this.bodyColors.Add(new System.Windows.Media.Pen(System.Windows.Media.Brushes.Violet, 15));

            //打开传感器
            this.kinectSensor.Open();

            //创建绘图和绑定窗体
            this.drawingGroup = new DrawingGroup();
            this.imageSource  = new DrawingImage(this.drawingGroup);
            this.DataContext  = this;

            //创建位图
            this.colorBitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null);
            InitializeComponent();
        }
コード例 #57
0
ファイル: MainWindow.xaml.cs プロジェクト: Centerwing/Hi
        /// <summary>
        /// Initializes a new instance of the MainWindow class
        /// </summary>
        public MainWindow()
        {
            // only one sensor is currently supported
            this.kinectSensor = KinectSensor.GetDefault();

            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // open the sensor
            this.kinectSensor.Open();

            // set the status text
            this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText
                                                            : Properties.Resources.NoSensorStatusText;

            // open the reader for the body frames
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            // set the BodyFramedArrived event notifier
            this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived;

            // initialize the BodyViewer object for displaying tracked bodies in the UI
            this.kinectBodyView = new KinectBodyView(this.kinectSensor);

            // initialize the gesture detection objects for our gestures
            this.gestureDetectorList = new List <GestureDetector>();

            // initialize the MainWindow
            this.InitializeComponent();

            // set our data context objects for display in UI
            this.DataContext = this;
            this.kinectBodyViewbox.DataContext = this.kinectBodyView;

            // create a gesture detector for each body (6 bodies => 6 detectors) and create content controls to display results in the UI
            int col0Row   = 0;
            int col1Row   = 0;
            int maxBodies = this.kinectSensor.BodyFrameSource.BodyCount;

            for (int i = 0; i < maxBodies; ++i)
            {
                GestureResultView result   = new GestureResultView(i, false, false, 0.0f);
                GestureDetector   detector = new GestureDetector(this.kinectSensor, result);
                this.gestureDetectorList.Add(detector);

                // split gesture results across the first two columns of the content grid
                ContentControl contentControl = new ContentControl();
                contentControl.Content = this.gestureDetectorList[i].GestureResultView;

                if (i % 2 == 0)
                {
                    // Gesture results for bodies: 0, 2, 4
                    Grid.SetColumn(contentControl, 0);
                    Grid.SetRow(contentControl, col0Row);
                    ++col0Row;
                }
                else
                {
                    // Gesture results for bodies: 1, 3, 5
                    Grid.SetColumn(contentControl, 1);
                    Grid.SetRow(contentControl, col1Row);
                    ++col1Row;
                }

                this.contentGrid.Children.Add(contentControl);
            }
        }
コード例 #58
0
ファイル: Error.xaml.cs プロジェクト: AlejandroMoya/VME
        void conectaActiva()
        {
            //Nos aseguramos que la cuenta de sensores conectados sea de al menos 1
            if (KinectSensor.KinectSensors.Count > 0)
            {
                //Checamos que la variable _sensor sea nula
                if (this.sensor == null)
                {
                    //Asignamos el primer sensor Kinect a nuestra variable
                    this.sensor = KinectSensor.KinectSensors[0];
                    if (this.sensor != null)
                    {
                        try
                        {
                            //Iniciamos el dispositivo Kinect
                            this.sensor.Start();
                            //Esto es opcional pero ayuda a colocar el dispositivo Kinect a un cierto angulo de inclinacion, desde -27 a 27
                            //   sensor.ElevationAngle = 3;
                            //Informamos que se ha conectado e inicializado correctamente el dispositivo Kinect
                            //  Error err = new VME.Error(RecursosLocalizables.StringResources.KinectDetect, 3);
                            // err.Show();
                        }
                        catch (Exception ex)
                        {
                        }

                        //Creamos esta variable ri que tratara de encontrar un language pack valido haciendo uso del metodo obtenerLP
                        RecognizerInfo ri = obtenerLP();

                        //Si se encontro el language pack requerido lo asignaremos a nuestra variable speechengine
                        if (ri != null)
                        {
                            this.speechengine = new SpeechRecognitionEngine(ri.Id);
                            //Creamos esta variable opciones la cual almacenara las opciones de palabras o frases que podran ser reconocidas por el dispositivo
                            Choices opciones = new Choices();
                            //Comenzamos a agregar las opciones comenzando por el valor de opcion que tratamos reconocer y una llave que identificara a ese valor
                            //Por ejemplo en esta linea "uno" es el valor de opcion y "UNO" es la llave

                            opciones.Add(RecursosLocalizables.StringResources.aceptar, "UNO");

                            //En esta linea "windows ocho" es el valor de opcion y "TRES" es la llave y asi sucesivamente
                            opciones.Add(new SemanticResultValue("windows", "TRES"));
                            opciones.Add(new SemanticResultValue("new windows", "TRES"));

                            //Esta variable creará todo el conjunto de frases y palabras en base a nuestro lenguaje elegido en la variable ri
                            var grammarb = new GrammarBuilder {
                                Culture = ri.Culture
                            };
                            //Agregamos las opciones de palabras y frases a grammarb
                            grammarb.Append(opciones);
                            //Creamos una variable de tipo Grammar utilizando como parametro a grammarb
                            var grammar = new Grammar(grammarb);
                            //Le decimos a nuestra variable speechengine que cargue a grammar
                            this.speechengine.LoadGrammar(grammar);
                            //mandamos llamar al evento SpeechRecognized el cual se ejecutara cada vez que una palabra sea detectada
                            speechengine.SpeechRecognized += new EventHandler <SpeechRecognizedEventArgs>(speechengine_SpeechRecognized);
                            //speechengine inicia la entrada de datos de tipo audio
                            speechengine.SetInputToAudioStream(sensor.AudioSource.Start(), new SpeechAudioFormatInfo(EncodingFormat.Pcm, 16000, 16, 1, 32000, 2, null));
                            speechengine.RecognizeAsync(RecognizeMode.Multiple);
                        }
                    }
                }
            }
        }
コード例 #59
0
        private void Window_Loaded(object sender, RoutedEventArgs e)
        {
            PopulatePoseLibrary();
            LoadImages();
            blackImg.Visibility    = System.Windows.Visibility.Hidden;
            upRightArm.Visibility  = System.Windows.Visibility.Hidden;
            upLeftArm.Visibility   = System.Windows.Visibility.Hidden;
            lowRightArm.Visibility = System.Windows.Visibility.Hidden;
            lowLeftArm.Visibility  = System.Windows.Visibility.Hidden;

            beamAngleTxt.Visibility        = System.Windows.Visibility.Hidden;
            soundSourceAngleTxt.Visibility = System.Windows.Visibility.Hidden;
            recognizedColorTxt.Visibility  = System.Windows.Visibility.Hidden;
            TBCountDown.Visibility         = System.Windows.Visibility.Hidden;


            hud.Visibility    = System.Windows.Visibility.Hidden;
            hudRed.Visibility = System.Windows.Visibility.Hidden;

            if (KinectSensor.KinectSensors.Count == 0)
            {
                MessageBox.Show("No Kinects detected", "Depth Sensor Basics");
                Application.Current.Shutdown();
            }
            else
            {
                sensor = KinectSensor.KinectSensors[0];
                if (sensor == null)
                {
                    MessageBox.Show("Kinect is not ready to use", "Depth Sensor Basics");
                    Application.Current.Shutdown();
                }
            }

            // -------------------------------------------------------
            // color
            sensor.ColorStream.Enable();
            // allocate storage for color data
            colorData = new byte[sensor.ColorStream.FramePixelDataLength];

            // create an empty bitmap with the same size as color frame
            colorImageBitmap = new WriteableBitmap(
                sensor.ColorStream.FrameWidth, sensor.ColorStream.FrameHeight,
                96, 96, PixelFormats.Bgr32, null);
            colorImg.Source = colorImageBitmap;
            // register an event handler
            sensor.ColorFrameReady += new EventHandler <ColorImageFrameReadyEventArgs>(sensor_ColorFrameReady);

            // skeleton stream
            sensor.SkeletonStream.Enable();
            sensor.SkeletonFrameReady += new EventHandler <SkeletonFrameReadyEventArgs>(sensor_SkeletonFrameReady);
            skeletons = new Skeleton[sensor.SkeletonStream.FrameSkeletonArrayLength];

            // -------------------------------------------------------
            // Create the drawing group we'll use for drawing
            drawingGroup = new DrawingGroup();
            // Create an image source that we can use in our image control
            drawingImg = new DrawingImage(drawingGroup);
            // Display the drawing using our image control
            skeletonImg.Source = drawingImg;
            // prevent drawing outside of our render area
            drawingGroup.ClipGeometry = new RectangleGeometry(new Rect(0.0, 0.0, 640, 480));

            // start the kinect
            sensor.Start();

            //gesture setup-----------------------------------------------------------------------------------
            recognitionEngine = new GestureRecognitionEngine();
            recognitionEngine.AddGesture(new SwipeToLeftGesture());
            recognitionEngine.AddGesture(new SwipeToRightGesture());
            recognitionEngine.AddGesture(new ClapGesture());
            recognitionEngine.GestureRecognized += new EventHandler <GestureEventArgs>(recognitionEngine_GestureRecognized);

            //audio source--------------------------------------------------------------------------------
            sensor.AudioSource.SoundSourceAngleChanged += new EventHandler <SoundSourceAngleChangedEventArgs>(AudioSource_SoundSourceAngleChanged);
            sensor.AudioSource.BeamAngleChanged        += new EventHandler <BeamAngleChangedEventArgs>(AudioSource_BeamAngleChanged);

            kinectRecognizerInfo = findKinectRecognizerInfo();
            if (kinectRecognizerInfo != null)
            {
                recognizer =
                    new SpeechRecognitionEngine(kinectRecognizerInfo);
            }

            buildCommands();

            // selects the beam angle using custom-written software
            // This gives the best results
            sensor.AudioSource.BeamAngleMode = BeamAngleMode.Adaptive;

            System.IO.Stream audioStream = sensor.AudioSource.Start();

            recognizer.SetInputToAudioStream(audioStream,
                                             new SpeechAudioFormatInfo(EncodingFormat.Pcm,
                                                                       16000, 16, 1, 32000, 2, null));

            recognizer.SpeechRecognized += new EventHandler <SpeechRecognizedEventArgs>(recognizer_SpeechRecognized);


            // recognize words repeatedly and asynchronously
            recognizer.RecognizeAsync(RecognizeMode.Multiple);

            // clean up previously stored photos
            System.IO.DirectoryInfo di = new DirectoryInfo("photos");
            foreach (FileInfo file in di.GetFiles())
            {
                file.Delete();
            }
            foreach (DirectoryInfo dir in di.GetDirectories())
            {
                dir.Delete(true);
            }

            Timer          = new DispatcherTimer();
            Timer.Interval = new TimeSpan(0, 0, 1);
            Timer.Tick    += Timer_Tick;
            //Timer.Start();
        }
コード例 #60
0
        /// <summary>
        /// Initializes a new instance of the MainWindow class.
        /// </summary>
        public MainWindow()
        {
            // one sensor is currently supported
            this.kinectSensor = KinectSensor.GetDefault();

            // get the coordinate mapper
            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            // get the depth (display) extents
            FrameDescription frameDescription = this.kinectSensor.DepthFrameSource.FrameDescription;

            // get size of joint space
            this.displayWidth  = frameDescription.Width;
            this.displayHeight = frameDescription.Height;

            // open the reader for the body frames
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            // a bone defined as a line between two joints
            this.bones = new List <Tuple <JointType, JointType> >();

            // Torso
            this.bones.Add(new Tuple <JointType, JointType>(JointType.Head, JointType.Neck));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.Neck, JointType.SpineShoulder));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.SpineMid));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineMid, JointType.SpineBase));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipLeft));

            // Right Arm
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderRight, JointType.ElbowRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowRight, JointType.WristRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.HandRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HandRight, JointType.HandTipRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.ThumbRight));

            // Left Arm
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderLeft, JointType.ElbowLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowLeft, JointType.WristLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.HandLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HandLeft, JointType.HandTipLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.ThumbLeft));

            // Right Leg
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HipRight, JointType.KneeRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.KneeRight, JointType.AnkleRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.AnkleRight, JointType.FootRight));

            // Left Leg
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HipLeft, JointType.KneeLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.KneeLeft, JointType.AnkleLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.AnkleLeft, JointType.FootLeft));

            // populate body colors, one for each BodyIndex
            this.bodyColors = new List <Pen>();

            this.bodyColors.Add(new Pen(Brushes.Red, 6));
            this.bodyColors.Add(new Pen(Brushes.Orange, 6));
            this.bodyColors.Add(new Pen(Brushes.Green, 6));
            this.bodyColors.Add(new Pen(Brushes.Blue, 6));
            this.bodyColors.Add(new Pen(Brushes.Indigo, 6));
            this.bodyColors.Add(new Pen(Brushes.Violet, 6));

            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // open the sensor
            this.kinectSensor.Open();

            // set the status text
            this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText
                                                            : Properties.Resources.NoSensorStatusText;

            // Create the drawing group we'll use for drawing
            this.drawingGroup = new DrawingGroup();

            // Create an image source that we can use in our image control
            this.imageSource = new DrawingImage(this.drawingGroup);

            // use the window object as the view model in this simple example
            this.DataContext = this;

            // initialize the components (controls) of the window
            this.InitializeComponent();
        }