public KinectInterop.SensorData OpenDefaultSensor(KinectInterop.FrameSource dwFlags, float sensorAngle, bool bUseMultiSource)
    {
        KinectInterop.SensorData sensorData = new KinectInterop.SensorData();
        sensorFlags = dwFlags;

        kinectSensor = KinectSensor.GetDefault();
        if(kinectSensor == null)
            return null;

        coordMapper = kinectSensor.CoordinateMapper;

        this.bodyCount = kinectSensor.BodyFrameSource.BodyCount;
        sensorData.bodyCount = this.bodyCount;
        sensorData.jointCount = 25;

        sensorData.depthCameraFOV = 60f;
        sensorData.colorCameraFOV = 53.8f;
        sensorData.depthCameraOffset = -0.05f;
        sensorData.faceOverlayOffset = -0.04f;

        if((dwFlags & KinectInterop.FrameSource.TypeBody) != 0)
        {
            if(!bUseMultiSource)
                bodyFrameReader = kinectSensor.BodyFrameSource.OpenReader();

            bodyData = new Body[sensorData.bodyCount];
        }

        var frameDesc = kinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Rgba);
        sensorData.colorImageWidth = frameDesc.Width;
        sensorData.colorImageHeight = frameDesc.Height;

        if((dwFlags & KinectInterop.FrameSource.TypeColor) != 0)
        {
            if(!bUseMultiSource)
                colorFrameReader = kinectSensor.ColorFrameSource.OpenReader();

            sensorData.colorImage = new byte[frameDesc.BytesPerPixel * frameDesc.LengthInPixels];
        }

        sensorData.depthImageWidth = kinectSensor.DepthFrameSource.FrameDescription.Width;
        sensorData.depthImageHeight = kinectSensor.DepthFrameSource.FrameDescription.Height;

        if((dwFlags & KinectInterop.FrameSource.TypeDepth) != 0)
        {
            if(!bUseMultiSource)
                depthFrameReader = kinectSensor.DepthFrameSource.OpenReader();

            sensorData.depthImage = new ushort[kinectSensor.DepthFrameSource.FrameDescription.LengthInPixels];
        }

        if((dwFlags & KinectInterop.FrameSource.TypeBodyIndex) != 0)
        {
            if(!bUseMultiSource)
                bodyIndexFrameReader = kinectSensor.BodyIndexFrameSource.OpenReader();

            sensorData.bodyIndexImage = new byte[kinectSensor.BodyIndexFrameSource.FrameDescription.LengthInPixels];
        }

        if((dwFlags & KinectInterop.FrameSource.TypeInfrared) != 0)
        {
            if(!bUseMultiSource)
                infraredFrameReader = kinectSensor.InfraredFrameSource.OpenReader();

            sensorData.infraredImage = new ushort[kinectSensor.InfraredFrameSource.FrameDescription.LengthInPixels];
        }

        //if(!kinectSensor.IsOpen)
        {
            //Debug.Log("Opening sensor, available: " + kinectSensor.IsAvailable);
            kinectSensor.Open();
        }

        float fWaitTime = Time.realtimeSinceStartup + 3f;
        while(!kinectSensor.IsAvailable && Time.realtimeSinceStartup < fWaitTime)
        {
            // wait for sensor to open
        }

        Debug.Log("K2-sensor " + (kinectSensor.IsOpen ? "opened" : "closed") +
                  ", available: " + kinectSensor.IsAvailable);

        if(bUseMultiSource && dwFlags != KinectInterop.FrameSource.TypeNone && kinectSensor.IsOpen)
        {
            multiSourceFrameReader = kinectSensor.OpenMultiSourceFrameReader((FrameSourceTypes)((int)dwFlags & 0x3F));
        }

        return sensorData;
    }
Пример #2
0
	// Use this for initialization
	void Start () {

		mySensor = KinectSensor.GetDefault();

		if (mySensor != null)
		{
			// Total array of data representing a single rendered frame
			colorFrameData = new byte[colorWidth * colorHeight * bytes_per_pixel];

			backgroundTex = new Texture2D(colorWidth, colorHeight, TextureFormat.BGRA32, false);

			if (!mySensor.IsOpen)
			{
				mySensor.Open();
			}

			msFrameReader = mySensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color);

			//Rendering user as part of the Unity Scene background via Main Camera
			Rect cameraRect = Camera.main.pixelRect;
			float rectHeight = cameraRect.height;
			float rectWidth = cameraRect.width;

			if (rectWidth > rectHeight)
				rectWidth = rectHeight * colorWidth / colorHeight;
			else
				rectHeight = rectWidth * colorHeight / colorWidth;

			float foregroundOfsX = (cameraRect.width - rectWidth) / 2;
			float foregroundOfsY = (cameraRect.height - rectHeight) / 2;
			foregroundImgRect = new Rect(foregroundOfsX, foregroundOfsY, rectWidth, rectHeight);
			foregroundGuiRect = new Rect(foregroundOfsX, cameraRect.height - foregroundOfsY, rectWidth, -rectHeight);
		}
	} //End of Start()
Пример #3
0
    public KinectManager()
    {
        _Sensor = Kinect.KinectSensor.GetDefault();
        if (_Sensor == null)
        {
            ExitWithLog("Kinect Sensor not avalibalbe");
        }

        _Reader = _Sensor.OpenMultiSourceFrameReader(
            Kinect.FrameSourceTypes.Depth |
            Kinect.FrameSourceTypes.Body
            );
        if (_Reader == null)
        {
            ExitWithLog("Fail to load multiframe source reader.");
        }

        DepthFrameDesc = _Sensor.DepthFrameSource.FrameDescription;
        Mapper         = _Sensor.CoordinateMapper;

        DepthData = new ushort[DepthFrameDesc.LengthInPixels];
        _BodyData = new Kinect.Body[_Sensor.BodyFrameSource.BodyCount];

        if (!_Sensor.IsOpen)
        {
            _Sensor.Open();
        }
    }
Пример #4
0
	// Use this for initialization
	void Start () {

        print("START");
        /*Get Kinect Sensor and start reading data*/

        //Get Default Kinect Sensor
        sensor = null;
        sensor = KinectSensor.GetDefault();

        if( sensor != null )
        {
            //We have a sensor connected

            print("SENSOR CONNECTED");
            //Open the connection/Start reading the data
            reader = sensor.BodyFrameSource.OpenReader();
            if( !sensor.IsOpen )
            {
                sensor.Open();
            }
            

        } else
        {
            print("NO KINECT CONNECTED");
        }

        print(sensor);
	}
Пример #5
0
    // Use this for initialization
    void Start()
    {
        _bones = new GameObject[7] {
            Head, HandLeft, HandRight, KneeLeft, KneeRight, ShoulderLeft, ShoulderRight
        };

        _Sensor = Kinect2.KinectSensor.GetDefault();

        if (_Sensor != null)
        {
            if (!_Sensor.IsOpen)
            {
                if (bDebugLog)
                {
                    Debug.Log("[Kinect2] KinectSensor Open");
                }
                _Sensor.Open();

                // ボディリーダーを開く
                _Reader = _Sensor.BodyFrameSource.OpenReader();
                _Reader.FrameArrived += BodyFrameReader_FrameArrived;
            }
        }

        //gimmick = GameObject.Find("ita");
        flag = false;
    }
    void Start () 
    {
        _Sensor = KinectSensor.GetDefault();
        
        if (_Sensor != null) 
        {
            _Reader = _Sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth);
            
            var colorFrameDesc = _Sensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Rgba);
            ColorWidth = colorFrameDesc.Width;
            ColorHeight = colorFrameDesc.Height;
            
            _ColorTexture = new Texture2D(colorFrameDesc.Width, colorFrameDesc.Height, TextureFormat.RGBA32, false);
            _ColorData = new byte[colorFrameDesc.BytesPerPixel * colorFrameDesc.LengthInPixels];
			            
            var depthFrameDesc = _Sensor.DepthFrameSource.FrameDescription;
            _DepthData = new ushort[depthFrameDesc.LengthInPixels];

			_DepthTexture = new Texture2D( depthFrameDesc.Width, depthFrameDesc.Height, TextureFormat.ARGB32, false );
            
            if (!_Sensor.IsOpen)
            {
                _Sensor.Open();
            }
        }
    }
Пример #7
0
	// Use this for initialization
	void Start()
	{
		mySensor = KinectSensor.GetDefault();

		if (mySensor != null)
		{
			if (!mySensor.IsOpen)
			{
				mySensor.Open();
			}

			ninjaTex = new Texture2D(colorWidth, colorHeight, TextureFormat.BGRA32, false);

			msFrameReader = mySensor.OpenMultiSourceFrameReader(FrameSourceTypes.Body | FrameSourceTypes.Depth |
				FrameSourceTypes.BodyIndex);

			// There has to be a more efficient way of tracking these (i.e. using OOP)
			rightHandQueue_X = new Queue<float>();
			rightHandQueue_Y = new Queue<float>();
			rightHandQueue_T = new Queue<float>();

			leftHandQueue_X = new Queue<float>();
			leftHandQueue_Y = new Queue<float>();
			leftHandQueue_T = new Queue<float>();

			rightFootQueue_X = new Queue<float>();
			rightFootQueue_Y = new Queue<float>();
			rightFootQueue_T = new Queue<float>();

			leftFootQueue_X = new Queue<float>();
			leftFootQueue_Y = new Queue<float>();
			leftFootQueue_T = new Queue<float>();

			/** Construct StreamWriter object for collecting user data **/
			sw_v = new StreamWriter("EMILY_V.txt");
			sw_t = new StreamWriter("EMILY_T.txt");
			sw_x = new StreamWriter("EMILY_X.txt");
			sw_y = new StreamWriter("EMILY_Y.txt");


			InitializeSlashRenderer();

			//Rendering user as part of the Unity Scene background via Main Camera
			Rect cameraRect = Camera.main.pixelRect;
			float rectHeight = cameraRect.height;
			float rectWidth = cameraRect.width;

			if (rectWidth > rectHeight)
				rectWidth = rectHeight * colorWidth / colorHeight;
			else
				rectHeight = rectWidth * colorHeight / colorWidth;

			float foregroundOfsX = (cameraRect.width - rectWidth) / 2;
			float foregroundOfsY = (cameraRect.height - rectHeight) / 2;
			foregroundImgRect = new Rect(foregroundOfsX, foregroundOfsY, rectWidth, rectHeight);
			foregroundGuiRect = new Rect(foregroundOfsX, cameraRect.height - foregroundOfsY, rectWidth, -rectHeight);
		}

	}
Пример #8
0
	void Start () {
		_Sensor = KinectSensor.GetDefault();
		if (_Sensor != null) {
			_Reader = _Sensor.BodyFrameSource.OpenReader();
			if (!_Sensor.IsOpen)
				_Sensor.Open();
		}   
	}
Пример #9
0
 void Start()
 {
     _Sensor = Windows.Kinect.KinectSensor.GetDefault();
     if (_Sensor != null)
     {
         _Reader = _Sensor.BodyFrameSource.OpenReader();
         if (!_Sensor.IsOpen)
         {
             _Sensor.Open();
         }
     }
 }
Пример #10
0
    // Use this for initialization
    void Start()
    {
        Sensor = KinectSensor.GetDefault();

        if (Sensor != null) {
            Reader = Sensor.BodyFrameSource.OpenReader ();

            if (!Sensor.IsOpen) {
                Sensor.Open ();
            }
        }
    }
Пример #11
0
        void Start()
        {
            _sensor = Kinect.KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _reader = _sensor.BodyFrameSource.OpenReader();

                if (!_sensor.IsOpen)
                {
                    _sensor.Open();
                }
            }
        }
    private void Start()
    {
        // Setup the Kinect Sensor
            KinectSensor = KinectSensor.GetDefault();
            if (KinectSensor != null)
            {
                Debug.Log("Got Sensor!");
                KinectSensor.Open();
                Debug.Log("Opened sensor");
                Reader = KinectSensor.BodyFrameSource.OpenReader();
                Debug.Log("Opened Frame Reader");

                if (!KinectSensor.IsOpen)
                {
                    Debug.Log("Opening Sensor (again...)");
                    KinectSensor.Open();
                }

                // Create enough storage for however many the Kinect can track
                Bodies = new Body[KinectSensor.BodyFrameSource.BodyCount];
            }

            Animate = true;
    }
    void Awake () 
    {
        _Sensor = KinectSensor.GetDefault();
		isNewFrame = false;

        if (_Sensor != null)
        {
			_Reader = _Sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Body | FrameSourceTypes.BodyIndex | FrameSourceTypes.Depth);
            
            if (!_Sensor.IsOpen)
            {
                _Sensor.Open();
            }
        }   
    }
Пример #14
0
        void Start()
        {
            _sensor   = Kinect.KinectSensor.GetDefault();
            bodies    = new Dictionary <ulong, GameObject>();
            trackedID = new List <string>();
            if (_sensor != null)
            {
                _reader = _sensor.BodyFrameSource.OpenReader();

                if (!_sensor.IsOpen)
                {
                    _sensor.Open();
                }
            }
        }
Пример #15
0
    void Start()
    {
        _Sensor = KinectSensor.GetDefault();

        if (_Sensor != null)
        {
            _Reader = _Sensor.BodyFrameSource.OpenReader();

            if (!_Sensor.IsOpen)
            {
                _Sensor.Open();
                coordinateMapper=_Sensor.CoordinateMapper;
            }
        }
    }
Пример #16
0
    void Start()
    {
        Debug.Log ("got to bodysourcemanager");
        GestureManager = this.GetComponent<CustomGestureManager> ();
        _Sensor = KinectSensor.GetDefault();

        if (_Sensor != null)
        {
            _Reader = _Sensor.BodyFrameSource.OpenReader();

            if (!_Sensor.IsOpen)
            {
                _Sensor.Open();
            }
        }
    }
Пример #17
0
    // Use this for initialization
    void Start()
    {
        _Sensor = KinectSensor.GetDefault();

        if (_Sensor != null)
        {
            _Reader = _Sensor.BodyIndexFrameSource.OpenReader();

            bodyIndexFrameDescription = _Sensor.BodyIndexFrameSource.FrameDescription;

            if (!_Sensor.IsOpen)
            {
                _Sensor.Open();
            }
        }
    }
    void Start()
    {
        _Sensor = KinectSensor.GetDefault();

        if (_Sensor != null)
        {
            _Reader = _Sensor.BodyFrameSource.OpenReader();

            if (!_Sensor.IsOpen)
            {
                _Sensor.Open();
                GameObject GameControllerObject = GameObject.Find("GameController");
                GameControllerObject.GetComponent<GameController>().kinectActive = true;
            }
        }
    }
Пример #19
0
    void Start()
    {
        _Sensor = KinectSensor.GetDefault();
        if (_Sensor != null)
        {
            _Mapper = _Sensor.CoordinateMapper;
            var frameDesc = _Sensor.DepthFrameSource.FrameDescription;

            // Downsample to lower resolution
            CreateMesh(frameDesc.Width / _DownsampleSize, frameDesc.Height / _DownsampleSize);

            if (!_Sensor.IsOpen)
            {
                _Sensor.Open();
            }
        }
    }
Пример #20
0
    //开启kinect.
    void Start()
    {
        //实例化KinectSensor类.
        _Sensor = KinectSensor.GetDefault();

        if (_Sensor != null)
        {
            //通过资源池,打开输出流.
            _Reader = _Sensor.BodyFrameSource.OpenReader();

            if (!_Sensor.IsOpen)
            {
                //开启kinect.
                _Sensor.Open();
            }
        }
    }
Пример #21
0
 void Start()
 {
     _Sensor = KinectSensor.GetDefault();
     if (_Sensor != null) 
     {
         _Reader = _Sensor.InfraredFrameSource.OpenReader();
         var frameDesc = _Sensor.InfraredFrameSource.FrameDescription;
         _Data = new ushort[frameDesc.LengthInPixels];
         _RawData = new byte[frameDesc.LengthInPixels * 4];
         _Texture = new Texture2D(frameDesc.Width, frameDesc.Height, TextureFormat.BGRA32, false);
         
         if (!_Sensor.IsOpen)
         {
             _Sensor.Open();
         }
     }
 }
Пример #22
0
	// Use this for initialization
	void Start () {
		_Sensor = KinectSensor.GetDefault();

		if (_Sensor != null)
		{
			_Reader = _Sensor.BodyFrameSource.OpenReader();

			if (!_Sensor.IsOpen)
			{
				_Sensor.Open();
			}
            if(_Sensor.IsAvailable)
                kinectEnabled = true;
            gestureDetectorList = new List<GestureDetector>();
		}


	}
Пример #23
0
    void Start()
    {
        /*_plane = GameObject.CreatePrimitive(PrimitiveType.Plane);
        _plane.transform.position = new Vector3 (0, 0, 0);
        */
        _Sensor = KinectSensor.GetDefault();
        /*Vector3 _CurrentVector = new Vector3 (0, 1, 0);
        */
        if (_Sensor != null)
        {
            _Reader = _Sensor.BodyFrameSource.OpenReader();

            if (!_Sensor.IsOpen)
            {
                _Sensor.Open();
            }
        }
    }
Пример #24
0
	// Use this for initialization
	void Start()
	{

		mySensor = KinectSensor.GetDefault();

		if (mySensor != null)
		{
			depthFrameData = new ushort[depthWidth * depthHeight];
			bodyIndexFrameData = new byte[depthWidth * depthHeight];
			colorFrameData = new byte[colorWidth * colorHeight * bytes_per_pixel];
			displayPixels = new byte[depthWidth * depthHeight * bytes_per_pixel];
			colorPoints = new ColorSpacePoint[depthWidth * depthHeight];

			ninjaTex = new Texture2D(depthWidth, depthHeight, TextureFormat.BGRA32, false);

			if (!mySensor.IsOpen)
			{
				mySensor.Open();
			}

			myCoordinateMapper = mySensor.CoordinateMapper;

			msFrameReader = mySensor.OpenMultiSourceFrameReader(FrameSourceTypes.BodyIndex |
				FrameSourceTypes.Color | FrameSourceTypes.Depth);

			//Rendering user as part of the Unity Scene background via Main Camera
			Rect cameraRect = Camera.main.pixelRect;
			float rectHeight = cameraRect.height;
			float rectWidth = cameraRect.width;

			if (rectWidth > rectHeight)
				rectWidth = rectHeight * depthWidth / depthHeight;
			else
				rectHeight = rectWidth * depthHeight / depthWidth;

			float foregroundOfsX = (cameraRect.width - rectWidth) / 2;
			float foregroundOfsY = (cameraRect.height - rectHeight) / 2;
			foregroundImgRect = new Rect(foregroundOfsX, foregroundOfsY, rectWidth, rectHeight);
			foregroundGuiRect = new Rect(foregroundOfsX, cameraRect.height - foregroundOfsY, rectWidth, -rectHeight);
		}

	}
Пример #25
0
    private void Start()
    {
        _Sensor = Kinect.KinectSensor.GetDefault();

        if (_Sensor != null)
        {
            _ColorReader = _Sensor.ColorFrameSource.OpenReader();
            _DepthReader = _Sensor.DepthFrameSource.OpenReader();

            var colorFrameDesc = _Sensor.ColorFrameSource.CreateFrameDescription(Kinect.ColorImageFormat.Rgba);
            var depthFrameDesc = _Sensor.DepthFrameSource.FrameDescription;

            if (!_Sensor.IsOpen)
            {
                _Sensor.Open();
            }
        }

        bodySourceManager = gameObject.AddComponent <BodySourceManager>();
    }
Пример #26
0
	// Use this for initialization
	void Start () {

		mySensor = KinectSensor.GetDefault();

		if (mySensor != null)
		{
			depthFrameReader = mySensor.DepthFrameSource.OpenReader();
			var depthFrameDescrip = mySensor.DepthFrameSource.FrameDescription;

			depthFrameData = new ushort[depthFrameDescrip.LengthInPixels];

			// Raw data contains B, G, R, A values
			depthRawData = new byte[depthFrameDescrip.LengthInPixels * BYTES_PER_PIXEL];

			depthTex = new Texture2D(depthFrameDescrip.Width, depthFrameDescrip.Height, TextureFormat.BGRA32, false);

			if (!mySensor.IsOpen)
			{
				mySensor.Open();
			}

			//Get the IR image size
			depthImageHeight = depthFrameDescrip.Height;
			depthImageWidth = depthFrameDescrip.Width;

			//Calculate the foreground rectangles
			Rect cameraRect = Camera.main.pixelRect;
			float rectHeight = cameraRect.height;
			float rectWidth = cameraRect.width;

			if (rectWidth > rectHeight)
				rectWidth = rectHeight * depthImageWidth / depthImageHeight;
			else
				rectHeight = rectWidth * depthImageHeight / depthImageWidth;

			float foregroundOfsX = (cameraRect.width - rectWidth) / 2;
			float foregroundOfsY = (cameraRect.height - rectHeight) / 2;
			foregroundImgRect = new Rect(foregroundOfsX, foregroundOfsY, rectWidth, rectHeight);
			foregroundGuiRect = new Rect(foregroundOfsX, cameraRect.height - foregroundOfsY, rectWidth, -rectHeight);
		}
	}
Пример #27
0
    protected virtual void Start()
    {
        try
        {
            Sensor = Kinect.KinectSensor.GetDefault();

            if (Sensor != null)
            {
                Reader = Sensor.BodyFrameSource.OpenReader();

                if (!Sensor.IsOpen)
                {
                    Sensor.Open();
                }
            }
        }
        catch
        {
            Debug.Log("Error: No Kinect found.");
        }
    }
Пример #28
0
    void Start()
    {
        if (instance == null)
        {
            instance = this;
        }

        sensor = Kinect.KinectSensor.GetDefault();

        if (sensor != null)
        {
            sensor.Open();
        }

        reader = sensor.OpenMultiSourceFrameReader(Kinect.FrameSourceTypes.Body | Kinect.FrameSourceTypes.Color | Kinect.FrameSourceTypes.Depth | Kinect.FrameSourceTypes.Infrared);
        reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;
        playersIdBreak = new List <ulong>();
        playersIdBreak.Clear();
        players     = new bool[6];
        countBodies = 0;
    }
Пример #29
0
    protected virtual void Start()
    {
        try
        {
            Sensor = Kinect.KinectSensor.GetDefault();

            if (Sensor != null)
            {
                Reader = Sensor.BodyFrameSource.OpenReader();

                if (!Sensor.IsOpen)
                {
                    Sensor.Open();
                }
            }
        }
        catch
        {
            Debug.Log("Error: No Kinect found.");
        }
    }
Пример #30
0
    private void Start()
    {
        sensor = Windows.Kinect.KinectSensor.GetDefault();

        if (sensor != null)
        {
            reader = sensor.DepthFrameSource.OpenReader();
            data   = new ushort[sensor.DepthFrameSource.FrameDescription.LengthInPixels];
            width  = sensor.DepthFrameSource.FrameDescription.Width;
            heigth = sensor.DepthFrameSource.FrameDescription.Height;

            if (!sensor.IsOpen)
            {
                sensor.Open();
            }
        }
        else
        {
            Debug.LogErrorFormat("Failed to acquire Kinect Sensor!");
        }
    }
    //初始化Sensor,開啟彩色畫面的讀取器,建立新的影像編碼描述。
    void Start()
    {
        _Sensor = KinectSensor.GetDefault();

        if (_Sensor != null)
        {
            _Reader = _Sensor.ColorFrameSource.OpenReader();

            var frameDesc = _Sensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Rgba);
            ColorWidth = frameDesc.Width;
            ColorHeight = frameDesc.Height;
            //建立貼圖物件,建立像素儲存的陣列。
            _Texture = new Texture2D(frameDesc.Width, frameDesc.Height, TextureFormat.RGBA32, false);
            _Data = new byte[frameDesc.BytesPerPixel * frameDesc.LengthInPixels];
            //若Sensor還沒開起,將Sensor開啟。
            if (!_Sensor.IsOpen)
            {
                _Sensor.Open();
            }
        }
    }
Пример #32
0
    void InitializeDefaultSensor()
    {
        m_pKinectSensor = KinectSensor.GetDefault();

        if (m_pKinectSensor != null)
        {
            // Initialize the Kinect and get coordinate mapper and the frame reader
            m_pCoordinateMapper = m_pKinectSensor.CoordinateMapper;

            m_pKinectSensor.Open();
            if (m_pKinectSensor.IsOpen)
            {
                m_pMultiSourceFrameReader = m_pKinectSensor.OpenMultiSourceFrameReader(
                    FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.BodyIndex);
            }
        }

        if (m_pKinectSensor == null)
        {
            UnityEngine.Debug.LogError("No ready Kinect found!");
        }
    }
Пример #33
0
	// Use this for initialization
	void Start()
	{
		mySensor = KinectSensor.GetDefault();

		if (mySensor != null)
		{
			irFrameReader = mySensor.InfraredFrameSource.OpenReader();
			var irFrameDescrip = mySensor.InfraredFrameSource.FrameDescription;

			irFrameData = new ushort[irFrameDescrip.LengthInPixels * irFrameDescrip.Width * irFrameDescrip.Height];

			// Raw data contains B, G, R, A values 
			irRawData = new byte[irFrameDescrip.LengthInPixels * irFrameDescrip.Width * irFrameDescrip.Height * 4];

			irTexture = new Texture2D(irFrameDescrip.Width, irFrameDescrip.Height, TextureFormat.BGRA32, false);

			if (!mySensor.IsOpen)
			{
				mySensor.Open();
			}
		}
	}
Пример #34
0
    //Methof to initialise your kinect
    void InitKinect()
    {
        _getsureBasePath = Path.Combine(Application.streamingAssetsPath, "GestureDB/JumpDB.gbd");
        _dbGestures      = VisualGestureBuilderDatabase.Create(_getsureBasePath);
        _bodies          = new Windows.Kinect.Body[6];
        _kinect          = Windows.Kinect.KinectSensor.GetDefault();
        _kinect.Open();
        _gestureFrameSource = VisualGestureBuilderFrameSource.Create(_kinect, 0);

        //for each gesture in trained database of custom gestures - add them to kinect frame source
        foreach (Gesture gest in _dbGestures.AvailableGestures)
        {
            _gestureFrameSource.AddGesture(gest);
            if (gest.Name == "Jump")
            {
                _jump = gest;
            }
            else if (gest.Name == "Lean_Left")
            {
                _moveLeft = gest;
            }
            else if (gest.Name == "Lean_Right")
            {
                _moveRight = gest;
            }
            else if (gest.Name == "Swing")
            {
                _swing = gest;
            }
        }
        _bodyFrameSource = _kinect.BodyFrameSource;
        _bodyFrameReader = _bodyFrameSource.OpenReader();
        _bodyFrameReader.FrameArrived += _bodyFrameReader_FrameArrived;

        _gestureFrameReader               = _gestureFrameSource.OpenReader();
        _gestureFrameReader.IsPaused      = true;
        _gestureFrameReader.FrameArrived += _gestureFrameReader_FrameArrived;
    }
Пример #35
0
	// Use this for initialization
	void Start () {
		mySensor = KinectSensor.GetDefault();

		if (mySensor != null)
		{
			if (!mySensor.IsOpen)
			{
				mySensor.Open();
			}

			//Writing data to an output file for graphing analysis
			sw_cm_x = new StreamWriter("PLAYER_CM_X");
			sw_cm_y = new StreamWriter("PLAYER_CM_Y");
			sw_t = new StreamWriter("PLAYER_T");

			ninjaTex = new Texture2D(colorWidth, colorHeight, TextureFormat.BGRA32, false);

			msFrameReader = mySensor.OpenMultiSourceFrameReader(FrameSourceTypes.Body | FrameSourceTypes.Depth |
				FrameSourceTypes.BodyIndex);

			//Rendering user as part of the Unity Scene background via Main Camera
			Rect cameraRect = Camera.main.pixelRect;
			float rectHeight = cameraRect.height;
			float rectWidth = cameraRect.width;

			if (rectWidth > rectHeight)
				rectWidth = rectHeight * colorWidth / colorHeight;
			else
				rectHeight = rectWidth * colorHeight / colorWidth;

			float foregroundOfsX = (cameraRect.width - rectWidth) / 2;
			float foregroundOfsY = (cameraRect.height - rectHeight) / 2;
			foregroundImgRect = new Rect(foregroundOfsX, foregroundOfsY, rectWidth, rectHeight);
			foregroundGuiRect = new Rect(foregroundOfsX, cameraRect.height - foregroundOfsY, rectWidth, -rectHeight);
			//UNNECESSARY?
		}
	}
	private Vector3 _chestRight; //right vectory of the chest	
	
	// Use this for initialization
	void Start () {
        if (sFBXExporterForUnity != null)
        {
            sFBXExporterForUnity.bOutAnimation = false;
            sFBXExporterForUnity.bOutAnimationCustomFrame = true;
        }
		_Sensor = Kinect2.KinectSensor.GetDefault();
        if (_Sensor != null)
        {
            if (!_Sensor.IsOpen)
            {
                if (bDebugLog) Debug.Log("[Kinect2] KinectSensor Open");
                _Sensor.Open();
                _Reader = _Sensor.OpenMultiSourceFrameReader(/*Kinect2.FrameSourceTypes.Color |
				                                             Kinect2.FrameSourceTypes.Depth |
				                                             Kinect2.FrameSourceTypes.Infrared |
				                                             */
                                                             Kinect2.FrameSourceTypes.Body);
                _Reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;
            }
            else
            {
                if (bDebugLog) Debug.Log("[Kinect2] KinectSensor Opened");
                _Reader = _Sensor.OpenMultiSourceFrameReader(/*Kinect2.FrameSourceTypes.Color |
				                                             Kinect2.FrameSourceTypes.Depth |
				                                             Kinect2.FrameSourceTypes.Infrared |
				                                             */
                                                             Kinect2.FrameSourceTypes.Body);
                _Reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;
            }
        }
		//store bones in a list for easier access, everything except Hip_Center will be one
		//higher than the corresponding Kinect2.NuiSkeletonPositionIndex (because of the hip_override)
		_bones = new GameObject[(int)Kinect2.JointType.ThumbRight + 1] {
			null, SpineMid, SpineShoulder, Neck,
			CollarLeft, ShoulderLeft, ElbowLeft, WristLeft,
			CollarRight, ShoulderRight, ElbowRight, WristRight,
			HipOverride, HipLeft, KneeLeft, AnkleLeft,
			null, HipRight, KneeRight, AnkleRight,
			Head, HandLeft, HandRight, FootLeft, FootRight};
			//SpineShoulder, HandTipLeft, ThumbLeft, HandTipRight, ThumbRight, FootLeft, FootRight};

		_vecbones = new Vector3[(int)Kinect2.JointType.ThumbRight + 1];
		_vecbones2 = new Vector3[(int)Kinect2.JointType.ThumbRight + 1];
		_qbones = new Quaternion[(int)Kinect2.JointType.ThumbRight + 1];
		_qbones2 = new Quaternion[(int)Kinect2.JointType.ThumbRight + 1];

		//determine which bones are not available
		for(int ii = 0; ii < _bones.Length; ii++)
		{
			if(_bones[ii] == null)
			{
				_nullMask |= (uint)(1 << ii);
			}
		}
		
		//store the base rotations and bone directions (in bone-local space)
		_baseRotation = new Quaternion[(int)Kinect2.JointType.ThumbRight + 1];
		_boneDir = new Vector3[(int)Kinect2.JointType.ThumbRight + 1];
		
		//first save the special rotations for the hip and spine
		_hipRight = HipRight.transform.position - HipLeft.transform.position;
		_hipRight = HipOverride.transform.InverseTransformDirection(_hipRight);
		
		_chestRight = ShoulderRight.transform.position - ShoulderLeft.transform.position;
		_chestRight = SpineMid.transform.InverseTransformDirection(_chestRight);
		
		//get direction of all other bones
		for( int ii = 0; ii < (int)Kinect2.JointType.ThumbRight- 4; ii++)
		{
			if((_nullMask & (uint)(1 << ii)) <= 0)
			{
				//if the bone is the end of a limb, get direction from this bone to one of the extras (hand or foot).
				if(ii % 4 == 3 && ((_nullMask & (uint)(1 << (ii/4) + (int)Kinect2.JointType.ThumbRight - 4)) <= 0))
				{
					_boneDir[ii] = _bones[(ii/4) + (int)Kinect2.JointType.ThumbRight - 4].transform.position - _bones[ii].transform.position;
				}
				//if the bone is the hip_override (at boneindex Hip_Left, get direction from average of left and right hips
				else if(ii == (int)Kinect2.JointType.HipLeft && HipLeft != null && HipRight != null)
				{
					_boneDir[ii] = ((HipRight.transform.position + HipLeft.transform.position) / 2.0f) - HipOverride.transform.position;
				}
				//otherwise, get the vector from this bone to the next.
				else if((_nullMask & (uint)(1 << ii+1)) <= 0)
				{
					_boneDir[ii] = _bones[ii+1].transform.position - _bones[ii].transform.position;
				}
				else
				{
					continue;
				}
				//Since the spine of the kinect data is ~40 degrees back from the hip,
				//check what angle the spine is at and rotate the saved direction back to match the data
				if(ii == (int)Kinect2.JointType.SpineMid)
				{
					float angle = Vector3.Angle(transform.up,_boneDir[ii]);
					_boneDir[ii] = Quaternion.AngleAxis(angle,transform.right) * _boneDir[ii];
				}
				//transform the direction into local space.
				_boneDir[ii] = _bones[ii].transform.InverseTransformDirection(_boneDir[ii]);
			}
		}
		//make _chestRight orthogonal to the direction of the spine.
		_chestRight -= Vector3.Project(_chestRight, _boneDir[(int)Kinect2.JointType.SpineMid]);
		//make _hipRight orthogonal to the direction of the hip override
		Vector3.OrthoNormalize(ref _boneDir[(int)Kinect2.JointType.HipLeft],ref _hipRight);
		// Root
		Root.transform.localRotation = Quaternion.Euler(fRotRootX, 0.0f, 0.0f);
	}
Пример #37
0
    void Start()
    {
        // FBX Exporter for Unity (Sync Animation Custom Frame)
        if (sFBXExporterForUnity != null)
        {
            if (sFBXExporterForUnity.enabled)
            {
                sFBXExporterForUnity.bOutAnimation            = false;
                sFBXExporterForUnity.bOutAnimationCustomFrame = true;
            }
        }

        // Perception Neuron
        NeuronSource source = CreateConnection(Neuron_address, Neuron_port, Neuron_commandServerPort, Neuron_socketType);

        if (source != null)
        {
            eAutoMotionCaptureDevicesSelecter = EAutoMotionCaptureDevicesSelecter.ePerceptionNeuron;
            if (source != null)
            {
                source.OnDestroy();
            }
            bCheckedDevices = true;
            if (bDebugLog)
            {
                Debug.Log("Auto Devices Selecter Enabled [Perception Neuron]");
            }
            return;
        }
        // Kinect1
        int hr = NativeMethods.NuiInitialize(NuiInitializeFlags.UsesDepthAndPlayerIndex | NuiInitializeFlags.UsesSkeleton | NuiInitializeFlags.UsesColor);

        if (hr != 0)
        {
        }
        else
        {
            eAutoMotionCaptureDevicesSelecter = EAutoMotionCaptureDevicesSelecter.eKinect1;
            NativeMethods.NuiShutdown();
            bCheckedDevices = true;
            if (bDebugLog)
            {
                Debug.Log("Auto Devices Selecter Enabled [Kinect1]");
            }
            return;
        }
        // Kinect2
        Kinect2.KinectSensor _Sensor = Kinect2.KinectSensor.GetDefault();
        if (_Sensor != null)
        {
            if (!_Sensor.IsOpen)
            {
                _Sensor.Open();
                _Sensor.IsAvailableChanged += (sender, evt) => {
                    if (!bCheckedDevices)
                    {
                        if (_Sensor.IsAvailable)
                        {
                            eAutoMotionCaptureDevicesSelecter = EAutoMotionCaptureDevicesSelecter.eKinect2;
                            if (_Sensor != null)
                            {
                                if (bDebugLog)
                                {
                                    Debug.Log("[Kinect2] KinectSensor Close");
                                }
                                _Sensor.Close();
                            }
                        }
                        bCheckedDevices = true;
                        if (bDebugLog)
                        {
                            Debug.Log("Auto Devices Selecter Enabled [Kinect2]");
                        }
                        return;
                    }
                };
            }
        }
    }
Пример #38
0
    void Start()
    {
        // FBX Exporter for Unity (Sync Animation Custom Frame)
        if (sFBXExporterForUnity != null)
        {
            if (sFBXExporterForUnity.enabled)
            {
                sFBXExporterForUnity.bOutAnimation            = false;
                sFBXExporterForUnity.bOutAnimationCustomFrame = true;
            }
        }
        else
        {
            if (GameObject.Find(strFBXExporterForUnity) != null)
            {
                sFBXExporterForUnity = GameObject.Find(strFBXExporterForUnity).GetComponent <FBXExporterForUnity>();
            }
        }

        switch (eAutoMotionCaptureDevicesSelecter)
        {
        case EAutoMotionCaptureDevicesSelecter.eAuto:
        {
            // Perception Neuron(1-5)
            int iSelect = 0;
            foreach (SNeuronConnection sNeuronConnection in sNeuronConnections)
            {
                sNeuronConnections[iSelect].sNeuronSource = NeuronConnection.CreateConnection(sNeuronConnection.address, sNeuronConnection.port, sNeuronConnection.commandServerPort, sNeuronConnection.socketType);
                if (sNeuronConnections[iSelect].sNeuronSource != null)
                {
                    eAutoMotionCaptureDevicesSelecter = (EAutoMotionCaptureDevicesSelecter)(EAutoMotionCaptureDevicesSelecter.ePerceptionNeuron_1 + iSelect);
                    NeuronConnection.DestroyConnection(sNeuronConnections[iSelect].sNeuronSource);
                    bCheckedDevices = true;
                    if (bDebugLog)
                    {
                        Debug.Log("Auto Devices Selecter Enabled [Perception Neuron_" + (iSelect + 1) + "]");
                    }
                    return;
                }
                iSelect++;
            }

            // Kinect1
            int hr = NativeMethods.NuiInitialize(NuiInitializeFlags.UsesDepthAndPlayerIndex | NuiInitializeFlags.UsesSkeleton | NuiInitializeFlags.UsesColor);
            if (hr == 0)
            {
                eAutoMotionCaptureDevicesSelecter = EAutoMotionCaptureDevicesSelecter.eKinect1;
                NativeMethods.NuiShutdown();
                bCheckedDevices = true;
                if (bDebugLog)
                {
                    Debug.Log("Auto Devices Selecter Enabled [Kinect1]");
                }
                return;
            }

            // Kinect2
            Kinect2.KinectSensor _Sensor = Kinect2.KinectSensor.GetDefault();
            if (_Sensor != null)
            {
                //if (!_Sensor.IsOpen)
                {
                    _Sensor.Open();
                    if (_Sensor.IsOpen)
                    {
                        _Sensor.IsAvailableChanged += (sender, evt) =>
                        {
                            if (!bCheckedDevices)
                            {
                                if (_Sensor.IsAvailable)
                                {
                                    eAutoMotionCaptureDevicesSelecter = EAutoMotionCaptureDevicesSelecter.eKinect2;
                                    bCheckedDevices = true;
                                    if (bDebugLog)
                                    {
                                        Debug.Log("Auto Devices Selecter Enabled [Kinect2]");
                                    }
                                    return;
                                }
                            }
                        };
                    }
                }
            }
            break;
        }

        case EAutoMotionCaptureDevicesSelecter.ePerceptionNeuron_1:
        case EAutoMotionCaptureDevicesSelecter.ePerceptionNeuron_2:
        case EAutoMotionCaptureDevicesSelecter.ePerceptionNeuron_3:
        case EAutoMotionCaptureDevicesSelecter.ePerceptionNeuron_4:
        case EAutoMotionCaptureDevicesSelecter.ePerceptionNeuron_5:
        case EAutoMotionCaptureDevicesSelecter.ePerceptionNeuron_6:
        case EAutoMotionCaptureDevicesSelecter.ePerceptionNeuron_7:
        case EAutoMotionCaptureDevicesSelecter.ePerceptionNeuron_8:
        {
            // Perception Neuron(1-5)
            int iSelect = (int)eAutoMotionCaptureDevicesSelecter - (int)EAutoMotionCaptureDevicesSelecter.ePerceptionNeuron_1;
            sNeuronConnections[iSelect].sNeuronSource = NeuronConnection.CreateConnection(sNeuronConnections[iSelect].address, sNeuronConnections[iSelect].port, sNeuronConnections[iSelect].commandServerPort, sNeuronConnections[iSelect].socketType);
            if (sNeuronConnections[iSelect].sNeuronSource != null)
            {
                eAutoMotionCaptureDevicesSelecter = (EAutoMotionCaptureDevicesSelecter)(EAutoMotionCaptureDevicesSelecter.ePerceptionNeuron_1 + iSelect);
                NeuronConnection.DestroyConnection(sNeuronConnections[iSelect].sNeuronSource);
                bCheckedDevices = true;
                if (bDebugLog)
                {
                    Debug.Log("Auto Devices Selecter Enabled [Perception Neuron_" + (iSelect + 1) + "]");
                }
                return;
            }
            break;
        }

        case EAutoMotionCaptureDevicesSelecter.eKinect1:
        {
            // Kinect1
            int hr = NativeMethods.NuiInitialize(NuiInitializeFlags.UsesDepthAndPlayerIndex | NuiInitializeFlags.UsesSkeleton | NuiInitializeFlags.UsesColor);
            if (hr == 0)
            {
                eAutoMotionCaptureDevicesSelecter = EAutoMotionCaptureDevicesSelecter.eKinect1;
                NativeMethods.NuiShutdown();
                bCheckedDevices = true;
                if (bDebugLog)
                {
                    Debug.Log("Auto Devices Selecter Enabled [Kinect1]");
                }
                return;
            }
            break;
        }

        case EAutoMotionCaptureDevicesSelecter.eKinect2:
        {
            // Kinect2
            Kinect2.KinectSensor _Sensor = Kinect2.KinectSensor.GetDefault();
            if (_Sensor != null)
            {
                if (!_Sensor.IsOpen)
                {
                    _Sensor.Open();
                    if (_Sensor.IsOpen)
                    {
                        _Sensor.IsAvailableChanged += (sender, evt) =>
                        {
                            if (!bCheckedDevices)
                            {
                                if (_Sensor.IsAvailable)
                                {
                                    eAutoMotionCaptureDevicesSelecter = EAutoMotionCaptureDevicesSelecter.eKinect2;
                                    bCheckedDevices = true;
                                    if (bDebugLog)
                                    {
                                        Debug.Log("Auto Devices Selecter Enabled [Kinect2]");
                                    }
                                    return;
                                }
                            }
                        };
                    }
                }
                else
                {
                    eAutoMotionCaptureDevicesSelecter = EAutoMotionCaptureDevicesSelecter.eKinect2;
                    bCheckedDevices = true;
                    if (bDebugLog)
                    {
                        Debug.Log("Auto Devices Selecter Enabled [Kinect2] -Sensor Opened-");
                    }
                    return;
                }
            }
            break;
        }
        }
        eAutoMotionCaptureDevicesSelecter = EAutoMotionCaptureDevicesSelecter.eNone;
    }
Пример #39
0
    void Start()
    {
        //Socket
        Connect ("127.0.0.1", "Hej");
        //SOCKET: data ska vara koordinater
        //byte[] data = System.Text.Encoding.ASCII.GetBytes("hej3");
        //stream.Write(data, 0, data.Length);
        _Sensor = KinectSensor.GetDefault();

        if (_Sensor != null)
        {
            prevX = -1;
            prevY = -1;
            offsetX = -250;
            offsetY = -250;

            _Reader = _Sensor.ColorFrameSource.OpenReader();
            lineRenderer = gameObject.AddComponent<LineRenderer>();
            var frameDesc = _Sensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Rgba);
            ColorWidth = frameDesc.Width;
            ColorHeight = frameDesc.Height;

            _Texture = new Texture2D(frameDesc.Width, frameDesc.Height, TextureFormat.RGBA32, false);
            _Data = new byte[frameDesc.BytesPerPixel * frameDesc.LengthInPixels];

            if (!_Sensor.IsOpen)
            {
                _Sensor.Open();
            }
        }
    }
Пример #40
0
    private Vector3 _chestRight;        //right vectory of the chest

    // Use this for initialization
    void Start()
    {
        /*
         * if (sFBXExporterForUnity != null)
         * {
         *  sFBXExporterForUnity.bOutAnimation = false;
         *  sFBXExporterForUnity.bOutAnimationCustomFrame = true;
         * }
         */
        _Sensor = Kinect2.KinectSensor.GetDefault();
        if (_Sensor != null)
        {
            if (!_Sensor.IsOpen)
            {
                if (bDebugLog)
                {
                    Debug.Log("[Kinect2] KinectSensor Open");
                }
                _Sensor.Open();
                _Reader = _Sensor.OpenMultiSourceFrameReader(/*Kinect2.FrameSourceTypes.Color |
                                                              *              Kinect2.FrameSourceTypes.Depth |
                                                              *              Kinect2.FrameSourceTypes.Infrared |
                                                              */
                    Kinect2.FrameSourceTypes.Body);
                _Reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;
            }
            else
            {
                if (bDebugLog)
                {
                    Debug.Log("[Kinect2] KinectSensor Opened");
                }
                _Reader = _Sensor.OpenMultiSourceFrameReader(/*Kinect2.FrameSourceTypes.Color |
                                                              *              Kinect2.FrameSourceTypes.Depth |
                                                              *              Kinect2.FrameSourceTypes.Infrared |
                                                              */
                    Kinect2.FrameSourceTypes.Body);
                _Reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;
            }
        }
        //store bones in a list for easier access, everything except Hip_Center will be one
        //higher than the corresponding Kinect2.NuiSkeletonPositionIndex (because of the hip_override)

        /*削除 _bones = new GameObject[(int)Kinect2.JointType.ThumbRight + 1] {
         *               null, SpineMid, SpineShoulder, Neck,
         *               CollarLeft, ShoulderLeft, ElbowLeft, WristLeft,
         *               CollarRight, ShoulderRight, ElbowRight, WristRight,
         *               HipOverride, HipLeft, KneeLeft, AnkleLeft,
         *               null, HipRight, KneeRight, AnkleRight,
         *               Head, HandLeft, HandRight, FootLeft, FootRight};*/
        //SpineShoulder, HandTipLeft, ThumbLeft, HandTipRight, ThumbRight, FootLeft, FootRight};
        _bones = new GameObject[5] {
            Head, HandLeft, HandRight, KneeLeft, KneeRight
        };

        _vecbones  = new Vector3[(int)Kinect2.JointType.ThumbRight + 1];
        _vecbones2 = new Vector3[(int)Kinect2.JointType.ThumbRight + 1];
        _qbones    = new Quaternion[(int)Kinect2.JointType.ThumbRight + 1];
        _qbones2   = new Quaternion[(int)Kinect2.JointType.ThumbRight + 1];

        //determine which bones are not available
        for (int ii = 0; ii < _bones.Length; ii++)
        {
            if (_bones[ii] == null)
            {
                _nullMask |= (uint)(1 << ii);
            }
        }

        //store the base rotations and bone directions (in bone-local space)
        _baseRotation = new Quaternion[(int)Kinect2.JointType.ThumbRight + 1];
        _boneDir      = new Vector3[(int)Kinect2.JointType.ThumbRight + 1];

        //first save the special rotations for the hip and spine

        /*_hipRight = HipRight.transform.position - HipLeft.transform.position;
         * _hipRight = HipOverride.transform.InverseTransformDirection(_hipRight);
         *
         * _chestRight = ShoulderRight.transform.position - ShoulderLeft.transform.position;
         * _chestRight = SpineMid.transform.InverseTransformDirection(_chestRight);*/

        //get direction of all other bones

        /*for (int ii = 0; ii < (int)Kinect2.JointType.ThumbRight - 4; ii++)
         * {
         *  if ((_nullMask & (uint)(1 << ii)) <= 0)
         *  {
         *      //if the bone is the end of a limb, get direction from this bone to one of the extras (hand or foot).
         *      if (ii % 4 == 3 && ((_nullMask & (uint)(1 << (ii / 4) + (int)Kinect2.JointType.ThumbRight - 4)) <= 0))
         *      {
         *          _boneDir[ii] = _bones[(ii / 4) + (int)Kinect2.JointType.ThumbRight - 4].transform.position - _bones[ii].transform.position;
         *      }
         *      //if the bone is the hip_override (at boneindex Hip_Left, get direction from average of left and right hips
         *      else if (ii == (int)Kinect2.JointType.HipLeft && HipLeft != null && HipRight != null)
         *      {
         *          _boneDir[ii] = ((HipRight.transform.position + HipLeft.transform.position) / 2.0f) - HipOverride.transform.position;
         *      }
         *      //otherwise, get the vector from this bone to the next.
         *      else if ((_nullMask & (uint)(1 << ii + 1)) <= 0)
         *      {
         *          _boneDir[ii] = _bones[ii + 1].transform.position - _bones[ii].transform.position;
         *      }
         *      else
         *      {
         *          continue;
         *      }
         *      //Since the spine of the kinect data is ~40 degrees back from the hip,
         *      //check what angle the spine is at and rotate the saved direction back to match the data
         *      if (ii == (int)Kinect2.JointType.SpineMid)
         *      {
         *          float angle = Vector3.Angle(transform.up, _boneDir[ii]);
         *          _boneDir[ii] = Quaternion.AngleAxis(angle, transform.right) * _boneDir[ii];
         *      }
         *      //transform the direction into local space.
         *      _boneDir[ii] = _bones[ii].transform.InverseTransformDirection(_boneDir[ii]);
         *  }
         * }
         * //make _chestRight orthogonal to the direction of the spine.
         * _chestRight -= Vector3.Project(_chestRight, _boneDir[(int)Kinect2.JointType.SpineMid]);
         * //make _hipRight orthogonal to the direction of the hip override
         * Vector3.OrthoNormalize(ref _boneDir[(int)Kinect2.JointType.HipLeft], ref _hipRight);
         * // Root
         * Root.transform.localRotation = Quaternion.Euler(fRotRootX, 0.0f, 0.0f);*/
    }
Пример #41
0
    void Start()
    {
        _Sensor = KinectSensor.GetDefault();

        Util.Log("Started BodySourceManager. Found Sensor: {0}", (_Sensor != null));

        if (_Sensor != null)
        {
            _Reader = _Sensor.BodyFrameSource.OpenReader();

            if (!_Sensor.IsOpen)
            {
                _Sensor.Open();
            }
        }
    }
Пример #42
0
    // Use this for initialization
    void Start()
    {
        fasterhappened = false;
        slowerhappened = false;

        _Sensor = KinectSensor.GetDefault();
        if(_Sensor != null)
        {

            if(!_Sensor.IsOpen)
            {
                _Sensor.Open();
            }

            // Set up Gesture Source
            _Source = VisualGestureBuilderFrameSource.Create(_Sensor, 0);

            // open the reader for the vgb frames
            _Reader = _Source.OpenReader();
            if(_Reader != null)
            {
                _Reader.IsPaused = true;
                _Reader.FrameArrived += GestureFrameArrived;
            }

            // load the 'Seated' gesture from the gesture database
            string path = System.IO.Path.Combine(Application.streamingAssetsPath, databasePath);
            // TODO path irgendwann nicht mehr hardcoden
            _Database = VisualGestureBuilderDatabase.Create("Assets/Streaming Assets/gestures.gbd");

            // Load all gestures
            IList<Gesture> gesturesList = _Database.AvailableGestures;
            for(int g = 0; g < gesturesList.Count; g++)
            {
                Gesture gesture = gesturesList[g];
                _Source.AddGesture(gesture);
            }

        }
    }
Пример #43
0
    public KinectInterop.SensorData OpenDefaultSensor(KinectInterop.FrameSource dwFlags, float sensorAngle, bool bUseMultiSource)
    {
        KinectInterop.SensorData sensorData = new KinectInterop.SensorData();
        //sensorFlags = dwFlags;

        kinectSensor = KinectSensor.GetDefault();
        if(kinectSensor == null)
            return null;

        coordMapper = kinectSensor.CoordinateMapper;

        this.bodyCount = kinectSensor.BodyFrameSource.BodyCount;
        sensorData.bodyCount = this.bodyCount;
        sensorData.jointCount = 25;

        sensorData.depthCameraFOV = 60f;
        sensorData.colorCameraFOV = 53.8f;
        sensorData.depthCameraOffset = -0.03f;

        if((dwFlags & KinectInterop.FrameSource.TypeBody) != 0)
        {
            if(!bUseMultiSource)
                bodyFrameReader = kinectSensor.BodyFrameSource.OpenReader();

            bodyData = new Body[sensorData.bodyCount];
        }

        var frameDesc = kinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Rgba);
        sensorData.colorImageWidth = frameDesc.Width;
        sensorData.colorImageHeight = frameDesc.Height;

        if((dwFlags & KinectInterop.FrameSource.TypeColor) != 0)
        {
            if(!bUseMultiSource)
                colorFrameReader = kinectSensor.ColorFrameSource.OpenReader();

            sensorData.colorImage = new byte[frameDesc.BytesPerPixel * frameDesc.LengthInPixels];
        }

        sensorData.depthImageWidth = kinectSensor.DepthFrameSource.FrameDescription.Width;
        sensorData.depthImageHeight = kinectSensor.DepthFrameSource.FrameDescription.Height;

        if((dwFlags & KinectInterop.FrameSource.TypeDepth) != 0)
        {
            if(!bUseMultiSource)
                depthFrameReader = kinectSensor.DepthFrameSource.OpenReader();

            sensorData.depthImage = new ushort[kinectSensor.DepthFrameSource.FrameDescription.LengthInPixels];
        }

        if((dwFlags & KinectInterop.FrameSource.TypeBodyIndex) != 0)
        {
            if(!bUseMultiSource)
                bodyIndexFrameReader = kinectSensor.BodyIndexFrameSource.OpenReader();

            sensorData.bodyIndexImage = new byte[kinectSensor.BodyIndexFrameSource.FrameDescription.LengthInPixels];
        }

        if((dwFlags & KinectInterop.FrameSource.TypeInfrared) != 0)
        {
            if(!bUseMultiSource)
                infraredFrameReader = kinectSensor.InfraredFrameSource.OpenReader();

            sensorData.infraredImage = new ushort[kinectSensor.InfraredFrameSource.FrameDescription.LengthInPixels];
        }

        if(!kinectSensor.IsOpen)
        {
            kinectSensor.Open();
        }

        if(bUseMultiSource && dwFlags != KinectInterop.FrameSource.TypeNone && kinectSensor.IsOpen)
        {
            multiSourceFrameReader = kinectSensor.OpenMultiSourceFrameReader((FrameSourceTypes)dwFlags);
        }

        return sensorData;
    }