public KinectInterop.SensorData OpenDefaultSensor(KinectInterop.FrameSource dwFlags, float sensorAngle, bool bUseMultiSource)
    {
        KinectInterop.SensorData sensorData = new KinectInterop.SensorData();
        sensorFlags = dwFlags;

        kinectSensor = KinectSensor.GetDefault();
        if(kinectSensor == null)
            return null;

        coordMapper = kinectSensor.CoordinateMapper;

        this.bodyCount = kinectSensor.BodyFrameSource.BodyCount;
        sensorData.bodyCount = this.bodyCount;
        sensorData.jointCount = 25;

        sensorData.depthCameraFOV = 60f;
        sensorData.colorCameraFOV = 53.8f;
        sensorData.depthCameraOffset = -0.05f;
        sensorData.faceOverlayOffset = -0.04f;

        if((dwFlags & KinectInterop.FrameSource.TypeBody) != 0)
        {
            if(!bUseMultiSource)
                bodyFrameReader = kinectSensor.BodyFrameSource.OpenReader();

            bodyData = new Body[sensorData.bodyCount];
        }

        var frameDesc = kinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Rgba);
        sensorData.colorImageWidth = frameDesc.Width;
        sensorData.colorImageHeight = frameDesc.Height;

        if((dwFlags & KinectInterop.FrameSource.TypeColor) != 0)
        {
            if(!bUseMultiSource)
                colorFrameReader = kinectSensor.ColorFrameSource.OpenReader();

            sensorData.colorImage = new byte[frameDesc.BytesPerPixel * frameDesc.LengthInPixels];
        }

        sensorData.depthImageWidth = kinectSensor.DepthFrameSource.FrameDescription.Width;
        sensorData.depthImageHeight = kinectSensor.DepthFrameSource.FrameDescription.Height;

        if((dwFlags & KinectInterop.FrameSource.TypeDepth) != 0)
        {
            if(!bUseMultiSource)
                depthFrameReader = kinectSensor.DepthFrameSource.OpenReader();

            sensorData.depthImage = new ushort[kinectSensor.DepthFrameSource.FrameDescription.LengthInPixels];
        }

        if((dwFlags & KinectInterop.FrameSource.TypeBodyIndex) != 0)
        {
            if(!bUseMultiSource)
                bodyIndexFrameReader = kinectSensor.BodyIndexFrameSource.OpenReader();

            sensorData.bodyIndexImage = new byte[kinectSensor.BodyIndexFrameSource.FrameDescription.LengthInPixels];
        }

        if((dwFlags & KinectInterop.FrameSource.TypeInfrared) != 0)
        {
            if(!bUseMultiSource)
                infraredFrameReader = kinectSensor.InfraredFrameSource.OpenReader();

            sensorData.infraredImage = new ushort[kinectSensor.InfraredFrameSource.FrameDescription.LengthInPixels];
        }

        //if(!kinectSensor.IsOpen)
        {
            //Debug.Log("Opening sensor, available: " + kinectSensor.IsAvailable);
            kinectSensor.Open();
        }

        float fWaitTime = Time.realtimeSinceStartup + 3f;
        while(!kinectSensor.IsAvailable && Time.realtimeSinceStartup < fWaitTime)
        {
            // wait for sensor to open
        }

        Debug.Log("K2-sensor " + (kinectSensor.IsOpen ? "opened" : "closed") +
                  ", available: " + kinectSensor.IsAvailable);

        if(bUseMultiSource && dwFlags != KinectInterop.FrameSource.TypeNone && kinectSensor.IsOpen)
        {
            multiSourceFrameReader = kinectSensor.OpenMultiSourceFrameReader((FrameSourceTypes)((int)dwFlags & 0x3F));
        }

        return sensorData;
    }
Exemplo n.º 2
0
	// Use this for initialization
	void Start () {

		mySensor = KinectSensor.GetDefault();

		if (mySensor != null)
		{
			// Total array of data representing a single rendered frame
			colorFrameData = new byte[colorWidth * colorHeight * bytes_per_pixel];

			backgroundTex = new Texture2D(colorWidth, colorHeight, TextureFormat.BGRA32, false);

			if (!mySensor.IsOpen)
			{
				mySensor.Open();
			}

			msFrameReader = mySensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color);

			//Rendering user as part of the Unity Scene background via Main Camera
			Rect cameraRect = Camera.main.pixelRect;
			float rectHeight = cameraRect.height;
			float rectWidth = cameraRect.width;

			if (rectWidth > rectHeight)
				rectWidth = rectHeight * colorWidth / colorHeight;
			else
				rectHeight = rectWidth * colorHeight / colorWidth;

			float foregroundOfsX = (cameraRect.width - rectWidth) / 2;
			float foregroundOfsY = (cameraRect.height - rectHeight) / 2;
			foregroundImgRect = new Rect(foregroundOfsX, foregroundOfsY, rectWidth, rectHeight);
			foregroundGuiRect = new Rect(foregroundOfsX, cameraRect.height - foregroundOfsY, rectWidth, -rectHeight);
		}
	} //End of Start()
Exemplo n.º 3
0
    public KinectManager()
    {
        _Sensor = Kinect.KinectSensor.GetDefault();
        if (_Sensor == null)
        {
            ExitWithLog("Kinect Sensor not avalibalbe");
        }

        _Reader = _Sensor.OpenMultiSourceFrameReader(
            Kinect.FrameSourceTypes.Depth |
            Kinect.FrameSourceTypes.Body
            );
        if (_Reader == null)
        {
            ExitWithLog("Fail to load multiframe source reader.");
        }

        DepthFrameDesc = _Sensor.DepthFrameSource.FrameDescription;
        Mapper         = _Sensor.CoordinateMapper;

        DepthData = new ushort[DepthFrameDesc.LengthInPixels];
        _BodyData = new Kinect.Body[_Sensor.BodyFrameSource.BodyCount];

        if (!_Sensor.IsOpen)
        {
            _Sensor.Open();
        }
    }
    void Start () 
    {
        _Sensor = KinectSensor.GetDefault();
        
        if (_Sensor != null) 
        {
            _Reader = _Sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth);
            
            var colorFrameDesc = _Sensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Rgba);
            ColorWidth = colorFrameDesc.Width;
            ColorHeight = colorFrameDesc.Height;
            
            _ColorTexture = new Texture2D(colorFrameDesc.Width, colorFrameDesc.Height, TextureFormat.RGBA32, false);
            _ColorData = new byte[colorFrameDesc.BytesPerPixel * colorFrameDesc.LengthInPixels];
			            
            var depthFrameDesc = _Sensor.DepthFrameSource.FrameDescription;
            _DepthData = new ushort[depthFrameDesc.LengthInPixels];

			_DepthTexture = new Texture2D( depthFrameDesc.Width, depthFrameDesc.Height, TextureFormat.ARGB32, false );
            
            if (!_Sensor.IsOpen)
            {
                _Sensor.Open();
            }
        }
    }
Exemplo n.º 5
0
    public void CloseSensor(KinectInterop.SensorData sensorData)
    {
        if(coordMapper != null)
        {
            coordMapper = null;
        }

        if(bodyFrameReader != null)
        {
            bodyFrameReader.Dispose();
            bodyFrameReader = null;
        }

        if(bodyIndexFrameReader != null)
        {
            bodyIndexFrameReader.Dispose();
            bodyIndexFrameReader = null;
        }

        if(colorFrameReader != null)
        {
            colorFrameReader.Dispose();
            colorFrameReader = null;
        }

        if(depthFrameReader != null)
        {
            depthFrameReader.Dispose();
            depthFrameReader = null;
        }

        if(infraredFrameReader != null)
        {
            infraredFrameReader.Dispose();
            infraredFrameReader = null;
        }

        if(multiSourceFrameReader != null)
        {
            multiSourceFrameReader.Dispose();
            multiSourceFrameReader = null;
        }

        if(kinectSensor != null)
        {
            if (kinectSensor.IsOpen)
            {
                kinectSensor.Close();
            }

            kinectSensor = null;
        }
    }
    void Awake () 
    {
        _Sensor = KinectSensor.GetDefault();
		isNewFrame = false;

        if (_Sensor != null)
        {
			_Reader = _Sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Body | FrameSourceTypes.BodyIndex | FrameSourceTypes.Depth);
            
            if (!_Sensor.IsOpen)
            {
                _Sensor.Open();
            }
        }   
    }
    void OnApplicationQuit()
    {
        if (_Reader != null)
        {
            _Reader.Dispose();
            _Reader = null;
        }

        if (_Sensor != null)
        {
            if (_Sensor.IsOpen)
            {
                _Sensor.Close();
            }

            _Sensor = null;
        }
    }
    void OnApplicationQuit()
    {
        if (reader != null)
        {
            reader.Dispose();
            reader = null;
        }

        if (sensor != null)
        {
            if (sensor.IsOpen)
            {
                sensor.Close();
            }

            sensor = null;
        }
    }
Exemplo n.º 9
0
	// Use this for initialization
	void Start()
	{

		mySensor = KinectSensor.GetDefault();

		if (mySensor != null)
		{
			depthFrameData = new ushort[depthWidth * depthHeight];
			bodyIndexFrameData = new byte[depthWidth * depthHeight];
			colorFrameData = new byte[colorWidth * colorHeight * bytes_per_pixel];
			displayPixels = new byte[depthWidth * depthHeight * bytes_per_pixel];
			colorPoints = new ColorSpacePoint[depthWidth * depthHeight];

			ninjaTex = new Texture2D(depthWidth, depthHeight, TextureFormat.BGRA32, false);

			if (!mySensor.IsOpen)
			{
				mySensor.Open();
			}

			myCoordinateMapper = mySensor.CoordinateMapper;

			msFrameReader = mySensor.OpenMultiSourceFrameReader(FrameSourceTypes.BodyIndex |
				FrameSourceTypes.Color | FrameSourceTypes.Depth);

			//Rendering user as part of the Unity Scene background via Main Camera
			Rect cameraRect = Camera.main.pixelRect;
			float rectHeight = cameraRect.height;
			float rectWidth = cameraRect.width;

			if (rectWidth > rectHeight)
				rectWidth = rectHeight * depthWidth / depthHeight;
			else
				rectHeight = rectWidth * depthHeight / depthWidth;

			float foregroundOfsX = (cameraRect.width - rectWidth) / 2;
			float foregroundOfsY = (cameraRect.height - rectHeight) / 2;
			foregroundImgRect = new Rect(foregroundOfsX, foregroundOfsY, rectWidth, rectHeight);
			foregroundGuiRect = new Rect(foregroundOfsX, cameraRect.height - foregroundOfsY, rectWidth, -rectHeight);
		}

	}
Exemplo n.º 10
0
    void Start()
    {
        if (instance == null)
        {
            instance = this;
        }

        sensor = Kinect.KinectSensor.GetDefault();

        if (sensor != null)
        {
            sensor.Open();
        }

        reader = sensor.OpenMultiSourceFrameReader(Kinect.FrameSourceTypes.Body | Kinect.FrameSourceTypes.Color | Kinect.FrameSourceTypes.Depth | Kinect.FrameSourceTypes.Infrared);
        reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;
        playersIdBreak = new List <ulong>();
        playersIdBreak.Clear();
        players     = new bool[6];
        countBodies = 0;
    }
Exemplo n.º 11
0
    void InitializeDefaultSensor()
    {
        m_pKinectSensor = KinectSensor.GetDefault();

        if (m_pKinectSensor != null)
        {
            // Initialize the Kinect and get coordinate mapper and the frame reader
            m_pCoordinateMapper = m_pKinectSensor.CoordinateMapper;

            m_pKinectSensor.Open();
            if (m_pKinectSensor.IsOpen)
            {
                m_pMultiSourceFrameReader = m_pKinectSensor.OpenMultiSourceFrameReader(
                    FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.BodyIndex);
            }
        }

        if (m_pKinectSensor == null)
        {
            UnityEngine.Debug.LogError("No ready Kinect found!");
        }
    }
Exemplo n.º 12
0
	// Use this for initialization
	void Start () {
		mySensor = KinectSensor.GetDefault();

		if (mySensor != null)
		{
			if (!mySensor.IsOpen)
			{
				mySensor.Open();
			}

			//Writing data to an output file for graphing analysis
			sw_cm_x = new StreamWriter("PLAYER_CM_X");
			sw_cm_y = new StreamWriter("PLAYER_CM_Y");
			sw_t = new StreamWriter("PLAYER_T");

			ninjaTex = new Texture2D(colorWidth, colorHeight, TextureFormat.BGRA32, false);

			msFrameReader = mySensor.OpenMultiSourceFrameReader(FrameSourceTypes.Body | FrameSourceTypes.Depth |
				FrameSourceTypes.BodyIndex);

			//Rendering user as part of the Unity Scene background via Main Camera
			Rect cameraRect = Camera.main.pixelRect;
			float rectHeight = cameraRect.height;
			float rectWidth = cameraRect.width;

			if (rectWidth > rectHeight)
				rectWidth = rectHeight * colorWidth / colorHeight;
			else
				rectHeight = rectWidth * colorHeight / colorWidth;

			float foregroundOfsX = (cameraRect.width - rectWidth) / 2;
			float foregroundOfsY = (cameraRect.height - rectHeight) / 2;
			foregroundImgRect = new Rect(foregroundOfsX, foregroundOfsY, rectWidth, rectHeight);
			foregroundGuiRect = new Rect(foregroundOfsX, cameraRect.height - foregroundOfsY, rectWidth, -rectHeight);
			//UNNECESSARY?
		}
	}
	private Vector3 _chestRight; //right vectory of the chest	
	
	// Use this for initialization
	void Start () {
        if (sFBXExporterForUnity != null)
        {
            sFBXExporterForUnity.bOutAnimation = false;
            sFBXExporterForUnity.bOutAnimationCustomFrame = true;
        }
		_Sensor = Kinect2.KinectSensor.GetDefault();
        if (_Sensor != null)
        {
            if (!_Sensor.IsOpen)
            {
                if (bDebugLog) Debug.Log("[Kinect2] KinectSensor Open");
                _Sensor.Open();
                _Reader = _Sensor.OpenMultiSourceFrameReader(/*Kinect2.FrameSourceTypes.Color |
				                                             Kinect2.FrameSourceTypes.Depth |
				                                             Kinect2.FrameSourceTypes.Infrared |
				                                             */
                                                             Kinect2.FrameSourceTypes.Body);
                _Reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;
            }
            else
            {
                if (bDebugLog) Debug.Log("[Kinect2] KinectSensor Opened");
                _Reader = _Sensor.OpenMultiSourceFrameReader(/*Kinect2.FrameSourceTypes.Color |
				                                             Kinect2.FrameSourceTypes.Depth |
				                                             Kinect2.FrameSourceTypes.Infrared |
				                                             */
                                                             Kinect2.FrameSourceTypes.Body);
                _Reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;
            }
        }
		//store bones in a list for easier access, everything except Hip_Center will be one
		//higher than the corresponding Kinect2.NuiSkeletonPositionIndex (because of the hip_override)
		_bones = new GameObject[(int)Kinect2.JointType.ThumbRight + 1] {
			null, SpineMid, SpineShoulder, Neck,
			CollarLeft, ShoulderLeft, ElbowLeft, WristLeft,
			CollarRight, ShoulderRight, ElbowRight, WristRight,
			HipOverride, HipLeft, KneeLeft, AnkleLeft,
			null, HipRight, KneeRight, AnkleRight,
			Head, HandLeft, HandRight, FootLeft, FootRight};
			//SpineShoulder, HandTipLeft, ThumbLeft, HandTipRight, ThumbRight, FootLeft, FootRight};

		_vecbones = new Vector3[(int)Kinect2.JointType.ThumbRight + 1];
		_vecbones2 = new Vector3[(int)Kinect2.JointType.ThumbRight + 1];
		_qbones = new Quaternion[(int)Kinect2.JointType.ThumbRight + 1];
		_qbones2 = new Quaternion[(int)Kinect2.JointType.ThumbRight + 1];

		//determine which bones are not available
		for(int ii = 0; ii < _bones.Length; ii++)
		{
			if(_bones[ii] == null)
			{
				_nullMask |= (uint)(1 << ii);
			}
		}
		
		//store the base rotations and bone directions (in bone-local space)
		_baseRotation = new Quaternion[(int)Kinect2.JointType.ThumbRight + 1];
		_boneDir = new Vector3[(int)Kinect2.JointType.ThumbRight + 1];
		
		//first save the special rotations for the hip and spine
		_hipRight = HipRight.transform.position - HipLeft.transform.position;
		_hipRight = HipOverride.transform.InverseTransformDirection(_hipRight);
		
		_chestRight = ShoulderRight.transform.position - ShoulderLeft.transform.position;
		_chestRight = SpineMid.transform.InverseTransformDirection(_chestRight);
		
		//get direction of all other bones
		for( int ii = 0; ii < (int)Kinect2.JointType.ThumbRight- 4; ii++)
		{
			if((_nullMask & (uint)(1 << ii)) <= 0)
			{
				//if the bone is the end of a limb, get direction from this bone to one of the extras (hand or foot).
				if(ii % 4 == 3 && ((_nullMask & (uint)(1 << (ii/4) + (int)Kinect2.JointType.ThumbRight - 4)) <= 0))
				{
					_boneDir[ii] = _bones[(ii/4) + (int)Kinect2.JointType.ThumbRight - 4].transform.position - _bones[ii].transform.position;
				}
				//if the bone is the hip_override (at boneindex Hip_Left, get direction from average of left and right hips
				else if(ii == (int)Kinect2.JointType.HipLeft && HipLeft != null && HipRight != null)
				{
					_boneDir[ii] = ((HipRight.transform.position + HipLeft.transform.position) / 2.0f) - HipOverride.transform.position;
				}
				//otherwise, get the vector from this bone to the next.
				else if((_nullMask & (uint)(1 << ii+1)) <= 0)
				{
					_boneDir[ii] = _bones[ii+1].transform.position - _bones[ii].transform.position;
				}
				else
				{
					continue;
				}
				//Since the spine of the kinect data is ~40 degrees back from the hip,
				//check what angle the spine is at and rotate the saved direction back to match the data
				if(ii == (int)Kinect2.JointType.SpineMid)
				{
					float angle = Vector3.Angle(transform.up,_boneDir[ii]);
					_boneDir[ii] = Quaternion.AngleAxis(angle,transform.right) * _boneDir[ii];
				}
				//transform the direction into local space.
				_boneDir[ii] = _bones[ii].transform.InverseTransformDirection(_boneDir[ii]);
			}
		}
		//make _chestRight orthogonal to the direction of the spine.
		_chestRight -= Vector3.Project(_chestRight, _boneDir[(int)Kinect2.JointType.SpineMid]);
		//make _hipRight orthogonal to the direction of the hip override
		Vector3.OrthoNormalize(ref _boneDir[(int)Kinect2.JointType.HipLeft],ref _hipRight);
		// Root
		Root.transform.localRotation = Quaternion.Euler(fRotRootX, 0.0f, 0.0f);
	}
    void Start()
    {
        sensor = KinectSensor.GetDefault();

        if ( sensor != null )
        {
            reader = sensor.OpenMultiSourceFrameReader(
                FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.BodyIndex);

            coordinateMapper = sensor.CoordinateMapper;

            FrameDescription depthFrameDesc = sensor.DepthFrameSource.FrameDescription;
            depthData = new ushort[depthFrameDesc.LengthInPixels];
            depthWidth = depthFrameDesc.Width;
            depthHeight = depthFrameDesc.Height;

            FrameDescription colorFrameDesc = sensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Rgba);
            colorLengthInBytes = colorFrameDesc.LengthInPixels * colorFrameDesc.BytesPerPixel;
            colorData = new byte[colorLengthInBytes];
            colorWidth = colorFrameDesc.Width;
            colorBytesPerPixel = colorFrameDesc.BytesPerPixel;

            FrameDescription bodyIndexDesc = sensor.BodyIndexFrameSource.FrameDescription;
            bodyIndexData = new byte[bodyIndexDesc.LengthInPixels * bodyIndexDesc.BytesPerPixel];

            // PREPARE THE COLOR TO DEPTH MAPPED BYTE ARRAY FOR CREATING OUR DYNAMIC TEXTURE
            // ---------------------------------------------------------------------------------------

            // STEP 1. ALLOCATE SPACE FOR THE RESULT OF MAPPING COLORSPACEPOINTS FOR EACH DEPTH FRAME POINT
            colorSpacePoints = new ColorSpacePoint[depthFrameDesc.LengthInPixels];
            cameraSpacePoints = new CameraSpacePoint[depthFrameDesc.LengthInPixels];

            // STEP 2. PREPARE THE BYTE ARRAY THAT WILL HOLD A CALCULATED COLOR PIXEL FOR EACH DEPTH PIXEL
            //         THIS BYTE ARRAY WILL BE FED TO THE MATERIAL AND USED AS THE MESH TEXTURE
            mappedColorData = new byte[depthFrameDesc.LengthInPixels * colorFrameDesc.BytesPerPixel];

            // STEP 3. CREATE A TEXTURE THAT HAS THE SIZE OF THE DEPTH FRAME BUT CAN HOLD RGBA VALUES FROM THE COLOR FRAME
            texture = new Texture2D(depthFrameDesc.Width, depthFrameDesc.Height, TextureFormat.RGBA32, false);

            // STEP 4. BIND THE MAIN TEXTURE TO THE LOCAL VARIABLE FOR FUTURE PROCESSING
            gameObject.GetComponent<Renderer>().material.mainTexture = texture;

            if (!sensor.IsOpen) sensor.Open();

            BuildSimpleMesh();
            //BuildVariableMesh();

        } else
        {
            Debug.LogError("Couldn't find Kinect Sensor!");
        }
    }
    public void CloseSensor(KinectInterop.SensorData sensorData)
    {
        if(coordMapper != null)
        {
            coordMapper = null;
        }

        if(bodyFrameReader != null)
        {
            bodyFrameReader.Dispose();
            bodyFrameReader = null;
        }

        if(bodyIndexFrameReader != null)
        {
            bodyIndexFrameReader.Dispose();
            bodyIndexFrameReader = null;
        }

        if(colorFrameReader != null)
        {
            colorFrameReader.Dispose();
            colorFrameReader = null;
        }

        if(depthFrameReader != null)
        {
            depthFrameReader.Dispose();
            depthFrameReader = null;
        }

        if(infraredFrameReader != null)
        {
            infraredFrameReader.Dispose();
            infraredFrameReader = null;
        }

        if(multiSourceFrameReader != null)
        {
            multiSourceFrameReader.Dispose();
            multiSourceFrameReader = null;
        }

        if(kinectSensor != null)
        {
            //if (kinectSensor.IsOpen)
            {
                //Debug.Log("Closing sensor, available: " + kinectSensor.IsAvailable);
                kinectSensor.Close();
            }

            float fWaitTime = Time.realtimeSinceStartup + 3f;
            while(kinectSensor.IsOpen && Time.realtimeSinceStartup < fWaitTime)
            {
                // wait for sensor to close
            }

            Debug.Log("K2-sensor " + (kinectSensor.IsOpen ? "opened" : "closed") +
                      ", available: " + kinectSensor.IsAvailable);

            kinectSensor = null;
        }
    }
Exemplo n.º 16
0
        //
        private void OnApplicationQuit()
        {
            if (m_Reader != null)
            {
                m_Reader.Dispose();
                m_Reader = null;
            }

            if (m_Sensor != null)
            {
                if (m_Sensor.IsOpen)
                {
                    m_Sensor.Close();
                }

                m_Sensor = null;
            }
        }
Exemplo n.º 17
0
    public KinectInterop.SensorData OpenDefaultSensor(KinectInterop.FrameSource dwFlags, float sensorAngle, bool bUseMultiSource)
    {
        KinectInterop.SensorData sensorData = new KinectInterop.SensorData();
        //sensorFlags = dwFlags;

        kinectSensor = KinectSensor.GetDefault();
        if(kinectSensor == null)
            return null;

        coordMapper = kinectSensor.CoordinateMapper;

        this.bodyCount = kinectSensor.BodyFrameSource.BodyCount;
        sensorData.bodyCount = this.bodyCount;
        sensorData.jointCount = 25;

        sensorData.depthCameraFOV = 60f;
        sensorData.colorCameraFOV = 53.8f;
        sensorData.depthCameraOffset = -0.03f;

        if((dwFlags & KinectInterop.FrameSource.TypeBody) != 0)
        {
            if(!bUseMultiSource)
                bodyFrameReader = kinectSensor.BodyFrameSource.OpenReader();

            bodyData = new Body[sensorData.bodyCount];
        }

        var frameDesc = kinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Rgba);
        sensorData.colorImageWidth = frameDesc.Width;
        sensorData.colorImageHeight = frameDesc.Height;

        if((dwFlags & KinectInterop.FrameSource.TypeColor) != 0)
        {
            if(!bUseMultiSource)
                colorFrameReader = kinectSensor.ColorFrameSource.OpenReader();

            sensorData.colorImage = new byte[frameDesc.BytesPerPixel * frameDesc.LengthInPixels];
        }

        sensorData.depthImageWidth = kinectSensor.DepthFrameSource.FrameDescription.Width;
        sensorData.depthImageHeight = kinectSensor.DepthFrameSource.FrameDescription.Height;

        if((dwFlags & KinectInterop.FrameSource.TypeDepth) != 0)
        {
            if(!bUseMultiSource)
                depthFrameReader = kinectSensor.DepthFrameSource.OpenReader();

            sensorData.depthImage = new ushort[kinectSensor.DepthFrameSource.FrameDescription.LengthInPixels];
        }

        if((dwFlags & KinectInterop.FrameSource.TypeBodyIndex) != 0)
        {
            if(!bUseMultiSource)
                bodyIndexFrameReader = kinectSensor.BodyIndexFrameSource.OpenReader();

            sensorData.bodyIndexImage = new byte[kinectSensor.BodyIndexFrameSource.FrameDescription.LengthInPixels];
        }

        if((dwFlags & KinectInterop.FrameSource.TypeInfrared) != 0)
        {
            if(!bUseMultiSource)
                infraredFrameReader = kinectSensor.InfraredFrameSource.OpenReader();

            sensorData.infraredImage = new ushort[kinectSensor.InfraredFrameSource.FrameDescription.LengthInPixels];
        }

        if(!kinectSensor.IsOpen)
        {
            kinectSensor.Open();
        }

        if(bUseMultiSource && dwFlags != KinectInterop.FrameSource.TypeNone && kinectSensor.IsOpen)
        {
            multiSourceFrameReader = kinectSensor.OpenMultiSourceFrameReader((FrameSourceTypes)dwFlags);
        }

        return sensorData;
    }
Exemplo n.º 18
0
        //
        private void Start()
        {
            m_Sensor = KinectSensor.GetDefault();

            if (m_Sensor != null)
            {
                //
                m_CoordinateMapper = m_Sensor.CoordinateMapper;

                //
                m_Reader = m_Sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Body | FrameSourceTypes.BodyIndex);

                var colorFrameDesc = m_Sensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Rgba);
                ColorWidth = colorFrameDesc.Width;
                ColorHeight = colorFrameDesc.Height;

                m_ColorTexture = new Texture2D(colorFrameDesc.Width, colorFrameDesc.Height, TextureFormat.RGBA32, false);
                m_ColorData = new byte[colorFrameDesc.BytesPerPixel * colorFrameDesc.LengthInPixels];

                m_ColorMappedToDepthPoints = new DepthSpacePoint[ColorWidth * ColorHeight];

                //
                var depthFrameDesc = m_Sensor.DepthFrameSource.FrameDescription;
                DepthWidth = depthFrameDesc.Width;
                DepthHeight = depthFrameDesc.Height;

                m_DepthData = new ushort[depthFrameDesc.Width * depthFrameDesc.Height];

                m_DepthCoordinates = new DepthSpacePoint[colorFrameDesc.Width * colorFrameDesc.Height];

                //
                m_BodyData = new Body[m_Sensor.BodyFrameSource.BodyCount];

                //
                var bodyIndexFrameDesc = m_Sensor.BodyIndexFrameSource.FrameDescription;
                BodyIndexWidth = bodyIndexFrameDesc.Width;
                BodyIndexHeight = bodyIndexFrameDesc.Height;
                m_BodyIndexData = new byte[ bodyIndexFrameDesc.Width * bodyIndexFrameDesc.Height ];

                //
                if (!m_Sensor.IsOpen)
                {
                    m_Sensor.Open();
                }
            }
        }
Exemplo n.º 19
0
	// Use this for initialization
	void Start()
	{
		mySensor = KinectSensor.GetDefault();

		if (mySensor != null)
		{
			if (!mySensor.IsOpen)
			{
				mySensor.Open();
			}

			ninjaTex = new Texture2D(colorWidth, colorHeight, TextureFormat.BGRA32, false);

			msFrameReader = mySensor.OpenMultiSourceFrameReader(FrameSourceTypes.Body | FrameSourceTypes.Depth |
				FrameSourceTypes.BodyIndex);

			// There has to be a more efficient way of tracking these (i.e. using OOP)
			rightHandQueue_X = new Queue<float>();
			rightHandQueue_Y = new Queue<float>();
			rightHandQueue_T = new Queue<float>();

			leftHandQueue_X = new Queue<float>();
			leftHandQueue_Y = new Queue<float>();
			leftHandQueue_T = new Queue<float>();

			rightFootQueue_X = new Queue<float>();
			rightFootQueue_Y = new Queue<float>();
			rightFootQueue_T = new Queue<float>();

			leftFootQueue_X = new Queue<float>();
			leftFootQueue_Y = new Queue<float>();
			leftFootQueue_T = new Queue<float>();

			/** Construct StreamWriter object for collecting user data **/
			sw_v = new StreamWriter("EMILY_V.txt");
			sw_t = new StreamWriter("EMILY_T.txt");
			sw_x = new StreamWriter("EMILY_X.txt");
			sw_y = new StreamWriter("EMILY_Y.txt");


			InitializeSlashRenderer();

			//Rendering user as part of the Unity Scene background via Main Camera
			Rect cameraRect = Camera.main.pixelRect;
			float rectHeight = cameraRect.height;
			float rectWidth = cameraRect.width;

			if (rectWidth > rectHeight)
				rectWidth = rectHeight * colorWidth / colorHeight;
			else
				rectHeight = rectWidth * colorHeight / colorWidth;

			float foregroundOfsX = (cameraRect.width - rectWidth) / 2;
			float foregroundOfsY = (cameraRect.height - rectHeight) / 2;
			foregroundImgRect = new Rect(foregroundOfsX, foregroundOfsY, rectWidth, rectHeight);
			foregroundGuiRect = new Rect(foregroundOfsX, cameraRect.height - foregroundOfsY, rectWidth, -rectHeight);
		}

	}
Exemplo n.º 20
0
    void OnApplicationQuit()
    {
        pDepthBuffer = null;
        pColorBuffer = null;
        pBodyIndexBuffer = null;

        if (m_pDepthCoordinates != null)
        {
            m_pDepthCoordinates = null;
        }

        if (m_pMultiSourceFrameReader != null)
        {
            m_pMultiSourceFrameReader.Dispose();
            m_pMultiSourceFrameReader = null;
        }

        if (m_pKinectSensor != null)
        {
            m_pKinectSensor.Close();
            m_pKinectSensor = null;
        }
    }
Exemplo n.º 21
0
 // Token: 0x06002DAC RID: 11692 RVA: 0x000E0C11 File Offset: 0x000DF011
 internal MultiSourceFrameReader(IntPtr pNative)
 {
     this._pNative = pNative;
     MultiSourceFrameReader.Windows_Kinect_MultiSourceFrameReader_AddRefObject(ref this._pNative);
 }
Exemplo n.º 22
0
	void OnApplicationQuit()
	{
		if (msFrameReader != null)
		{
			msFrameReader.Dispose();
			msFrameReader = null;
		}
		if (mySensor != null)
		{
			if (mySensor.IsOpen)
				mySensor.Close();
			mySensor = null;
		}
	}
Exemplo n.º 23
0
    private Vector3 _chestRight;        //right vectory of the chest

    // Use this for initialization
    void Start()
    {
        /*
         * if (sFBXExporterForUnity != null)
         * {
         *  sFBXExporterForUnity.bOutAnimation = false;
         *  sFBXExporterForUnity.bOutAnimationCustomFrame = true;
         * }
         */
        _Sensor = Kinect2.KinectSensor.GetDefault();
        if (_Sensor != null)
        {
            if (!_Sensor.IsOpen)
            {
                if (bDebugLog)
                {
                    Debug.Log("[Kinect2] KinectSensor Open");
                }
                _Sensor.Open();
                _Reader = _Sensor.OpenMultiSourceFrameReader(/*Kinect2.FrameSourceTypes.Color |
                                                              *              Kinect2.FrameSourceTypes.Depth |
                                                              *              Kinect2.FrameSourceTypes.Infrared |
                                                              */
                    Kinect2.FrameSourceTypes.Body);
                _Reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;
            }
            else
            {
                if (bDebugLog)
                {
                    Debug.Log("[Kinect2] KinectSensor Opened");
                }
                _Reader = _Sensor.OpenMultiSourceFrameReader(/*Kinect2.FrameSourceTypes.Color |
                                                              *              Kinect2.FrameSourceTypes.Depth |
                                                              *              Kinect2.FrameSourceTypes.Infrared |
                                                              */
                    Kinect2.FrameSourceTypes.Body);
                _Reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;
            }
        }
        //store bones in a list for easier access, everything except Hip_Center will be one
        //higher than the corresponding Kinect2.NuiSkeletonPositionIndex (because of the hip_override)

        /*削除 _bones = new GameObject[(int)Kinect2.JointType.ThumbRight + 1] {
         *               null, SpineMid, SpineShoulder, Neck,
         *               CollarLeft, ShoulderLeft, ElbowLeft, WristLeft,
         *               CollarRight, ShoulderRight, ElbowRight, WristRight,
         *               HipOverride, HipLeft, KneeLeft, AnkleLeft,
         *               null, HipRight, KneeRight, AnkleRight,
         *               Head, HandLeft, HandRight, FootLeft, FootRight};*/
        //SpineShoulder, HandTipLeft, ThumbLeft, HandTipRight, ThumbRight, FootLeft, FootRight};
        _bones = new GameObject[5] {
            Head, HandLeft, HandRight, KneeLeft, KneeRight
        };

        _vecbones  = new Vector3[(int)Kinect2.JointType.ThumbRight + 1];
        _vecbones2 = new Vector3[(int)Kinect2.JointType.ThumbRight + 1];
        _qbones    = new Quaternion[(int)Kinect2.JointType.ThumbRight + 1];
        _qbones2   = new Quaternion[(int)Kinect2.JointType.ThumbRight + 1];

        //determine which bones are not available
        for (int ii = 0; ii < _bones.Length; ii++)
        {
            if (_bones[ii] == null)
            {
                _nullMask |= (uint)(1 << ii);
            }
        }

        //store the base rotations and bone directions (in bone-local space)
        _baseRotation = new Quaternion[(int)Kinect2.JointType.ThumbRight + 1];
        _boneDir      = new Vector3[(int)Kinect2.JointType.ThumbRight + 1];

        //first save the special rotations for the hip and spine

        /*_hipRight = HipRight.transform.position - HipLeft.transform.position;
         * _hipRight = HipOverride.transform.InverseTransformDirection(_hipRight);
         *
         * _chestRight = ShoulderRight.transform.position - ShoulderLeft.transform.position;
         * _chestRight = SpineMid.transform.InverseTransformDirection(_chestRight);*/

        //get direction of all other bones

        /*for (int ii = 0; ii < (int)Kinect2.JointType.ThumbRight - 4; ii++)
         * {
         *  if ((_nullMask & (uint)(1 << ii)) <= 0)
         *  {
         *      //if the bone is the end of a limb, get direction from this bone to one of the extras (hand or foot).
         *      if (ii % 4 == 3 && ((_nullMask & (uint)(1 << (ii / 4) + (int)Kinect2.JointType.ThumbRight - 4)) <= 0))
         *      {
         *          _boneDir[ii] = _bones[(ii / 4) + (int)Kinect2.JointType.ThumbRight - 4].transform.position - _bones[ii].transform.position;
         *      }
         *      //if the bone is the hip_override (at boneindex Hip_Left, get direction from average of left and right hips
         *      else if (ii == (int)Kinect2.JointType.HipLeft && HipLeft != null && HipRight != null)
         *      {
         *          _boneDir[ii] = ((HipRight.transform.position + HipLeft.transform.position) / 2.0f) - HipOverride.transform.position;
         *      }
         *      //otherwise, get the vector from this bone to the next.
         *      else if ((_nullMask & (uint)(1 << ii + 1)) <= 0)
         *      {
         *          _boneDir[ii] = _bones[ii + 1].transform.position - _bones[ii].transform.position;
         *      }
         *      else
         *      {
         *          continue;
         *      }
         *      //Since the spine of the kinect data is ~40 degrees back from the hip,
         *      //check what angle the spine is at and rotate the saved direction back to match the data
         *      if (ii == (int)Kinect2.JointType.SpineMid)
         *      {
         *          float angle = Vector3.Angle(transform.up, _boneDir[ii]);
         *          _boneDir[ii] = Quaternion.AngleAxis(angle, transform.right) * _boneDir[ii];
         *      }
         *      //transform the direction into local space.
         *      _boneDir[ii] = _bones[ii].transform.InverseTransformDirection(_boneDir[ii]);
         *  }
         * }
         * //make _chestRight orthogonal to the direction of the spine.
         * _chestRight -= Vector3.Project(_chestRight, _boneDir[(int)Kinect2.JointType.SpineMid]);
         * //make _hipRight orthogonal to the direction of the hip override
         * Vector3.OrthoNormalize(ref _boneDir[(int)Kinect2.JointType.HipLeft], ref _hipRight);
         * // Root
         * Root.transform.localRotation = Quaternion.Euler(fRotRootX, 0.0f, 0.0f);*/
    }
	void Start ()
	{
		sensor = KinectSensor.GetDefault ();
		
		if (sensor != null) {
			coordinateMapper = sensor.CoordinateMapper;

			reader = sensor.OpenMultiSourceFrameReader (FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.BodyIndex);
			
			FrameDescription colorFrameDesc = sensor.ColorFrameSource.CreateFrameDescription (ColorImageFormat.Rgba);
			texture = new Texture2D (colorFrameDesc.Width, colorFrameDesc.Height, TextureFormat.RGBA32, false);
			colorData = new byte[colorFrameDesc.BytesPerPixel * colorFrameDesc.LengthInPixels];


			FrameDescription depthFrameDesc = sensor.DepthFrameSource.FrameDescription;
			depthData = new ushort[depthFrameDesc.LengthInPixels];
			depthSpacePoints = new DepthSpacePoint[colorFrameDesc.LengthInPixels];

			FrameDescription bodyIndexFrameDesc = sensor.BodyIndexFrameSource.FrameDescription;
			bodyIndexData = new byte[bodyIndexFrameDesc.BytesPerPixel * bodyIndexFrameDesc.LengthInPixels];
		
			
			if (!sensor.IsOpen) {
				sensor.Open ();
			}

			rgbaMat = new Mat (colorFrameDesc.Height, colorFrameDesc.Width, CvType.CV_8UC4);
			Debug.Log ("rgbaMat " + rgbaMat.ToString ());

			maskMat = new Mat (rgbaMat.rows (), rgbaMat.cols (), CvType.CV_8UC1);
			outputMat = new Mat (rgbaMat.rows (), rgbaMat.cols (), CvType.CV_8UC4);
			
			maskData = new byte[rgbaMat.rows () * rgbaMat.cols ()];
			
			gameObject.transform.localScale = new Vector3 (texture.width, texture.height, 1);
			gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
			Camera.main.orthographicSize = texture.height / 2;



			// sepia
			sepiaKernel = new Mat (4, 4, CvType.CV_32F);
			sepiaKernel.put (0, 0, /* R */0.189f, 0.769f, 0.393f, 0f);
			sepiaKernel.put (1, 0, /* G */0.168f, 0.686f, 0.349f, 0f);
			sepiaKernel.put (2, 0, /* B */0.131f, 0.534f, 0.272f, 0f);
			sepiaKernel.put (3, 0, /* A */0.000f, 0.000f, 0.000f, 1f);
			
			
			// pixelize
			pixelizeIntermediateMat = new Mat ();
			pixelizeSize0 = new Size ();
			
			
			//comic
			comicGrayMat = new Mat (texture.height, texture.width, CvType.CV_8UC1);
			comicLineMat = new Mat (texture.height, texture.width, CvType.CV_8UC1);
			comicMaskMat = new Mat (texture.height, texture.width, CvType.CV_8UC1);
			
			//create a striped background.
			comicBgMat = new Mat (texture.height, texture.width, CvType.CV_8UC1, new Scalar (255));
			for (int i = 0; i < comicBgMat.rows ()*2.5f; i=i+4) {
				Core.line (comicBgMat, new Point (0, 0 + i), new Point (comicBgMat.cols (), -comicBgMat.cols () + i), new Scalar (0), 1);
			}
			
			comicDstMat = new Mat (texture.height, texture.width, CvType.CV_8UC1);
			
			comicGrayPixels = new byte[comicGrayMat.cols () * comicGrayMat.rows () * comicGrayMat.channels ()];
			comicMaskPixels = new byte[comicMaskMat.cols () * comicMaskMat.rows () * comicMaskMat.channels ()];
		} else {
			UnityEngine.Debug.LogError ("No ready Kinect found!");
		}

	}