void Awake() { if (KinectOneSensor.instance != null) { Debug.Log("There should be only one active instance of the KinectSensor component at at time."); throw new Exception("There should be only one active instance of the KinectSensor component at a time."); } try { // The MSR Kinect DLL (native code) is going to load into the Unity process and stay resident even between debug runs of the game. // So our component must be resilient to starting up on a second run when the Kinect DLL is already loaded and // perhaps even left in a running state. Kinect does not appear to like having NuiInitialize called when it is already initialized as // it messes up the internal state and stops functioning. It is resilient to having Shutdown called right before initializing even if it // hasn't been initialized yet. So calling this first puts us in a good state on a first or second run. // However, calling NuiShutdown before starting prevents the image streams from being read, so if you want to use image data // (either depth or RGB), comment this line out. //NuiShutdown(); int hr = NativeMethods.NuiInitialize(NuiInitializeFlags.UsesDepthAndPlayerIndex | NuiInitializeFlags.UsesSkeleton | NuiInitializeFlags.UsesColor); if (hr != 0) { throw new Exception("NuiInitialize Failed."); } hr = NativeMethods.NuiSkeletonTrackingEnable(IntPtr.Zero, skeltonTrackingMode); if (hr != 0) { throw new Exception("Cannot initialize Skeleton Data."); } depthStreamHandle = IntPtr.Zero; hr = NativeMethods.NuiImageStreamOpen(NuiImageType.DepthAndPlayerIndex, NuiImageResolution.resolution320x240, 0, 2, IntPtr.Zero, ref depthStreamHandle); //Debug.Log(depthStreamHandle); if (hr != 0) { throw new Exception("Cannot open depth stream."); } colorStreamHandle = IntPtr.Zero; hr = NativeMethods.NuiImageStreamOpen(NuiImageType.Color, NuiImageResolution.resolution640x480, 0, 2, IntPtr.Zero, ref colorStreamHandle); //Debug.Log(colorStreamHandle); if (hr != 0) { throw new Exception("Cannot open color stream."); } colorImage = new Color32[640*480]; double theta = Mathf.Atan((lookAt.y+kinectCenter.y-sensorHeight) / (lookAt.z + kinectCenter.z)); long kinectAngle = (long)(theta * (180 / Mathf.PI)); NativeMethods.NuiCameraSetAngle(kinectAngle); DontDestroyOnLoad(gameObject); KinectOneSensor.Instance = this; NativeMethods.NuiSetDeviceStatusCallback(new NuiStatusProc(), IntPtr.Zero); } catch (Exception e) { Debug.Log(e.Message); } }
// Use this for initialization void Start() { kinect = devOrEmu.getKinect(); players = new NuiSkeletonTrackingState[Constants.NuiSkeletonCount]; trackedPlayers = new int[Constants.NuiSkeletonMaxTracked]; trackedPlayers[0] = -1; trackedPlayers[1] = -1; bonePos = new Vector3[2,(int)NuiSkeletonPositionIndex.Count]; rawBonePos = new Vector3[2,(int)NuiSkeletonPositionIndex.Count]; boneVel = new Vector3[2,(int)NuiSkeletonPositionIndex.Count]; boneState = new NuiSkeletonPositionTrackingState[2,(int)NuiSkeletonPositionIndex.Count]; boneLocalOrientation = new Quaternion[2, (int)NuiSkeletonPositionIndex.Count]; boneAbsoluteOrientation = new Quaternion[2, (int)NuiSkeletonPositionIndex.Count]; //create the transform matrix that converts from kinect-space to world-space Matrix4x4 trans = new Matrix4x4(); trans.SetTRS( new Vector3(-kinect.getKinectCenter().x, kinect.getSensorHeight()-kinect.getKinectCenter().y, -kinect.getKinectCenter().z), Quaternion.identity, Vector3.one ); Matrix4x4 rot = new Matrix4x4(); Quaternion quat = new Quaternion(); double theta = Mathf.Atan((kinect.getLookAt().y+kinect.getKinectCenter().y-kinect.getSensorHeight()) / (kinect.getLookAt().z + kinect.getKinectCenter().z)); float kinectAngle = (float)(theta * (180 / Mathf.PI)); // BUG here //quat.eulerAngles = new Vector3(-kinectAngle, 0, 0); //rot.SetTRS( Vector3.zero, quat, Vector3.one); //final transform matrix offsets the rotation of the kinect, then translates to a new center kinectToWorld = flipMatrix*trans*rot; }
//end lxjk // Use this for initialization void Start() { kinect = devOrEmu.getKinect(); }