// Use this for initialization void Start() { m_camera = OpenVR.TrackedCamera; if (m_camera == null) { Debug.LogError("No camera found"); return; } // First get the size of a frame uint width = 0; uint height = 0; uint bufferSize = 0; EVRTrackedCameraError cameraError = m_camera.GetCameraFrameSize(0, EVRTrackedCameraFrameType.Undistorted, ref width, ref height, ref bufferSize); if (cameraError != EVRTrackedCameraError.None) { Debug.LogError("Could not get frame size (error=" + cameraError + ")"); return; } if (width * height == 0) { Debug.LogError("Frame size of 0, are you sure you've enabled the camera in the SteamVR settings panel?"); return; } uint bitsPerPixel = bufferSize / (width * height); m_frameBufferSize = bufferSize; // Then get a handle to the stream cameraError = m_camera.AcquireVideoStreamingService(0, ref m_streamHandle); if (cameraError == EVRTrackedCameraError.None) { m_frameBuffer = Marshal.AllocHGlobal((int)bufferSize); m_frameHeader = new CameraVideoStreamFrameHeader_t(); m_frameHeaderSize = (uint)Marshal.SizeOf(m_frameHeader); if (bitsPerPixel == 3) { m_texture = new Texture2D((int)width, (int)height, TextureFormat.RGB24, false); } else if (bitsPerPixel == 4) { m_texture = new Texture2D((int)width, (int)height, TextureFormat.RGBA32, false); } // if there's some other texture format here, we simply won't be able to do anything in Update() if (m_texture && GetComponent <Renderer>()) { GetComponent <Renderer>().material.mainTexture = m_texture; } } else { Debug.LogError("Could not acquire handle to stream (error=" + cameraError + ")"); } }
public unsafe void tick() { if (myHandle == 0) { return; } if (TimeSource.now() > myVideoSignalTime + 2.0) { Warn.print("No video frames arriving"); //stopStream(); } CameraVideoStreamFrameHeader_t frameHeader = new CameraVideoStreamFrameHeader_t(); EVRTrackedCameraError error = OpenVR.TrackedCamera.GetVideoStreamFrameBuffer(myHandle, myFrameType, IntPtr.Zero, 0, ref frameHeader, (uint)Marshal.SizeOf(frameHeader)); if (error != EVRTrackedCameraError.None) { Warn.print("Failed to get frame header"); return; } if (frameHeader.nFrameSequence == myLastFrameSequence) { //frame hasn't changed yet return; } myVideoSignalTime = TimeSource.now(); // Frame has changed, do the more expensive frame buffer copy fixed(byte *ptr = myFrameBuffer) { error = OpenVR.TrackedCamera.GetVideoStreamFrameBuffer(myHandle, myFrameType, (IntPtr)ptr, myFrameBufferSize, ref frameHeader, (uint)Marshal.SizeOf(frameHeader)); if (error != EVRTrackedCameraError.None) { Warn.print("Failed to get frame buffer"); return; } } if (frameHeader.standingTrackedDevicePose.bPoseIsValid == true) { Matrix4 standingView = VR.convertToMatrix4(frameHeader.standingTrackedDevicePose.mDeviceToAbsoluteTracking); Matrix4 seated2Standing = VR.convertToMatrix4(VR.vrSystem.GetSeatedZeroPoseToStandingAbsoluteTrackingPose()); myView = standingView * seated2Standing.Inverted(); } invertBuffer(); //invert buffer from first pixel being top left to bottom left myTexture.paste(myFrameFlipBuffer, Vector2.Zero, new Vector2(myFrameWidth, myFrameHeight), PixelFormat.Rgba); }