IEnumerator _CheckCamera(Action done = null)
        {
            while (OpenVR.TrackedCamera == null)
            {
                Debug.Log("[ViveSR Experience] Looking for Tracked Camera");
                yield return(new WaitForEndOfFrame());
            }

            uint deviceIndex = OpenVR.k_unTrackedDeviceIndex_Hmd;

            EVRTrackedCameraError error = EVRTrackedCameraError.None;

            ulong _handle = 0;

            error = OpenVR.TrackedCamera.AcquireVideoStreamingService(deviceIndex, ref _handle);

            while (error != EVRTrackedCameraError.None || _handle == 0)
            {
                Debug.Log("[ViveSR Experience] VIVE Pro Camera might have not been enabled in SteamVR SDK");
                error = OpenVR.TrackedCamera.AcquireVideoStreamingService(deviceIndex, ref _handle);
            }

            if (done != null)
            {
                done();
            }
        }
    void Update()
    {
        // first get header only
        camerror = trcam_instance.GetVideoStreamFrameBuffer(pTrackedCamera, EVRTrackedCameraFrameType.Undistorted, (IntPtr)null, 0, ref pFrameHeader, (uint)Marshal.SizeOf(typeof(CameraVideoStreamFrameHeader_t)));
        if (camerror != EVRTrackedCameraError.None)
        {
//			Debug.LogError("GetVideoStreamFrameBuffer: EVRTrackedCameraError="+camerror);
            return;
        }
        //if frame hasn't changed don't copy buffer
        if (pFrameHeader.nFrameSequence == prevFrameSequence)
        {
            return;
        }
        // now get header and buffer
        camerror = trcam_instance.GetVideoStreamFrameBuffer(pTrackedCamera, EVRTrackedCameraFrameType.Undistorted, pBuffer, buffsize, ref pFrameHeader, (uint)Marshal.SizeOf(typeof(CameraVideoStreamFrameHeader_t)));
        if (camerror != EVRTrackedCameraError.None)
        {
            Debug.LogError("GetVideoStreamFrameBuffer: EVRTrackedCameraError=" + camerror);
            return;
        }
        prevFrameSequence = pFrameHeader.nFrameSequence;

        Marshal.Copy(pBuffer, buffer, 0, (int)buffsize);
        texture.LoadRawTextureData(buffer);
        texture.Apply();
        GetComponent <Renderer>().material.mainTexture = texture;
    }
Ejemplo n.º 3
0
    void Update()
    {
        screen.transform.localPosition = initPos + localOffset;
        screen.transform.localRotation = Quaternion.identity;
        screen.transform.RotateAround(screen.transform.position, screen.transform.forward, 180.0f + zOffsetAngle);
        screen.transform.RotateAround(screen.transform.parent.position, screen.transform.parent.right, xOffsetAngle);

        // first get header only
        camerror = trcam_instance.GetVideoStreamFrameBuffer(pTrackedCamera, EVRTrackedCameraFrameType.Undistorted, (IntPtr)null, 0, ref pFrameHeader, (uint)Marshal.SizeOf(typeof(CameraVideoStreamFrameHeader_t)));
        if (camerror != EVRTrackedCameraError.None)
        {
            //			Debug.LogError("GetVideoStreamFrameBuffer: EVRTrackedCameraError="+camerror);
            return;
        }
        //if frame hasn't changed don't copy buffer
        if (pFrameHeader.nFrameSequence == prevFrameSequence)
        {
            return;
        }
        // now get header and buffer
        camerror = trcam_instance.GetVideoStreamFrameBuffer(pTrackedCamera, EVRTrackedCameraFrameType.Undistorted, pBuffer, buffsize, ref pFrameHeader, (uint)Marshal.SizeOf(typeof(CameraVideoStreamFrameHeader_t)));
        if (camerror != EVRTrackedCameraError.None)
        {
            Debug.LogError("GetVideoStreamFrameBuffer: EVRTrackedCameraError=" + camerror);
            return;
        }
        prevFrameSequence = pFrameHeader.nFrameSequence;

        Marshal.Copy(pBuffer, buffer, 0, (int)buffsize);
        texture.LoadRawTextureData(buffer);
        texture.Apply();
    }
Ejemplo n.º 4
0
    // Use this for initialization
    void Start()
    {
        m_camera = OpenVR.TrackedCamera;
        if (m_camera == null)
        {
            Debug.LogError("No camera found");
            return;
        }

        // First get the size of a frame
        uint width      = 0;
        uint height     = 0;
        uint bufferSize = 0;
        EVRTrackedCameraError cameraError = m_camera.GetCameraFrameSize(0, EVRTrackedCameraFrameType.Undistorted, ref width, ref height, ref bufferSize);

        if (cameraError != EVRTrackedCameraError.None)
        {
            Debug.LogError("Could not get frame size (error=" + cameraError + ")");
            return;
        }

        if (width * height == 0)
        {
            Debug.LogError("Frame size of 0, are you sure you've enabled the camera in the SteamVR settings panel?");
            return;
        }

        uint bitsPerPixel = bufferSize / (width * height);

        m_frameBufferSize = bufferSize;

        // Then get a handle to the stream
        cameraError = m_camera.AcquireVideoStreamingService(0, ref m_streamHandle);
        if (cameraError == EVRTrackedCameraError.None)
        {
            m_frameBuffer     = Marshal.AllocHGlobal((int)bufferSize);
            m_frameHeader     = new CameraVideoStreamFrameHeader_t();
            m_frameHeaderSize = (uint)Marshal.SizeOf(m_frameHeader);

            if (bitsPerPixel == 3)
            {
                m_texture = new Texture2D((int)width, (int)height, TextureFormat.RGB24, false);
            }
            else if (bitsPerPixel == 4)
            {
                m_texture = new Texture2D((int)width, (int)height, TextureFormat.RGBA32, false);
            }
            // if there's some other texture format here, we simply won't be able to do anything in Update()

            if (m_texture && GetComponent <Renderer>())
            {
                GetComponent <Renderer>().material.mainTexture = m_texture;
            }
        }
        else
        {
            Debug.LogError("Could not acquire handle to stream (error=" + cameraError + ")");
        }
    }
Ejemplo n.º 5
0
    private Size initTrackedCamera()
    {
        uint width = 0, height = 0, index = 0;
        bool pHasCamera = false;

        trcam_instance = OpenVR.TrackedCamera;

        if (trcam_instance == null)
        {
            Debug.LogError("Error getting TrackedCamera");
            camError = true;
        }
        else
        {
            camerror = trcam_instance.HasCamera(index, ref pHasCamera);
            if (camerror != EVRTrackedCameraError.None)
            {
                Debug.LogError("HasCamera: EVRTrackedCameraError=" + camerror);
                camError = true;
            }
            else if (pHasCamera)
            {
                camerror = trcam_instance.GetCameraFrameSize(index, EVRTrackedCameraFrameType.Undistorted, ref width, ref height, ref buffsize);
                if (camerror != EVRTrackedCameraError.None)
                {
                    Debug.LogError("GetCameraFrameSize: EVRTrackedCameraError=" + camerror);
                    camError = true;
                }
                else
                {
                    buffer  = new byte[buffsize];
                    pBuffer = Marshal.AllocHGlobal((int)buffsize);

                    camerror = trcam_instance.AcquireVideoStreamingService(index, ref pTrackedCamera);
                    if (camerror != EVRTrackedCameraError.None)
                    {
                        Debug.LogError("AcquireVideoStreamingService: EVRTrackedCameraError=" + camerror);
                        camError = true;
                    }
                }
            }
            else
            {
                Debug.Log("no camera found");
                camError = true;
            }
        }
        if (camError)
        {
            Texture2D tex = (gameObject.GetComponent <Renderer>().material.mainTexture as Texture2D);
            if (tex != null)
            {
                staticBuffer = tex.GetRawTextureData();
            }
            Vector3 scale = gameObject.transform.localScale;
            return(new Size(scale.x, scale.y));
        }
        return(new Size(width, height));
    }
Ejemplo n.º 6
0
        public unsafe void tick()
        {
            if (myHandle == 0)
            {
                return;
            }

            if (TimeSource.now() > myVideoSignalTime + 2.0)
            {
                Warn.print("No video frames arriving");
                //stopStream();
            }

            CameraVideoStreamFrameHeader_t frameHeader = new CameraVideoStreamFrameHeader_t();
            EVRTrackedCameraError          error       = OpenVR.TrackedCamera.GetVideoStreamFrameBuffer(myHandle, myFrameType, IntPtr.Zero, 0, ref frameHeader, (uint)Marshal.SizeOf(frameHeader));

            if (error != EVRTrackedCameraError.None)
            {
                Warn.print("Failed to get frame header");
                return;
            }

            if (frameHeader.nFrameSequence == myLastFrameSequence)
            {
                //frame hasn't changed yet
                return;
            }

            myVideoSignalTime = TimeSource.now();

            // Frame has changed, do the more expensive frame buffer copy
            fixed(byte *ptr = myFrameBuffer)
            {
                error = OpenVR.TrackedCamera.GetVideoStreamFrameBuffer(myHandle, myFrameType, (IntPtr)ptr, myFrameBufferSize, ref frameHeader, (uint)Marshal.SizeOf(frameHeader));
                if (error != EVRTrackedCameraError.None)
                {
                    Warn.print("Failed to get frame buffer");
                    return;
                }
            }

            if (frameHeader.standingTrackedDevicePose.bPoseIsValid == true)
            {
                Matrix4 standingView    = VR.convertToMatrix4(frameHeader.standingTrackedDevicePose.mDeviceToAbsoluteTracking);
                Matrix4 seated2Standing = VR.convertToMatrix4(VR.vrSystem.GetSeatedZeroPoseToStandingAbsoluteTrackingPose());

                myView = standingView * seated2Standing.Inverted();
            }

            invertBuffer();

            //invert buffer from first pixel being top left to bottom left
            myTexture.paste(myFrameFlipBuffer, Vector2.Zero, new Vector2(myFrameWidth, myFrameHeight), PixelFormat.Rgba);
        }
Ejemplo n.º 7
0
        public static bool hasTrackedCamera()
        {
            bool ret = false;
            EVRTrackedCameraError error = OpenVR.TrackedCamera.HasCamera(OpenVR.k_unTrackedDeviceIndex_Hmd, ref ret);

            if (error != EVRTrackedCameraError.None || !ret)
            {
                Warn.print("No tracked camera available on HMD: {0}", OpenVR.TrackedCamera.GetCameraErrorNameFromEnum(error));
                return(false);
            }

            return(true);
        }
Ejemplo n.º 8
0
    void Start()
    {
        initPos = screen.transform.localPosition;

        uint width = 0, height = 0;
        bool pHasCamera = false;

        trcam_instance = OpenVR.TrackedCamera;

        if (trcam_instance == null)
        {
            Debug.LogError("Error getting TrackedCamera");
        }
        else
        {
            camerror = trcam_instance.HasCamera((uint)index, ref pHasCamera);
            if (camerror != EVRTrackedCameraError.None)
            {
                Debug.LogError("HasCamera: EVRTrackedCameraError=" + camerror);
                return;
            }
            if (pHasCamera)
            {
                camerror = trcam_instance.GetCameraFrameSize((uint)index, EVRTrackedCameraFrameType.Undistorted, ref width, ref height, ref buffsize);
                if (camerror != EVRTrackedCameraError.None)
                {
                    Debug.LogError("GetCameraFrameSize: EVRTrackedCameraError=" + camerror);
                }
                else
                {
                    Debug.Log("width=" + width + " height=" + height + " buffsize=" + buffsize);
                    texture = new Texture2D((int)width, (int)height, TextureFormat.RGBA32, false);

                    buffer  = new byte[buffsize];
                    pBuffer = Marshal.AllocHGlobal((int)buffsize);

                    camerror = trcam_instance.AcquireVideoStreamingService((uint)index, ref pTrackedCamera);
                    if (camerror != EVRTrackedCameraError.None)
                    {
                        Debug.LogError("AcquireVideoStreamingService: EVRTrackedCameraError=" + camerror);
                    }
                }
            }
            else
            {
                Debug.Log("no camera found, only Vive Pre and later supported");
            }
        }
    }
Ejemplo n.º 9
0
    private byte[] updateTrackedCamera()
    {
        if (camError)
        {
            if (texture != null)
            {
                return(texture.GetRawTextureData());
            }
            else
            {
                return(null);
            }
        }
        // first get header only
        camerror = trcam_instance.GetVideoStreamFrameBuffer(pTrackedCamera, EVRTrackedCameraFrameType.Undistorted, (IntPtr)null, 0, ref pFrameHeader, (uint)Marshal.SizeOf(typeof(CameraVideoStreamFrameHeader_t)));
        if (camerror != EVRTrackedCameraError.None)
        {
            Debug.LogError("GetVideoStreamFrameBuffer: EVRTrackedCameraError=" + camerror);
            return(null);
        }
        //if frame hasn't changed don't copy buffer
        if (pFrameHeader.nFrameSequence == prevFrameSequence)
        {
            return(null);
        }
        // now get header and buffer
        camerror = trcam_instance.GetVideoStreamFrameBuffer(pTrackedCamera, EVRTrackedCameraFrameType.Undistorted, pBuffer, buffsize, ref pFrameHeader, (uint)Marshal.SizeOf(typeof(CameraVideoStreamFrameHeader_t)));
        if (camerror != EVRTrackedCameraError.None)
        {
            Debug.LogError("GetVideoStreamFrameBuffer: EVRTrackedCameraError=" + camerror);
            return(null);
        }
        prevFrameSequence = pFrameHeader.nFrameSequence;

        //capture new frame buffer
        Marshal.Copy(pBuffer, buffer, 0, (int)buffsize);
        return(buffer);
    }
Ejemplo n.º 10
0
        public bool startStream()
        {
            myVideoSignalTime = TimeSource.now();
            OpenVR.TrackedCamera.AcquireVideoStreamingService(OpenVR.k_unTrackedDeviceIndex_Hmd, ref myHandle);
            if (myHandle == 0)
            {
                Warn.print("AcquireVideoStreamingService() failed");
                return(false);
            }

            HmdMatrix44_t         proj  = new HmdMatrix44_t();
            EVRTrackedCameraError error = OpenVR.TrackedCamera.GetCameraProjection(OpenVR.k_unTrackedDeviceIndex_Hmd, myFrameType, 0.01f, 10.0f, ref proj);

            if (error != EVRTrackedCameraError.None)
            {
                Warn.print("Error getting camera projection");
            }
            myProjection = VR.convertToMatrix4(proj);

            Info.print("Started VR Camera stream");
            myIsStreaming = true;
            return(true);
        }
Ejemplo n.º 11
0
 // Token: 0x06001F08 RID: 7944 RVA: 0x0009CE2A File Offset: 0x0009B02A
 public string GetCameraErrorNameFromEnum(EVRTrackedCameraError eCameraError)
 {
     return(Marshal.PtrToStringAnsi(this.FnTable.GetCameraErrorNameFromEnum(eCameraError)));
 }
Ejemplo n.º 12
0
	public string GetCameraErrorNameFromEnum(EVRTrackedCameraError eCameraError)
	{
		IntPtr result = FnTable.GetCameraErrorNameFromEnum(eCameraError);
		return Marshal.PtrToStringAnsi(result);
	}
Ejemplo n.º 13
0
        }                                                       // 0x00000001811E1240-0x00000001811E1350

        // Methods
        public string GetCameraErrorNameFromEnum(EVRTrackedCameraError eCameraError) => default;                                                                                                                                                                                     // 0x00000001811E0FE0-0x00000001811E1060