Beispiel #1
0
    private Size initTrackedCamera()
    {
        uint width = 0, height = 0, index = 0;
        bool pHasCamera = false;

        trcam_instance = OpenVR.TrackedCamera;

        if (trcam_instance == null)
        {
            Debug.LogError("Error getting TrackedCamera");
            camError = true;
        }
        else
        {
            camerror = trcam_instance.HasCamera(index, ref pHasCamera);
            if (camerror != EVRTrackedCameraError.None)
            {
                Debug.LogError("HasCamera: EVRTrackedCameraError=" + camerror);
                camError = true;
            }
            else if (pHasCamera)
            {
                camerror = trcam_instance.GetCameraFrameSize(index, EVRTrackedCameraFrameType.Undistorted, ref width, ref height, ref buffsize);
                if (camerror != EVRTrackedCameraError.None)
                {
                    Debug.LogError("GetCameraFrameSize: EVRTrackedCameraError=" + camerror);
                    camError = true;
                }
                else
                {
                    buffer  = new byte[buffsize];
                    pBuffer = Marshal.AllocHGlobal((int)buffsize);

                    camerror = trcam_instance.AcquireVideoStreamingService(index, ref pTrackedCamera);
                    if (camerror != EVRTrackedCameraError.None)
                    {
                        Debug.LogError("AcquireVideoStreamingService: EVRTrackedCameraError=" + camerror);
                        camError = true;
                    }
                }
            }
            else
            {
                Debug.Log("no camera found");
                camError = true;
            }
        }
        if (camError)
        {
            Texture2D tex = (gameObject.GetComponent <Renderer>().material.mainTexture as Texture2D);
            if (tex != null)
            {
                staticBuffer = tex.GetRawTextureData();
            }
            Vector3 scale = gameObject.transform.localScale;
            return(new Size(scale.x, scale.y));
        }
        return(new Size(width, height));
    }
Beispiel #2
0
    // Use this for initialization
    void Start()
    {
        m_camera = OpenVR.TrackedCamera;
        if (m_camera == null)
        {
            Debug.LogError("No camera found");
            return;
        }

        // First get the size of a frame
        uint width      = 0;
        uint height     = 0;
        uint bufferSize = 0;
        EVRTrackedCameraError cameraError = m_camera.GetCameraFrameSize(0, EVRTrackedCameraFrameType.Undistorted, ref width, ref height, ref bufferSize);

        if (cameraError != EVRTrackedCameraError.None)
        {
            Debug.LogError("Could not get frame size (error=" + cameraError + ")");
            return;
        }

        if (width * height == 0)
        {
            Debug.LogError("Frame size of 0, are you sure you've enabled the camera in the SteamVR settings panel?");
            return;
        }

        uint bitsPerPixel = bufferSize / (width * height);

        m_frameBufferSize = bufferSize;

        // Then get a handle to the stream
        cameraError = m_camera.AcquireVideoStreamingService(0, ref m_streamHandle);
        if (cameraError == EVRTrackedCameraError.None)
        {
            m_frameBuffer     = Marshal.AllocHGlobal((int)bufferSize);
            m_frameHeader     = new CameraVideoStreamFrameHeader_t();
            m_frameHeaderSize = (uint)Marshal.SizeOf(m_frameHeader);

            if (bitsPerPixel == 3)
            {
                m_texture = new Texture2D((int)width, (int)height, TextureFormat.RGB24, false);
            }
            else if (bitsPerPixel == 4)
            {
                m_texture = new Texture2D((int)width, (int)height, TextureFormat.RGBA32, false);
            }
            // if there's some other texture format here, we simply won't be able to do anything in Update()

            if (m_texture && GetComponent <Renderer>())
            {
                GetComponent <Renderer>().material.mainTexture = m_texture;
            }
        }
        else
        {
            Debug.LogError("Could not acquire handle to stream (error=" + cameraError + ")");
        }
    }
Beispiel #3
0
    void Start()
    {
        initPos = screen.transform.localPosition;

        uint width = 0, height = 0;
        bool pHasCamera = false;

        trcam_instance = OpenVR.TrackedCamera;

        if (trcam_instance == null)
        {
            Debug.LogError("Error getting TrackedCamera");
        }
        else
        {
            camerror = trcam_instance.HasCamera((uint)index, ref pHasCamera);
            if (camerror != EVRTrackedCameraError.None)
            {
                Debug.LogError("HasCamera: EVRTrackedCameraError=" + camerror);
                return;
            }
            if (pHasCamera)
            {
                camerror = trcam_instance.GetCameraFrameSize((uint)index, EVRTrackedCameraFrameType.Undistorted, ref width, ref height, ref buffsize);
                if (camerror != EVRTrackedCameraError.None)
                {
                    Debug.LogError("GetCameraFrameSize: EVRTrackedCameraError=" + camerror);
                }
                else
                {
                    Debug.Log("width=" + width + " height=" + height + " buffsize=" + buffsize);
                    texture = new Texture2D((int)width, (int)height, TextureFormat.RGBA32, false);

                    buffer  = new byte[buffsize];
                    pBuffer = Marshal.AllocHGlobal((int)buffsize);

                    camerror = trcam_instance.AcquireVideoStreamingService((uint)index, ref pTrackedCamera);
                    if (camerror != EVRTrackedCameraError.None)
                    {
                        Debug.LogError("AcquireVideoStreamingService: EVRTrackedCameraError=" + camerror);
                    }
                }
            }
            else
            {
                Debug.Log("no camera found, only Vive Pre and later supported");
            }
        }
    }