예제 #1
0
    private Size initTrackedCamera()
    {
        uint width = 0, height = 0, index = 0;
        bool pHasCamera = false;

        trcam_instance = OpenVR.TrackedCamera;

        if (trcam_instance == null)
        {
            Debug.LogError("Error getting TrackedCamera");
            camError = true;
        }
        else
        {
            camerror = trcam_instance.HasCamera(index, ref pHasCamera);
            if (camerror != EVRTrackedCameraError.None)
            {
                Debug.LogError("HasCamera: EVRTrackedCameraError=" + camerror);
                camError = true;
            }
            else if (pHasCamera)
            {
                camerror = trcam_instance.GetCameraFrameSize(index, EVRTrackedCameraFrameType.Undistorted, ref width, ref height, ref buffsize);
                if (camerror != EVRTrackedCameraError.None)
                {
                    Debug.LogError("GetCameraFrameSize: EVRTrackedCameraError=" + camerror);
                    camError = true;
                }
                else
                {
                    buffer  = new byte[buffsize];
                    pBuffer = Marshal.AllocHGlobal((int)buffsize);

                    camerror = trcam_instance.AcquireVideoStreamingService(index, ref pTrackedCamera);
                    if (camerror != EVRTrackedCameraError.None)
                    {
                        Debug.LogError("AcquireVideoStreamingService: EVRTrackedCameraError=" + camerror);
                        camError = true;
                    }
                }
            }
            else
            {
                Debug.Log("no camera found");
                camError = true;
            }
        }
        if (camError)
        {
            Texture2D tex = (gameObject.GetComponent <Renderer>().material.mainTexture as Texture2D);
            if (tex != null)
            {
                staticBuffer = tex.GetRawTextureData();
            }
            Vector3 scale = gameObject.transform.localScale;
            return(new Size(scale.x, scale.y));
        }
        return(new Size(width, height));
    }
예제 #2
0
    // Use this for initialization
    void Start()
    {
        m_camera = OpenVR.TrackedCamera;
        if (m_camera == null)
        {
            Debug.LogError("No camera found");
            return;
        }

        // First get the size of a frame
        uint width      = 0;
        uint height     = 0;
        uint bufferSize = 0;
        EVRTrackedCameraError cameraError = m_camera.GetCameraFrameSize(0, EVRTrackedCameraFrameType.Undistorted, ref width, ref height, ref bufferSize);

        if (cameraError != EVRTrackedCameraError.None)
        {
            Debug.LogError("Could not get frame size (error=" + cameraError + ")");
            return;
        }

        if (width * height == 0)
        {
            Debug.LogError("Frame size of 0, are you sure you've enabled the camera in the SteamVR settings panel?");
            return;
        }

        uint bitsPerPixel = bufferSize / (width * height);

        m_frameBufferSize = bufferSize;

        // Then get a handle to the stream
        cameraError = m_camera.AcquireVideoStreamingService(0, ref m_streamHandle);
        if (cameraError == EVRTrackedCameraError.None)
        {
            m_frameBuffer     = Marshal.AllocHGlobal((int)bufferSize);
            m_frameHeader     = new CameraVideoStreamFrameHeader_t();
            m_frameHeaderSize = (uint)Marshal.SizeOf(m_frameHeader);

            if (bitsPerPixel == 3)
            {
                m_texture = new Texture2D((int)width, (int)height, TextureFormat.RGB24, false);
            }
            else if (bitsPerPixel == 4)
            {
                m_texture = new Texture2D((int)width, (int)height, TextureFormat.RGBA32, false);
            }
            // if there's some other texture format here, we simply won't be able to do anything in Update()

            if (m_texture && GetComponent <Renderer>())
            {
                GetComponent <Renderer>().material.mainTexture = m_texture;
            }
        }
        else
        {
            Debug.LogError("Could not acquire handle to stream (error=" + cameraError + ")");
        }
    }
예제 #3
0
        // Token: 0x060028B7 RID: 10423 RVA: 0x000C4AC4 File Offset: 0x000C2CC4
        public VideoStream(uint deviceIndex)
        {
            this.deviceIndex = deviceIndex;
            CVRTrackedCamera trackedCamera = OpenVR.TrackedCamera;

            if (trackedCamera != null)
            {
                trackedCamera.HasCamera(deviceIndex, ref this._hasCamera);
            }
        }
예제 #4
0
 // Token: 0x06005FB5 RID: 24501 RVA: 0x0021A6E8 File Offset: 0x00218AE8
 public ulong Acquire()
 {
     if (this._handle == 0UL && this.hasCamera)
     {
         CVRTrackedCamera trackedCamera = OpenVR.TrackedCamera;
         if (trackedCamera != null)
         {
             trackedCamera.AcquireVideoStreamingService(this.deviceIndex, ref this._handle);
         }
     }
     return(this.refCount += 1UL);
 }
예제 #5
0
 // Token: 0x06005FB6 RID: 24502 RVA: 0x0021A740 File Offset: 0x00218B40
 public ulong Release()
 {
     if (this.refCount > 0UL && (this.refCount -= 1UL) == 0UL && this._handle != 0UL)
     {
         CVRTrackedCamera trackedCamera = OpenVR.TrackedCamera;
         if (trackedCamera != null)
         {
             trackedCamera.ReleaseVideoStreamingService(this._handle);
         }
         this._handle = 0UL;
     }
     return(this.refCount);
 }
예제 #6
0
    void Start()
    {
        initPos = screen.transform.localPosition;

        uint width = 0, height = 0;
        bool pHasCamera = false;

        trcam_instance = OpenVR.TrackedCamera;

        if (trcam_instance == null)
        {
            Debug.LogError("Error getting TrackedCamera");
        }
        else
        {
            camerror = trcam_instance.HasCamera((uint)index, ref pHasCamera);
            if (camerror != EVRTrackedCameraError.None)
            {
                Debug.LogError("HasCamera: EVRTrackedCameraError=" + camerror);
                return;
            }
            if (pHasCamera)
            {
                camerror = trcam_instance.GetCameraFrameSize((uint)index, EVRTrackedCameraFrameType.Undistorted, ref width, ref height, ref buffsize);
                if (camerror != EVRTrackedCameraError.None)
                {
                    Debug.LogError("GetCameraFrameSize: EVRTrackedCameraError=" + camerror);
                }
                else
                {
                    Debug.Log("width=" + width + " height=" + height + " buffsize=" + buffsize);
                    texture = new Texture2D((int)width, (int)height, TextureFormat.RGBA32, false);

                    buffer  = new byte[buffsize];
                    pBuffer = Marshal.AllocHGlobal((int)buffsize);

                    camerror = trcam_instance.AcquireVideoStreamingService((uint)index, ref pTrackedCamera);
                    if (camerror != EVRTrackedCameraError.None)
                    {
                        Debug.LogError("AcquireVideoStreamingService: EVRTrackedCameraError=" + camerror);
                    }
                }
            }
            else
            {
                Debug.Log("no camera found, only Vive Pre and later supported");
            }
        }
    }
예제 #7
0
        // Token: 0x060028B6 RID: 10422 RVA: 0x000C4914 File Offset: 0x000C2B14
        private void Update()
        {
            if (Time.frameCount == this.prevFrameCount)
            {
                return;
            }
            this.prevFrameCount = Time.frameCount;
            if (this.videostream.handle == 0UL)
            {
                return;
            }
            SteamVR instance = SteamVR.instance;

            if (instance == null)
            {
                return;
            }
            CVRTrackedCamera trackedCamera = OpenVR.TrackedCamera;

            if (trackedCamera == null)
            {
                return;
            }
            IntPtr    nativeTex        = IntPtr.Zero;
            Texture2D texture2D        = (this._texture != null) ? this._texture : new Texture2D(2, 2);
            uint      nFrameHeaderSize = (uint)Marshal.SizeOf(this.header.GetType());

            if (instance.textureType == ETextureType.OpenGL)
            {
                if (this.glTextureId != 0U)
                {
                    trackedCamera.ReleaseVideoStreamTextureGL(this.videostream.handle, this.glTextureId);
                }
                if (trackedCamera.GetVideoStreamTextureGL(this.videostream.handle, this.frameType, ref this.glTextureId, ref this.header, nFrameHeaderSize) != EVRTrackedCameraError.None)
                {
                    return;
                }
                nativeTex = (IntPtr)((long)((ulong)this.glTextureId));
            }
            else if (instance.textureType == ETextureType.DirectX && trackedCamera.GetVideoStreamTextureD3D11(this.videostream.handle, this.frameType, texture2D.GetNativeTexturePtr(), ref nativeTex, ref this.header, nFrameHeaderSize) != EVRTrackedCameraError.None)
            {
                return;
            }
            if (this._texture == null)
            {
                this._texture = Texture2D.CreateExternalTexture((int)this.header.nWidth, (int)this.header.nHeight, TextureFormat.RGBA32, false, false, nativeTex);
                uint num  = 0U;
                uint num2 = 0U;
                VRTextureBounds_t vrtextureBounds_t = default(VRTextureBounds_t);
                if (trackedCamera.GetVideoStreamTextureSize(this.deviceIndex, this.frameType, ref vrtextureBounds_t, ref num, ref num2) == EVRTrackedCameraError.None)
                {
                    vrtextureBounds_t.vMin = 1f - vrtextureBounds_t.vMin;
                    vrtextureBounds_t.vMax = 1f - vrtextureBounds_t.vMax;
                    this.frameBounds       = vrtextureBounds_t;
                    return;
                }
            }
            else
            {
                this._texture.UpdateExternalTexture(nativeTex);
            }
        }