Пример #1
0
    void Update()
    {
        //ensure correct fps
        float deltaSample = 1.0f / _Fps;

        mLastSample += Time.deltaTime;
        if (mLastSample >= deltaSample)
        {
            mLastSample -= deltaSample;

//                //backup the current configuration to restore it later
//                var oldTargetTexture = _Camera.targetTexture;
//                var oldActiveTexture = RenderTexture.active;
//
//                //Set the buffer as target and render the view of the camera into it
//                _Camera.targetTexture = mRtBuffer;
//                _Camera.Render();
//
//
//                RenderTexture.active = mRtBuffer;
//                mTexture.ReadPixels(new Rect(0, 0, mRtBuffer.width, mRtBuffer.height), 0, 0, false);
//                mTexture.Apply();

            //get the byte array. still looking for a way to reuse the current buffer
            //instead of allocating a new one all the time

            if (IsRightCamera)
            {
                mTexture = ovrObj.GetCameraTextureRight();
            }
            else
            {
                mTexture = ovrObj.GetCameraTextureLeft();
            }

            mByteBuffer = mTexture.GetRawTextureData();


            //update the internal WebRTC device
            //Debug.Log(mTexture.format);
            //Debug.Log(
            mVideoInput.UpdateFrame(mUsedDeviceName, mByteBuffer, mTexture.width, mTexture.height, WebRtcCSharp.VideoType.kBGRA, 0, true);
            //);



            //reset the camera/active render texture  in case it is still used for other purposes
//                _Camera.targetTexture = oldTargetTexture;
//                RenderTexture.active = oldActiveTexture;

            //update debug output if available
            if (_DebugTarget != null)
            {
                _DebugTarget.texture = mTexture;
            }
        }
    }
Пример #2
0
    // Update is called once per frame
    void Update()
    {
        //if (!rtReady) {
        //    SetUpRT();
        //} else {

        //}

        Texture frame = Frame.CameraImage.Texture;

        if (frame != null && ready)
        {
            Texture2D f = frame as Texture2D;
            byteBuffer = f.GetRawTextureData();
            videoInput.UpdateFrame(usedDeviceName, byteBuffer, f.width, f.height, WebRtcCSharp.VideoType.kBGRA, 0, true);

            if (frame.width > 0 && frame.height > 0)
            {
                VideoTest.instance.DebugCall(4);
            }
        }
    }