private IEnumerator FinalRenderCapture()
    {
        yield return(new WaitForEndOfFrame());

        while (_handle >= 0 && !AVProMovieCapturePlugin.IsNewFrameDue(_handle))
        {
            System.Threading.Thread.Sleep(8);
        }

        if (_handle >= 0)
        {
            // Grab final RenderTexture into texture and encode
#if AVPRO_MOVIECAPTURE_GLISSUEEVENT
            if (_useNativeGrabber)
            {
                if (_audioCapture && _audioDeviceIndex < 0 && !_noAudio)
                {
                    AVProMovieCapturePlugin.EncodeAudio(_handle, _audioCapture.BufferPtr, (uint)_audioCapture.BufferLength);
                    _audioCapture.FlushBuffer();
                }
                GL.IssuePluginEvent(AVProMovieCapturePlugin.PluginID | (int)AVProMovieCapturePlugin.PluginEvent.CaptureFrameBuffer | _handle);
            }
#endif
            if (!_useNativeGrabber)
            {
                ConvertAndEncode();
                //_texture.ReadPixels(new Rect(0, 0, _texture.width, _texture.height), 0, 0, false);
                //EncodeTexture(_texture);
            }

            UpdateFPS();
        }

        yield return(null);
    }
示例#2
0
    private void OnRenderImage(RenderTexture source, RenderTexture dest)
    {
        if (_capturing && !_paused)
        {
#if true
            while (_handle >= 0 && !AVProMovieCapturePlugin.IsNewFrameDue(_handle))
            {
                System.Threading.Thread.Sleep(1);
            }
            if (_handle >= 0)
            {
                if (_audioCapture && _audioDeviceIndex < 0 && !_noAudio && _isRealTime)
                {
                    int           audioDataLength = 0;
                    System.IntPtr audioDataPtr    = _audioCapture.ReadData(out audioDataLength);
                    if (audioDataLength > 0)
                    {
                        AVProMovieCapturePlugin.EncodeAudio(_handle, audioDataPtr, (uint)audioDataLength);
                    }
                }

                // In Direct3D the RT can be flipped vertically

                /*if (source.texelSize.y < 0)
                 * {
                 *
                 * }*/

                Graphics.Blit(source, dest);

                _lastSource = source;
                _lastDest   = dest;

                if (dest != _originalTarget)
                {
                    Graphics.Blit(dest, _originalTarget);
                }

#if AVPRO_MOVIECAPTURE_GLISSUEEVENT_52
                GL.IssuePluginEvent(AVProMovieCapturePlugin.GetRenderEventFunc(), AVProMovieCapturePlugin.PluginID | (int)AVProMovieCapturePlugin.PluginEvent.CaptureFrameBuffer | _handle);
#else
                GL.IssuePluginEvent(AVProMovieCapturePlugin.PluginID | (int)AVProMovieCapturePlugin.PluginEvent.CaptureFrameBuffer | _handle);
#endif
                GL.InvalidateState();

                UpdateFPS();

                return;
            }
#endif
        }

        // Pass-through
        Graphics.Blit(source, dest);

        _lastSource = source;
        _lastDest   = dest;
    }
    private void OnRenderImage(RenderTexture source, RenderTexture dest)
    {
#if false
        if (_capturing && !_paused)
        {
            while (_handle >= 0 && !AVProMovieCapturePlugin.IsNewFrameDue(_handle))
            {
                System.Threading.Thread.Sleep(1);
            }
            if (_handle >= 0)
            {
                if (_audioCapture && _audioDeviceIndex < 0 && !_noAudio)
                {
                    uint bufferLength = (uint)_audioCapture.BufferLength;
                    if (bufferLength > 0)
                    {
                        AVProMovieCapturePlugin.EncodeAudio(_handle, _audioCapture.BufferPtr, bufferLength);
                        _audioCapture.FlushBuffer();
                    }
                }

                // In Direct3D the RT can be flipped vertically

                /*if (source.texelSize.y < 0)
                 * {
                 *
                 * }*/

                Graphics.Blit(_cubeTarget, _target, _cubemapToEquirectangularMaterial);

#if AVPRO_MOVIECAPTURE_GLISSUEEVENT_52
                GL.IssuePluginEvent(_renderEventFunction, AVProMovieCapturePlugin.PluginID | (int)AVProMovieCapturePlugin.PluginEvent.CaptureFrameBuffer | _handle);
#else
                GL.IssuePluginEvent(AVProMovieCapturePlugin.PluginID | (int)AVProMovieCapturePlugin.PluginEvent.CaptureFrameBuffer | _handle);
#endif
                GL.InvalidateState();

                UpdateFPS();
            }
        }
#endif
        // Pass-through

        if (_cubeTarget != null)
        {
            Graphics.Blit(_cubeTarget, dest, _cubemapToEquirectangularMaterial);
        }
        else
        {
            Graphics.Blit(source, dest);
        }
    }
    public override void UpdateFrame()
    {
        if (_capturing && !_paused)
        {
            if (_cubeTarget != null && _camera != null)
            {
                while (_handle >= 0 && !AVProMovieCapturePlugin.IsNewFrameDue(_handle))
                {
                    System.Threading.Thread.Sleep(1);
                }
                if (_handle >= 0)
                {
                    if (_audioCapture && _audioDeviceIndex < 0 && !_noAudio && _isRealTime)
                    {
                        uint bufferLength = (uint)_audioCapture.BufferLength;
                        if (bufferLength > 0)
                        {
                            AVProMovieCapturePlugin.EncodeAudio(_handle, _audioCapture.BufferPtr, bufferLength);
                            _audioCapture.FlushBuffer();
                        }
                    }

                    // In Direct3D the RT can be flipped vertically

                    /*if (source.texelSize.y < 0)
                     * {
                     *
                     * }*/

                    _cubeCamera.transform.position = _camera.transform.position;
                    _cubeCamera.transform.rotation = _camera.transform.rotation;
                    _cubeCamera.RenderToCubemap(_cubeTarget, 63);

                    Graphics.Blit(_cubeTarget, _target, _cubemapToEquirectangularMaterial);

                    AVProMovieCapturePlugin.SetTexturePointer(_handle, _target.GetNativeTexturePtr());

#if AVPRO_MOVIECAPTURE_GLISSUEEVENT_52
                    GL.IssuePluginEvent(_renderEventFunction, AVProMovieCapturePlugin.PluginID | (int)AVProMovieCapturePlugin.PluginEvent.CaptureFrameBuffer | _handle);
#else
                    GL.IssuePluginEvent(AVProMovieCapturePlugin.PluginID | (int)AVProMovieCapturePlugin.PluginEvent.CaptureFrameBuffer | _handle);
#endif
                    GL.InvalidateState();

                    UpdateFPS();
                }
            }
        }

        base.UpdateFrame();
    }
    private IEnumerator FinalRenderCapture()
    {
        yield return(new WaitForEndOfFrame());

        //System.Threading.Thread.Sleep(1000);
        while (!AVProMovieCapturePlugin.IsNewFrameDue(_handle))
        {
            System.Threading.Thread.Sleep(8);
        }

        /*int frame = Time.frameCount;
         * if (frame - _lastFrame != 1)
         * {
         *      Debug.Log("dropped: " + (frame - _lastFrame));
         * }
         * _lastFrame = frame;*/

        //System.Threading.Thread.Sleep(100);

        //if (IsNewFrameDue(_handle))
        {
            // Grab final RenderTexture into texture and encode
#if UNITY_3_5 || UNITY_4_1 || UNITY_4_0_1 || UNITY_4_0
            if (!_isDirectX11)
            {
                if (_audioCapture && _audioDeviceIndex < 0 && !_noAudio)
                {
                    AVProMovieCapturePlugin.EncodeAudio(_handle, _audioCapture.BufferPtr, (uint)_audioCapture.BufferLength);
                    _audioCapture.FlushBuffer();
                }
                GL.IssuePluginEvent(AVProMovieCapturePlugin.PluginID | (int)AVProMovieCapturePlugin.PluginEvent.CaptureFrameBuffer | _handle);
            }
            else
            {
                _texture.ReadPixels(new Rect(0, 0, _texture.width, _texture.height), 0, 0, false);
                EncodeTexture(_texture);
            }
#else
            _texture.ReadPixels(new Rect(0, 0, _texture.width, _texture.height), 0, 0, false);
            EncodeTexture(_texture);
#endif
            UpdateFPS();
        }

        yield return(null);
    }
示例#6
0
    private IEnumerator FinalRenderCapture()
    {
        yield return(new WaitForEndOfFrame());

        bool canGrab = true;

        if (IsUsingMotionBlur())
        {
            // If the motion blur is still accumulating, don't grab this frame
            canGrab = _motionBlur.IsFrameAccumulated;
        }

        if (canGrab)
        {
            // Wait for the encoder to require a new frame to be sent
            while (_handle >= 0 && !AVProMovieCapturePlugin.IsNewFrameDue(_handle))
            {
                System.Threading.Thread.Sleep(NewFrameSleepTimeMs);
            }

            // Send the new frame to encode
            if (_handle >= 0)
            {
                // Grab final RenderTexture into texture and encode
                if (IsRecordingUnityAudio())
                {
                    int           audioDataLength = 0;
                    System.IntPtr audioDataPtr    = _audioCapture.ReadData(out audioDataLength);
                    if (audioDataLength > 0)
                    {
                        AVProMovieCapturePlugin.EncodeAudio(_handle, audioDataPtr, (uint)audioDataLength);
                    }
                }
#if AVPRO_MOVIECAPTURE_GLISSUEEVENT_52
                GL.IssuePluginEvent(_renderEventFunction, AVProMovieCapturePlugin.PluginID | (int)AVProMovieCapturePlugin.PluginEvent.CaptureFrameBuffer | _handle);
#else
                GL.IssuePluginEvent(AVProMovieCapturePlugin.PluginID | (int)AVProMovieCapturePlugin.PluginEvent.CaptureFrameBuffer | _handle);
#endif
                GL.InvalidateState();

                UpdateFPS();
            }
        }

        yield return(null);
    }
示例#7
0
    public override void UpdateFrame()
    {
        if (_capturing && !_paused)
        {
            if (_cubeTarget != null && _camera != null)
            {
                bool canGrab = true;

                if (_useMotionBlur && !_isRealTime && _motionBlur != null)
                {
                    // TODO: fix motion blur
                    //this._motionBlur.RenderImage()
                    // If the motion blur is still accumulating, don't grab this frame
                    canGrab = _motionBlur.IsFrameAccumulated;
                }

                if (canGrab)
                {
                    while (_handle >= 0 && !AVProMovieCapturePlugin.IsNewFrameDue(_handle))
                    {
                        System.Threading.Thread.Sleep(1);
                    }
                    if (_handle >= 0)
                    {
                        if (_audioCapture && _audioDeviceIndex < 0 && !_noAudio && _isRealTime)
                        {
                            uint bufferLength = (uint)_audioCapture.BufferLength;
                            if (bufferLength > 0)
                            {
                                AVProMovieCapturePlugin.EncodeAudio(_handle, _audioCapture.BufferPtr, bufferLength);
                                _audioCapture.FlushBuffer();
                            }
                        }

                        // In Direct3D the RT can be flipped vertically

                        /*if (source.texelSize.y < 0)
                         * {
                         *
                         * }*/

                        //_cubeCamera.transform.position = _camera.transform.position;
                        //_cubeCamera.transform.rotation = _camera.transform.rotation;

                        Camera camera = _camera;

                        if (!IsManualCubemapRendering())
                        {
                            // Note: Camera.RenderToCubemap() doesn't support camera rotation
                            camera.RenderToCubemap(_cubeTarget, 63);
                        }
                        else
                        {
                            // Cache old camera values
                            float         prevFieldOfView = _camera.fieldOfView;
                            RenderTexture prevtarget      = _camera.targetTexture;
                            Quaternion    prevRotation    = camera.transform.rotation;
                            Quaternion    xform           = camera.transform.rotation;

                            // Ignore the camera rotation
                            if (!_supportCameraRotation)
                            {
                                xform = Quaternion.identity;
                            }

                            camera.targetTexture = _faceTarget;
                            camera.fieldOfView   = 90f;

                            camera.transform.rotation = xform * Quaternion.LookRotation(Vector3.forward, Vector3.down);
                            camera.Render();
                            Graphics.SetRenderTarget(_cubeTarget, 0, CubemapFace.PositiveZ);
                            Graphics.Blit(_faceTarget, _blitMaterial);

                            camera.transform.rotation = xform * Quaternion.LookRotation(Vector3.back, Vector3.down);
                            camera.Render();
                            Graphics.SetRenderTarget(_cubeTarget, 0, CubemapFace.NegativeZ);
                            Graphics.Blit(_faceTarget, _blitMaterial);

                            camera.transform.rotation = xform * Quaternion.LookRotation(Vector3.right, Vector3.down);
                            camera.Render();
                            Graphics.SetRenderTarget(_cubeTarget, 0, CubemapFace.NegativeX);
                            Graphics.Blit(_faceTarget, _blitMaterial);

                            camera.transform.rotation = xform * Quaternion.LookRotation(Vector3.left, Vector3.down);
                            camera.Render();
                            Graphics.SetRenderTarget(_cubeTarget, 0, CubemapFace.PositiveX);
                            Graphics.Blit(_faceTarget, _blitMaterial);

                            camera.transform.rotation = xform * Quaternion.LookRotation(Vector3.up, Vector3.forward);
                            camera.Render();
                            Graphics.SetRenderTarget(_cubeTarget, 0, CubemapFace.PositiveY);
                            Graphics.Blit(_faceTarget, _blitMaterial);

                            camera.transform.rotation = xform * Quaternion.LookRotation(Vector3.down, Vector3.back);
                            camera.Render();
                            Graphics.SetRenderTarget(_cubeTarget, 0, CubemapFace.NegativeY);
                            Graphics.Blit(_faceTarget, _blitMaterial);

                            Graphics.SetRenderTarget(null);

                            // Restore camera values
                            camera.transform.rotation = prevRotation;
                            camera.targetTexture      = prevtarget;
                            camera.fieldOfView        = prevFieldOfView;
                        }

                        Graphics.Blit(_cubeTarget, _finalTarget, _cubemapToEquirectangularMaterial);

                        // TODO: motion blur accumulate here!  Currently it's grabbing from the camera which is wrong because it's not the full 360 capture...

                        // TODO: cache GetNativeTexturePtr() as it causes a GPU thread sync!
                        AVProMovieCapturePlugin.SetTexturePointer(_handle, _finalTarget.GetNativeTexturePtr());

#if AVPRO_MOVIECAPTURE_GLISSUEEVENT_52
                        GL.IssuePluginEvent(_renderEventFunction, AVProMovieCapturePlugin.PluginID | (int)AVProMovieCapturePlugin.PluginEvent.CaptureFrameBuffer | _handle);
#else
                        GL.IssuePluginEvent(AVProMovieCapturePlugin.PluginID | (int)AVProMovieCapturePlugin.PluginEvent.CaptureFrameBuffer | _handle);
#endif
                        GL.InvalidateState();

                        UpdateFPS();
                    }
                }
            }
        }

        base.UpdateFrame();
    }
示例#8
0
    // If we're forcing a resolution or AA change then we have to render the camera again to the new target
    // If we try to just set the targetTexture of the camera and grab it in OnRenderImage we can't render it to the screen as before :(
    public override void UpdateFrame()
    {
        if (_capturing && !_paused && _camera != null)
        {
            bool canGrab = true;

            if (IsUsingMotionBlur())
            {
                // If the motion blur is still accumulating, don't grab this frame
                canGrab = _motionBlur.IsFrameAccumulated;
            }

            if (canGrab)
            {
                while (_handle >= 0 && !AVProMovieCapturePlugin.IsNewFrameDue(_handle))
                {
                    System.Threading.Thread.Sleep(1);
                }
                if (_handle >= 0)
                {
                    // Render the camera
                    {
                        RenderTexture prev = _camera.targetTexture;
                        // Reset the viewport rect as we're rendering to a texture captures the full viewport
                        Rect             prevRect     = _camera.rect;
                        CameraClearFlags prevClear    = _camera.clearFlags;
                        Color            prevColor    = _camera.backgroundColor;
                        bool             clearChanged = false;
                        if (_camera.clearFlags == CameraClearFlags.Nothing || _camera.clearFlags == CameraClearFlags.Depth)
                        {
                            clearChanged            = true;
                            _camera.clearFlags      = CameraClearFlags.SolidColor;
                            _camera.backgroundColor = Color.black;
                        }

                        // Render
                        _camera.rect          = new Rect(0f, 0f, 1f, 1f);
                        _camera.targetTexture = _target;
                        _camera.Render();

                        // Restore camera
                        {
                            _camera.rect = prevRect;
                            if (clearChanged)
                            {
                                _camera.clearFlags      = prevClear;
                                _camera.backgroundColor = prevColor;
                            }
                            _camera.targetTexture = prev;
                        }
                    }

                    // NOTE: If support for captures to survive through alt-tab events, or window resizes where the GPU resources are recreated
                    // is required, then this line is needed.  It is very expensive though as it does a sync with the rendering thread.
                    _targetNativePointer = _target.GetNativeTexturePtr();

                    AVProMovieCapturePlugin.SetTexturePointer(_handle, _targetNativePointer);

#if AVPRO_MOVIECAPTURE_GLISSUEEVENT_52
                    GL.IssuePluginEvent(_renderEventFunction, AVProMovieCapturePlugin.PluginID | (int)AVProMovieCapturePlugin.PluginEvent.CaptureFrameBuffer | _handle);
#else
                    GL.IssuePluginEvent(AVProMovieCapturePlugin.PluginID | (int)AVProMovieCapturePlugin.PluginEvent.CaptureFrameBuffer | _handle);
#endif

                    if (IsRecordingUnityAudio())
                    {
                        int           audioDataLength = 0;
                        System.IntPtr audioDataPtr    = _audioCapture.ReadData(out audioDataLength);
                        if (audioDataLength > 0)
                        {
                            AVProMovieCapturePlugin.EncodeAudio(_handle, audioDataPtr, (uint)audioDataLength);
                        }
                    }

                    UpdateFPS();
                }
            }
        }
        base.UpdateFrame();
    }
    //private Material _invisibleMaterial;

#if true
    // If we're forcing a resolution or AA change then we have to render the camera again to the new target
    // If we try to just set the targetTexture of the camera and grab it in OnRenderImage we can't render it to the screen as before :(
    public override void UpdateFrame()
    {
        if (_capturing && !_paused && _camera != null)
        {
            bool canGrab = true;

            if (_useMotionBlur && !_isRealTime && _motionBlur != null)
            {
                // If the motion blur is still accumulating, don't grab this frame
                canGrab = _motionBlur.IsFrameAccumulated;
            }

            if (canGrab)
            {
                while (_handle >= 0 && !AVProMovieCapturePlugin.IsNewFrameDue(_handle))
                {
                    System.Threading.Thread.Sleep(1);
                }
                if (_handle >= 0)
                {
                    if (_audioCapture && _audioDeviceIndex < 0 && !_noAudio && _isRealTime)
                    {
                        int           audioDataLength = 0;
                        System.IntPtr audioDataPtr    = _audioCapture.ReadData(out audioDataLength);
                        if (audioDataLength > 0)
                        {
                            AVProMovieCapturePlugin.EncodeAudio(_handle, audioDataPtr, (uint)audioDataLength);
                        }
                    }

                    GL.InvalidateState();
                    RenderTexture prev = _camera.targetTexture;
                    _camera.targetTexture = _target;
                    // Reset the viewport rect as we're rendering to a texture captures the full viewport
                    Rect             prevRect     = _camera.rect;
                    CameraClearFlags prevClear    = _camera.clearFlags;
                    Color            prevColor    = _camera.backgroundColor;
                    bool             clearChanged = false;
                    if (_camera.clearFlags == CameraClearFlags.Nothing || _camera.clearFlags == CameraClearFlags.Depth)
                    {
                        clearChanged            = true;
                        _camera.clearFlags      = CameraClearFlags.SolidColor;
                        _camera.backgroundColor = Color.black;
                    }
                    _camera.rect = new Rect(0f, 0f, 1f, 1f);
                    _camera.Render();
                    _camera.rect = prevRect;
                    if (clearChanged)
                    {
                        _camera.clearFlags      = prevClear;
                        _camera.backgroundColor = prevColor;
                    }

                    _camera.targetTexture = prev;

                    // TODO: find out why without this blit our StretchRect fails (at least in Unity 5.x, seems ok in Unity 4.6.8)
                    //Graphics.Blit(Texture2D.whiteTexture, _target, _invisibleMaterial);
                    GL.InvalidateState();

                    AVProMovieCapturePlugin.SetTexturePointer(_handle, _target.GetNativeTexturePtr());

#if AVPRO_MOVIECAPTURE_GLISSUEEVENT_52
                    GL.IssuePluginEvent(_renderEventFunction, AVProMovieCapturePlugin.PluginID | (int)AVProMovieCapturePlugin.PluginEvent.CaptureFrameBuffer | _handle);
#else
                    GL.IssuePluginEvent(AVProMovieCapturePlugin.PluginID | (int)AVProMovieCapturePlugin.PluginEvent.CaptureFrameBuffer | _handle);
#endif


                    UpdateFPS();
                }
            }
        }
        base.UpdateFrame();
    }
示例#10
0
    public override void UpdateFrame()
    {
        if (_capturing && !_paused)
        {
            if (_cubeTarget != null && _camera != null)
            {
                bool canGrab = true;

                if (IsUsingMotionBlur())
                {
                    // TODO: fix motion blur
                    //this._motionBlur.RenderImage()
                    // If the motion blur is still accumulating, don't grab this frame
                    canGrab = _motionBlur.IsFrameAccumulated;
                }

                if (canGrab)
                {
                    while (_handle >= 0 && !AVProMovieCapturePlugin.IsNewFrameDue(_handle))
                    {
                        System.Threading.Thread.Sleep(1);
                    }
                    if (_handle >= 0)
                    {
                        if (_audioCapture && _audioDeviceIndex < 0 && !_noAudio && _isRealTime)
                        {
                            uint bufferLength = (uint)_audioCapture.BufferLength;
                            if (bufferLength > 0)
                            {
                                AVProMovieCapturePlugin.EncodeAudio(_handle, _audioCapture.BufferPtr, bufferLength);
                                _audioCapture.FlushBuffer();
                            }
                        }

                        RenderTexture finalTexture = _finalTarget;
                        if (!IsUsingMotionBlur())
                        {
                            UpdateTexture();
                        }
                        else
                        {
                            finalTexture = _motionBlur.FinalTexture;
                        }

                        // TODO: cache GetNativeTexturePtr() as it causes a GPU thread sync!
                        AVProMovieCapturePlugin.SetTexturePointer(_handle, finalTexture.GetNativeTexturePtr());

#if AVPRO_MOVIECAPTURE_GLISSUEEVENT_52
                        GL.IssuePluginEvent(_renderEventFunction, AVProMovieCapturePlugin.PluginID | (int)AVProMovieCapturePlugin.PluginEvent.CaptureFrameBuffer | _handle);
#else
                        GL.IssuePluginEvent(AVProMovieCapturePlugin.PluginID | (int)AVProMovieCapturePlugin.PluginEvent.CaptureFrameBuffer | _handle);
#endif
                        GL.InvalidateState();

                        UpdateFPS();
                    }
                }
            }
        }

        base.UpdateFrame();
    }
示例#11
0
    public override void UpdateFrame()
    {
        if (_capturing && !_paused && _sourceTexture)
        {
            bool canGrab = true;

            // If motion blur is enabled, wait until all frames are accumulated
            if (IsUsingMotionBlur())
            {
                // If the motion blur is still accumulating, don't grab this frame
                canGrab = _motionBlur.IsFrameAccumulated;
            }

            if (canGrab)
            {
                // Wait for the encoder to be ready for another frame
                while (_handle >= 0 && !AVProMovieCapturePlugin.IsNewFrameDue(_handle))
                {
                    System.Threading.Thread.Sleep(1);
                }
                if (_handle >= 0)
                {
                    // If motion blur is enabled, use the motion blur result
                    Texture sourceTexture = _sourceTexture;
                    if (IsUsingMotionBlur())
                    {
                        sourceTexture = _motionBlur.FinalTexture;
                    }

                    // If the texture isn't a RenderTexture then blit it to the Rendertexture so the native plugin can grab it
                    if (sourceTexture is RenderTexture)
                    {
                        AVProMovieCapturePlugin.SetTexturePointer(_handle, sourceTexture.GetNativeTexturePtr());
                    }
                    else
                    {
                        Graphics.Blit(sourceTexture, _renderTexture);
                        AVProMovieCapturePlugin.SetTexturePointer(_handle, _renderTexture.GetNativeTexturePtr());
                    }

#if AVPRO_MOVIECAPTURE_GLISSUEEVENT_52
                    GL.IssuePluginEvent(_renderEventFunction, AVProMovieCapturePlugin.PluginID | (int)AVProMovieCapturePlugin.PluginEvent.CaptureFrameBuffer | _handle);
#else
                    GL.IssuePluginEvent(AVProMovieCapturePlugin.PluginID | (int)AVProMovieCapturePlugin.PluginEvent.CaptureFrameBuffer | _handle);
#endif

                    // Handle audio from Unity
                    if (IsRecordingUnityAudio())
                    {
                        int           audioDataLength = 0;
                        System.IntPtr audioDataPtr    = _audioCapture.ReadData(out audioDataLength);
                        if (audioDataLength > 0)
                        {
                            AVProMovieCapturePlugin.EncodeAudio(_handle, audioDataPtr, (uint)audioDataLength);
                        }
                    }

                    UpdateFPS();
                }
            }
        }
        base.UpdateFrame();
    }