Exemplo n.º 1
0
    public override void UnprepareCapture()
    {
        AVProMovieCapturePlugin.SetTexturePointer(_handle, System.IntPtr.Zero);

        if (_target != null)
        {
            _target.DiscardContents();
        }
    }
Exemplo n.º 2
0
    public override void UnprepareCapture()
    {
        AVProMovieCapturePlugin.SetTexturePointer(_handle, System.IntPtr.Zero);

        if (_renderTexture != null)
        {
            RenderTexture.ReleaseTemporary(_renderTexture);
            _renderTexture = null;
        }

        base.UnprepareCapture();
    }
    public override void UpdateFrame()
    {
        if (_capturing && !_paused)
        {
            if (_cubeTarget != null && _camera != null)
            {
                while (_handle >= 0 && !AVProMovieCapturePlugin.IsNewFrameDue(_handle))
                {
                    System.Threading.Thread.Sleep(1);
                }
                if (_handle >= 0)
                {
                    if (_audioCapture && _audioDeviceIndex < 0 && !_noAudio && _isRealTime)
                    {
                        uint bufferLength = (uint)_audioCapture.BufferLength;
                        if (bufferLength > 0)
                        {
                            AVProMovieCapturePlugin.EncodeAudio(_handle, _audioCapture.BufferPtr, bufferLength);
                            _audioCapture.FlushBuffer();
                        }
                    }

                    // In Direct3D the RT can be flipped vertically

                    /*if (source.texelSize.y < 0)
                     * {
                     *
                     * }*/

                    _cubeCamera.transform.position = _camera.transform.position;
                    _cubeCamera.transform.rotation = _camera.transform.rotation;
                    _cubeCamera.RenderToCubemap(_cubeTarget, 63);

                    Graphics.Blit(_cubeTarget, _target, _cubemapToEquirectangularMaterial);

                    AVProMovieCapturePlugin.SetTexturePointer(_handle, _target.GetNativeTexturePtr());

#if AVPRO_MOVIECAPTURE_GLISSUEEVENT_52
                    GL.IssuePluginEvent(_renderEventFunction, AVProMovieCapturePlugin.PluginID | (int)AVProMovieCapturePlugin.PluginEvent.CaptureFrameBuffer | _handle);
#else
                    GL.IssuePluginEvent(AVProMovieCapturePlugin.PluginID | (int)AVProMovieCapturePlugin.PluginEvent.CaptureFrameBuffer | _handle);
#endif
                    GL.InvalidateState();

                    UpdateFPS();
                }
            }
        }

        base.UpdateFrame();
    }
Exemplo n.º 4
0
    public override void UpdateFrame()
    {
        if (_capturing && !_paused)
        {
            if (_cubeTarget != null && _camera != null)
            {
                bool canGrab = true;

                if (_useMotionBlur && !_isRealTime && _motionBlur != null)
                {
                    // TODO: fix motion blur
                    //this._motionBlur.RenderImage()
                    // If the motion blur is still accumulating, don't grab this frame
                    canGrab = _motionBlur.IsFrameAccumulated;
                }

                if (canGrab)
                {
                    while (_handle >= 0 && !AVProMovieCapturePlugin.IsNewFrameDue(_handle))
                    {
                        System.Threading.Thread.Sleep(1);
                    }
                    if (_handle >= 0)
                    {
                        if (_audioCapture && _audioDeviceIndex < 0 && !_noAudio && _isRealTime)
                        {
                            uint bufferLength = (uint)_audioCapture.BufferLength;
                            if (bufferLength > 0)
                            {
                                AVProMovieCapturePlugin.EncodeAudio(_handle, _audioCapture.BufferPtr, bufferLength);
                                _audioCapture.FlushBuffer();
                            }
                        }

                        // In Direct3D the RT can be flipped vertically

                        /*if (source.texelSize.y < 0)
                         * {
                         *
                         * }*/

                        //_cubeCamera.transform.position = _camera.transform.position;
                        //_cubeCamera.transform.rotation = _camera.transform.rotation;

                        Camera camera = _camera;

                        if (!IsManualCubemapRendering())
                        {
                            // Note: Camera.RenderToCubemap() doesn't support camera rotation
                            camera.RenderToCubemap(_cubeTarget, 63);
                        }
                        else
                        {
                            // Cache old camera values
                            float         prevFieldOfView = _camera.fieldOfView;
                            RenderTexture prevtarget      = _camera.targetTexture;
                            Quaternion    prevRotation    = camera.transform.rotation;
                            Quaternion    xform           = camera.transform.rotation;

                            // Ignore the camera rotation
                            if (!_supportCameraRotation)
                            {
                                xform = Quaternion.identity;
                            }

                            camera.targetTexture = _faceTarget;
                            camera.fieldOfView   = 90f;

                            camera.transform.rotation = xform * Quaternion.LookRotation(Vector3.forward, Vector3.down);
                            camera.Render();
                            Graphics.SetRenderTarget(_cubeTarget, 0, CubemapFace.PositiveZ);
                            Graphics.Blit(_faceTarget, _blitMaterial);

                            camera.transform.rotation = xform * Quaternion.LookRotation(Vector3.back, Vector3.down);
                            camera.Render();
                            Graphics.SetRenderTarget(_cubeTarget, 0, CubemapFace.NegativeZ);
                            Graphics.Blit(_faceTarget, _blitMaterial);

                            camera.transform.rotation = xform * Quaternion.LookRotation(Vector3.right, Vector3.down);
                            camera.Render();
                            Graphics.SetRenderTarget(_cubeTarget, 0, CubemapFace.NegativeX);
                            Graphics.Blit(_faceTarget, _blitMaterial);

                            camera.transform.rotation = xform * Quaternion.LookRotation(Vector3.left, Vector3.down);
                            camera.Render();
                            Graphics.SetRenderTarget(_cubeTarget, 0, CubemapFace.PositiveX);
                            Graphics.Blit(_faceTarget, _blitMaterial);

                            camera.transform.rotation = xform * Quaternion.LookRotation(Vector3.up, Vector3.forward);
                            camera.Render();
                            Graphics.SetRenderTarget(_cubeTarget, 0, CubemapFace.PositiveY);
                            Graphics.Blit(_faceTarget, _blitMaterial);

                            camera.transform.rotation = xform * Quaternion.LookRotation(Vector3.down, Vector3.back);
                            camera.Render();
                            Graphics.SetRenderTarget(_cubeTarget, 0, CubemapFace.NegativeY);
                            Graphics.Blit(_faceTarget, _blitMaterial);

                            Graphics.SetRenderTarget(null);

                            // Restore camera values
                            camera.transform.rotation = prevRotation;
                            camera.targetTexture      = prevtarget;
                            camera.fieldOfView        = prevFieldOfView;
                        }

                        Graphics.Blit(_cubeTarget, _finalTarget, _cubemapToEquirectangularMaterial);

                        // TODO: motion blur accumulate here!  Currently it's grabbing from the camera which is wrong because it's not the full 360 capture...

                        // TODO: cache GetNativeTexturePtr() as it causes a GPU thread sync!
                        AVProMovieCapturePlugin.SetTexturePointer(_handle, _finalTarget.GetNativeTexturePtr());

#if AVPRO_MOVIECAPTURE_GLISSUEEVENT_52
                        GL.IssuePluginEvent(_renderEventFunction, AVProMovieCapturePlugin.PluginID | (int)AVProMovieCapturePlugin.PluginEvent.CaptureFrameBuffer | _handle);
#else
                        GL.IssuePluginEvent(AVProMovieCapturePlugin.PluginID | (int)AVProMovieCapturePlugin.PluginEvent.CaptureFrameBuffer | _handle);
#endif
                        GL.InvalidateState();

                        UpdateFPS();
                    }
                }
            }
        }

        base.UpdateFrame();
    }
Exemplo n.º 5
0
    // If we're forcing a resolution or AA change then we have to render the camera again to the new target
    // If we try to just set the targetTexture of the camera and grab it in OnRenderImage we can't render it to the screen as before :(
    public override void UpdateFrame()
    {
        if (_capturing && !_paused && _camera != null)
        {
            bool canGrab = true;

            if (IsUsingMotionBlur())
            {
                // If the motion blur is still accumulating, don't grab this frame
                canGrab = _motionBlur.IsFrameAccumulated;
            }

            if (canGrab)
            {
                while (_handle >= 0 && !AVProMovieCapturePlugin.IsNewFrameDue(_handle))
                {
                    System.Threading.Thread.Sleep(1);
                }
                if (_handle >= 0)
                {
                    // Render the camera
                    {
                        RenderTexture prev = _camera.targetTexture;
                        // Reset the viewport rect as we're rendering to a texture captures the full viewport
                        Rect             prevRect     = _camera.rect;
                        CameraClearFlags prevClear    = _camera.clearFlags;
                        Color            prevColor    = _camera.backgroundColor;
                        bool             clearChanged = false;
                        if (_camera.clearFlags == CameraClearFlags.Nothing || _camera.clearFlags == CameraClearFlags.Depth)
                        {
                            clearChanged            = true;
                            _camera.clearFlags      = CameraClearFlags.SolidColor;
                            _camera.backgroundColor = Color.black;
                        }

                        // Render
                        _camera.rect          = new Rect(0f, 0f, 1f, 1f);
                        _camera.targetTexture = _target;
                        _camera.Render();

                        // Restore camera
                        {
                            _camera.rect = prevRect;
                            if (clearChanged)
                            {
                                _camera.clearFlags      = prevClear;
                                _camera.backgroundColor = prevColor;
                            }
                            _camera.targetTexture = prev;
                        }
                    }

                    // NOTE: If support for captures to survive through alt-tab events, or window resizes where the GPU resources are recreated
                    // is required, then this line is needed.  It is very expensive though as it does a sync with the rendering thread.
                    _targetNativePointer = _target.GetNativeTexturePtr();

                    AVProMovieCapturePlugin.SetTexturePointer(_handle, _targetNativePointer);

#if AVPRO_MOVIECAPTURE_GLISSUEEVENT_52
                    GL.IssuePluginEvent(_renderEventFunction, AVProMovieCapturePlugin.PluginID | (int)AVProMovieCapturePlugin.PluginEvent.CaptureFrameBuffer | _handle);
#else
                    GL.IssuePluginEvent(AVProMovieCapturePlugin.PluginID | (int)AVProMovieCapturePlugin.PluginEvent.CaptureFrameBuffer | _handle);
#endif

                    if (IsRecordingUnityAudio())
                    {
                        int           audioDataLength = 0;
                        System.IntPtr audioDataPtr    = _audioCapture.ReadData(out audioDataLength);
                        if (audioDataLength > 0)
                        {
                            AVProMovieCapturePlugin.EncodeAudio(_handle, audioDataPtr, (uint)audioDataLength);
                        }
                    }

                    UpdateFPS();
                }
            }
        }
        base.UpdateFrame();
    }
    //private Material _invisibleMaterial;

#if true
    // If we're forcing a resolution or AA change then we have to render the camera again to the new target
    // If we try to just set the targetTexture of the camera and grab it in OnRenderImage we can't render it to the screen as before :(
    public override void UpdateFrame()
    {
        if (_capturing && !_paused && _camera != null)
        {
            bool canGrab = true;

            if (_useMotionBlur && !_isRealTime && _motionBlur != null)
            {
                // If the motion blur is still accumulating, don't grab this frame
                canGrab = _motionBlur.IsFrameAccumulated;
            }

            if (canGrab)
            {
                while (_handle >= 0 && !AVProMovieCapturePlugin.IsNewFrameDue(_handle))
                {
                    System.Threading.Thread.Sleep(1);
                }
                if (_handle >= 0)
                {
                    if (_audioCapture && _audioDeviceIndex < 0 && !_noAudio && _isRealTime)
                    {
                        int           audioDataLength = 0;
                        System.IntPtr audioDataPtr    = _audioCapture.ReadData(out audioDataLength);
                        if (audioDataLength > 0)
                        {
                            AVProMovieCapturePlugin.EncodeAudio(_handle, audioDataPtr, (uint)audioDataLength);
                        }
                    }

                    GL.InvalidateState();
                    RenderTexture prev = _camera.targetTexture;
                    _camera.targetTexture = _target;
                    // Reset the viewport rect as we're rendering to a texture captures the full viewport
                    Rect             prevRect     = _camera.rect;
                    CameraClearFlags prevClear    = _camera.clearFlags;
                    Color            prevColor    = _camera.backgroundColor;
                    bool             clearChanged = false;
                    if (_camera.clearFlags == CameraClearFlags.Nothing || _camera.clearFlags == CameraClearFlags.Depth)
                    {
                        clearChanged            = true;
                        _camera.clearFlags      = CameraClearFlags.SolidColor;
                        _camera.backgroundColor = Color.black;
                    }
                    _camera.rect = new Rect(0f, 0f, 1f, 1f);
                    _camera.Render();
                    _camera.rect = prevRect;
                    if (clearChanged)
                    {
                        _camera.clearFlags      = prevClear;
                        _camera.backgroundColor = prevColor;
                    }

                    _camera.targetTexture = prev;

                    // TODO: find out why without this blit our StretchRect fails (at least in Unity 5.x, seems ok in Unity 4.6.8)
                    //Graphics.Blit(Texture2D.whiteTexture, _target, _invisibleMaterial);
                    GL.InvalidateState();

                    AVProMovieCapturePlugin.SetTexturePointer(_handle, _target.GetNativeTexturePtr());

#if AVPRO_MOVIECAPTURE_GLISSUEEVENT_52
                    GL.IssuePluginEvent(_renderEventFunction, AVProMovieCapturePlugin.PluginID | (int)AVProMovieCapturePlugin.PluginEvent.CaptureFrameBuffer | _handle);
#else
                    GL.IssuePluginEvent(AVProMovieCapturePlugin.PluginID | (int)AVProMovieCapturePlugin.PluginEvent.CaptureFrameBuffer | _handle);
#endif


                    UpdateFPS();
                }
            }
        }
        base.UpdateFrame();
    }
 public override void UnprepareCapture()
 {
     AVProMovieCapturePlugin.SetTexturePointer(_handle, System.IntPtr.Zero);
 }
Exemplo n.º 8
0
    public override void UpdateFrame()
    {
        if (_capturing && !_paused)
        {
            if (_cubeTarget != null && _camera != null)
            {
                bool canGrab = true;

                if (IsUsingMotionBlur())
                {
                    // TODO: fix motion blur
                    //this._motionBlur.RenderImage()
                    // If the motion blur is still accumulating, don't grab this frame
                    canGrab = _motionBlur.IsFrameAccumulated;
                }

                if (canGrab)
                {
                    while (_handle >= 0 && !AVProMovieCapturePlugin.IsNewFrameDue(_handle))
                    {
                        System.Threading.Thread.Sleep(1);
                    }
                    if (_handle >= 0)
                    {
                        if (_audioCapture && _audioDeviceIndex < 0 && !_noAudio && _isRealTime)
                        {
                            uint bufferLength = (uint)_audioCapture.BufferLength;
                            if (bufferLength > 0)
                            {
                                AVProMovieCapturePlugin.EncodeAudio(_handle, _audioCapture.BufferPtr, bufferLength);
                                _audioCapture.FlushBuffer();
                            }
                        }

                        RenderTexture finalTexture = _finalTarget;
                        if (!IsUsingMotionBlur())
                        {
                            UpdateTexture();
                        }
                        else
                        {
                            finalTexture = _motionBlur.FinalTexture;
                        }

                        // TODO: cache GetNativeTexturePtr() as it causes a GPU thread sync!
                        AVProMovieCapturePlugin.SetTexturePointer(_handle, finalTexture.GetNativeTexturePtr());

#if AVPRO_MOVIECAPTURE_GLISSUEEVENT_52
                        GL.IssuePluginEvent(_renderEventFunction, AVProMovieCapturePlugin.PluginID | (int)AVProMovieCapturePlugin.PluginEvent.CaptureFrameBuffer | _handle);
#else
                        GL.IssuePluginEvent(AVProMovieCapturePlugin.PluginID | (int)AVProMovieCapturePlugin.PluginEvent.CaptureFrameBuffer | _handle);
#endif
                        GL.InvalidateState();

                        UpdateFPS();
                    }
                }
            }
        }

        base.UpdateFrame();
    }
Exemplo n.º 9
0
    public override void UpdateFrame()
    {
        if (_capturing && !_paused && _sourceTexture)
        {
            bool canGrab = true;

            // If motion blur is enabled, wait until all frames are accumulated
            if (IsUsingMotionBlur())
            {
                // If the motion blur is still accumulating, don't grab this frame
                canGrab = _motionBlur.IsFrameAccumulated;
            }

            if (canGrab)
            {
                // Wait for the encoder to be ready for another frame
                while (_handle >= 0 && !AVProMovieCapturePlugin.IsNewFrameDue(_handle))
                {
                    System.Threading.Thread.Sleep(1);
                }
                if (_handle >= 0)
                {
                    // If motion blur is enabled, use the motion blur result
                    Texture sourceTexture = _sourceTexture;
                    if (IsUsingMotionBlur())
                    {
                        sourceTexture = _motionBlur.FinalTexture;
                    }

                    // If the texture isn't a RenderTexture then blit it to the Rendertexture so the native plugin can grab it
                    if (sourceTexture is RenderTexture)
                    {
                        AVProMovieCapturePlugin.SetTexturePointer(_handle, sourceTexture.GetNativeTexturePtr());
                    }
                    else
                    {
                        Graphics.Blit(sourceTexture, _renderTexture);
                        AVProMovieCapturePlugin.SetTexturePointer(_handle, _renderTexture.GetNativeTexturePtr());
                    }

#if AVPRO_MOVIECAPTURE_GLISSUEEVENT_52
                    GL.IssuePluginEvent(_renderEventFunction, AVProMovieCapturePlugin.PluginID | (int)AVProMovieCapturePlugin.PluginEvent.CaptureFrameBuffer | _handle);
#else
                    GL.IssuePluginEvent(AVProMovieCapturePlugin.PluginID | (int)AVProMovieCapturePlugin.PluginEvent.CaptureFrameBuffer | _handle);
#endif

                    // Handle audio from Unity
                    if (IsRecordingUnityAudio())
                    {
                        int           audioDataLength = 0;
                        System.IntPtr audioDataPtr    = _audioCapture.ReadData(out audioDataLength);
                        if (audioDataLength > 0)
                        {
                            AVProMovieCapturePlugin.EncodeAudio(_handle, audioDataPtr, (uint)audioDataLength);
                        }
                    }

                    UpdateFPS();
                }
            }
        }
        base.UpdateFrame();
    }