// NOTE: This is old code based on OnRenderImage...may be revived at some point private void OnRenderImage(RenderTexture source, RenderTexture dest) { if (_capturing && !_paused) { #if true while (_handle >= 0 && !NativePlugin.IsNewFrameDue(_handle)) { System.Threading.Thread.Sleep(1); } if (_handle >= 0) { if (_audioCapture && _audioDeviceIndex < 0 && !_noAudio && _isRealTime) { int audioDataLength = 0; System.IntPtr audioDataPtr = _audioCapture.ReadData(out audioDataLength); if (audioDataLength > 0) { NativePlugin.EncodeAudio(_handle, audioDataPtr, (uint)audioDataLength); } } // In Direct3D the RT can be flipped vertically /*if (source.texelSize.y < 0) * { * * }*/ Graphics.Blit(source, dest); _lastSource = source; _lastDest = dest; if (dest != _originalTarget) { Graphics.Blit(dest, _originalTarget); } #if AVPRO_MOVIECAPTURE_GLISSUEEVENT_52 GL.IssuePluginEvent(NativePlugin.GetRenderEventFunc(), NativePlugin.PluginID | (int)NativePlugin.PluginEvent.CaptureFrameBuffer | _handle); #else GL.IssuePluginEvent(NativePlugin.PluginID | (int)NativePlugin.PluginEvent.CaptureFrameBuffer | _handle); #endif GL.InvalidateState(); UpdateFPS(); return; } #endif } // Pass-through Graphics.Blit(source, dest); _lastSource = source; _lastDest = dest; }
private void OnRenderImage(RenderTexture source, RenderTexture dest) { #if false if (_capturing && !_paused) { while (_handle >= 0 && !NativePlugin.IsNewFrameDue(_handle)) { System.Threading.Thread.Sleep(1); } if (_handle >= 0) { if (_audioCapture && _audioDeviceIndex < 0 && !_noAudio) { uint bufferLength = (uint)_audioCapture.BufferLength; if (bufferLength > 0) { NativePlugin.EncodeAudio(_handle, _audioCapture.BufferPtr, bufferLength); _audioCapture.FlushBuffer(); } } // In Direct3D the RT can be flipped vertically /*if (source.texelSize.y < 0) * { * * }*/ Graphics.Blit(_cubeTarget, _target, _cubemapToEquirectangularMaterial); RenderThreadEvent(NativePlugin.PluginEvent.CaptureFrameBuffer); GL.InvalidateState(); UpdateFPS(); } } #endif // Pass-through if (_cubeTarget != null) { Graphics.Blit(_cubeTarget, dest, _cubemapToEquirectangularMaterial); } else { Graphics.Blit(source, dest); } }
private IEnumerator FinalRenderCapture() { yield return(_waitForEndOfFrame); TickFrameTimer(); bool canGrab = true; if (IsUsingMotionBlur()) { // If the motion blur is still accumulating, don't grab this frame canGrab = _motionBlur.IsFrameAccumulated; } if (canGrab && CanOutputFrame()) { // Grab final RenderTexture into texture and encode if (IsRecordingUnityAudio()) { int audioDataLength = 0; System.IntPtr audioDataPtr = _audioCapture.ReadData(out audioDataLength); if (audioDataLength > 0) { NativePlugin.EncodeAudio(_handle, audioDataPtr, (uint)audioDataLength); } } RenderThreadEvent(NativePlugin.PluginEvent.CaptureFrameBuffer); GL.InvalidateState(); UpdateFPS(); } RenormTimer(); //yield return null; }
public override void UpdateFrame() { TickFrameTimer(); AccumulateMotionBlur(); if (_capturing && !_paused) { if (_cubeTarget != null && _camera != null) { bool canGrab = true; if (IsUsingMotionBlur()) { // TODO: fix motion blur //this._motionBlur.RenderImage() // If the motion blur is still accumulating, don't grab this frame canGrab = _motionBlur.IsFrameAccumulated; } if (canGrab && CanOutputFrame()) { if (IsRecordingUnityAudio()) { int audioDataLength = 0; System.IntPtr audioDataPtr = _audioCapture.ReadData(out audioDataLength); if (audioDataLength > 0) { NativePlugin.EncodeAudio(_handle, audioDataPtr, (uint)audioDataLength); } } RenderTexture finalTexture = _finalTarget; if (!IsUsingMotionBlur()) { UpdateTexture(); } else { finalTexture = _motionBlur.FinalTexture; } if (_targetNativePointer == System.IntPtr.Zero || _supportTextureRecreate) { // NOTE: If support for captures to survive through alt-tab events, or window resizes where the GPU resources are recreated // is required, then this line is needed. It is very expensive though as it does a sync with the rendering thread. _targetNativePointer = finalTexture.GetNativeTexturePtr(); } NativePlugin.SetTexturePointer(_handle, _targetNativePointer); RenderThreadEvent(NativePlugin.PluginEvent.CaptureFrameBuffer); GL.InvalidateState(); UpdateFPS(); } } } base.UpdateFrame(); RenormTimer(); }
private void Capture() { TickFrameTimer(); AccumulateMotionBlur(); if (ShouldCaptureFrame()) { bool hasSourceTextureChanged = HasSourceTextureChanged(); // If motion blur is enabled, wait until all frames are accumulated if (IsUsingMotionBlur()) { // If the motion blur is still accumulating, don't grab this frame hasSourceTextureChanged = _motionBlur.IsFrameAccumulated; } _isSourceTextureChanged = false; if (hasSourceTextureChanged) { if ((_manualUpdate /*&& NativePlugin.IsNewFrameDue(_handle)*/) || CanOutputFrame()) { // If motion blur is enabled, use the motion blur result Texture sourceTexture = _sourceTexture; if (IsUsingMotionBlur()) { sourceTexture = _motionBlur.FinalTexture; } // If the texture isn't a RenderTexture then blit it to the Rendertexture so the native plugin can grab it if (!(sourceTexture is RenderTexture)) { _renderTexture.DiscardContents(); Graphics.Blit(sourceTexture, _renderTexture); sourceTexture = _renderTexture; } if (_targetNativePointer == System.IntPtr.Zero || _supportTextureRecreate) { // NOTE: If support for captures to survive through alt-tab events, or window resizes where the GPU resources are recreated // is required, then this line is needed. It is very expensive though as it does a sync with the rendering thread. _targetNativePointer = sourceTexture.GetNativeTexturePtr(); } NativePlugin.SetTexturePointer(_handle, _targetNativePointer); RenderThreadEvent(NativePlugin.PluginEvent.CaptureFrameBuffer); if (!IsUsingMotionBlur()) { _isSourceTextureChanged = false; } // Handle audio from Unity if (IsRecordingUnityAudio()) { int audioDataLength = 0; System.IntPtr audioDataPtr = _audioCapture.ReadData(out audioDataLength); if (audioDataLength > 0) { NativePlugin.EncodeAudio(_handle, audioDataPtr, (uint)audioDataLength); } } UpdateFPS(); } } } RenormTimer(); }
// If we're forcing a resolution or AA change then we have to render the camera again to the new target // If we try to just set the targetTexture of the camera and grab it in OnRenderImage we can't render it to the screen as before :( public override void UpdateFrame() { TickFrameTimer(); if (_capturing && !_paused && HasCamera()) { bool canGrab = true; if (IsUsingMotionBlur()) { // If the motion blur is still accumulating, don't grab this frame canGrab = _motionBlur.IsFrameAccumulated; } if (canGrab) { /*while (_handle >= 0 && !AVProMovieCapturePlugin.IsNewFrameDue(_handle)) * { * System.Threading.Thread.Sleep(1); * }*/ if (_handle >= 0 && CanOutputFrame()) { // Render the camera(s) if (!IsUsingMotionBlur()) { // Render a single camera if (!HasContributingCameras()) { RenderTexture prev = _lastCamera.targetTexture; // Reset the viewport rect as we're rendering to a texture captures the full viewport Rect prevRect = _lastCamera.rect; CameraClearFlags prevClear = _lastCamera.clearFlags; Color prevColor = _lastCamera.backgroundColor; bool clearChanged = false; if (_lastCamera.clearFlags == CameraClearFlags.Nothing || _lastCamera.clearFlags == CameraClearFlags.Depth) { clearChanged = true; _lastCamera.clearFlags = CameraClearFlags.SolidColor; if (!_supportAlpha) { _lastCamera.backgroundColor = Color.black; } else { _lastCamera.backgroundColor = new Color(0f, 0f, 0f, 0f); } } // Render _lastCamera.rect = new Rect(0f, 0f, 1f, 1f); _lastCamera.targetTexture = _target; _lastCamera.Render(); // Restore camera { _lastCamera.rect = prevRect; if (clearChanged) { _lastCamera.clearFlags = prevClear; _lastCamera.backgroundColor = prevColor; } _lastCamera.targetTexture = prev; } } // Render the camera chain else { // First render contributing cameras for (int cameraIndex = 0; cameraIndex < _contribCameras.Length; cameraIndex++) { Camera camera = _contribCameras[cameraIndex]; if (camera != null) { RenderTexture prev = camera.targetTexture; camera.targetTexture = _target; camera.Render(); camera.targetTexture = prev; } } // Finally render the last camera if (_lastCamera != null) { RenderTexture prev = _lastCamera.targetTexture; _lastCamera.targetTexture = _target; _lastCamera.Render(); _lastCamera.targetTexture = prev; } } } else { // Just grab the last result of the motion blur Graphics.Blit(_motionBlur.FinalTexture, _target); } if (_supportTextureRecreate) { // NOTE: If support for captures to survive through alt-tab events, or window resizes where the GPU resources are recreated // is required, then this line is needed. It is very expensive though as it does a sync with the rendering thread. _targetNativePointer = _target.GetNativeTexturePtr(); } NativePlugin.SetTexturePointer(_handle, _targetNativePointer); RenderThreadEvent(NativePlugin.PluginEvent.CaptureFrameBuffer); if (IsRecordingUnityAudio()) { int audioDataLength = 0; System.IntPtr audioDataPtr = _audioCapture.ReadData(out audioDataLength); if (audioDataLength > 0) { NativePlugin.EncodeAudio(_handle, audioDataPtr, (uint)audioDataLength); } } UpdateFPS(); } } } base.UpdateFrame(); RenormTimer(); }
// If we're forcing a resolution or AA change then we have to render the camera again to the new target // If we try to just set the targetTexture of the camera and grab it in OnRenderImage we can't render it to the screen as before :( public override void UpdateFrame() { TickFrameTimer(); if (_capturing && !_paused && HasCamera()) { bool canGrab = true; if (IsUsingMotionBlur()) { // If the motion blur is still accumulating, don't grab this frame canGrab = _motionBlur.IsFrameAccumulated; } if (canGrab) { /*while (_handle >= 0 && !AVProMovieCapturePlugin.IsNewFrameDue(_handle)) * { * System.Threading.Thread.Sleep(1); * }*/ if (_handle >= 0 && CanOutputFrame()) { // Render the camera(s) if (!IsUsingMotionBlur()) { UpdateTexture(); } else { // Just grab the last result of the motion blur _target.DiscardContents(); Graphics.Blit(_motionBlur.FinalTexture, _target); } if (_supportTextureRecreate) { // NOTE: If support for captures to survive through alt-tab events, or window resizes where the GPU resources are recreated // is required, then this line is needed. It is very expensive though as it does a sync with the rendering thread. _targetNativePointer = _target.GetNativeTexturePtr(); } NativePlugin.SetTexturePointer(_handle, _targetNativePointer); RenderThreadEvent(NativePlugin.PluginEvent.CaptureFrameBuffer); if (IsRecordingUnityAudio()) { int audioDataLength = 0; System.IntPtr audioDataPtr = _audioCapture.ReadData(out audioDataLength); if (audioDataLength > 0) { NativePlugin.EncodeAudio(_handle, audioDataPtr, (uint)audioDataLength); } } UpdateFPS(); } } } base.UpdateFrame(); RenormTimer(); }