// Whenever any camera will render us, add a command buffer to do the work on it public void OnWillRenderObject() { var act = gameObject.activeInHierarchy && enabled; if (!act) { Cleanup(); return; } var cam = Camera.current; if (!cam) return; CommandBuffer buf = null; // Did we already add the command buffer on this camera? Nothing to do then. if (m_Cameras.ContainsKey(cam)) return; if (!m_Material) { m_Material = new Material(m_BlurShader); m_Material.hideFlags = HideFlags.HideAndDontSave; } buf = new CommandBuffer(); buf.name = "Grab screen and blur"; m_Cameras[cam] = buf; // copy screen into temporary RT int screenCopyID = Shader.PropertyToID("_ScreenCopyTexture"); buf.GetTemporaryRT (screenCopyID, -1, -1, 0, FilterMode.Bilinear); buf.Blit (BuiltinRenderTextureType.CurrentActive, screenCopyID); // get two smaller RTs int blurredID = Shader.PropertyToID("_Temp1"); int blurredID2 = Shader.PropertyToID("_Temp2"); buf.GetTemporaryRT (blurredID, -2, -2, 0, FilterMode.Bilinear); buf.GetTemporaryRT (blurredID2, -2, -2, 0, FilterMode.Bilinear); // downsample screen copy into smaller RT, release screen RT buf.Blit (screenCopyID, blurredID); buf.ReleaseTemporaryRT (screenCopyID); // horizontal blur buf.SetGlobalVector("offsets", new Vector4(2.0f/Screen.width,0,0,0)); buf.Blit (blurredID, blurredID2, m_Material); // vertical blur buf.SetGlobalVector("offsets", new Vector4(0,2.0f/Screen.height,0,0)); buf.Blit (blurredID2, blurredID, m_Material); // horizontal blur buf.SetGlobalVector("offsets", new Vector4(4.0f/Screen.width,0,0,0)); buf.Blit (blurredID, blurredID2, m_Material); // vertical blur buf.SetGlobalVector("offsets", new Vector4(0,4.0f/Screen.height,0,0)); buf.Blit (blurredID2, blurredID, m_Material); buf.SetGlobalTexture("_GrabBlurTexture", blurredID); cam.AddCommandBuffer (CameraEvent.AfterSkybox, buf); }
static public int ReleaseTemporaryRT(IntPtr l) { try { UnityEngine.Rendering.CommandBuffer self = (UnityEngine.Rendering.CommandBuffer)checkSelf(l); System.Int32 a1; checkType(l, 2, out a1); self.ReleaseTemporaryRT(a1); return(0); } catch (Exception e) { LuaDLL.luaL_error(l, e.ToString()); return(0); } }
static public int ReleaseTemporaryRT(IntPtr l) { try { UnityEngine.Rendering.CommandBuffer self = (UnityEngine.Rendering.CommandBuffer)checkSelf(l); System.Int32 a1; checkType(l, 2, out a1); self.ReleaseTemporaryRT(a1); pushValue(l, true); return(1); } catch (Exception e) { return(error(l, e)); } }
private void Awake() { int lowResRenderTarget = Shader.PropertyToID("_LowResRenderTarget"); CommandBuffer cb = new CommandBuffer(); cb.GetTemporaryRT(lowResRenderTarget, this.width, this.height, 0, FilterMode.Trilinear, RenderTextureFormat.ARGB32); // Blit the low-res texture into itself, to re-draw it with the current material cb.Blit(lowResRenderTarget, lowResRenderTarget, this.material); // Blit the low-res texture into the camera's target render texture, effectively rendering it to the entire screen cb.Blit(lowResRenderTarget, BuiltinRenderTextureType.CameraTarget); cb.ReleaseTemporaryRT(lowResRenderTarget); // Tell the camera to execute our CommandBuffer before the forward opaque pass - that is, just before normal geometry starts rendering this.GetComponent<Camera>().AddCommandBuffer(CameraEvent.BeforeForwardOpaque, cb); }
void InitializeContext() { m_num_video_frames = 0; // initialize scratch buffer UpdateScratchBuffer(); // initialize context and stream { m_mp4conf = fcAPI.fcMP4Config.default_value; m_mp4conf.video = m_captureVideo; m_mp4conf.audio = m_captureAudio; m_mp4conf.video_width = m_scratch_buffer.width; m_mp4conf.video_height = m_scratch_buffer.height; m_mp4conf.video_max_framerate = 60; m_mp4conf.video_bitrate = m_videoBitrate; m_mp4conf.audio_bitrate = m_audioBitrate; m_mp4conf.audio_sampling_rate = AudioSettings.outputSampleRate; m_mp4conf.audio_num_channels = fcAPI.fcGetNumAudioChannels(); m_ctx = fcAPI.fcMP4CreateContext(ref m_mp4conf); m_output_file = DateTime.Now.ToString("yyyyMMdd_HHmmss") + ".mp4"; m_ostream = fcAPI.fcCreateFileStream(GetOutputPath()); fcAPI.fcMP4AddOutputStream(m_ctx, m_ostream); } // initialize command buffer { int tid = Shader.PropertyToID("_TmpFrameBuffer"); m_cb = new CommandBuffer(); m_cb.name = "MP4Recorder: copy frame buffer"; m_cb.GetTemporaryRT(tid, -1, -1, 0, FilterMode.Bilinear); m_cb.Blit(BuiltinRenderTextureType.CurrentActive, tid); m_cb.SetRenderTarget(m_scratch_buffer); m_cb.DrawMesh(m_quad, Matrix4x4.identity, m_mat_copy, 0, 0); m_cb.ReleaseTemporaryRT(tid); } }
void InitializeContext() { m_num_video_frames = 0; // initialize scratch buffer UpdateScratchBuffer(); // initialize context and stream { fcAPI.fcGifConfig conf; conf.width = m_scratch_buffer.width; conf.height = m_scratch_buffer.height; conf.num_colors = Mathf.Clamp(m_numColors, 1, 256); conf.max_active_tasks = 0; m_ctx = fcAPI.fcGifCreateContext(ref conf); } // initialize command buffer { int tid = Shader.PropertyToID("_TmpFrameBuffer"); m_cb = new CommandBuffer(); m_cb.name = "GifRecorder: copy frame buffer"; m_cb.GetTemporaryRT(tid, -1, -1, 0, FilterMode.Bilinear); m_cb.Blit(BuiltinRenderTextureType.CurrentActive, tid); m_cb.SetRenderTarget(m_scratch_buffer); m_cb.DrawMesh(m_quad, Matrix4x4.identity, m_mat_copy, 0, 0); m_cb.ReleaseTemporaryRT(tid); } }
private void InitializeBuffers() { m_isScatteringEnabled = SkinSettings.Enabled; m_isTransmissionEnabled = TransmissionSettings.Enabled || m_isScatteringEnabled; if (SkinSettings.Lut == null) { SkinSettings.Lut = SkinLut; #if UNITY_EDITOR EditorUtility.SetDirty(this); #endif } if ((m_isTransmissionEnabled || m_isScatteringEnabled) && m_camera != null && DeferredTransmissionBlit != null && m_copyTransmission == null && m_releaseDeferredPlus == null) { int opacityBufferId = Shader.PropertyToID("_DeferredTransmissionBuffer"); int blurredNormalsBufferIdTemp = Shader.PropertyToID("_DeferredBlurredNormalBufferTemp"); int blurredNormalBuffer = Shader.PropertyToID("_DeferredBlurredNormalBuffer"); m_deferredTransmissionBlitMaterial = new Material(DeferredTransmissionBlit); m_deferredTransmissionBlitMaterial.hideFlags = HideFlags.HideAndDontSave; // Copy Gbuffer emission buffer so we can get at the alpha channel for transmission. m_copyTransmission = new CommandBuffer(); m_copyTransmission.name = c_copyTransmissionBufferName; m_copyTransmission.GetTemporaryRT(opacityBufferId, -1, -1, 0, FilterMode.Point, RenderTextureFormat.ARGB32); m_copyTransmission.Blit(BuiltinRenderTextureType.CameraTarget, opacityBufferId, m_deferredTransmissionBlitMaterial); // Blurred normals for skin if (m_isScatteringEnabled) { GenerateNormalBlurMaterialAndCommandBuffer(blurredNormalBuffer, blurredNormalsBufferIdTemp, out m_deferredBlurredNormalsMaterial, out m_renderBlurredNormals); #if UNITY_EDITOR GenerateNormalBlurMaterialAndCommandBuffer(blurredNormalBuffer, blurredNormalsBufferIdTemp, out m_sceneViewBlurredNormalsMaterial, out m_sceneViewBlurredNormals); #endif } // Cleanup resources. m_releaseDeferredPlus = new CommandBuffer(); m_releaseDeferredPlus.name = c_releaseDeferredBuffer; m_releaseDeferredPlus.ReleaseTemporaryRT(opacityBufferId); if (m_isScatteringEnabled) { m_releaseDeferredPlus.ReleaseTemporaryRT(blurredNormalsBufferIdTemp); } #if UNITY_EDITOR SceneView.onSceneGUIDelegate += OnSceneGUIDelegate; #endif } AddCommandBuffersToCamera(m_camera, m_renderBlurredNormals); #if UNITY_EDITOR EditorUtility.SetDirty(m_camera); #endif }
public void Initialize() { if (combufPreLight == null) { int propsBufferID = Shader.PropertyToID("_UBERPropsBuffer"); // prepare material if (CopyPropsMat == null) { if (CopyPropsMat != null) { DestroyImmediate(CopyPropsMat); } CopyPropsMat = new Material(Shader.Find("Hidden/UBER_CopyPropsTexture")); CopyPropsMat.hideFlags = HideFlags.DontSave; } // take a copy of emission buffer.a where UBER stores its props (translucency, self-shadowing, wetness) combufPreLight = new CommandBuffer(); combufPreLight.name="UBERPropsPrelight"; combufPreLight.GetTemporaryRT(propsBufferID, -1, -1, 0, FilterMode.Point, RenderTextureFormat.RHalf); combufPreLight.Blit(BuiltinRenderTextureType.CameraTarget, propsBufferID, CopyPropsMat); // release temp buffer combufPostLight = new CommandBuffer(); combufPostLight.name="UBERPropsPostlight"; combufPostLight.ReleaseTemporaryRT (propsBufferID); } }
void OnEnable() { m_outputDir.CreateDirectory(); m_quad = FrameCapturerUtils.CreateFullscreenQuad(); m_mat_copy = new Material(m_shCopy); var cam = GetComponent<Camera>(); if (cam.targetTexture != null) { m_mat_copy.EnableKeyword("OFFSCREEN"); } #if UNITY_EDITOR if (m_captureGBuffer && !FrameCapturerUtils.IsRenderingPathDeferred(cam)) { Debug.LogWarning("PngRecorder: Rendering Path must be deferred to use Capture GBuffer mode."); m_captureGBuffer = false; } #endif // UNITY_EDITOR // initialize png context fcAPI.fcPngConfig conf = fcAPI.fcPngConfig.default_value; m_ctx = fcAPI.fcPngCreateContext(ref conf); // initialize render targets { m_frame_buffer = new RenderTexture(cam.pixelWidth, cam.pixelHeight, 0, RenderTextureFormat.ARGBHalf); m_frame_buffer.wrapMode = TextureWrapMode.Repeat; m_frame_buffer.Create(); var formats = new RenderTextureFormat[7] { RenderTextureFormat.ARGBHalf, // albedo (RGB) RenderTextureFormat.RHalf, // occlusion (R) RenderTextureFormat.ARGBHalf, // specular (RGB) RenderTextureFormat.RHalf, // smoothness (R) RenderTextureFormat.ARGBHalf, // normal (RGB) RenderTextureFormat.ARGBHalf, // emission (RGB) RenderTextureFormat.RHalf, // depth (R) }; m_gbuffer = new RenderTexture[7]; for (int i = 0; i < m_gbuffer.Length; ++i) { // last one is depth (1 channel) m_gbuffer[i] = new RenderTexture(cam.pixelWidth, cam.pixelHeight, 0, formats[i]); m_gbuffer[i].filterMode = FilterMode.Point; m_gbuffer[i].Create(); } } // initialize command buffers { int tid = Shader.PropertyToID("_TmpFrameBuffer"); m_cb_copy_fb = new CommandBuffer(); m_cb_copy_fb.name = "PngRecorder: Copy FrameBuffer"; m_cb_copy_fb.GetTemporaryRT(tid, -1, -1, 0, FilterMode.Point); m_cb_copy_fb.Blit(BuiltinRenderTextureType.CurrentActive, tid); m_cb_copy_fb.SetRenderTarget(m_frame_buffer); m_cb_copy_fb.DrawMesh(m_quad, Matrix4x4.identity, m_mat_copy, 0, 0); m_cb_copy_fb.ReleaseTemporaryRT(tid); m_cb_copy_gb = new CommandBuffer(); m_cb_copy_gb.name = "PngRecorder: Copy G-Buffer"; m_cb_copy_gb.SetRenderTarget( new RenderTargetIdentifier[] { m_gbuffer[0], m_gbuffer[1], m_gbuffer[2], m_gbuffer[3] }, m_gbuffer[0]); m_cb_copy_gb.DrawMesh(m_quad, Matrix4x4.identity, m_mat_copy, 0, 4); m_cb_copy_gb.SetRenderTarget( new RenderTargetIdentifier[] { m_gbuffer[4], m_gbuffer[5], m_gbuffer[6], m_gbuffer[3] }, m_gbuffer[0]); m_cb_copy_gb.DrawMesh(m_quad, Matrix4x4.identity, m_mat_copy, 0, 5); } }
// [ImageEffectOpaque] public void OnPreRender() { if (material == null) { return; } else if (Camera.current.actualRenderingPath != RenderingPath.DeferredShading) { return; } int downsampleAmount = (settings.reflectionSettings.reflectionQuality == SSRResolution.High) ? 1 : 2; var rtW = camera_.pixelWidth / downsampleAmount; var rtH = camera_.pixelHeight / downsampleAmount; float sWidth = camera_.pixelWidth; float sHeight = camera_.pixelHeight; float sx = sWidth / 2.0f; float sy = sHeight / 2.0f; const int maxMip = 5; RenderTextureFormat intermediateFormat = camera_.hdr ? RenderTextureFormat.ARGBHalf : RenderTextureFormat.ARGB32; material.SetInt("_RayStepSize", settings.reflectionSettings.stepSize); material.SetInt("_AdditiveReflection", settings.reflectionSettings.blendType == SSRReflectionBlendType.Additive ? 1 : 0); material.SetInt("_BilateralUpsampling", bilateralUpsample ? 1 : 0); material.SetInt("_TreatBackfaceHitAsMiss", treatBackfaceHitAsMiss ? 1 : 0); material.SetInt("_AllowBackwardsRays", settings.reflectionSettings.reflectBackfaces ? 1 : 0); material.SetInt("_TraceBehindObjects", traceBehindObjects ? 1 : 0); material.SetInt("_MaxSteps", settings.reflectionSettings.iterationCount); material.SetInt("_FullResolutionFiltering", 0); material.SetInt("_HalfResolution", (settings.reflectionSettings.reflectionQuality != SSRResolution.High) ? 1 : 0); material.SetInt("_HighlightSuppression", highlightSuppression ? 1 : 0); /** The height in pixels of a 1m object if viewed from 1m away. */ float pixelsPerMeterAtOneMeter = sWidth / (-2.0f * (float)(Math.Tan(camera_.fieldOfView / 180.0 * Math.PI * 0.5))); material.SetFloat("_PixelsPerMeterAtOneMeter", pixelsPerMeterAtOneMeter); material.SetFloat("_ScreenEdgeFading", settings.screenEdgeMask.intensity); material.SetFloat("_ReflectionBlur", settings.reflectionSettings.reflectionBlur); material.SetFloat("_MaxRayTraceDistance", settings.reflectionSettings.maxDistance); material.SetFloat("_FadeDistance", settings.intensitySettings.fadeDistance); material.SetFloat("_LayerThickness", settings.reflectionSettings.widthModifier); material.SetFloat("_SSRMultiplier", settings.intensitySettings.reflectionMultiplier); material.SetFloat("_FresnelFade", settings.intensitySettings.fresnelFade); material.SetFloat("_FresnelFadePower", settings.intensitySettings.fresnelFadePower); Matrix4x4 P = camera_.projectionMatrix; Vector4 projInfo = new Vector4 ((-2.0f / (sWidth * P[0])), (-2.0f / (sHeight * P[5])), ((1.0f - P[2]) / P[0]), ((1.0f + P[6]) / P[5])); Vector3 cameraClipInfo = (float.IsPositiveInfinity(camera_.farClipPlane)) ? new Vector3(camera_.nearClipPlane, -1.0f, 1.0f) : new Vector3(camera_.nearClipPlane * camera_.farClipPlane, camera_.nearClipPlane - camera_.farClipPlane, camera_.farClipPlane); material.SetVector("_ReflectionBufferSize", new Vector2(rtW, rtH)); material.SetVector("_ScreenSize", new Vector2(sWidth, sHeight)); material.SetVector("_InvScreenSize", new Vector2((float)(1.0f / sWidth), (float)(1.0f / sHeight))); material.SetVector("_ProjInfo", projInfo); // used for unprojection material.SetVector("_CameraClipInfo", cameraClipInfo); Matrix4x4 warpToScreenSpaceMatrix = new Matrix4x4(); warpToScreenSpaceMatrix.SetRow(0, new Vector4(sx, 0.0f, 0.0f, sx)); warpToScreenSpaceMatrix.SetRow(1, new Vector4(0.0f, sy, 0.0f, sy)); warpToScreenSpaceMatrix.SetRow(2, new Vector4(0.0f, 0.0f, 1.0f, 0.0f)); warpToScreenSpaceMatrix.SetRow(3, new Vector4(0.0f, 0.0f, 0.0f, 1.0f)); Matrix4x4 projectToPixelMatrix = warpToScreenSpaceMatrix * P; material.SetMatrix("_ProjectToPixelMatrix", projectToPixelMatrix); material.SetMatrix("_WorldToCameraMatrix", camera_.worldToCameraMatrix); material.SetMatrix("_CameraToWorldMatrix", camera_.worldToCameraMatrix.inverse); if (m_CommandBuffer == null) { m_CommandBuffer = new CommandBuffer(); m_CommandBuffer.name = "Screen Space Reflections"; // RGB: Normals, A: Roughness. // Has the nice benefit of allowing us to control the filtering mode as well. m_CommandBuffer.GetTemporaryRT(kNormalAndRoughnessTexture, -1, -1, 0, FilterMode.Point, RenderTextureFormat.ARGB32, RenderTextureReadWrite.Linear); m_CommandBuffer.GetTemporaryRT(kHitPointTexture, rtW, rtH, 0, FilterMode.Bilinear, RenderTextureFormat.ARGBHalf, RenderTextureReadWrite.Linear); for (int i = 0; i < maxMip; ++i) { // We explicitly interpolate during bilateral upsampling. m_CommandBuffer.GetTemporaryRT(kReflectionTextures[i], rtW >> i, rtH >> i, 0, FilterMode.Bilinear, intermediateFormat); } m_CommandBuffer.GetTemporaryRT(kFilteredReflections, rtW, rtH, 0, bilateralUpsample ? FilterMode.Point : FilterMode.Bilinear, intermediateFormat); m_CommandBuffer.GetTemporaryRT(kFinalReflectionTexture, rtW, rtH, 0, FilterMode.Point, intermediateFormat); m_CommandBuffer.Blit(BuiltinRenderTextureType.CameraTarget, kNormalAndRoughnessTexture, material, (int)PassIndex.BilateralKeyPack); m_CommandBuffer.Blit(BuiltinRenderTextureType.CameraTarget, kHitPointTexture, material, (int)PassIndex.RayTraceStep); m_CommandBuffer.Blit(BuiltinRenderTextureType.CameraTarget, kFilteredReflections, material, (int)PassIndex.HitPointToReflections); m_CommandBuffer.Blit(kFilteredReflections, kReflectionTextures[0], material, (int)PassIndex.PoissonBlur); for (int i = 1; i < maxMip; ++i) { int inputTex = kReflectionTextures[i - 1]; int lowMip = i; m_CommandBuffer.GetTemporaryRT(kBlurTexture, rtW >> lowMip, rtH >> lowMip, 0, FilterMode.Bilinear, intermediateFormat); m_CommandBuffer.SetGlobalVector("_Axis", new Vector4(1.0f, 0.0f, 0.0f, 0.0f)); m_CommandBuffer.SetGlobalFloat("_CurrentMipLevel", i - 1.0f); m_CommandBuffer.Blit(inputTex, kBlurTexture, material, (int)PassIndex.Blur); m_CommandBuffer.SetGlobalVector("_Axis", new Vector4(0.0f, 1.0f, 0.0f, 0.0f)); inputTex = kReflectionTextures[i]; m_CommandBuffer.Blit(kBlurTexture, inputTex, material, (int)PassIndex.Blur); m_CommandBuffer.ReleaseTemporaryRT(kBlurTexture); } m_CommandBuffer.Blit(kReflectionTextures[0], kFinalReflectionTexture, material, (int)PassIndex.CompositeSSR); m_CommandBuffer.GetTemporaryRT(kTempTexture, camera_.pixelWidth, camera_.pixelHeight, 0, FilterMode.Bilinear, intermediateFormat); m_CommandBuffer.Blit(BuiltinRenderTextureType.CameraTarget, kTempTexture, material, (int)PassIndex.CompositeFinal); m_CommandBuffer.Blit(kTempTexture, BuiltinRenderTextureType.CameraTarget); m_CommandBuffer.ReleaseTemporaryRT(kTempTexture); camera_.AddCommandBuffer(CameraEvent.AfterFinalPass, m_CommandBuffer); } }