// Called for each camera/injection point pair on each frame. Return true if the effect should be rendered for this camera. public override bool Setup(ref RenderingData renderingData, CustomPostProcessInjectionPoint injectionPoint) { // Get the current volume stack var stack = VolumeManager.instance.stack; // Get the corresponding volume component m_VolumeComponent = stack.GetComponent <GlitchEffect>(); // if power value > 0, then we need to render this effect. return(m_VolumeComponent.power.value > 0); }
// Called for each camera/injection point pair on each frame. Return true if the effect should be rendered for this camera. public override bool Setup(ref RenderingData renderingData, CustomPostProcessInjectionPoint injectionPoint) { // Get the current volume stack var stack = VolumeManager.instance.stack; // Get the 2 volume components m_GrayScaleComponent = stack.GetComponent <GrayscaleEffect>(); m_InvertComponent = stack.GetComponent <InvertEffect>(); // if blend value > 0 for any of the 2 components, then we need to render this effect. return(m_GrayScaleComponent.blend.value > 0 || m_InvertComponent.blend.value > 0); }
/// <summary> /// Construct the custom post-processing render pass /// </summary> /// <param name="injectionPoint">The post processing injection point</param> /// <param name="classes">The list of classes for the renderers to be executed by this render pass</param> public CustomPostProcessRenderPass(CustomPostProcessInjectionPoint injectionPoint, List <CustomPostProcessRenderer> renderers) { this.injectionPoint = injectionPoint; this.m_ProfilingSamplers = new List <ProfilingSampler>(renderers.Count); this.m_PostProcessRenderers = renderers; foreach (var renderer in renderers) { // Get renderer name and add it to the names list var attribute = CustomPostProcessAttribute.GetAttribute(renderer.GetType()); m_ProfilingSamplers.Add(new ProfilingSampler(attribute?.Name)); } // Pre-allocate a list for active renderers this.m_ActivePostProcessRenderers = new List <int>(renderers.Count); // Set render pass event and name based on the injection point. switch (injectionPoint) { case CustomPostProcessInjectionPoint.AfterOpaqueAndSky: renderPassEvent = RenderPassEvent.AfterRenderingSkybox; m_PassName = "Custom PostProcess after Opaque & Sky"; break; case CustomPostProcessInjectionPoint.BeforePostProcess: renderPassEvent = RenderPassEvent.BeforeRenderingPostProcessing; m_PassName = "Custom PostProcess before PostProcess"; break; case CustomPostProcessInjectionPoint.AfterPostProcess: // NOTE: This was initially "AfterRenderingPostProcessing" but it made the builtin post-processing to blit directly to the camera target. renderPassEvent = RenderPassEvent.AfterRendering; m_PassName = "Custom PostProcess after PostProcess"; break; } // Initialize the IDs and allocation state of the intermediate render targets m_Intermediate = new RenderTargetHandle[2]; m_Intermediate[0].Init("_IntermediateRT0"); m_Intermediate[1].Init("_IntermediateRT1"); m_IntermediateAllocated = new bool[2]; m_IntermediateAllocated[0] = false; m_IntermediateAllocated[1] = false; }
// Called for each camera/injection point pair on each frame. Return true if the effect should be rendered for this camera. public override bool Setup(ref RenderingData renderingData, CustomPostProcessInjectionPoint injectionPoint) { // Get the current volume stack var stack = VolumeManager.instance.stack; // Get the corresponding volume component m_VolumeComponent = stack.GetComponent <AfterImageEffect>(); // if blend value and time scale > 0, then we need to render this effect. bool requireRendering = m_VolumeComponent.blend.value != Color.black && m_VolumeComponent.timeScale.value > 0; // if we don't need to execute this frame, we need to make sure that the history is invalidated // this solves an artifact where a very old history is used due to the effect being disabled for many frames if (!requireRendering) { // If the camera already had a history, invalidate it. if (_histories.TryGetValue(renderingData.cameraData.camera.GetInstanceID(), out CameraHistory history)) { history.Invalidated = true; } } return(requireRendering); }
// The actual rendering execution is done here public override void Render(CommandBuffer cmd, RenderTargetIdentifier source, RenderTargetIdentifier destination, ref RenderingData renderingData, CustomPostProcessInjectionPoint injectionPoint) { // Update local variables // Note: we calculate our delta time since this function can be called more than once in a single frame. timeElapsed += m_VolumeComponent.speed.value * (Time.time - previousFrameTime); previousFrameTime = Time.time; // set material properties if (m_Material != null) { m_Material.SetFloat(ShaderIDs.Power, m_VolumeComponent.power.value); m_Material.SetFloat(ShaderIDs.Time, timeElapsed); m_Material.SetFloat(ShaderIDs.Scale, m_VolumeComponent.scale.value); } // Since we are using a shader graph, we cann't use CoreUtils.DrawFullScreen without modifying the vertex shader. // So we go with the easy route and use CommandBuffer.Blit instead. The same goes if you want to use legacy image effect shaders. // Note: don't forget to set pass to 0 (last argument in Blit) to make sure that extra passes are not drawn. cmd.Blit(source, destination, m_Material, 0); }
// The actual rendering execution is done here public override void Render(CommandBuffer cmd, RenderTargetIdentifier source, RenderTargetIdentifier destination, ref RenderingData renderingData, CustomPostProcessInjectionPoint injectionPoint) { // set material properties if (m_Material != null) { m_Material.SetFloat(ShaderIDs.Blend, m_VolumeComponent.blend.value); } // set source texture cmd.SetGlobalTexture(ShaderIDs.Input, source); // draw a fullscreen triangle to the destination CoreUtils.DrawFullScreen(cmd, m_Material, destination); }
// The actual rendering execution is done here public override void Render(CommandBuffer cmd, RenderTargetIdentifier source, RenderTargetIdentifier destination, ref RenderingData renderingData, CustomPostProcessInjectionPoint injectionPoint) { // set material properties if (m_Material != null) { m_Material.SetFloat(ShaderIDs.Intensity, m_VolumeComponent.intensity.value); m_Material.SetFloat(ShaderIDs.Thickness, m_VolumeComponent.thickness.value); Vector2 normalThreshold = m_VolumeComponent.normalThreshold.value; Vector2 depthThreshold = m_VolumeComponent.depthThreshold.value; Vector4 threshold = new Vector4(Mathf.Cos(normalThreshold.y * Mathf.Deg2Rad), Mathf.Cos(normalThreshold.x * Mathf.Deg2Rad), depthThreshold.x, depthThreshold.y); m_Material.SetVector(ShaderIDs.Threshold, threshold); m_Material.SetColor(ShaderIDs.Color, m_VolumeComponent.color.value); } // set source texture cmd.SetGlobalTexture(ShaderIDs.Input, source); // draw a fullscreen triangle to the destination CoreUtils.DrawFullScreen(cmd, m_Material, destination); }
/// <summary> /// Called every frame for each camera when the post process needs to be rendered. /// </summary> /// <param name="cmd">Command Buffer used to issue your commands</param> /// <param name="source">Source Render Target, it contains the camera color buffer in it's current state</param> /// <param name="destination">Destination Render Target</param> /// <param name="renderingData">Current Rendering Data</param> /// <param name="injectionPoint">The injection point from which the renderer is being called</param> public abstract void Render(CommandBuffer cmd, RenderTargetIdentifier source, RenderTargetIdentifier destination, ref RenderingData renderingData, CustomPostProcessInjectionPoint injectionPoint);
/// <summary> /// Setup function, called every frame once for each camera before render is called. /// </summary> /// <param name="renderingData">Current Rendering Data</param> /// <param name="injectionPoint">The injection point from which the renderer is being called</param> /// <returns> /// True if render should be called for this camera. False Otherwise. /// </returns> public virtual bool Setup(ref RenderingData renderingData, CustomPostProcessInjectionPoint injectionPoint) { return(true); }
// The actual rendering execution is done here public override void Render(CommandBuffer cmd, RenderTargetIdentifier source, RenderTargetIdentifier destination, ref RenderingData renderingData, CustomPostProcessInjectionPoint injectionPoint) { // set material properties if (m_Material != null) { m_Material.SetFloat(ShaderIDs.Intensity, m_VolumeComponent.intensity.value); m_Material.SetFloat(ShaderIDs.Exponent, 1 / m_VolumeComponent.fogDistance.value); m_Material.SetVector(ShaderIDs.ColorRange, new Vector2(m_VolumeComponent.nearColorDistance.value, m_VolumeComponent.farColorDistance.value)); m_Material.SetColor(ShaderIDs.NearFogColor, m_VolumeComponent.nearFogColor.value); m_Material.SetColor(ShaderIDs.FarFogColor, m_VolumeComponent.farFogColor.value); // Checks whether the renderer is called before transparent or not to pick the proper shader features if (injectionPoint == CustomPostProcessInjectionPoint.AfterOpaqueAndSky) { m_Material.DisableKeyword("AFTER_TRANSPARENT_ON"); } else { m_Material.EnableKeyword("AFTER_TRANSPARENT_ON"); } } // set source texture cmd.SetGlobalTexture(ShaderIDs.Input, source); // draw a fullscreen triangle to the destination CoreUtils.DrawFullScreen(cmd, m_Material, destination); }
// The actual rendering execution is done here public override void Render(CommandBuffer cmd, RenderTargetIdentifier source, RenderTargetIdentifier destination, ref RenderingData renderingData, CustomPostProcessInjectionPoint injectionPoint) { // Get camera instance id int id = renderingData.cameraData.camera.GetInstanceID(); // Get an RT descriptor for temporary or history RTs. RenderTextureDescriptor descriptor = GetTempRTDescriptor(renderingData); CameraHistory history; // See if we already have a history for this camera if (_histories.TryGetValue(id, out history)) { var frame = history.Frame; // If the camera target is resized, we need to resize the history too. if (frame.width != descriptor.width || frame.height != descriptor.height) { RenderTexture newframe = new RenderTexture(descriptor); newframe.name = "_CameraHistoryTexture"; if (history.Invalidated) // if invalidated, blit from source to history { cmd.Blit(source, newframe); } else // if not invalidated, copy & resize the old history to the new size { Graphics.Blit(frame, newframe); } frame.Release(); history.Frame = newframe; } else if (history.Invalidated) { cmd.Blit(source, frame); // if invalidated, blit from source to history } history.Invalidated = false; // No longer invalid :D } else { // If we had no history for this camera, create one for it. history = new CameraHistory(descriptor); history.Frame.name = "_CameraHistoryTexture"; _histories.Add(id, history); cmd.Blit(source, history.Frame); // Copy frame from source to history } // set material properties if (m_Material != null) { Color blend = m_VolumeComponent.blend.value; float power = Time.deltaTime / Mathf.Max(Mathf.Epsilon, m_VolumeComponent.timeScale.value); // The amound of blending should depend on the delta time to make fading time frame-rate independent. blend.r = Mathf.Pow(blend.r, power); blend.g = Mathf.Pow(blend.g, power); blend.b = Mathf.Pow(blend.b, power); m_Material.SetColor(ShaderIDs.Blend, blend); } // set source texture cmd.SetGlobalTexture(ShaderIDs.Input, source); // set source texture cmd.SetGlobalTexture(ShaderIDs.Other, history.Frame); // See if the destination is the camera target. If yes, then we need to use an intermediate texture to avoid reading from destination bool isCameraTarget = destination == RenderTargetHandle.CameraTarget.Identifier(); if (isCameraTarget) { // Create a temporary target cmd.GetTemporaryRT(_intermediate.id, descriptor, FilterMode.Point); // blend current frame with history frame into the temporary target CoreUtils.DrawFullScreen(cmd, m_Material, _intermediate.Identifier()); // Copy the temporary target to the destination and history cmd.Blit(_intermediate.Identifier(), destination); cmd.Blit(_intermediate.Identifier(), history.Frame); // Release the temporary target cmd.ReleaseTemporaryRT(_intermediate.id); } else { // the destination isn't the camera target, blend onto the destination directly. CoreUtils.DrawFullScreen(cmd, m_Material, destination); // Then copy the destination to the history cmd.Blit(destination, history.Frame); } }
// The actual rendering execution is done here public override void Render(CommandBuffer cmd, RenderTargetIdentifier source, RenderTargetIdentifier destination, ref RenderingData renderingData, CustomPostProcessInjectionPoint injectionPoint) { // set material properties if (m_Material != null) { float grayBlend = m_GrayScaleComponent.blend.value; if (grayBlend > 0) { m_Material.EnableKeyword("GRAYSCALE_ON"); m_Material.SetFloat(ShaderIDs.GrayBlend, grayBlend); } else { m_Material.DisableKeyword("GRAYSCALE_ON"); } float invertBlend = m_InvertComponent.blend.value; if (invertBlend > 0) { m_Material.EnableKeyword("INVERT_ON"); m_Material.SetFloat(ShaderIDs.InvertBlend, invertBlend); } else { m_Material.DisableKeyword("INVERT_ON"); } } // set source texture cmd.SetGlobalTexture(ShaderIDs.Input, source); // draw a fullscreen triangle to the destination CoreUtils.DrawFullScreen(cmd, m_Material, destination); }
// This code is mostly copied from Unity's HDRP repository /// <summary> /// Intialize a reoderable list /// </summary> void InitList(ref ReorderableList reorderableList, List <string> elements, string headerName, CustomPostProcessInjectionPoint injectionPoint, CustomPostProcess feature) { reorderableList = new ReorderableList(elements, typeof(string), true, true, true, true); reorderableList.drawHeaderCallback = (rect) => EditorGUI.LabelField(rect, headerName, EditorStyles.boldLabel); reorderableList.drawElementCallback = (rect, index, isActive, isFocused) => { rect.height = EditorGUIUtility.singleLineHeight; var elemType = Type.GetType(elements[index]); EditorGUI.LabelField(rect, GetName(elemType), EditorStyles.boldLabel); }; reorderableList.onAddCallback = (list) => { var menu = new GenericMenu(); foreach (var type in _availableRenderers[injectionPoint]) { if (!elements.Contains(type.AssemblyQualifiedName)) { menu.AddItem(new GUIContent(GetName(type)), false, () => { Undo.RegisterCompleteObjectUndo(feature, $"Added {type.ToString()} Custom Post Process"); elements.Add(type.AssemblyQualifiedName); forceRecreate(feature); // This is done since OnValidate doesn't get called. }); } } if (menu.GetItemCount() == 0) { menu.AddDisabledItem(new GUIContent("No Custom Post Process Availble")); } menu.ShowAsContext(); EditorUtility.SetDirty(feature); }; reorderableList.onRemoveCallback = (list) => { Undo.RegisterCompleteObjectUndo(feature, $"Removed {list.list[list.index].ToString()} Custom Post Process"); elements.RemoveAt(list.index); EditorUtility.SetDirty(feature); forceRecreate(feature); // This is done since OnValidate doesn't get called. }; reorderableList.elementHeightCallback = _ => EditorGUIUtility.singleLineHeight + EditorGUIUtility.standardVerticalSpacing; reorderableList.onReorderCallback = (list) => { EditorUtility.SetDirty(feature); forceRecreate(feature); // This is done since OnValidate doesn't get called. }; }
// The actual rendering execution is done here public override void Render(CommandBuffer cmd, RenderTargetIdentifier source, RenderTargetIdentifier destination, ref RenderingData renderingData, CustomPostProcessInjectionPoint injectionPoint) { // set material properties if (m_Material != null) { float split = m_VolumeComponent.split.value; float radians = m_VolumeComponent.angle.value * Mathf.Deg2Rad; Vector2 splitVector = new Vector2( Mathf.Round(Mathf.Cos(radians) * split), Mathf.Round(Mathf.Sin(radians) * split) ); m_Material.SetVector(ShaderIDs.Split, splitVector); } // set source texture cmd.SetGlobalTexture(ShaderIDs.Input, source); // draw a fullscreen triangle to the destination CoreUtils.DrawFullScreen(cmd, m_Material, destination); }