static void InitializeRenderingData(PipelineSettings settings, ref CameraData cameraData, ref CullResults cullResults, int maxVisibleAdditionalLights, int maxPerObjectAdditionalLights, out RenderingData renderingData) { List <VisibleLight> visibleLights = cullResults.visibleLights; List <int> additionalLightIndices = new List <int>(); bool hasDirectionalShadowCastingLight = false; bool hasPunctualShadowCastingLight = false; if (cameraData.maxShadowDistance > 0.0f) { for (int i = 0; i < visibleLights.Count; ++i) { Light light = visibleLights[i].light; bool castShadows = light != null && light.shadows != LightShadows.None; // LWRP doesn't support point light shadows yet castShadows &= visibleLights[i].lightType != LightType.Point; if (visibleLights[i].lightType == LightType.Directional) { hasDirectionalShadowCastingLight |= castShadows; } else if (additionalLightIndices.Count < maxVisibleAdditionalLights) { hasPunctualShadowCastingLight |= (castShadows && settings.additionalLightsRenderingMode == LightRenderingMode.PerPixel); additionalLightIndices.Add(i); } } } renderingData.cullResults = cullResults; renderingData.cameraData = cameraData; InitializeLightData(settings, visibleLights, additionalLightIndices, maxPerObjectAdditionalLights, out renderingData.lightData); InitializeShadowData(settings, visibleLights, hasDirectionalShadowCastingLight, hasPunctualShadowCastingLight && !renderingData.lightData.shadeAdditionalLightsPerVertex, out renderingData.shadowData); renderingData.supportsDynamicBatching = settings.supportsDynamicBatching; }
void FinalBlitPass(ref ScriptableRenderContext context, ref CameraData cameraData) { Material material = cameraData.isStereoEnabled ? null : m_BlitMaterial; RenderTargetIdentifier sourceRT = GetSurface(colorAttachmentHandle); CommandBuffer cmd = CommandBufferPool.Get("Final Blit Pass"); cmd.SetGlobalTexture("_BlitTex", sourceRT); if (!cameraData.isDefaultViewport) { SetRenderTarget(cmd, BuiltinRenderTextureType.CameraTarget, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, ClearFlag.All, Color.black); cmd.SetViewProjectionMatrices(Matrix4x4.identity, Matrix4x4.identity); cmd.SetViewport(cameraData.camera.pixelRect); LightweightPipeline.DrawFullScreen(cmd, material); } else { cmd.Blit(GetSurface(colorAttachmentHandle), BuiltinRenderTextureType.CameraTarget, material); } context.ExecuteCommandBuffer(cmd); CommandBufferPool.Release(cmd); }
static void InitializeCameraData(PipelineSettings settings, Camera camera, out CameraData cameraData) { const float kRenderScaleThreshold = 0.05f; cameraData.camera = camera; bool msaaEnabled = camera.allowMSAA && settings.msaaSampleCount > 1; if (msaaEnabled) { cameraData.msaaSamples = (camera.targetTexture != null) ? camera.targetTexture.antiAliasing : settings.msaaSampleCount; } else { cameraData.msaaSamples = 1; } cameraData.isSceneViewCamera = camera.cameraType == CameraType.SceneView; cameraData.isOffscreenRender = camera.targetTexture != null && !cameraData.isSceneViewCamera; cameraData.isStereoEnabled = IsStereoEnabled(camera); cameraData.isHdrEnabled = camera.allowHDR && settings.supportsHDR; cameraData.postProcessLayer = camera.GetComponent <PostProcessLayer>(); cameraData.postProcessEnabled = cameraData.postProcessLayer != null && cameraData.postProcessLayer.isActiveAndEnabled; Rect cameraRect = camera.rect; cameraData.isDefaultViewport = (!(Math.Abs(cameraRect.x) > 0.0f || Math.Abs(cameraRect.y) > 0.0f || Math.Abs(cameraRect.width) < 1.0f || Math.Abs(cameraRect.height) < 1.0f)); // If XR is enabled, use XR renderScale. // Discard variations lesser than kRenderScaleThreshold. // Scale is only enabled for gameview. float usedRenderScale = XRGraphicsConfig.enabled ? settings.savedXRGraphicsConfig.renderScale : settings.renderScale; cameraData.renderScale = (Mathf.Abs(1.0f - usedRenderScale) < kRenderScaleThreshold) ? 1.0f : usedRenderScale; cameraData.renderScale = (camera.cameraType == CameraType.Game) ? cameraData.renderScale : 1.0f; cameraData.requiresDepthTexture = settings.supportsCameraDepthTexture || cameraData.isSceneViewCamera; cameraData.requiresSoftParticles = settings.supportsSoftParticles; cameraData.requiresOpaqueTexture = settings.supportsCameraOpaqueTexture; cameraData.opaqueTextureDownsampling = settings.opaqueDownsampling; bool anyShadowsEnabled = settings.supportsDirectionalShadows || settings.supportsLocalShadows; cameraData.maxShadowDistance = (anyShadowsEnabled) ? settings.shadowDistance : 0.0f; LightweightAdditionalCameraData additionalCameraData = camera.gameObject.GetComponent <LightweightAdditionalCameraData>(); if (additionalCameraData != null) { cameraData.maxShadowDistance = (additionalCameraData.renderShadows) ? cameraData.maxShadowDistance : 0.0f; cameraData.requiresDepthTexture &= additionalCameraData.requiresDepthTexture; cameraData.requiresOpaqueTexture &= additionalCameraData.requiresColorTexture; } else if (!cameraData.isSceneViewCamera && camera.cameraType != CameraType.Reflection && camera.cameraType != CameraType.Preview) { cameraData.requiresDepthTexture = false; cameraData.requiresOpaqueTexture = false; } cameraData.requiresDepthTexture |= cameraData.postProcessEnabled; var commonOpaqueFlags = SortFlags.CommonOpaque; var noFrontToBackOpaqueFlags = SortFlags.SortingLayer | SortFlags.RenderQueue | SortFlags.OptimizeStateChanges | SortFlags.CanvasOrder; bool hasHSRGPU = SystemInfo.hasHiddenSurfaceRemovalOnGPU; bool canSkipFrontToBackSorting = (camera.opaqueSortMode == OpaqueSortMode.Default && hasHSRGPU) || camera.opaqueSortMode == OpaqueSortMode.NoDistanceSort; cameraData.defaultOpaqueSortFlags = canSkipFrontToBackSorting ? noFrontToBackOpaqueFlags : commonOpaqueFlags; }
public static void RenderPostProcess(CommandBuffer cmd, PostProcessRenderContext context, ref CameraData cameraData, RenderTextureFormat colorFormat, RenderTargetIdentifier source, RenderTargetIdentifier dest, bool opaqueOnly) { Camera camera = cameraData.camera; context.Reset(); context.camera = camera; context.source = source; context.sourceFormat = colorFormat; context.destination = dest; context.command = cmd; context.flip = !IsStereoEnabled(camera) && camera.targetTexture == null; if (opaqueOnly) { cameraData.postProcessLayer.RenderOpaqueOnly(context); } else { cameraData.postProcessLayer.Render(context); } }
void InitializeCameraData(Camera camera, out CameraData cameraData) { const float kRenderScaleThreshold = 0.05f; cameraData.camera = camera; bool msaaEnabled = camera.allowMSAA && pipelineAsset.msaaSampleCount > 1; if (msaaEnabled) { cameraData.msaaSamples = (camera.targetTexture != null) ? camera.targetTexture.antiAliasing : pipelineAsset.msaaSampleCount; } else { cameraData.msaaSamples = 1; } cameraData.isSceneViewCamera = camera.cameraType == CameraType.SceneView; cameraData.isOffscreenRender = camera.targetTexture != null && !cameraData.isSceneViewCamera; cameraData.isStereoEnabled = IsStereoEnabled(camera); // TODO: There's currently an issue in engine side that breaks MSAA with texture2DArray. // for now we force msaa disabled when using texture2DArray. This fixes VR multiple and single pass instanced modes. if (cameraData.isStereoEnabled && XRGraphicsConfig.eyeTextureDesc.dimension == TextureDimension.Tex2DArray) { cameraData.msaaSamples = 1; } cameraData.isHdrEnabled = camera.allowHDR && pipelineAsset.supportsHDR; cameraData.postProcessLayer = camera.GetComponent <PostProcessLayer>(); cameraData.postProcessEnabled = cameraData.postProcessLayer != null && cameraData.postProcessLayer.isActiveAndEnabled; Rect cameraRect = camera.rect; cameraData.isDefaultViewport = (!(Math.Abs(cameraRect.x) > 0.0f || Math.Abs(cameraRect.y) > 0.0f || Math.Abs(cameraRect.width) < 1.0f || Math.Abs(cameraRect.height) < 1.0f)); // If XR is enabled, use XR renderScale. // Discard variations lesser than kRenderScaleThreshold. // Scale is only enabled for gameview. float usedRenderScale = XRGraphicsConfig.enabled ? pipelineAsset.savedXRGraphicsConfig.renderScale : pipelineAsset.renderScale; cameraData.renderScale = (Mathf.Abs(1.0f - usedRenderScale) < kRenderScaleThreshold) ? 1.0f : usedRenderScale; cameraData.renderScale = (camera.cameraType == CameraType.Game) ? cameraData.renderScale : 1.0f; cameraData.requiresDepthTexture = pipelineAsset.supportsCameraDepthTexture || cameraData.isSceneViewCamera; cameraData.requiresSoftParticles = pipelineAsset.supportsSoftParticles; cameraData.requiresOpaqueTexture = pipelineAsset.supportsCameraOpaqueTexture; cameraData.opaqueTextureDownsampling = pipelineAsset.opaqueDownsampling; bool anyShadowsEnabled = pipelineAsset.supportsDirectionalShadows || pipelineAsset.supportsLocalShadows; cameraData.maxShadowDistance = (anyShadowsEnabled) ? pipelineAsset.shadowDistance : 0.0f; LightweightAdditionalCameraData additionalCameraData = camera.gameObject.GetComponent <LightweightAdditionalCameraData>(); if (additionalCameraData != null) { cameraData.maxShadowDistance = (additionalCameraData.renderShadows) ? cameraData.maxShadowDistance : 0.0f; cameraData.requiresDepthTexture &= additionalCameraData.requiresDepthTexture; cameraData.requiresOpaqueTexture &= additionalCameraData.requiresColorTexture; } else if (!cameraData.isSceneViewCamera && camera.cameraType != CameraType.Reflection && camera.cameraType != CameraType.Preview) { cameraData.requiresDepthTexture = false; cameraData.requiresOpaqueTexture = false; } cameraData.requiresDepthTexture |= cameraData.postProcessEnabled; }
void RenderTransparents(ref ScriptableRenderContext context, ref CullResults cullResults, ref CameraData cameraData, RendererConfiguration rendererConfiguration, bool dynamicBatching) { CommandBuffer cmd = CommandBufferPool.Get(k_RenderTransparentsTag); using (new ProfilingSample(cmd, k_RenderTransparentsTag)) { Camera camera = cameraData.camera; SetRenderTarget(cmd, RenderBufferLoadAction.Load, RenderBufferStoreAction.Store, ClearFlag.None, Color.black); context.ExecuteCommandBuffer(cmd); cmd.Clear(); var drawSettings = CreateDrawRendererSettings(camera, SortFlags.CommonTransparent, rendererConfiguration, dynamicBatching); context.DrawRenderers(cullResults.visibleRenderers, ref drawSettings, renderer.transparentFilterSettings); // Render objects that did not match any shader pass with error shader RenderObjectsWithError(ref context, ref cullResults, camera, renderer.transparentFilterSettings, SortFlags.None); } context.ExecuteCommandBuffer(cmd); CommandBufferPool.Release(cmd); }
void RenderOpaques(ref ScriptableRenderContext context, ref CullResults cullResults, ref CameraData cameraData, RendererConfiguration rendererConfiguration, bool dynamicBatching) { CommandBuffer cmd = CommandBufferPool.Get(k_RenderOpaquesTag); using (new ProfilingSample(cmd, k_RenderOpaquesTag)) { Camera camera = cameraData.camera; ClearFlag clearFlag = GetCameraClearFlag(camera); SetRenderTarget(cmd, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, clearFlag, CoreUtils.ConvertSRGBToActiveColorSpace(camera.backgroundColor)); // TODO: We need a proper way to handle multiple camera/ camera stack. Issue is: multiple cameras can share a same RT // (e.g, split screen games). However devs have to be dilligent with it and know when to clear/preserve color. // For now we make it consistent by resolving viewport with a RT until we can have a proper camera management system //if (colorAttachmentHandle == -1 && !cameraData.isDefaultViewport) // cmd.SetViewport(camera.pixelRect); context.ExecuteCommandBuffer(cmd); cmd.Clear(); var drawSettings = CreateDrawRendererSettings(camera, SortFlags.CommonOpaque, rendererConfiguration, dynamicBatching); context.DrawRenderers(cullResults.visibleRenderers, ref drawSettings, renderer.opaqueFilterSettings); // Render objects that did not match any shader pass with error shader RenderObjectsWithError(ref context, ref cullResults, camera, renderer.opaqueFilterSettings, SortFlags.None); if (camera.clearFlags == CameraClearFlags.Skybox) { context.DrawSkybox(camera); } } context.ExecuteCommandBuffer(cmd); CommandBufferPool.Release(cmd); }
void RenderOpaques(ref ScriptableRenderContext context, ref CullResults cullResults, ref CameraData cameraData, RendererConfiguration rendererConfiguration, bool dynamicBatching) { CommandBuffer cmd = CommandBufferPool.Get(k_RenderOpaquesTag); using (new ProfilingSample(cmd, k_SetupRenderTargetTag)) { Camera camera = cameraData.camera; ClearFlag clearFlag = GetCameraClearFlag(camera); SetRenderTarget(cmd, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, clearFlag, CoreUtils.ConvertSRGBToActiveColorSpace(camera.backgroundColor)); // If rendering to an intermediate RT we resolve viewport on blit due to offset not being supported // while rendering to a RT. if (colorAttachmentHandle == -1 && cameraData.isDefaultViewport) { cmd.SetViewport(camera.pixelRect); } context.ExecuteCommandBuffer(cmd); cmd.Clear(); var drawSettings = CreateDrawRendererSettings(camera, SortFlags.CommonOpaque, rendererConfiguration, dynamicBatching); context.DrawRenderers(cullResults.visibleRenderers, ref drawSettings, renderer.opaqueFilterSettings); // Render objects that did not match any shader pass with error shader RenderObjectsWithError(ref context, ref cullResults, camera, renderer.opaqueFilterSettings, SortFlags.None); if (camera.clearFlags == CameraClearFlags.Skybox) { context.DrawSkybox(camera); } } context.ExecuteCommandBuffer(cmd); CommandBufferPool.Release(cmd); }
void InitializeCameraData(Camera camera, out CameraData cameraData) { const float kRenderScaleThreshold = 0.05f; cameraData.camera = camera; bool msaaEnabled = camera.allowMSAA && pipelineAsset.msaaSampleCount > 1; if (msaaEnabled) { cameraData.msaaSamples = (camera.targetTexture != null) ? camera.targetTexture.antiAliasing : pipelineAsset.msaaSampleCount; } else { cameraData.msaaSamples = 1; } cameraData.isSceneViewCamera = camera.cameraType == CameraType.SceneView; cameraData.isOffscreenRender = camera.targetTexture != null && !cameraData.isSceneViewCamera; cameraData.isStereoEnabled = IsStereoEnabled(camera); cameraData.isHdrEnabled = camera.allowHDR && pipelineAsset.supportsHDR; cameraData.postProcessLayer = camera.GetComponent <PostProcessLayer>(); cameraData.postProcessEnabled = cameraData.postProcessLayer != null && cameraData.postProcessLayer.isActiveAndEnabled; // PostProcess for VR is not working atm. Disable it for now. cameraData.postProcessEnabled &= !cameraData.isStereoEnabled; Rect cameraRect = camera.rect; cameraData.isDefaultViewport = (!(Math.Abs(cameraRect.x) > 0.0f || Math.Abs(cameraRect.y) > 0.0f || Math.Abs(cameraRect.width) < 1.0f || Math.Abs(cameraRect.height) < 1.0f)); // Discard variations lesser than kRenderScaleThreshold. // Scale is only enabled for gameview. // In XR mode, grab renderScale from XRSettings instead of SRP asset for now. // This is just a temporary change pending full integration of XR with SRP if (camera.cameraType == CameraType.Game) { #if !UNITY_SWITCH if (cameraData.isStereoEnabled) { cameraData.renderScale = XRSettings.eyeTextureResolutionScale; } else #endif { cameraData.renderScale = pipelineAsset.renderScale; } } else { cameraData.renderScale = 1.0f; } cameraData.renderScale = (Mathf.Abs(1.0f - cameraData.renderScale) < kRenderScaleThreshold) ? 1.0f : cameraData.renderScale; cameraData.requiresDepthTexture = pipelineAsset.supportsCameraDepthTexture || cameraData.isSceneViewCamera; cameraData.requiresSoftParticles = pipelineAsset.supportsSoftParticles; cameraData.requiresOpaqueTexture = pipelineAsset.supportsCameraOpaqueTexture; cameraData.opaqueTextureDownsampling = pipelineAsset.opaqueDownsampling; bool anyShadowsEnabled = pipelineAsset.supportsDirectionalShadows || pipelineAsset.supportsLocalShadows; cameraData.maxShadowDistance = (anyShadowsEnabled) ? pipelineAsset.shadowDistance : 0.0f; LightweightAdditionalCameraData additionalCameraData = camera.gameObject.GetComponent <LightweightAdditionalCameraData>(); if (additionalCameraData != null) { cameraData.maxShadowDistance = (additionalCameraData.renderShadows) ? cameraData.maxShadowDistance : 0.0f; cameraData.requiresDepthTexture &= additionalCameraData.requiresDepthTexture; cameraData.requiresOpaqueTexture &= additionalCameraData.requiresColorTexture; } else if (!cameraData.isSceneViewCamera && camera.cameraType != CameraType.Reflection && camera.cameraType != CameraType.Preview) { cameraData.requiresDepthTexture = false; cameraData.requiresOpaqueTexture = false; } cameraData.requiresDepthTexture |= cameraData.postProcessEnabled; }
public void RenderPostProcess(CommandBuffer cmd, ref CameraData cameraData, RenderTextureFormat colorFormat, RenderTargetIdentifier source, RenderTargetIdentifier dest, bool opaqueOnly) { RenderPostProcess(cmd, ref cameraData, colorFormat, source, dest, opaqueOnly, !cameraData.isStereoEnabled && cameraData.camera.targetTexture == null); }
void SetupPerCameraShaderConstants(CameraData cameraData) { float cameraWidth = (float)cameraData.camera.pixelWidth * cameraData.renderScale; float cameraHeight = (float)cameraData.camera.pixelWidth * cameraData.renderScale; Shader.SetGlobalVector(PerCameraBuffer._ScaledScreenParams, new Vector4(cameraWidth, cameraHeight, 1.0f + 1.0f / cameraWidth, 1.0f + 1.0f / cameraHeight)); }