static void InitializeCameraData(LightweightRenderPipelineAsset settings, Camera camera, LWRPAdditionalCameraData additionalCameraData, out CameraData cameraData) { const float kRenderScaleThreshold = 0.05f; cameraData.camera = camera; int msaaSamples = 1; if (camera.allowMSAA && settings.msaaSampleCount > 1) { msaaSamples = (camera.targetTexture != null) ? camera.targetTexture.antiAliasing : settings.msaaSampleCount; } if (Camera.main == camera && camera.cameraType == CameraType.Game && camera.targetTexture == null) { // There's no exposed API to control how a backbuffer is created with MSAA // By settings antiAliasing we match what the amount of samples in camera data with backbuffer // We only do this for the main camera and this only takes effect in the beginning of next frame. // This settings should not be changed on a frame basis so that's fine. QualitySettings.antiAliasing = msaaSamples; } cameraData.isSceneViewCamera = camera.cameraType == CameraType.SceneView; cameraData.isStereoEnabled = IsStereoEnabled(camera); cameraData.isHdrEnabled = camera.allowHDR && settings.supportsHDR; cameraData.postProcessLayer = camera.GetComponent <PostProcessLayer>(); cameraData.postProcessEnabled = cameraData.postProcessLayer != null && cameraData.postProcessLayer.isActiveAndEnabled; // Disables postprocessing in mobile VR. It's stable on mobile yet. if (cameraData.isStereoEnabled && Application.isMobilePlatform) { cameraData.postProcessEnabled = false; } Rect cameraRect = camera.rect; cameraData.isDefaultViewport = (!(Math.Abs(cameraRect.x) > 0.0f || Math.Abs(cameraRect.y) > 0.0f || Math.Abs(cameraRect.width) < 1.0f || Math.Abs(cameraRect.height) < 1.0f)); // If XR is enabled, use XR renderScale. // Discard variations lesser than kRenderScaleThreshold. // Scale is only enabled for gameview. float usedRenderScale = XRGraphics.enabled ? XRGraphics.eyeTextureResolutionScale : settings.renderScale; cameraData.renderScale = (Mathf.Abs(1.0f - usedRenderScale) < kRenderScaleThreshold) ? 1.0f : usedRenderScale; cameraData.renderScale = (camera.cameraType == CameraType.Game) ? cameraData.renderScale : 1.0f; bool anyShadowsEnabled = settings.supportsMainLightShadows || settings.supportsAdditionalLightShadows; cameraData.maxShadowDistance = (anyShadowsEnabled) ? settings.shadowDistance : 0.0f; if (additionalCameraData != null) { cameraData.maxShadowDistance = (additionalCameraData.renderShadows) ? cameraData.maxShadowDistance : 0.0f; cameraData.requiresDepthTexture = additionalCameraData.requiresDepthTexture; cameraData.requiresOpaqueTexture = additionalCameraData.requiresColorTexture; } else { cameraData.requiresDepthTexture = settings.supportsCameraDepthTexture; cameraData.requiresOpaqueTexture = settings.supportsCameraOpaqueTexture; } cameraData.requiresDepthTexture |= cameraData.isSceneViewCamera || cameraData.postProcessEnabled; var commonOpaqueFlags = SortingCriteria.CommonOpaque; var noFrontToBackOpaqueFlags = SortingCriteria.SortingLayer | SortingCriteria.RenderQueue | SortingCriteria.OptimizeStateChanges | SortingCriteria.CanvasOrder; bool hasHSRGPU = SystemInfo.hasHiddenSurfaceRemovalOnGPU; bool canSkipFrontToBackSorting = (camera.opaqueSortMode == OpaqueSortMode.Default && hasHSRGPU) || camera.opaqueSortMode == OpaqueSortMode.NoDistanceSort; cameraData.defaultOpaqueSortFlags = canSkipFrontToBackSorting ? noFrontToBackOpaqueFlags : commonOpaqueFlags; cameraData.captureActions = CameraCaptureBridge.GetCaptureActions(camera); cameraData.cameraTargetDescriptor = CreateRenderTextureDescriptor(camera, cameraData.renderScale, cameraData.isStereoEnabled, cameraData.isHdrEnabled, msaaSamples); }
static void InitializeCameraData(LightweightRenderPipelineAsset settings, Camera camera, LWRPAdditionalCameraData additionalCameraData, out CameraData cameraData) { const float kRenderScaleThreshold = 0.05f; cameraData.camera = camera; cameraData.isStereoEnabled = IsStereoEnabled(camera); int msaaSamples = 1; if (camera.allowMSAA && settings.msaaSampleCount > 1) { msaaSamples = (camera.targetTexture != null) ? camera.targetTexture.antiAliasing : settings.msaaSampleCount; } cameraData.isSceneViewCamera = camera.cameraType == CameraType.SceneView; cameraData.isHdrEnabled = camera.allowHDR && settings.supportsHDR; #if UNITY_2019_2_OR_NEWER camera.TryGetComponent(out cameraData.postProcessLayer); #else cameraData.postProcessLayer = camera.GetComponent <PostProcessLayer>(); #endif cameraData.postProcessEnabled = cameraData.postProcessLayer != null && cameraData.postProcessLayer.isActiveAndEnabled; // On Android, Postprocessing v2 works with single-pass double-wide mode and is disabled for multiview var xrDesc = UnityEngine.XR.XRSettings.eyeTextureDesc; if (cameraData.isStereoEnabled && Application.isMobilePlatform && Application.platform == RuntimePlatform.Android && xrDesc.dimension == TextureDimension.Tex2DArray) { cameraData.postProcessEnabled = false; } Rect cameraRect = camera.rect; cameraData.isDefaultViewport = (!(Math.Abs(cameraRect.x) > 0.0f || Math.Abs(cameraRect.y) > 0.0f || Math.Abs(cameraRect.width) < 1.0f || Math.Abs(cameraRect.height) < 1.0f)); // If XR is enabled, use XR renderScale. // Discard variations lesser than kRenderScaleThreshold. // Scale is only enabled for gameview. float usedRenderScale = XRGraphics.enabled ? XRGraphics.eyeTextureResolutionScale : settings.renderScale; cameraData.renderScale = (Mathf.Abs(1.0f - usedRenderScale) < kRenderScaleThreshold) ? 1.0f : usedRenderScale; cameraData.renderScale = (camera.cameraType == CameraType.Game) ? cameraData.renderScale : 1.0f; bool anyShadowsEnabled = settings.supportsMainLightShadows || settings.supportsAdditionalLightShadows; cameraData.maxShadowDistance = Mathf.Min(settings.shadowDistance, camera.farClipPlane); cameraData.maxShadowDistance = (anyShadowsEnabled && cameraData.maxShadowDistance >= camera.nearClipPlane) ? cameraData.maxShadowDistance : 0.0f; if (additionalCameraData != null) { cameraData.maxShadowDistance = (additionalCameraData.renderShadows) ? cameraData.maxShadowDistance : 0.0f; cameraData.requiresDepthTexture = additionalCameraData.requiresDepthTexture; cameraData.requiresOpaqueTexture = additionalCameraData.requiresColorTexture; } else { cameraData.requiresDepthTexture = settings.supportsCameraDepthTexture; cameraData.requiresOpaqueTexture = settings.supportsCameraOpaqueTexture; } cameraData.requiresDepthTexture |= cameraData.isSceneViewCamera || cameraData.postProcessEnabled; var commonOpaqueFlags = SortingCriteria.CommonOpaque; var noFrontToBackOpaqueFlags = SortingCriteria.SortingLayer | SortingCriteria.RenderQueue | SortingCriteria.OptimizeStateChanges | SortingCriteria.CanvasOrder; bool hasHSRGPU = SystemInfo.hasHiddenSurfaceRemovalOnGPU; bool canSkipFrontToBackSorting = (camera.opaqueSortMode == OpaqueSortMode.Default && hasHSRGPU) || camera.opaqueSortMode == OpaqueSortMode.NoDistanceSort; cameraData.defaultOpaqueSortFlags = canSkipFrontToBackSorting ? noFrontToBackOpaqueFlags : commonOpaqueFlags; cameraData.captureActions = CameraCaptureBridge.GetCaptureActions(camera); cameraData.cameraTargetDescriptor = CreateRenderTextureDescriptor(camera, cameraData.renderScale, cameraData.isStereoEnabled, cameraData.isHdrEnabled, msaaSamples); }
internal static void RenderPostProcessing(CommandBuffer cmd, ref CameraData cameraData, RenderTextureDescriptor sourceDescriptor, RenderTargetIdentifier source, RenderTargetIdentifier destination, bool opaqueOnly, bool flip) { var layer = cameraData.postProcessLayer; int effectsCount; if (opaqueOnly) { effectsCount = layer.sortedBundles[PostProcessEvent.BeforeTransparent].Count; } else { effectsCount = layer.sortedBundles[PostProcessEvent.BeforeStack].Count + layer.sortedBundles[PostProcessEvent.AfterStack].Count; } Camera camera = cameraData.camera; var postProcessRenderContext = RenderingUtils.postProcessRenderContext; postProcessRenderContext.Reset(); postProcessRenderContext.camera = camera; postProcessRenderContext.source = source; postProcessRenderContext.sourceFormat = sourceDescriptor.colorFormat; postProcessRenderContext.destination = destination; postProcessRenderContext.command = cmd; postProcessRenderContext.flip = flip; // If there's only one effect in the stack and soure is same as dest we // create an intermediate blit rendertarget to handle it. // Otherwise, PostProcessing system will create the intermediate blit targets itself. if (effectsCount == 1 && source == destination) { RenderTargetIdentifier rtId = new RenderTargetIdentifier(m_PostProcessingTemporaryTargetId); RenderTextureDescriptor descriptor = sourceDescriptor; descriptor.msaaSamples = 1; descriptor.depthBufferBits = 0; postProcessRenderContext.destination = rtId; cmd.GetTemporaryRT(m_PostProcessingTemporaryTargetId, descriptor, FilterMode.Point); if (opaqueOnly) { cameraData.postProcessLayer.RenderOpaqueOnly(postProcessRenderContext); } else { cameraData.postProcessLayer.Render(postProcessRenderContext); } cmd.Blit(rtId, destination); cmd.ReleaseTemporaryRT(m_PostProcessingTemporaryTargetId); } else { if (opaqueOnly) { cameraData.postProcessLayer.RenderOpaqueOnly(postProcessRenderContext); } else { cameraData.postProcessLayer.Render(postProcessRenderContext); } } }
static void InitializeCameraData(PipelineSettings settings, Camera camera, out CameraData cameraData) { const float kRenderScaleThreshold = 0.05f; cameraData.camera = camera; bool msaaEnabled = camera.allowMSAA && settings.msaaSampleCount > 1; if (msaaEnabled) { cameraData.msaaSamples = (camera.targetTexture != null) ? camera.targetTexture.antiAliasing : settings.msaaSampleCount; } else { cameraData.msaaSamples = 1; } cameraData.isSceneViewCamera = camera.cameraType == CameraType.SceneView; cameraData.isStereoEnabled = IsStereoEnabled(camera); cameraData.isHdrEnabled = camera.allowHDR && settings.supportsHDR; cameraData.postProcessLayer = camera.GetComponent <PostProcessLayer>(); cameraData.postProcessEnabled = cameraData.postProcessLayer != null && cameraData.postProcessLayer.isActiveAndEnabled; Rect cameraRect = camera.rect; cameraData.isDefaultViewport = (!(Math.Abs(cameraRect.x) > 0.0f || Math.Abs(cameraRect.y) > 0.0f || Math.Abs(cameraRect.width) < 1.0f || Math.Abs(cameraRect.height) < 1.0f)); // If XR is enabled, use XR renderScale. // Discard variations lesser than kRenderScaleThreshold. // Scale is only enabled for gameview. float usedRenderScale = XRGraphics.enabled ? XRGraphics.eyeTextureResolutionScale : settings.renderScale; cameraData.renderScale = (Mathf.Abs(1.0f - usedRenderScale) < kRenderScaleThreshold) ? 1.0f : usedRenderScale; cameraData.renderScale = (camera.cameraType == CameraType.Game) ? cameraData.renderScale : 1.0f; cameraData.opaqueTextureDownsampling = settings.opaqueDownsampling; bool anyShadowsEnabled = settings.supportsMainLightShadows || settings.supportsAdditionalLightShadows; cameraData.maxShadowDistance = (anyShadowsEnabled) ? settings.shadowDistance : 0.0f; LWRPAdditionalCameraData additionalCameraData = camera.gameObject.GetComponent <LWRPAdditionalCameraData>(); if (additionalCameraData != null) { cameraData.maxShadowDistance = (additionalCameraData.renderShadows) ? cameraData.maxShadowDistance : 0.0f; cameraData.requiresDepthTexture = additionalCameraData.requiresDepthTexture; cameraData.requiresOpaqueTexture = additionalCameraData.requiresColorTexture; } else { cameraData.requiresDepthTexture = settings.supportsCameraDepthTexture; cameraData.requiresOpaqueTexture = settings.supportsCameraOpaqueTexture; } cameraData.requiresDepthTexture |= cameraData.isSceneViewCamera || cameraData.postProcessEnabled; var commonOpaqueFlags = SortingCriteria.CommonOpaque; var noFrontToBackOpaqueFlags = SortingCriteria.SortingLayer | SortingCriteria.RenderQueue | SortingCriteria.OptimizeStateChanges | SortingCriteria.CanvasOrder; bool hasHSRGPU = SystemInfo.hasHiddenSurfaceRemovalOnGPU; bool canSkipFrontToBackSorting = (camera.opaqueSortMode == OpaqueSortMode.Default && hasHSRGPU) || camera.opaqueSortMode == OpaqueSortMode.NoDistanceSort; cameraData.defaultOpaqueSortFlags = canSkipFrontToBackSorting ? noFrontToBackOpaqueFlags : commonOpaqueFlags; cameraData.captureActions = CameraCaptureBridge.GetCaptureActions(camera); }
public static void InitializeRenderingData(LightweightRenderPipelineAsset settings, ref CameraData cameraData, ref CullingResults cullResults, out RenderingData renderingData) { var visibleLights = cullResults.visibleLights; int mainLightIndex = GetMainLightIndex(settings, visibleLights); bool mainLightCastShadows = false; bool additionalLightsCastShadows = false; if (cameraData.maxShadowDistance > 0.0f) { mainLightCastShadows = (mainLightIndex != -1 && visibleLights[mainLightIndex].light != null && visibleLights[mainLightIndex].light.shadows != LightShadows.None); // If additional lights are shaded per-pixel they cannot cast shadows if (settings.additionalLightsRenderingMode == LightRenderingMode.PerPixel) { for (int i = 0; i < visibleLights.Length; ++i) { if (i == mainLightIndex) { continue; } Light light = visibleLights[i].light; // LWRP doesn't support additional directional lights or point light shadows yet if (visibleLights[i].lightType == LightType.Spot && light != null && light.shadows != LightShadows.None) { additionalLightsCastShadows = true; break; } } } } renderingData.cullResults = cullResults; renderingData.cameraData = cameraData; renderingData.uiCmaera = null; InitializeLightData(settings, visibleLights, mainLightIndex, out renderingData.lightData); InitializeShadowData(settings, visibleLights, mainLightCastShadows, additionalLightsCastShadows && !renderingData.lightData.shadeAdditionalLightsPerVertex, out renderingData.shadowData); renderingData.supportsDynamicBatching = settings.supportsDynamicBatching; renderingData.perObjectData = GetPerObjectLightFlags(renderingData.lightData.additionalLightsCount); renderingData.pipelineAsset = settings; bool platformNeedsToKillAlpha = Application.platform == RuntimePlatform.IPhonePlayer || Application.platform == RuntimePlatform.Android || Application.platform == RuntimePlatform.tvOS; renderingData.killAlphaInFinalBlit = !Graphics.preserveFramebufferAlpha && platformNeedsToKillAlpha; }