void OnPreRender() { if (!IsActive) { OnDisable(); return; } if (GL.wireframe) { OnDisable(); return; } if (Instance == null) { OnEnable(); } XRHelpers.Update(_camera); XRHelpers.UpdatePassIndex(ref s_xrPassIndex); OnPreRenderOceanMask(); OnPreRenderUnderwaterEffect(); _firstRender = false; }
void OnPreRenderUnderwaterEffect() { // Ensure legacy underwater fog is disabled. if (_firstRender) { OceanRenderer.Instance.OceanMaterial.DisableKeyword("_OLD_UNDERWATER"); } RenderTextureDescriptor descriptor = XRHelpers.GetRenderTextureDescriptor(_camera); descriptor.useDynamicScale = _camera.allowDynamicResolution; // Format must be correct for CopyTexture to work. Hopefully this is good enough. if (_camera.allowHDR) { descriptor.colorFormat = RenderTextureFormat.DefaultHDR; } var temporaryColorBuffer = RenderTexture.GetTemporary(descriptor); UpdatePostProcessMaterial( _camera, _underwaterEffectMaterial, _sphericalHarmonicsData, _meniscus, _firstRender || _copyOceanMaterialParamsEachFrame, _debug._viewOceanMask, // horizonSafetyMarginMultiplier is added to the horizon, so no-op is zero. _useHorizonSafetyMarginMultiplier ? _horizonSafetyMarginMultiplier : 0f, // farPlaneMultiplier is multiplied to the far plane, so no-op is one. _useHorizonSafetyMarginMultiplier ? 1f : _farPlaneMultiplier, _filterOceanData, s_xrPassIndex ); _underwaterEffectCommandBuffer.Clear(); if (_camera.allowMSAA) { // Use blit if MSAA is active because transparents were not included with CopyTexture. // Not sure if we need an MSAA resolve? Not sure how to do that... _underwaterEffectCommandBuffer.Blit(BuiltinRenderTextureType.CameraTarget, temporaryColorBuffer); } else { // Copy the frame buffer as we cannot read/write at the same time. If it causes problems, replace with Blit. _underwaterEffectCommandBuffer.CopyTexture(BuiltinRenderTextureType.CameraTarget, temporaryColorBuffer); } _underwaterEffectMaterial.SetTexture(sp_CrestCameraColorTexture, temporaryColorBuffer); _underwaterEffectCommandBuffer.SetRenderTarget(BuiltinRenderTextureType.CameraTarget, 0, CubemapFace.Unknown, -1); _underwaterEffectCommandBuffer.DrawProcedural(Matrix4x4.identity, _underwaterEffectMaterial.material, -1, MeshTopology.Triangles, 3, 1); RenderTexture.ReleaseTemporary(temporaryColorBuffer); }
void OnPreRenderOceanMask() { RenderTextureDescriptor descriptor = XRHelpers.GetRenderTextureDescriptor(_camera); descriptor.useDynamicScale = _camera.allowDynamicResolution; InitialiseMaskTextures(descriptor, ref _maskTexture, ref _depthTexture); _oceanMaskCommandBuffer.Clear(); // Passing -1 to depth slice binds all slices. Important for XR SPI to work in both eyes. _oceanMaskCommandBuffer.SetRenderTarget(_maskTexture.colorBuffer, _depthTexture.depthBuffer, mipLevel: 0, CubemapFace.Unknown, depthSlice: -1); _oceanMaskCommandBuffer.ClearRenderTarget(true, true, Color.white * UNDERWATER_MASK_NO_MASK); _oceanMaskCommandBuffer.SetGlobalTexture(sp_CrestOceanMaskTexture, _maskTexture.colorBuffer); _oceanMaskCommandBuffer.SetGlobalTexture(sp_CrestOceanMaskDepthTexture, _depthTexture.depthBuffer); PopulateOceanMask( _oceanMaskCommandBuffer, _camera, OceanRenderer.Instance.Tiles, _cameraFrustumPlanes, _oceanMaskMaterial.material, _debug._disableOceanMask ); }
internal static void UpdatePostProcessMaterial( RenderTexture source, Camera camera, PropertyWrapperMaterial underwaterPostProcessMaterialWrapper, UnderwaterSphericalHarmonicsData sphericalHarmonicsData, bool isMeniscusEnabled, bool copyParamsFromOceanMaterial, bool debugViewPostProcessMask, float horizonSafetyMarginMultiplier, float farPlaneMultiplier, int dataSliceOffset ) { Material underwaterPostProcessMaterial = underwaterPostProcessMaterialWrapper.material; if (copyParamsFromOceanMaterial) { // Measured this at approx 0.05ms on dell laptop underwaterPostProcessMaterial.CopyPropertiesFromMaterial(OceanRenderer.Instance.OceanMaterial); } // Enable/Disable meniscus. if (isMeniscusEnabled) { underwaterPostProcessMaterial.EnableKeyword("CREST_MENISCUS"); } else { underwaterPostProcessMaterial.DisableKeyword("CREST_MENISCUS"); } // Enabling/disabling keywords each frame don't seem to have large measurable overhead if (debugViewPostProcessMask) { underwaterPostProcessMaterial.EnableKeyword(DEBUG_VIEW_OCEAN_MASK); } else { underwaterPostProcessMaterial.DisableKeyword(DEBUG_VIEW_OCEAN_MASK); } underwaterPostProcessMaterial.SetFloat(LodDataMgr.sp_LD_SliceIndex, 0); underwaterPostProcessMaterial.SetVector(sp_InstanceData, new Vector4(OceanRenderer.Instance.ViewerAltitudeLevelAlpha, 0f, 0f, OceanRenderer.Instance.CurrentLodCount)); LodDataMgrAnimWaves.Bind(underwaterPostProcessMaterialWrapper); LodDataMgrSeaFloorDepth.Bind(underwaterPostProcessMaterialWrapper); LodDataMgrShadow.Bind(underwaterPostProcessMaterialWrapper); float seaLevel = OceanRenderer.Instance.SeaLevel; { // We only apply the horizon safety margin multiplier to horizon if and only if // concrete height of the camera relative to the water and the height of the camera // relative to the sea-level are the same. This ensures that in incredibly turbulent // water - if in doubt - use the neutral horizon. float seaLevelHeightDifference = camera.transform.position.y - seaLevel; if (seaLevelHeightDifference >= 0.0f ^ OceanRenderer.Instance.ViewerHeightAboveWater >= 0.0f) { horizonSafetyMarginMultiplier = 0.0f; } } { underwaterPostProcessMaterial.SetFloat(sp_OceanHeight, seaLevel); underwaterPostProcessMaterial.SetInt(sp_DataSliceOffset, dataSliceOffset); float maxOceanVerticalDisplacement = OceanRenderer.Instance.MaxVertDisplacement * 0.5f; float cameraYPosition = camera.transform.position.y; float nearPlaneFrustumWorldHeight; { float current = camera.ViewportToWorldPoint(new Vector3(0f, 0f, camera.nearClipPlane)).y; float maxY = current, minY = current; current = camera.ViewportToWorldPoint(new Vector3(0f, 1f, camera.nearClipPlane)).y; maxY = Mathf.Max(maxY, current); minY = Mathf.Min(minY, current); current = camera.ViewportToWorldPoint(new Vector3(1f, 0f, camera.nearClipPlane)).y; maxY = Mathf.Max(maxY, current); minY = Mathf.Min(minY, current); current = camera.ViewportToWorldPoint(new Vector3(1f, 1f, camera.nearClipPlane)).y; maxY = Mathf.Max(maxY, current); minY = Mathf.Min(minY, current); nearPlaneFrustumWorldHeight = maxY - minY; } // We don't both setting the horizon value if we know we are going to be having to apply the post-processing // effect full-screen anyway. bool forceFullShader = (cameraYPosition + nearPlaneFrustumWorldHeight + maxOceanVerticalDisplacement) <= seaLevel; underwaterPostProcessMaterial.SetFloat(sp_OceanHeight, seaLevel); if (forceFullShader) { underwaterPostProcessMaterial.EnableKeyword(FULL_SCREEN_EFFECT); } else { underwaterPostProcessMaterial.DisableKeyword(FULL_SCREEN_EFFECT); } } // Have to set these explicitly as the built-in transforms aren't in world-space for the blit function if (XRHelpers.IsSinglePass) { XRHelpers.SetViewProjectionMatrices(camera); Matrix4x4 cameraProjectionMatrix = camera.projectionMatrix; camera.projectionMatrix = XRHelpers.LeftEyeProjectionMatrix; var inverseViewProjectionMatrix = (XRHelpers.LeftEyeProjectionMatrix * camera.worldToCameraMatrix).inverse; underwaterPostProcessMaterial.SetMatrix(sp_InvViewProjection, inverseViewProjectionMatrix); { GetHorizonPosNormal(camera, Camera.MonoOrStereoscopicEye.Left, seaLevel, horizonSafetyMarginMultiplier, farPlaneMultiplier, out Vector2 pos, out Vector2 normal); underwaterPostProcessMaterial.SetVector(sp_HorizonPosNormal, new Vector4(pos.x, pos.y, normal.x, normal.y)); } camera.projectionMatrix = XRHelpers.RightEyeProjectionMatrix; var inverseViewProjectionMatrixRightEye = (XRHelpers.RightEyeProjectionMatrix * camera.worldToCameraMatrix).inverse; underwaterPostProcessMaterial.SetMatrix(sp_InvViewProjectionRight, inverseViewProjectionMatrixRightEye); { GetHorizonPosNormal(camera, Camera.MonoOrStereoscopicEye.Right, seaLevel, horizonSafetyMarginMultiplier, farPlaneMultiplier, out Vector2 pos, out Vector2 normal); underwaterPostProcessMaterial.SetVector(sp_HorizonPosNormalRight, new Vector4(pos.x, pos.y, normal.x, normal.y)); } // Revert to original matrix. Not sure if we need to do this. camera.projectionMatrix = cameraProjectionMatrix; } else { if (XRHelpers.IsNewSDKRunning) { XRHelpers.SetViewProjectionMatrices(camera); } var inverseViewProjectionMatrix = (camera.projectionMatrix * camera.worldToCameraMatrix).inverse; underwaterPostProcessMaterial.SetMatrix(sp_InvViewProjection, inverseViewProjectionMatrix); { GetHorizonPosNormal(camera, Camera.MonoOrStereoscopicEye.Mono, seaLevel, horizonSafetyMarginMultiplier, farPlaneMultiplier, out Vector2 pos, out Vector2 normal); underwaterPostProcessMaterial.SetVector(sp_HorizonPosNormal, new Vector4(pos.x, pos.y, normal.x, normal.y)); } } // Not sure why we need to do this - blit should set it...? underwaterPostProcessMaterial.SetTexture(sp_MainTex, source); // Compute ambient lighting SH { // We could pass in a renderer which would prime this lookup. However it doesnt make sense to use an existing render // at different position, as this would then thrash it and negate the priming functionality. We could create a dummy invis GO // with a dummy Renderer which might be enoguh, but this is hacky enough that we'll wait for it to become a problem // rather than add a pre-emptive hack. UnityEngine.Profiling.Profiler.BeginSample("Underwater sample spherical harmonics"); LightProbes.GetInterpolatedProbe(OceanRenderer.Instance.ViewCamera.transform.position, null, out SphericalHarmonicsL2 sphericalHarmonicsL2); sphericalHarmonicsL2.Evaluate(sphericalHarmonicsData._shDirections, sphericalHarmonicsData._ambientLighting); underwaterPostProcessMaterial.SetVector(sp_AmbientLighting, sphericalHarmonicsData._ambientLighting[0]); UnityEngine.Profiling.Profiler.EndSample(); } }