public static T LoadAsset <T>(string relativePath) where T : UnityEngine.Object { return(AssetDatabase.LoadAssetAtPath <T>(HDUtils.GetHDRenderPipelinePath() + relativePath)); }
void ExecuteNormalBufferBlur(ScriptableRenderContext renderContext, CommandBuffer cmd, HDCamera hdCamera, RTHandle cameraColor, RTHandle cameraDepth, RTHandle cameraNormal, CullingResults cullingResults) { if (!EnsureMaterial(ref passMaterial, NAME_SHADER)) { return; } if (layerMask == 0) { return; } if (!hdCamera.frameSettings.IsEnabled(FrameSettingsField.Decals)) { return; } int bufferW = cameraColor.rt.width; int bufferH = cameraColor.rt.height; // allocate temporary buffers cmd.GetTemporaryRT(rtRegions, bufferW, bufferH, (int)DepthBits.None, FilterMode.Point, RenderTextureFormat.R8, RenderTextureReadWrite.Linear, 1, false); cmd.GetTemporaryRT(rtDecoded, bufferW, bufferH, (int)DepthBits.None, FilterMode.Point, RenderTextureFormat.ARGBHalf, RenderTextureReadWrite.Linear, 1, false); // render decals to mark blur regions CoreUtils.SetRenderTarget(cmd, rtRegions, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, cameraDepth, RenderBufferLoadAction.Load, RenderBufferStoreAction.Store, ClearFlag.Color, Color.white ); CoreUtils.SetViewport(cmd, cameraDepth); RendererListDesc renderListDesc = new RendererListDesc(NAME_PASS_REPLACE_TAG, cullingResults, hdCamera.camera) { rendererConfiguration = PerObjectData.None, renderQueueRange = GetRenderQueueRange(queue), sortingCriteria = SortingCriteria.None, layerMask = layerMask, overrideMaterial = passMaterial, overrideMaterialPassIndex = PASS_MARK, stateBlock = null, excludeObjectMotionVectors = false, }; #if UNITY_2020_2_OR_NEWER CoreUtils.DrawRendererList(renderContext, cmd, RendererList.Create(renderListDesc)); #else HDUtils.DrawRendererList(renderContext, cmd, RendererList.Create(renderListDesc)); #endif // decode normal buffer in marked regions CoreUtils.SetRenderTarget(cmd, rtDecoded, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, cameraDepth, RenderBufferLoadAction.Load, RenderBufferStoreAction.DontCare, ClearFlag.None ); CoreUtils.SetViewport(cmd, cameraDepth); cmd.SetRandomWriteTarget(2, cameraNormal); cmd.DrawProcedural(Matrix4x4.identity, passMaterial, PASS_DECODE, MeshTopology.Triangles, 3, 1); cmd.ClearRandomWriteTargets(); // blur and re-encode normals in marked regions cmd.SetGlobalTexture(rtRegions, rtRegions); cmd.SetGlobalTexture(rtDecoded, rtDecoded); if (dbufferNormalMaskRTIDs != null) { CoreUtils.SetRenderTarget(cmd, dbufferNormalMaskRTIDs, cameraDepth, ClearFlag.None); CoreUtils.SetViewport(cmd, cameraDepth); cmd.SetRandomWriteTarget(2, cameraNormal); cmd.DrawProcedural(Matrix4x4.identity, passMaterial, PASS_BLUR_AND_ENCODE_AND_DECAL, MeshTopology.Triangles, 3, 1); cmd.ClearRandomWriteTargets(); } else { CoreUtils.SetRenderTarget(cmd, cameraDepth, RenderBufferLoadAction.Load, RenderBufferStoreAction.Store, ClearFlag.None ); CoreUtils.SetViewport(cmd, cameraDepth); cmd.SetRandomWriteTarget(2, cameraNormal); cmd.DrawProcedural(Matrix4x4.identity, passMaterial, PASS_BLUR_AND_ENCODE, MeshTopology.Triangles, 3, 1); cmd.ClearRandomWriteTargets(); } // free temporary buffers cmd.ReleaseTemporaryRT(rtRegions); cmd.ReleaseTemporaryRT(rtDecoded); }
public void Denoise(CommandBuffer cmd, HDCamera hdCamera, RTHandle noisyBuffer, RTHandle outputBuffer, bool halfResolution = false, float historyValidity = 1.0f) { // Grab the global illumination volume component var giSettings = hdCamera.volumeStack.GetComponent <UnityEngine.Rendering.HighDefinition.GlobalIllumination>(); var historyDepthBuffer = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.Depth); var historyDepthBuffer1 = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.Depth1); // If the depth textures are not available, we can't denoise if (historyDepthBuffer == null || historyDepthBuffer1 == null) { HDUtils.BlitCameraTexture(cmd, noisyBuffer, outputBuffer); return; } // Compute the dispatch parameters based on if we are half res or not int tileSize = 8; int numTilesX, numTilesY; Vector4 halfScreenSize; EvaluateDispatchParameters(hdCamera, halfResolution, tileSize, out numTilesX, out numTilesY, out halfScreenSize); // Pick the right kernel to use int m_KernelFilter = halfResolution ? m_SpatialFilterHalfKernel : m_SpatialFilterKernel; // Bind the input scalars var info = m_SharedRTManager.GetDepthBufferMipChainInfo(); firstMipOffset.Set(HDShadowUtils.Asfloat((uint)info.mipLevelOffsets[1].x), HDShadowUtils.Asfloat((uint)info.mipLevelOffsets[1].y)); cmd.SetComputeVectorParam(m_SSGIDenoiserCS, HDShaderIDs._DepthPyramidFirstMipLevelOffset, firstMipOffset); cmd.SetComputeIntParam(m_SSGIDenoiserCS, HDShaderIDs._IndirectDiffuseSpatialFilter, giSettings.filterRadius); // Inject half screen size if required if (halfResolution) { cmd.SetComputeVectorParam(m_SSGIDenoiserCS, HDShaderIDs._HalfScreenSize, halfScreenSize); } // Bind the input buffers cmd.SetComputeTextureParam(m_SSGIDenoiserCS, m_KernelFilter, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthTexture()); cmd.SetComputeTextureParam(m_SSGIDenoiserCS, m_KernelFilter, HDShaderIDs._InputNoisyBuffer, noisyBuffer); // Bind the output buffer cmd.SetComputeTextureParam(m_SSGIDenoiserCS, m_KernelFilter, HDShaderIDs._OutputFilteredBuffer, outputBuffer); // Do the spatial pass cmd.DispatchCompute(m_SSGIDenoiserCS, m_KernelFilter, numTilesX, numTilesY, hdCamera.viewCount); // Grab the history buffer RTHandle indirectDiffuseHistory = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.RaytracedIndirectDiffuseHF); if (indirectDiffuseHistory == null) { indirectDiffuseHistory = hdCamera.AllocHistoryFrameRT((int)HDCameraFrameHistoryType.RaytracedIndirectDiffuseHF, IndirectDiffuseHistoryBufferAllocatorFunction, 1); // clear it to black if this is the first pass to avoid nans CoreUtils.SetRenderTarget(cmd, indirectDiffuseHistory, m_SharedRTManager.GetDepthStencilBuffer(), ClearFlag.Color, clearColor: Color.black); } // Pick the right kernel to use m_KernelFilter = halfResolution ? m_TemporalFilterHalfKernel : m_TemporalFilterKernel; // Bind the input buffers cmd.SetComputeTextureParam(m_SSGIDenoiserCS, m_KernelFilter, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthTexture()); cmd.SetComputeTextureParam(m_SSGIDenoiserCS, m_KernelFilter, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer()); cmd.SetComputeFloatParam(m_SSGIDenoiserCS, HDShaderIDs._HistoryValidity, historyValidity); if (halfResolution) { cmd.SetComputeTextureParam(m_SSGIDenoiserCS, m_KernelFilter, HDShaderIDs._HistoryDepthTexture, historyDepthBuffer1); cmd.SetComputeVectorParam(m_SSGIDenoiserCS, HDShaderIDs._DepthPyramidFirstMipLevelOffset, firstMipOffset); } else { cmd.SetComputeTextureParam(m_SSGIDenoiserCS, m_KernelFilter, HDShaderIDs._HistoryDepthTexture, historyDepthBuffer); } cmd.SetComputeTextureParam(m_SSGIDenoiserCS, m_KernelFilter, HDShaderIDs._HistoryBuffer, indirectDiffuseHistory); cmd.SetComputeTextureParam(m_SSGIDenoiserCS, m_KernelFilter, HDShaderIDs._InputNoisyBuffer, outputBuffer); // Bind the output buffer cmd.SetComputeTextureParam(m_SSGIDenoiserCS, m_KernelFilter, HDShaderIDs._OutputFilteredBuffer, noisyBuffer); // Do the temporal pass cmd.DispatchCompute(m_SSGIDenoiserCS, m_KernelFilter, numTilesX, numTilesY, hdCamera.viewCount); // Copy the new version into the history buffer cmd.SetComputeTextureParam(m_SSGIDenoiserCS, m_CopyHistory, HDShaderIDs._InputNoisyBuffer, noisyBuffer); cmd.SetComputeTextureParam(m_SSGIDenoiserCS, m_CopyHistory, HDShaderIDs._OutputFilteredBuffer, indirectDiffuseHistory); cmd.DispatchCompute(m_SSGIDenoiserCS, m_CopyHistory, numTilesX, numTilesY, hdCamera.viewCount); }
void FixDXRAsset(bool fromAsyncUnused) { if (!IsHdrpAssetUsedCorrect()) { FixHdrpAssetUsed(fromAsync: false); } HDRenderPipeline.defaultAsset.renderPipelineRayTracingResources = AssetDatabase.LoadAssetAtPath <HDRenderPipelineRayTracingResources>(HDUtils.GetHDRenderPipelinePath() + "Runtime/RenderPipelineResources/HDRenderPipelineRayTracingResources.asset"); ResourceReloader.ReloadAllNullIn(HDRenderPipeline.defaultAsset.renderPipelineRayTracingResources, HDUtils.GetHDRenderPipelinePath()); }
static void DrawEmissionContent(SerializedHDLight serialized, Editor owner) { using (var changes = new EditorGUI.ChangeCheckScope()) { if (GraphicsSettings.lightsUseLinearIntensity && GraphicsSettings.lightsUseColorTemperature) { EditorGUILayout.PropertyField(serialized.settings.useColorTemperature, s_Styles.useColorTemperature); if (serialized.settings.useColorTemperature.boolValue) { EditorGUI.indentLevel += 1; EditorGUILayout.PropertyField(serialized.settings.color, s_Styles.colorFilter); SliderWithTexture(s_Styles.colorTemperature, serialized.settings.colorTemperature, serialized.settings); EditorGUI.indentLevel -= 1; } else { EditorGUILayout.PropertyField(serialized.settings.color, s_Styles.color); } } else { EditorGUILayout.PropertyField(serialized.settings.color, s_Styles.color); } if (changes.changed && HDRenderPipelinePreferences.lightColorNormalization) { serialized.settings.color.colorValue = HDUtils.NormalizeColor(serialized.settings.color.colorValue); } } EditorGUI.BeginChangeCheck(); EditorGUILayout.BeginHorizontal(); EditorGUILayout.PropertyField(serialized.serializedLightData.intensity, s_Styles.lightIntensity); DrawLightIntensityUnitPopup(serialized, owner); EditorGUILayout.EndHorizontal(); if (EditorGUI.EndChangeCheck()) { serialized.serializedLightData.intensity.floatValue = Mathf.Max(serialized.serializedLightData.intensity.floatValue, 0.0f); } if (serialized.editorLightShape != LightShape.Directional && serialized.serializedLightData.lightUnit.enumValueIndex == (int)PunctualLightUnit.Lux) { // Box are local directional light and shouldn't display the Lux At widget. It use only lux if (!(serialized.editorLightShape == LightShape.Spot && ((SpotLightShape)serialized.serializedLightData.spotLightShape.enumValueIndex == SpotLightShape.Box))) { EditorGUI.indentLevel++; EditorGUI.BeginChangeCheck(); EditorGUILayout.PropertyField(serialized.serializedLightData.luxAtDistance, s_Styles.luxAtDistance); if (EditorGUI.EndChangeCheck()) { serialized.serializedLightData.luxAtDistance.floatValue = Mathf.Max(serialized.serializedLightData.luxAtDistance.floatValue, 0.01f); } EditorGUI.indentLevel--; } } if (serialized.editorLightShape == LightShape.Spot) { var spotLightShape = (SpotLightShape)serialized.serializedLightData.spotLightShape.enumValueIndex; if (spotLightShape == SpotLightShape.Cone || spotLightShape == SpotLightShape.Pyramid) { // Display reflector only in advance mode if (serialized.serializedLightData.lightUnit.enumValueIndex == (int)PunctualLightUnit.Lumen && GetAdvanced(Advanceable.Emission, serialized, owner)) { EditorGUI.indentLevel++; EditorGUILayout.PropertyField(serialized.serializedLightData.enableSpotReflector, s_Styles.enableSpotReflector); EditorGUI.indentLevel--; } } } if (serialized.editorLightShape != LightShape.Directional) { EditorGUI.BeginChangeCheck(); serialized.settings.DrawRange(false); if (EditorGUI.EndChangeCheck()) { // For GI we need to detect any change on additional data and call SetLightDirty + For intensity we need to detect light shape change serialized.needUpdateAreaLightEmissiveMeshComponents = true; ((Light)owner.target).SetLightDirty(); // Should be apply only to parameter that's affect GI, but make the code cleaner } } serialized.settings.DrawBounceIntensity(); EditorGUI.BeginChangeCheck(); // For GI we need to detect any change on additional data and call SetLightDirty // No cookie with area light (maybe in future textured area light ?) if (!HDAdditionalLightData.IsAreaLight(serialized.serializedLightData.lightTypeExtent)) { serialized.settings.DrawCookie(); // When directional light use a cookie, it can control the size if (serialized.settings.cookie != null && serialized.editorLightShape == LightShape.Directional) { EditorGUI.indentLevel++; EditorGUILayout.PropertyField(serialized.serializedLightData.shapeWidth, s_Styles.cookieSizeX); EditorGUILayout.PropertyField(serialized.serializedLightData.shapeHeight, s_Styles.cookieSizeY); EditorGUI.indentLevel--; } } else if ((LightTypeExtent)serialized.serializedLightData.lightTypeExtent.enumValueIndex == LightTypeExtent.Rectangle) { EditorGUILayout.ObjectField(serialized.serializedLightData.areaLightCookie, s_Styles.areaLightCookie); } if (EditorGUI.EndChangeCheck()) { serialized.needUpdateAreaLightEmissiveMeshComponents = true; ((Light)owner.target).SetLightDirty(); // Should be apply only to parameter that's affect GI, but make the code cleaner } }
static void MenuCreatePostProcessShader() { string templatePath = $"{HDUtils.GetHDRenderPipelinePath()}/Editor/PostProcessing/Templates/CustomPostProcessingShader.template"; ProjectWindowUtil.CreateScriptAssetFromTemplateFile(templatePath, "New Post Process Shader.shader"); }
public override void Render(CommandBuffer cmd, HDCamera camera, RTHandle source, RTHandle destination) { //mat is the material which contains the shader //we are passing the destination RenderTexture to if (m_Material == null) { // Debug.Log("material stuck at null"); return; } if (rth1 == null) { rth1 = GetNewRTHandle(camera); } if (rth2 == null) { rth2 = GetNewRTHandle(camera); } // Init (); buffersToDispose = new List <ComputeBuffer> (); // InitRenderTexture (); // CreateScene (); // SetParameters (); // get depth texture Vector4 parameters = new Vector4(depthDistance.value, depthDistance.value, depthDistance.value, depthDistance.value); m_Material.SetVector("_Params", parameters); m_Material.SetTexture("_InputTexture", source); HDUtils.DrawFullScreen(cmd, m_Material, rth1, matProperties, 3); // // RenderTexture sourceCopy = source; // HDUtils.DrawFullScreen(cmd, m_Material, rth1); // // // Graphics.Blit(source,depthTexture); // raymarching.SetTexture(0, "Depth", source.rt); raymarching.SetTexture(0, "Source", source); raymarching.SetTexture(0, "Destination", rth1); raymarching.SetFloat("nearPlane", nearPlaneDepth); raymarching.SetFloat("farPlane", farPlaneDepth); int threadGroupsX = Mathf.CeilToInt(cam.pixelWidth / 8.0f); int threadGroupsY = Mathf.CeilToInt(cam.pixelHeight / 8.0f); int kernelHandle = raymarching.FindKernel("CSMain"); raymarching.Dispatch(kernelHandle, threadGroupsX, threadGroupsY, 1); // Graphics.Blit(target, destination); m_Material.SetTexture("_InputTexture", rth1); HDUtils.DrawFullScreen(cmd, m_Material, destination); // cam.targetTexture = depthTexture; // mask things // RenderTexture.ReleaseTemporary(depthTexture); foreach (var buffer in buffersToDispose) { buffer.Dispose(); } }
void FixDXRAsset(bool fromAsyncUnused) { if (!IsHdrpAssetUsedCorrect()) { FixHdrpAssetUsed(fromAsync: false); } HDRenderPipeline.defaultAsset.renderPipelineRayTracingResources = AssetDatabase.LoadAssetAtPath <HDRenderPipelineRayTracingResources>(HDUtils.GetHDRenderPipelinePath() + "Runtime/RenderPipelineResources/HDRenderPipelineRayTracingResources.asset"); ResourceReloader.ReloadAllNullIn(HDRenderPipeline.defaultAsset.renderPipelineRayTracingResources, HDUtils.GetHDRenderPipelinePath()); // IMPORTANT: We display the error only if we are D3D12 as the supportsRayTracing always return false in any other device even if OS/HW supports DXR. // The D3D12 is a separate check in the wizard, so it is fine not to display an error in case we are not D3D12. if (!SystemInfo.supportsRayTracing && IsDXRDirect3D12Correct()) { Debug.LogError("Your hardware and/or OS don't support DXR!"); } if (!HDProjectSettings.wizardNeedRestartAfterChangingToDX12 && PlayerSettings.GetGraphicsAPIs(CalculateSelectedBuildTarget()).FirstOrDefault() != GraphicsDeviceType.Direct3D12) { Debug.LogWarning("DXR is supported only with DX12"); } }
public override void Render(CommandBuffer cmd, HDCamera camera, RTHandle srcRT, RTHandle destRT) { if (_material == null) { return; } // Update the time parameters. var time = Time.time; var delta = time - _prevTime; _jumpTime += delta * jump.value * 11.3f; _prevTime = time; // Block parameters var block3 = block.value * block.value * block.value; // Shuffle block parameters every 1/30 seconds. _blockTime += delta * 60; if (_blockTime > 1) { if (Random.value < 0.09f) { _blockSeed1 += 251; } if (Random.value < 0.29f) { _blockSeed2 += 373; } if (Random.value < 0.25f) { _blockStride = Random.Range(1, 32); } _blockTime = 0; } // Drift parameters (time, displacement) var vdrift = new Vector2( time * 606.11f % (Mathf.PI * 2), drift.value * 0.04f ); // Jitter parameters (threshold, displacement) var jv = jitter.value; var vjitter = new Vector3( Mathf.Max(0, 1.001f - jv * 1.2f), 0.002f + jv * jv * jv * 0.05f ); // Jump parameters (scroll, displacement) var vjump = new Vector2(_jumpTime, jump.value); // Invoke the shader. _material.SetInt(ShaderIDs.Seed, (int)(time * 10000)); _material.SetFloat(ShaderIDs.BlockStrength, block3); _material.SetInt(ShaderIDs.BlockStride, _blockStride); _material.SetInt(ShaderIDs.BlockSeed1, _blockSeed1); _material.SetInt(ShaderIDs.BlockSeed2, _blockSeed2); _material.SetVector(ShaderIDs.Drift, vdrift); _material.SetVector(ShaderIDs.Jitter, vjitter); _material.SetVector(ShaderIDs.Jump, vjump); _material.SetFloat(ShaderIDs.Shake, shake.value * 0.2f); _material.SetTexture(ShaderIDs.InputTexture, srcRT); // Shader pass number var pass = 0; if (drift.value > 0 || jitter.value > 0 || jump.value > 0 || shake.value > 0) { pass += 1; } if (block.value > 0) { pass += 2; } // Blit HDUtils.DrawFullScreen(cmd, _material, destRT, null, pass); }
void GenerateGaussianMips(CommandBuffer cmd, HDCamera hdCam) { RTHandle source; Vector2Int size = new Vector2Int(hdCam.actualWidth, hdCam.actualHeight); if (targetColorBuffer == TargetBuffer.Camera) { GetCameraBuffers(out source, out _); } else { GetCustomBuffers(out source, out _); } int dstMipWidth = Mathf.Max(1, size.x >> 1); int dstMipHeight = Mathf.Max(1, size.y >> 1); // Scale for downsample float scaleX = ((float)size.x / source.rt.width); float scaleY = ((float)size.y / source.rt.height); if (useMask) { // Save the non blurred color into a copy: cmd.CopyTexture(source, colorCopy); } // Downsample. using (new ProfilingSample(cmd, "Downsample", CustomSampler.Create("Downsample"))) { var downsampleProperties = new MaterialPropertyBlock(); downsampleProperties.SetTexture(ShaderID._BlitTexture, source); downsampleProperties.SetVector(ShaderID._BlitScaleBias, new Vector4(scaleX, scaleY, 0f, 0f)); downsampleProperties.SetFloat(ShaderID._BlitMipLevel, 0); CoreUtils.SetRenderTarget(cmd, downSampleBuffer, ClearFlag.None); cmd.SetViewport(new Rect(0, 0, dstMipWidth, dstMipHeight)); cmd.DrawProcedural(Matrix4x4.identity, HDUtils.GetBlitMaterial(source.rt.dimension), 1, MeshTopology.Triangles, 3, 1, downsampleProperties); } // Horizontal Blur using (new ProfilingSample(cmd, "H Blur", CustomSampler.Create("H Blur"))) { var hBlurProperties = new MaterialPropertyBlock(); CoreUtils.SetRenderTarget(cmd, blurBuffer, ClearFlag.None); hBlurProperties.SetFloat(ShaderID._Radius, radius / 4.0f); // The blur is 4 pixel wide in the shader hBlurProperties.SetTexture(ShaderID._Source, downSampleBuffer); // The blur is 4 pixel wide in the shader hBlurProperties.SetFloat(ShaderID._UVScale, 2); cmd.SetViewport(new Rect(0, 0, dstMipWidth, dstMipHeight)); CoreUtils.DrawFullScreen(cmd, blurMaterial, shaderPassId: 0, properties: hBlurProperties); // Do not forget the shaderPassId: ! or it won't work } // Copy back the result in the color buffer while doing a vertical blur using (new ProfilingSample(cmd, "V Blur + Copy back", CustomSampler.Create("V Blur + Copy back"))) { var vBlurProperties = new MaterialPropertyBlock(); // When we use a mask, we do the vertical blur into the downsampling buffer instead of the camera buffer // We need that because we're going to write to the color buffer and read from this blured buffer which we can't do // if they are in the same buffer CoreUtils.SetRenderTarget(cmd, (useMask) ? downSampleBuffer : source, ClearFlag.None); vBlurProperties.SetFloat(ShaderID._Radius, radius / 4.0f); // The blur is 4 pixel wide in the shader vBlurProperties.SetTexture(ShaderID._Source, blurBuffer); vBlurProperties.SetFloat(ShaderID._UVScale, (useMask) ? 2 : 1); CoreUtils.DrawFullScreen(cmd, blurMaterial, shaderPassId: 1, properties: vBlurProperties); } if (useMask) { using (new ProfilingSample(cmd, "Compose Mask Blur", CustomSampler.Create("Compose Mask Blur"))) { var compositingProperties = new MaterialPropertyBlock(); CoreUtils.SetRenderTarget(cmd, source, ClearFlag.None); compositingProperties.SetFloat(ShaderID._Radius, radius / 4.0f); // The blur is 4 pixel wide in the shader compositingProperties.SetTexture(ShaderID._Source, downSampleBuffer); compositingProperties.SetTexture(ShaderID._ColorBufferCopy, colorCopy); compositingProperties.SetTexture(ShaderID._Mask, maskBuffer); compositingProperties.SetTexture(ShaderID._MaskDepth, maskDepthBuffer); compositingProperties.SetFloat(ShaderID._InvertMask, invertMask ? 1 : 0); CoreUtils.DrawFullScreen(cmd, blurMaterial, shaderPassId: 2, properties: compositingProperties); } } }
public void OnPreprocessBuild(BuildReport report) { // Detect if the users forget to assign an HDRP Asset if (GraphicsSettings.renderPipelineAsset == null) { if (!Application.isBatchMode) { if (!EditorUtility.DisplayDialog("Build Player", "There is no HDRP Asset provided in GraphicsSettings.\nAre you sure you want to continue?\n Build time can be extremely long without it.", "Ok", "Cancel")) { throw new BuildFailedException("Stop build on request."); } } else { Debug.LogWarning("There is no HDRP Asset provided in GraphicsSettings. Build time can be extremely long without it."); } return; } // Don't execute the preprocess if we are not HDRenderPipeline HDRenderPipelineAsset hdPipelineAsset = GraphicsSettings.renderPipelineAsset as HDRenderPipelineAsset; if (hdPipelineAsset == null) { return; } // If platform is supported all good GraphicsDeviceType unsupportedGraphicDevice = GraphicsDeviceType.Null; bool supported = HDUtils.AreGraphicsAPIsSupported(report.summary.platform, out unsupportedGraphicDevice) && HDUtils.IsSupportedBuildTarget(report.summary.platform) && HDUtils.IsOperatingSystemSupported(SystemInfo.operatingSystem); if (!supported) { unsupportedGraphicDevice = (unsupportedGraphicDevice == GraphicsDeviceType.Null) ? SystemInfo.graphicsDeviceType : unsupportedGraphicDevice; string msg = "The platform " + report.summary.platform.ToString() + " with the graphic API " + unsupportedGraphicDevice + " is not supported with High Definition Render Pipeline"; // Throw an exception to stop the build throw new BuildFailedException(msg); } // Update all quality levels with the right max lod so that meshes can be stripped. // We don't take lod bias into account because it can be overridden per camera. int qualityLevelCount = QualitySettings.names.Length; for (int i = 0; i < qualityLevelCount; ++i) { QualitySettings.SetQualityLevel(i, false); var renderPipeline = QualitySettings.renderPipeline as HDRenderPipelineAsset; if (renderPipeline != null) { QualitySettings.maximumLODLevel = GetMinimumMaxLoDValue(renderPipeline); } else { QualitySettings.maximumLODLevel = GetMinimumMaxLoDValue(hdPipelineAsset); } } }
private static void SetDefaultDiffusionProfile(Material mat) { string matDiffProfile = HDUtils.ConvertVector4ToGUID(mat.GetVector("Diffusion_Profile_Asset")); string guid = ""; long localID; uint diffusionProfileHash = 0; foreach (var diffusionProfileAsset in HDRenderPipelineGlobalSettings.instance.diffusionProfileSettingsList) { if (diffusionProfileAsset != null) { bool gotGuid = AssetDatabase.TryGetGUIDAndLocalFileIdentifier <DiffusionProfileSettings>(diffusionProfileAsset, out guid, out localID); if (gotGuid && (diffusionProfileAsset.name.Equals(kDefaultDiffusionProfileName) || guid.Equals(matDiffProfile))) { diffusionProfileHash = diffusionProfileAsset.profile.hash; break; } } } if (diffusionProfileHash == 0) { // If the user doesn't have a foliage diffusion profile defined, grab the foliage diffusion profile that comes with HD. // This won't work until the user adds it to their default diffusion profiles list, // but there is a nice "fix" button on the material to help with that. DiffusionProfileSettings foliageSettings = AssetDatabase.LoadAssetAtPath <DiffusionProfileSettings>(HDUtils.GetHDRenderPipelinePath() + kFoliageDiffusionProfilePath); if (AssetDatabase.TryGetGUIDAndLocalFileIdentifier <DiffusionProfileSettings>(foliageSettings, out guid, out localID)) { diffusionProfileHash = foliageSettings.profile.hash; } } if (diffusionProfileHash != 0) { mat.SetVector("Diffusion_Profile_Asset", HDUtils.ConvertGUIDToVector4(guid)); mat.SetFloat("Diffusion_Profile", HDShadowUtils.Asfloat(diffusionProfileHash)); } }
public override void Render(CommandBuffer cmd, HDCamera camera, RTHandle srcRT, RTHandle destRT) { if (_ditherType != ditherType.value || _ditherTexture == null) { CoreUtils.Destroy(_ditherTexture); _ditherType = ditherType.value; _ditherTexture = GenerateDitherTexture(_ditherType); } #if UNITY_EDITOR // In Editor, the gradient will be modified without any hint, // so we have to copy the color keys every frame. if (true) #else // In Player, we assume no one can modify gradients in profiles, // so we update the cache only when the reference was updated. if (_cachedGradient != fillGradient.value) #endif { _cachedGradient = fillGradient.value; _cachedColorKeys = _cachedGradient.colorKeys; _cachedAlphaKeys = _cachedGradient.alphaKeys; } Vector2 edgeThresh; if (edgeSource.value == EdgeSource.Depth) { var thresh = 1 / Mathf.Lerp(1000, 1, edgeThreshold.value); var scaler = 1 + 2 / (1.01f - edgeContrast.value); edgeThresh = new Vector2(thresh, thresh * scaler); } else // Depth & Color { var t1 = edgeThreshold.value; var t2 = t1 + 1.01f - edgeContrast.value; edgeThresh = new Vector2(t1, t2); } _material.SetColor(ShaderIDs.EdgeColor, edgeColor.value); _material.SetVector(ShaderIDs.EdgeThresholds, edgeThresh); _material.SetFloat(ShaderIDs.FillOpacity, fillOpacity.value); GradientUtility.SetColorKeys(_material, _cachedColorKeys); GradientUtility.SetAlphaKeys(_material, _cachedAlphaKeys); _material.SetTexture(ShaderIDs.DitherTexture, _ditherTexture); _material.SetFloat(ShaderIDs.DitherStrength, ditherStrength.value); var pass = (int)edgeSource.value; if (fillOpacity.value > 0 && _cachedColorKeys.Length > 4) { pass += 3; } if (fillGradient.value.mode == GradientMode.Blend) { pass += 6; } // Blit to destRT with the overlay shader. _material.SetTexture(ShaderIDs.InputTexture, srcRT); HDUtils.DrawFullScreen(cmd, _material, destRT, null, pass); }
// Denoiser variant when history is stored in an array and the validation buffer is seperate public void DenoiseBuffer(CommandBuffer cmd, HDCamera hdCamera, RTHandle noisySignal, RTHandle historySignal, RTHandle validationHistory, RTHandle velocityBuffer, RTHandle outputSignal, int sliceIndex, Vector4 channelMask, bool singleChannel = true, float historyValidity = 1.0f) { // If we do not have a depth and normal history buffers, we can skip right away var historyDepthBuffer = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.Depth); var historyNormalBuffer = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.Normal); if (historyDepthBuffer == null || historyNormalBuffer == null) { HDUtils.BlitCameraTexture(cmd, noisySignal, historySignal); HDUtils.BlitCameraTexture(cmd, noisySignal, outputSignal); return; } // Fetch texture dimensions int texWidth = hdCamera.actualWidth; int texHeight = hdCamera.actualHeight; // Evaluate the dispatch parameters int areaTileSize = 8; int numTilesX = (texWidth + (areaTileSize - 1)) / areaTileSize; int numTilesY = (texHeight + (areaTileSize - 1)) / areaTileSize; // Request the intermediate buffer we need RTHandle validationBuffer = m_RenderPipeline.GetRayTracingBuffer(InternalRayTracingBuffers.R0); // First of all we need to validate the history to know where we can or cannot use the history signal int m_KernelFilter = m_TemporalFilterCS.FindKernel("ValidateHistory"); var historyScale = new Vector2(hdCamera.actualWidth / (float)historySignal.rt.width, hdCamera.actualHeight / (float)historySignal.rt.height); cmd.SetComputeVectorParam(m_TemporalFilterCS, HDShaderIDs._RTHandleScaleHistory, historyScale); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer()); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._HistoryDepthTexture, historyDepthBuffer); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer()); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._HistoryNormalBufferTexture, historyNormalBuffer); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._ValidationBufferRW, validationBuffer); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._VelocityBuffer, velocityBuffer); cmd.SetComputeFloatParam(m_TemporalFilterCS, HDShaderIDs._HistoryValidity, historyValidity); cmd.SetComputeFloatParam(m_TemporalFilterCS, HDShaderIDs._PixelSpreadAngleTangent, HDRenderPipeline.GetPixelSpreadTangent(hdCamera.camera.fieldOfView, hdCamera.actualWidth, hdCamera.actualHeight)); cmd.DispatchCompute(m_TemporalFilterCS, m_KernelFilter, numTilesX, numTilesY, hdCamera.viewCount); // Now that we have validated our history, let's accumulate m_KernelFilter = m_TemporalFilterCS.FindKernel(singleChannel ? "TemporalAccumulationSingleArray" : "TemporalAccumulationColorArray"); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._DenoiseInputTexture, noisySignal); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._HistoryBuffer, historySignal); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._HistoryValidityBuffer, validationHistory); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer()); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._DenoiseOutputTextureRW, outputSignal); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._ValidationBuffer, validationBuffer); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._VelocityBuffer, velocityBuffer); cmd.SetComputeIntParam(m_TemporalFilterCS, HDShaderIDs._DenoisingHistorySlice, sliceIndex); cmd.SetComputeVectorParam(m_TemporalFilterCS, HDShaderIDs._DenoisingHistoryMask, channelMask); cmd.DispatchCompute(m_TemporalFilterCS, m_KernelFilter, numTilesX, numTilesY, hdCamera.viewCount); // Make sure to copy the new-accumulated signal in our history buffer m_KernelFilter = m_TemporalFilterCS.FindKernel(singleChannel ? "CopyHistorySingleArray" : "CopyHistoryColorArray"); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._DenoiseInputTexture, outputSignal); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._DenoiseOutputTextureRW, historySignal); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._ValidityOutputTextureRW, validationHistory); cmd.SetComputeIntParam(m_TemporalFilterCS, HDShaderIDs._DenoisingHistorySlice, sliceIndex); cmd.SetComputeVectorParam(m_TemporalFilterCS, HDShaderIDs._DenoisingHistoryMask, channelMask); cmd.DispatchCompute(m_TemporalFilterCS, m_KernelFilter, numTilesX, numTilesY, hdCamera.viewCount); }
static string GetLightUnitIconPath() => HDUtils.GetHDRenderPipelinePath() + "/Editor/RenderPipelineResources/Texture/LightUnitIcons/";
void FixHdrpAssetEditorResources(bool fromAsyncUnused) { if (!IsHdrpAssetUsedCorrect()) { FixHdrpAssetUsed(fromAsync: false); } var hdrpAsset = HDRenderPipeline.defaultAsset; if (hdrpAsset == null) { return; } hdrpAsset.renderPipelineEditorResources = AssetDatabase.LoadAssetAtPath <HDRenderPipelineEditorResources>(HDUtils.GetHDRenderPipelinePath() + "Editor/RenderPipelineResources/HDRenderPipelineEditorResources.asset"); ResourceReloader.ReloadAllNullIn(HDRenderPipeline.defaultAsset.renderPipelineEditorResources, HDUtils.GetHDRenderPipelinePath()); }
protected override void Render(CommandBuffer cmd, HDCamera camera, RTHandle source, RTHandle destination, SunFlare data) { var cam = camera.camera; var sunForward = sunTransform.forward; var sunWorldPos = cam.transform.position - sunForward * 1000f; var sunViewPos = cam.WorldToViewportPoint(sunWorldPos); var intensity = Mathf.Clamp01(Vector3.Dot(cam.transform.forward, -sunForward)); var sunVisible = sunViewPos.z > 0 && sunViewPos.x >= -0.1f && sunViewPos.x < 1.1f && sunViewPos.y >= -0.1f && sunViewPos.y < 1.1f; if (!sunVisible) { if (Physics.Raycast(cam.transform.position, -sunForward, 1000f, LayerMask)) { intensity = 0f; } } if (intensity > 0f) { GetCameraBuffers(out _, out var depthBuffer); var depthTexRes = depthBuffer.referenceSize; var actualCameraSize = new Vector2Int(camera.actualWidth, camera.actualHeight); var occlTexRes = new Vector2Int(OcclusionRes, OcclusionRes); var scaleRatio = new Vector2((float)actualCameraSize.x / depthTexRes.x, (float)actualCameraSize.y / depthTexRes.y); var aspectRatio = (float)actualCameraSize.y / actualCameraSize.x; var scaledSun = new Vector4(sunViewPos.x * scaleRatio.x, sunViewPos.y * scaleRatio.y, 0.1f * aspectRatio * scaleRatio.x, 0.1f * scaleRatio.y); cmd.SetComputeVectorParam(computeShader, DepthTextureRes, new Vector4(depthTexRes.x, depthTexRes.y, 1f / depthTexRes.x, 1f / depthTexRes.y)); cmd.SetComputeVectorParam(computeShader, OcclusionTextureRes, new Vector4(occlTexRes.x, occlTexRes.y, 1f / occlTexRes.x, 1f / occlTexRes.y)); cmd.SetComputeVectorParam(computeShader, SunViewPos, scaledSun); var kernel = textureOcclusionKernel; cmd.SetComputeTextureParam(computeShader, kernel, DepthTexture, depthBuffer); cmd.SetComputeTextureParam(computeShader, kernel, OcclusionTextureOut, occlusionTextureA); cmd.DispatchCompute(computeShader, kernel, OcclusionRes / 8, OcclusionRes / 8, 1); kernel = blurTextureOcclusionKernel; cmd.SetComputeTextureParam(computeShader, kernel, OcclusionTextureIn, occlusionTextureA); cmd.SetComputeTextureParam(computeShader, kernel, OcclusionTextureOut, occlusionTextureB); cmd.DispatchCompute(computeShader, kernel, OcclusionRes / 8, OcclusionRes / 8, 1); kernel = reduceTextureOcclusionKernel; cmd.SetComputeTextureParam(computeShader, kernel, OcclusionTextureIn, occlusionTextureB); cmd.SetComputeTextureParam(computeShader, kernel, OcclusionTextureOut, occlusionTextureA); cmd.DispatchCompute(computeShader, kernel, 1, 1, 1); kernel = angleOcclusionKernel; cmd.SetComputeTextureParam(computeShader, kernel, OcclusionTextureIn, occlusionTextureB); cmd.SetComputeBufferParam(computeShader, kernel, AngleOcclusion, angleOcclusion); cmd.DispatchCompute(computeShader, kernel, AngleSamples / 64, 1, 1); cmd.SetGlobalVector(SunViewPos, sunViewPos); cmd.SetGlobalVector(SunSettings, new Vector4(data.sunIntensity, data.haloIntensity, data.ghostingIntensity, intensity)); cmd.SetGlobalTexture(InputTexture, source); cmd.SetGlobalTexture(OcclusionTextureIn, occlusionTextureA); cmd.SetGlobalTexture(OcclusionTextureOut, occlusionTextureB); cmd.SetGlobalBuffer(AngleOcclusion, angleOcclusion); HDUtils.DrawFullScreen(cmd, material, destination); } else { HDUtils.BlitCameraTexture(cmd, source, destination); } }
// Generates the gaussian pyramid of source into destination // We can't do it in place as the color pyramid has to be read while writing to the color // buffer in some cases (e.g. refraction, distortion) // Returns the number of mips public int RenderColorGaussianPyramid(CommandBuffer cmd, Vector2Int size, Texture source, RenderTexture destination) { // Select between Tex2D and Tex2DArray versions of the kernels bool sourceIsArray = (source.dimension == TextureDimension.Tex2DArray); int rtIndex = sourceIsArray ? 1 : 0; // Sanity check if (sourceIsArray) { Debug.Assert(source.dimension == destination.dimension, "MipGenerator source texture does not match dimension of destination!"); } // Check if format has changed since last time we generated mips if (m_TempColorTargets[rtIndex] != null && m_TempColorTargets[rtIndex].rt.graphicsFormat != destination.graphicsFormat) { RTHandles.Release(m_TempColorTargets[rtIndex]); m_TempColorTargets[rtIndex] = null; } // Only create the temporary target on-demand in case the game doesn't actually need it if (m_TempColorTargets[rtIndex] == null) { m_TempColorTargets[rtIndex] = RTHandles.Alloc( Vector2.one * 0.5f, sourceIsArray ? TextureXR.slices : 1, dimension: source.dimension, filterMode: FilterMode.Bilinear, colorFormat: destination.graphicsFormat, enableRandomWrite: true, useMipMap: false, useDynamicScale: true, name: "Temp Gaussian Pyramid Target" ); } int srcMipLevel = 0; int srcMipWidth = size.x; int srcMipHeight = size.y; int slices = destination.volumeDepth; int tempTargetWidth = srcMipWidth >> 1; int tempTargetHeight = srcMipHeight >> 1; // Check if format has changed since last time we generated mips if (m_TempDownsamplePyramid[rtIndex] != null && m_TempDownsamplePyramid[rtIndex].rt.graphicsFormat != destination.graphicsFormat) { RTHandles.Release(m_TempDownsamplePyramid[rtIndex]); m_TempDownsamplePyramid[rtIndex] = null; } if (m_TempDownsamplePyramid[rtIndex] == null) { m_TempDownsamplePyramid[rtIndex] = RTHandles.Alloc( Vector2.one * 0.5f, sourceIsArray ? TextureXR.slices : 1, dimension: source.dimension, filterMode: FilterMode.Bilinear, colorFormat: destination.graphicsFormat, enableRandomWrite: false, useMipMap: false, useDynamicScale: true, name: "Temporary Downsampled Pyramid" ); cmd.SetRenderTarget(m_TempDownsamplePyramid[rtIndex]); cmd.ClearRenderTarget(false, true, Color.black); } float sourceScaleX = (float)size.x / source.width; float sourceScaleY = (float)size.y / source.height; // Copies src mip0 to dst mip0 m_PropertyBlock.SetTexture(HDShaderIDs._BlitTexture, source); m_PropertyBlock.SetVector(HDShaderIDs._BlitScaleBias, new Vector4(sourceScaleX, sourceScaleY, 0f, 0f)); m_PropertyBlock.SetFloat(HDShaderIDs._BlitMipLevel, 0f); cmd.SetRenderTarget(destination, 0, CubemapFace.Unknown, -1); cmd.SetViewport(new Rect(0, 0, srcMipWidth, srcMipHeight)); cmd.DrawProcedural(Matrix4x4.identity, HDUtils.GetBlitMaterial(source.dimension), 0, MeshTopology.Triangles, 3, 1, m_PropertyBlock); int finalTargetMipWidth = destination.width; int finalTargetMipHeight = destination.height; // Note: smaller mips are excluded as we don't need them and the gaussian compute works // on 8x8 blocks while (srcMipWidth >= 8 || srcMipHeight >= 8) { int dstMipWidth = Mathf.Max(1, srcMipWidth >> 1); int dstMipHeight = Mathf.Max(1, srcMipHeight >> 1); // Scale for downsample float scaleX = ((float)srcMipWidth / finalTargetMipWidth); float scaleY = ((float)srcMipHeight / finalTargetMipHeight); // Downsample. m_PropertyBlock.SetTexture(HDShaderIDs._BlitTexture, destination); m_PropertyBlock.SetVector(HDShaderIDs._BlitScaleBias, new Vector4(scaleX, scaleY, 0f, 0f)); m_PropertyBlock.SetFloat(HDShaderIDs._BlitMipLevel, srcMipLevel); cmd.SetRenderTarget(m_TempDownsamplePyramid[rtIndex], 0, CubemapFace.Unknown, -1); cmd.SetViewport(new Rect(0, 0, dstMipWidth, dstMipHeight)); cmd.DrawProcedural(Matrix4x4.identity, HDUtils.GetBlitMaterial(source.dimension), 1, MeshTopology.Triangles, 3, 1, m_PropertyBlock); // In this mip generation process, source viewport can be smaller than the source render target itself because of the RTHandle system // We are not using the scale provided by the RTHandle system for two reasons: // - Source might be a planar probe which will not be scaled by the system (since it's actually the final target of probe rendering at the exact size) // - When computing mip size, depending on even/odd sizes, the scale computed for mip 0 might miss a texel at the border. // This can result in a shift in the mip map downscale that depends on the render target size rather than the actual viewport // (Two rendering at the same viewport size but with different RTHandle reference size would yield different results which can break automated testing) // So in the end we compute a specific scale for downscale and blur passes at each mip level. // Scales for Blur float blurSourceTextureWidth = (float)m_TempDownsamplePyramid[rtIndex].rt.width; // Same size as m_TempColorTargets which is the source for vertical blur float blurSourceTextureHeight = (float)m_TempDownsamplePyramid[rtIndex].rt.height; scaleX = ((float)dstMipWidth / blurSourceTextureWidth); scaleY = ((float)dstMipHeight / blurSourceTextureHeight); // Blur horizontal. m_PropertyBlock.SetTexture(HDShaderIDs._Source, m_TempDownsamplePyramid[rtIndex]); m_PropertyBlock.SetVector(HDShaderIDs._SrcScaleBias, new Vector4(scaleX, scaleY, 0f, 0f)); m_PropertyBlock.SetVector(HDShaderIDs._SrcUvLimits, new Vector4((dstMipWidth - 0.5f) / blurSourceTextureWidth, (dstMipHeight - 0.5f) / blurSourceTextureHeight, 1.0f / blurSourceTextureWidth, 0f)); m_PropertyBlock.SetFloat(HDShaderIDs._SourceMip, 0); cmd.SetRenderTarget(m_TempColorTargets[rtIndex], 0, CubemapFace.Unknown, -1); cmd.SetViewport(new Rect(0, 0, dstMipWidth, dstMipHeight)); cmd.DrawProcedural(Matrix4x4.identity, m_ColorPyramidPSMat, rtIndex, MeshTopology.Triangles, 3, 1, m_PropertyBlock); // Blur vertical. m_PropertyBlock.SetTexture(HDShaderIDs._Source, m_TempColorTargets[rtIndex]); m_PropertyBlock.SetVector(HDShaderIDs._SrcScaleBias, new Vector4(scaleX, scaleY, 0f, 0f)); m_PropertyBlock.SetVector(HDShaderIDs._SrcUvLimits, new Vector4((dstMipWidth - 0.5f) / blurSourceTextureWidth, (dstMipHeight - 0.5f) / blurSourceTextureHeight, 0f, 1.0f / blurSourceTextureHeight)); m_PropertyBlock.SetFloat(HDShaderIDs._SourceMip, 0); cmd.SetRenderTarget(destination, srcMipLevel + 1, CubemapFace.Unknown, -1); cmd.SetViewport(new Rect(0, 0, dstMipWidth, dstMipHeight)); cmd.DrawProcedural(Matrix4x4.identity, m_ColorPyramidPSMat, rtIndex, MeshTopology.Triangles, 3, 1, m_PropertyBlock); srcMipLevel++; srcMipWidth = srcMipWidth >> 1; srcMipHeight = srcMipHeight >> 1; finalTargetMipWidth = finalTargetMipWidth >> 1; finalTargetMipHeight = finalTargetMipHeight >> 1; } return(srcMipLevel + 1); }
protected override void Execute(CustomPassContext ctx) { HDUtils.DrawFullScreen(ctx.cmd, material, ctx.cameraNormalBuffer); }
static void MenuCreateCustomPassCSharpScript() { string templatePath = $"{HDUtils.GetHDRenderPipelinePath()}/Editor/RenderPipeline/CustomPass/CustomPassCSharpScript.template"; ProjectWindowUtil.CreateScriptAssetFromTemplateFile(templatePath, "New Custom Pass.cs"); }
void GenerateGaussianMips(CommandBuffer cmd, HDCamera hdCam) { RTHandle source; // Retrieve the target buffer of the blur from the UI: if (targetColorBuffer == TargetBuffer.Camera) { GetCameraBuffers(out source, out _); } else { GetCustomBuffers(out source, out _); } // Save the non blurred color into a copy if the mask is enabled: if (useMask) { cmd.CopyTexture(source, colorCopy); } // Downsample using (new ProfilingSample(cmd, "Downsample", CustomSampler.Create("Downsample"))) { // This Blit will automatically downsample the color because our target buffer have been allocated in half resolution HDUtils.BlitCameraTexture(cmd, source, downSampleBuffer, 0); } // Horizontal Blur using (new ProfilingSample(cmd, "H Blur", CustomSampler.Create("H Blur"))) { var hBlurProperties = new MaterialPropertyBlock(); hBlurProperties.SetFloat(ShaderID._Radius, radius / 4.0f); // The blur is 4 pixel wide in the shader hBlurProperties.SetTexture(ShaderID._Source, downSampleBuffer); // The blur is 4 pixel wide in the shader SetViewPortSize(cmd, hBlurProperties, blurBuffer); HDUtils.DrawFullScreen(cmd, blurMaterial, blurBuffer, hBlurProperties, shaderPassId: 0); // Do not forget the shaderPassId: ! or it won't work } // Copy back the result in the color buffer while doing a vertical blur using (new ProfilingSample(cmd, "V Blur + Copy back", CustomSampler.Create("V Blur + Copy back"))) { var vBlurProperties = new MaterialPropertyBlock(); // When we use a mask, we do the vertical blur into the downsampling buffer instead of the camera buffer // We need that because we're going to write to the color buffer and read from this blured buffer which we can't do // if they are in the same buffer vBlurProperties.SetFloat(ShaderID._Radius, radius / 4.0f); // The blur is 4 pixel wide in the shader vBlurProperties.SetTexture(ShaderID._Source, blurBuffer); var targetBuffer = (useMask) ? downSampleBuffer : source; SetViewPortSize(cmd, vBlurProperties, targetBuffer); HDUtils.DrawFullScreen(cmd, blurMaterial, targetBuffer, vBlurProperties, shaderPassId: 1); // Do not forget the shaderPassId: ! or it won't work } if (useMask) { // Merge the non blur copy and the blurred version using the mask buffers using (new ProfilingSample(cmd, "Compose Mask Blur", CustomSampler.Create("Compose Mask Blur"))) { var compositingProperties = new MaterialPropertyBlock(); compositingProperties.SetFloat(ShaderID._Radius, radius / 4.0f); // The blur is 4 pixel wide in the shader compositingProperties.SetTexture(ShaderID._Source, downSampleBuffer); compositingProperties.SetTexture(ShaderID._ColorBufferCopy, colorCopy); compositingProperties.SetTexture(ShaderID._Mask, maskBuffer); compositingProperties.SetTexture(ShaderID._MaskDepth, maskDepthBuffer); compositingProperties.SetFloat(ShaderID._InvertMask, invertMask ? 1 : 0); SetViewPortSize(cmd, compositingProperties, source); HDUtils.DrawFullScreen(cmd, blurMaterial, source, compositingProperties, shaderPassId: 2); // Do not forget the shaderPassId: ! or it won't work } } }
static void MenuCreateCustomFullScreenPassShader() { string templatePath = $"{HDUtils.GetHDRenderPipelinePath()}/Editor/RenderPipeline/CustomPass/CustomPassFullScreenShader.template"; ProjectWindowUtil.CreateScriptAssetFromTemplateFile(templatePath, "New FullScreen CustomPass.shader"); }
void DrawHandles() { //Note: each target need to be handled individually to allow multi edition DecalProjector decalProjector = target as DecalProjector; if (editMode == k_EditShapePreservingUV || editMode == k_EditShapeWithoutPreservingUV) { using (new Handles.DrawingScope(Color.white, Matrix4x4.TRS(decalProjector.transform.position, decalProjector.transform.rotation, Vector3.one))) { bool needToRefreshDecalProjector = false; handle.center = decalProjector.offset; handle.size = decalProjector.size; Vector3 boundsSizePreviousOS = handle.size; Vector3 boundsMinPreviousOS = handle.size * -0.5f + handle.center; EditorGUI.BeginChangeCheck(); handle.DrawHandle(); if (EditorGUI.EndChangeCheck()) { needToRefreshDecalProjector = true; // Adjust decal transform if handle changed. Undo.RecordObject(decalProjector, "Decal Projector Change"); decalProjector.size = handle.size; decalProjector.offset = handle.center; Vector3 boundsSizeCurrentOS = handle.size; Vector3 boundsMinCurrentOS = handle.size * -0.5f + handle.center; if (editMode == k_EditShapePreservingUV) { // Treat decal projector bounds as a crop tool, rather than a scale tool. // Compute a new uv scale and bias terms to pin decal projection pixels in world space, irrespective of projector bounds. Vector2 uvScale = decalProjector.uvScale; uvScale.x *= Mathf.Max(1e-5f, boundsSizeCurrentOS.x) / Mathf.Max(1e-5f, boundsSizePreviousOS.x); uvScale.y *= Mathf.Max(1e-5f, boundsSizeCurrentOS.y) / Mathf.Max(1e-5f, boundsSizePreviousOS.y); decalProjector.uvScale = uvScale; Vector2 uvBias = decalProjector.uvBias; uvBias.x += (boundsMinCurrentOS.x - boundsMinPreviousOS.x) / Mathf.Max(1e-5f, boundsSizeCurrentOS.x) * decalProjector.uvScale.x; uvBias.y += (boundsMinCurrentOS.y - boundsMinPreviousOS.y) / Mathf.Max(1e-5f, boundsSizeCurrentOS.y) * decalProjector.uvScale.y; decalProjector.uvBias = uvBias; } if (PrefabUtility.IsPartOfNonAssetPrefabInstance(decalProjector)) { PrefabUtility.RecordPrefabInstancePropertyModifications(decalProjector); } } // Automatically recenter our transform component if necessary. // In order to correctly handle world-space snapping, we only perform this recentering when the user is no longer interacting with the gizmo. if ((GUIUtility.hotControl == 0) && (decalProjector.offset != Vector3.zero)) { needToRefreshDecalProjector = true; // Both the DecalProjectorComponent, and the transform will be modified. // The undo system will automatically group all RecordObject() calls here into a single action. Undo.RecordObject(decalProjector.transform, "Decal Projector Change"); // Re-center the transform to the center of the decal projector bounds, // while maintaining the world-space coordinates of the decal projector boundings vertices. // Center of the decal projector is not the same of the HierarchicalBox as we want it to be on the z face as lights decalProjector.transform.Translate(decalProjector.offset + new Vector3(0f, 0f, handle.size.z * -0.5f), Space.Self); decalProjector.offset = new Vector3(0f, 0f, handle.size.z * 0.5f); if (PrefabUtility.IsPartOfNonAssetPrefabInstance(decalProjector)) { PrefabUtility.RecordPrefabInstancePropertyModifications(decalProjector); } } if (needToRefreshDecalProjector) { // Smoothly update the decal image projected Matrix4x4 sizeOffset = Matrix4x4.Translate(decalProjector.decalOffset) * Matrix4x4.Scale(decalProjector.decalSize); DecalSystem.instance.UpdateCachedData(decalProjector.position, decalProjector.rotation, sizeOffset, decalProjector.drawDistance, decalProjector.fadeScale, decalProjector.uvScaleBias, decalProjector.affectsTransparency, decalProjector.Handle, decalProjector.gameObject.layer, HDUtils.GetSceneCullingMaskFromGameObject(decalProjector.gameObject), decalProjector.fadeFactor); } } } //[TODO: add editable pivot. Uncomment this when ready] //else if (editMode == k_EditUV) //{ // //here should be handles code to manipulate the pivot without changing the UV //} }