// Whenever any camera will render us, add a command buffer to do the work on it
	public void OnWillRenderObject()
	{
		var act = gameObject.activeInHierarchy && enabled;
		if (!act)
		{
			Cleanup();
			return;
		}

		var cam = Camera.current;
		if (!cam)
			return;

		CommandBuffer buf = null;
		// Did we already add the command buffer on this camera? Nothing to do then.
		if (m_Cameras.ContainsKey(cam))
			return;

		if (!m_Material)
		{
			m_Material = new Material(m_BlurShader);
			m_Material.hideFlags = HideFlags.HideAndDontSave;
		}

		buf = new CommandBuffer();
		buf.name = "Grab screen and blur";
		m_Cameras[cam] = buf;

		// copy screen into temporary RT
		int screenCopyID = Shader.PropertyToID("_ScreenCopyTexture");
		buf.GetTemporaryRT (screenCopyID, -1, -1, 0, FilterMode.Bilinear);
		buf.Blit (BuiltinRenderTextureType.CurrentActive, screenCopyID);

		// get two smaller RTs
		int blurredID = Shader.PropertyToID("_Temp1");
		int blurredID2 = Shader.PropertyToID("_Temp2");
		buf.GetTemporaryRT (blurredID, -2, -2, 0, FilterMode.Bilinear);
		buf.GetTemporaryRT (blurredID2, -2, -2, 0, FilterMode.Bilinear);

		// downsample screen copy into smaller RT, release screen RT
		buf.Blit (screenCopyID, blurredID);
		buf.ReleaseTemporaryRT (screenCopyID);

		// horizontal blur
		buf.SetGlobalVector("offsets", new Vector4(2.0f/Screen.width,0,0,0));
		buf.Blit (blurredID, blurredID2, m_Material);
		// vertical blur
		buf.SetGlobalVector("offsets", new Vector4(0,2.0f/Screen.height,0,0));
		buf.Blit (blurredID2, blurredID, m_Material);
		// horizontal blur
		buf.SetGlobalVector("offsets", new Vector4(4.0f/Screen.width,0,0,0));
		buf.Blit (blurredID, blurredID2, m_Material);
		// vertical blur
		buf.SetGlobalVector("offsets", new Vector4(0,4.0f/Screen.height,0,0));
		buf.Blit (blurredID2, blurredID, m_Material);

		buf.SetGlobalTexture("_GrabBlurTexture", blurredID);

		cam.AddCommandBuffer (CameraEvent.AfterSkybox, buf);
	}
Пример #2
0
    void OnDisable()
    {
        if (m_cb != null)
        {
            m_cam.RemoveCommandBuffer(CameraEvent.AfterEverything, m_cb);
            m_cb.Release();
            m_cb = null;
        }
        if (m_frame_buffer != null)
        {
            m_frame_buffer.Release();
            m_frame_buffer = null;
        }
        if (m_depth != null)
        {
            for (int i = 0; i < m_gbuffer.Length; ++i)
            {
                m_gbuffer[i].Release();
            }
            m_depth.Release();
            m_gbuffer = null;
            m_depth = null;
            m_rt_gbuffer = null;
        }

        FrameCapturer.fcExrDestroyContext(m_exr);
    }
 public CommandBuffer GetCommandBuffer()
 {
     if(m_cb == null)
     {
         m_cb = new CommandBuffer();
         m_cb.name = "Hair Shadow";
         GetComponent<Light>().AddCommandBuffer(LightEvent.AfterShadowMap, m_cb);
     }
     return m_cb;
 }
 void OnPreRender()
 {
     var cam = GetComponent<Camera>();
     if (m_material == null)
     {
         m_material = new Material(m_shader);
     }
     if (m_commands == null)
     {
         m_commands = new CommandBuffer();
         m_commands.name = "ClearForwardGBuffer";
         m_commands.DrawMesh(m_quad, Matrix4x4.identity, m_material);
         cam.AddCommandBuffer(CameraEvent.AfterFinalPass, m_commands);
     }
     m_material.SetColor("_ClearColor", cam.backgroundColor);
 }
Пример #5
0
    private void Awake() {
        int lowResRenderTarget = Shader.PropertyToID("_LowResRenderTarget");

        CommandBuffer cb = new CommandBuffer();

        cb.GetTemporaryRT(lowResRenderTarget, this.width, this.height, 0, FilterMode.Trilinear, RenderTextureFormat.ARGB32);

        // Blit the low-res texture into itself, to re-draw it with the current material
        cb.Blit(lowResRenderTarget, lowResRenderTarget, this.material);

        // Blit the low-res texture into the camera's target render texture, effectively rendering it to the entire screen
        cb.Blit(lowResRenderTarget, BuiltinRenderTextureType.CameraTarget);

        cb.ReleaseTemporaryRT(lowResRenderTarget);

        // Tell the camera to execute our CommandBuffer before the forward opaque pass - that is, just before normal geometry starts rendering
        this.GetComponent<Camera>().AddCommandBuffer(CameraEvent.BeforeForwardOpaque, cb);
    }
Пример #6
0
 public void FixedUpdateTransform(CommandBuffer updateCB)
 {
     if (!this.m_initialized)
     {
         this.Initialize();
     }
     if (this.m_affectedObjectsChanged)
     {
         this.UpdateAffectedObjects();
     }
     for (int i = 0; i < this.m_affectedObjects.Length; i++)
     {
         if (this.m_affectedObjects[i].FixedStep)
         {
             this.m_affectedObjects[i].OnUpdateTransform(this.m_camera, updateCB, this.m_starting);
         }
     }
 }
Пример #7
0
    public void OnPreRender()
    {
        var act = gameObject.activeInHierarchy && enabled;
        if (!act)
        {
            Cleanup();
            return;
        }

        var cam = Camera.current;
        if (!cam)
            return;

        CommandBuffer buf = null;
        // Did we already add the command buffer on this camera? Nothing to do then.
        if (m_Cameras.ContainsKey(cam))
            return;

        if (!DepthMaterial)
        {
            DepthMaterial = new Material(DepthShader);
            DepthMaterial.hideFlags = HideFlags.HideAndDontSave;
        }

        buf = new CommandBuffer();
        buf.name = "Render Grass Depth Reference";
        m_Cameras[cam] = buf;

        int referenceDepthID = Shader.PropertyToID("_rdepth");
        buf.GetTemporaryRT(referenceDepthID, -1, -1, 24, FilterMode.Point, RenderTextureFormat.Depth);
        buf.SetRenderTarget(new RenderTargetIdentifier(referenceDepthID));
        buf.ClearRenderTarget(true, false, Color.white, 1);

        foreach (MeshRenderer r in GrassMeshes)
            buf.DrawRenderer(r, DepthMaterial);

        buf.SetRenderTarget(new RenderTargetIdentifier(BuiltinRenderTextureType.CameraTarget));

        buf.SetGlobalTexture("_ReferenceDepth", referenceDepthID);

        cam.AddCommandBuffer(CameraEvent.BeforeImageEffects, buf);
        Debug.Log("Add Command Buffer");
    }
Пример #8
0
 void ClearCommandBuffer()
 {
     if (m_camera != null)
     {
         if (m_cb_prepass != null)
         {
             m_camera.RemoveCommandBuffer(CameraEvent.BeforeGBuffer, m_cb_prepass);
         }
         if (m_cb_raymarch != null)
         {
             m_camera.RemoveCommandBuffer(CameraEvent.BeforeGBuffer, m_cb_raymarch);
         }
         if (m_cb_show_steps != null)
         {
             m_camera.RemoveCommandBuffer(CameraEvent.AfterEverything, m_cb_show_steps);
         }
         m_cb_prepass = null;
         m_cb_raymarch = null;
         m_cb_show_steps = null;
     }
 }
Пример #9
0
        void BuildLightData(CommandBuffer cmd, HDCamera hdCamera, List <HDAdditionalLightData> lightArray)
        {
            // Also we need to build the light list data
            if (m_LightDataGPUArray == null || m_LightDataGPUArray.count != lightArray.Count)
            {
                ResizeLightDataBuffer(lightArray.Count);
            }

            // Build the data for every light
            for (int lightIdx = 0; lightIdx < lightArray.Count; ++lightIdx)
            {
                var lightData = new LightData();

                HDAdditionalLightData additionalLightData = lightArray[lightIdx];
                // When the user deletes a light source in the editor, there is a single frame where the light is null before the collection of light in the scene is triggered
                // the workaround for this is simply to add an invalid light for that frame
                if (additionalLightData == null)
                {
                    m_LightDataCPUArray[lightIdx] = lightData;
                    continue;
                }
                Light light = additionalLightData.gameObject.GetComponent <Light>();

                // Both of these positions are non-camera-relative.
                float distanceToCamera  = (light.gameObject.transform.position - hdCamera.camera.transform.position).magnitude;
                float lightDistanceFade = HDUtils.ComputeLinearDistanceFade(distanceToCamera, additionalLightData.fadeDistance);

                bool contributesToLighting = ((additionalLightData.lightDimmer > 0) && (additionalLightData.affectDiffuse || additionalLightData.affectSpecular)) || (additionalLightData.volumetricDimmer > 0);
                contributesToLighting = contributesToLighting && (lightDistanceFade > 0);

                if (!contributesToLighting)
                {
                    continue;
                }

                lightData.lightLayers = additionalLightData.GetLightLayers();
                LightCategory lightCategory = LightCategory.Count;
                GPULightType  gpuLightType  = GPULightType.Point;
                GetLightGPUType(additionalLightData, light, ref gpuLightType, ref lightCategory);

                lightData.lightType = gpuLightType;

                lightData.positionRWS = light.gameObject.transform.position - hdCamera.camera.transform.position;

                bool applyRangeAttenuation = additionalLightData.applyRangeAttenuation && (gpuLightType != GPULightType.ProjectorBox);

                lightData.range = light.range;

                if (applyRangeAttenuation)
                {
                    lightData.rangeAttenuationScale = 1.0f / (light.range * light.range);
                    lightData.rangeAttenuationBias  = 1.0f;

                    if (lightData.lightType == GPULightType.Rectangle)
                    {
                        // Rect lights are currently a special case because they use the normalized
                        // [0, 1] attenuation range rather than the regular [0, r] one.
                        lightData.rangeAttenuationScale = 1.0f;
                    }
                }
                else // Don't apply any attenuation but do a 'step' at range
                {
                    // Solve f(x) = b - (a * x)^2 where x = (d/r)^2.
                    // f(0) = huge -> b = huge.
                    // f(1) = 0    -> huge - a^2 = 0 -> a = sqrt(huge).
                    const float hugeValue = 16777216.0f;
                    const float sqrtHuge  = 4096.0f;
                    lightData.rangeAttenuationScale = sqrtHuge / (light.range * light.range);
                    lightData.rangeAttenuationBias  = hugeValue;

                    if (lightData.lightType == GPULightType.Rectangle)
                    {
                        // Rect lights are currently a special case because they use the normalized
                        // [0, 1] attenuation range rather than the regular [0, r] one.
                        lightData.rangeAttenuationScale = sqrtHuge;
                    }
                }

                Color value = light.color.linear * light.intensity;
                if (additionalLightData.useColorTemperature)
                {
                    value *= Mathf.CorrelatedColorTemperatureToRGB(light.colorTemperature);
                }
                lightData.color = new Vector3(value.r, value.g, value.b);

                lightData.forward = light.transform.forward;
                lightData.up      = light.transform.up;
                lightData.right   = light.transform.right;

                if (lightData.lightType == GPULightType.ProjectorBox)
                {
                    // Rescale for cookies and windowing.
                    lightData.right *= 2.0f / Mathf.Max(additionalLightData.shapeWidth, 0.001f);
                    lightData.up    *= 2.0f / Mathf.Max(additionalLightData.shapeHeight, 0.001f);
                }
                else if (lightData.lightType == GPULightType.ProjectorPyramid)
                {
                    // Get width and height for the current frustum
                    var spotAngle = light.spotAngle;

                    float frustumWidth, frustumHeight;

                    if (additionalLightData.aspectRatio >= 1.0f)
                    {
                        frustumHeight = 2.0f * Mathf.Tan(spotAngle * 0.5f * Mathf.Deg2Rad);
                        frustumWidth  = frustumHeight * additionalLightData.aspectRatio;
                    }
                    else
                    {
                        frustumWidth  = 2.0f * Mathf.Tan(spotAngle * 0.5f * Mathf.Deg2Rad);
                        frustumHeight = frustumWidth / additionalLightData.aspectRatio;
                    }

                    // Rescale for cookies and windowing.
                    lightData.right *= 2.0f / frustumWidth;
                    lightData.up    *= 2.0f / frustumHeight;
                }

                if (lightData.lightType == GPULightType.Spot)
                {
                    var spotAngle = light.spotAngle;

                    var innerConePercent      = additionalLightData.GetInnerSpotPercent01();
                    var cosSpotOuterHalfAngle = Mathf.Clamp(Mathf.Cos(spotAngle * 0.5f * Mathf.Deg2Rad), 0.0f, 1.0f);
                    var sinSpotOuterHalfAngle = Mathf.Sqrt(1.0f - cosSpotOuterHalfAngle * cosSpotOuterHalfAngle);
                    var cosSpotInnerHalfAngle = Mathf.Clamp(Mathf.Cos(spotAngle * 0.5f * innerConePercent * Mathf.Deg2Rad), 0.0f, 1.0f); // inner cone

                    var val = Mathf.Max(0.0001f, (cosSpotInnerHalfAngle - cosSpotOuterHalfAngle));
                    lightData.angleScale  = 1.0f / val;
                    lightData.angleOffset = -cosSpotOuterHalfAngle * lightData.angleScale;

                    // Rescale for cookies and windowing.
                    float cotOuterHalfAngle = cosSpotOuterHalfAngle / sinSpotOuterHalfAngle;
                    lightData.up    *= cotOuterHalfAngle;
                    lightData.right *= cotOuterHalfAngle;
                }
                else
                {
                    // These are the neutral values allowing GetAngleAnttenuation in shader code to return 1.0
                    lightData.angleScale  = 0.0f;
                    lightData.angleOffset = 1.0f;
                }

                if (lightData.lightType != GPULightType.Directional && lightData.lightType != GPULightType.ProjectorBox)
                {
                    // Store the squared radius of the light to simulate a fill light.
                    lightData.size = new Vector2(additionalLightData.shapeRadius * additionalLightData.shapeRadius, 0);
                }

                if (lightData.lightType == GPULightType.Rectangle || lightData.lightType == GPULightType.Tube)
                {
                    lightData.size = new Vector2(additionalLightData.shapeWidth, additionalLightData.shapeHeight);
                }

                lightData.lightDimmer           = lightDistanceFade * (additionalLightData.lightDimmer);
                lightData.diffuseDimmer         = lightDistanceFade * (additionalLightData.affectDiffuse ? additionalLightData.lightDimmer : 0);
                lightData.specularDimmer        = lightDistanceFade * (additionalLightData.affectSpecular ? additionalLightData.lightDimmer * hdCamera.frameSettings.specularGlobalDimmer : 0);
                lightData.volumetricLightDimmer = lightDistanceFade * (additionalLightData.volumetricDimmer);

                lightData.contactShadowMask      = 0;
                lightData.cookieIndex            = -1;
                lightData.shadowIndex            = -1;
                lightData.screenSpaceShadowIndex = -1;

                if (light != null && light.cookie != null)
                {
                    // TODO: add texture atlas support for cookie textures.
                    switch (light.type)
                    {
                    case LightType.Spot:
                        lightData.cookieIndex = m_RenderPipeline.m_TextureCaches.cookieTexArray.FetchSlice(cmd, light.cookie);
                        break;

                    case LightType.Point:
                        lightData.cookieIndex = m_RenderPipeline.m_TextureCaches.cubeCookieTexArray.FetchSlice(cmd, light.cookie);
                        break;
                    }
                }
                else if (light.type == LightType.Spot && additionalLightData.spotLightShape != SpotLightShape.Cone)
                {
                    // Projectors lights must always have a cookie texture.
                    // As long as the cache is a texture array and not an atlas, the 4x4 white texture will be rescaled to 128
                    lightData.cookieIndex = m_RenderPipeline.m_TextureCaches.cookieTexArray.FetchSlice(cmd, Texture2D.whiteTexture);
                }
                else if (lightData.lightType == GPULightType.Rectangle && additionalLightData.areaLightCookie != null)
                {
                    lightData.cookieIndex = m_RenderPipeline.m_TextureCaches.areaLightCookieManager.FetchSlice(cmd, additionalLightData.areaLightCookie);
                }

                {
                    lightData.shadowDimmer           = 1.0f;
                    lightData.volumetricShadowDimmer = 1.0f;
                }

                {
                    // fix up shadow information
                    lightData.shadowIndex = additionalLightData.shadowIndex;
                }

                // Value of max smoothness is from artists point of view, need to convert from perceptual smoothness to roughness
                lightData.minRoughness = (1.0f - additionalLightData.maxSmoothness) * (1.0f - additionalLightData.maxSmoothness);

                // No usage for the shadow masks
                lightData.shadowMaskSelector = Vector4.zero;
                {
                    // use -1 to say that we don't use shadow mask
                    lightData.shadowMaskSelector.x = -1.0f;
                    lightData.nonLightMappedOnly   = 0;
                }

                // Set the data for this light
                m_LightDataCPUArray[lightIdx] = lightData;
            }

            //Push the data to the GPU
            m_LightDataGPUArray.SetData(m_LightDataCPUArray);
        }
	public void Initialize() {
		if (combufPreLight == null) {
			int propsBufferID = Shader.PropertyToID("_UBERPropsBuffer");

            // prepare material
            if (CopyPropsMat == null)
            {
                if (CopyPropsMat != null)
                {
                    DestroyImmediate(CopyPropsMat);
                }
                CopyPropsMat = new Material(Shader.Find("Hidden/UBER_CopyPropsTexture"));
                CopyPropsMat.hideFlags = HideFlags.DontSave;
            }

            // take a copy of emission buffer.a where UBER stores its props (translucency, self-shadowing, wetness)
            combufPreLight = new CommandBuffer();
			combufPreLight.name="UBERPropsPrelight";
            combufPreLight.GetTemporaryRT(propsBufferID, -1, -1, 0, FilterMode.Point, RenderTextureFormat.RHalf);
            combufPreLight.Blit(BuiltinRenderTextureType.CameraTarget, propsBufferID, CopyPropsMat);
            
			// release temp buffer
			combufPostLight = new CommandBuffer();
			combufPostLight.name="UBERPropsPostlight";
            combufPostLight.ReleaseTemporaryRT (propsBufferID);
		}
    }
Пример #11
0
 void ClearCommandBuffer()
 {
     var cam = GetComponent<Camera>();
     if (cam != null)
     {
         if (m_cb_prepass != null)
         {
             cam.RemoveCommandBuffer(CameraEvent.BeforeGBuffer, m_cb_prepass);
         }
         if (m_cb_raymarch != null)
         {
             cam.RemoveCommandBuffer(CameraEvent.BeforeGBuffer, m_cb_raymarch);
         }
         if (m_cb_show_steps != null)
         {
             cam.RemoveCommandBuffer(CameraEvent.AfterEverything, m_cb_show_steps);
         }
         m_cb_prepass = null;
         m_cb_raymarch = null;
         m_cb_show_steps = null;
     }
 }
Пример #12
0
	public void FixedUpdateTransform( CommandBuffer updateCB )
#endif
	{
		if ( !m_initialized )
			Initialize();

		if ( m_affectedObjectsChanged )
			UpdateAffectedObjects();

		for ( int i = 0; i < m_affectedObjects.Length; i++ )
		{
			if ( m_affectedObjects[ i ].FixedStep )
			{
			#if UNITY_4
				m_affectedObjects[ i ].OnUpdateTransform( m_camera, m_starting );
			#else
				m_affectedObjects[ i ].OnUpdateTransform( m_camera, updateCB, m_starting );
			#endif
			}
		}
	}
Пример #13
0
    void OnDisable()
    {
        if (commandBuffer == null)
            return;

        var camera = GetComponent<Camera>();
        camera.RemoveCommandBuffer(lastCameraEvent, commandBuffer);
        commandBuffer.Dispose();
        commandBuffer = null;
    }
Пример #14
0
 public void ExecuteCommandBuffer(CommandBuffer commandBuffer)
 {
     this.CheckValid();
     this.ExecuteCommandBuffer_Internal(commandBuffer);
 }
Пример #15
0
 public static void BlitCameraTexture(CommandBuffer cmd, HDCamera camera, RTHandleSystem.RTHandle source, RTHandleSystem.RTHandle destination, Rect destViewport, float mipLevel = 0.0f, bool bilinear = false)
 {
     SetRenderTarget(cmd, camera, destination);
     cmd.SetViewport(destViewport);
     BlitTexture(cmd, source, destination, camera.scaleBias, mipLevel, bilinear);
 }
Пример #16
0
        void RenderColorPyramidMips(
            RectInt srcRect,
            CommandBuffer cmd,
            RenderTexture targetTexture,
            List <RenderTexture> mips,
            int lodCount,
            Vector2 scale
            )
        {
            Assert.AreEqual(0, srcRect.x, "Offset are not supported");
            Assert.AreEqual(0, srcRect.y, "Offset are not supported");
            Assert.IsTrue(srcRect.width > 0);
            Assert.IsTrue(srcRect.height > 0);

            var src = targetTexture;

            for (var i = 0; i < lodCount; i++)
            {
                var dest = mips[i];

                var srcMip     = new RectInt(0, 0, srcRect.width >> i, srcRect.height >> i);
                var dstMip     = new RectInt(0, 0, srcMip.width >> 1, srcMip.height >> 1);
                var srcWorkMip = new RectInt(
                    0,
                    0,
                    Mathf.CeilToInt(srcMip.width / 16.0f) * 16,
                    Mathf.CeilToInt(srcMip.height / 16.0f) * 16
                    );
                var dstWorkMip = new RectInt(0, 0, srcWorkMip.width >> 1, srcWorkMip.height >> 1);

                m_TexturePadding.Pad(cmd, src, srcMip, srcWorkMip);

                // TODO: Add proper stereo support to the compute job

                cmd.SetComputeTextureParam(m_ColorPyramidCS, m_ColorPyramidKernel, _Source, src);
                cmd.SetComputeTextureParam(m_ColorPyramidCS, m_ColorPyramidKernel, _Result, dest);
                // _Size is used as a scale inside the whole render target so here we need to keep the full size (and not the scaled size depending on the current camera)
                cmd.SetComputeVectorParam(
                    m_ColorPyramidCS,
                    _Size,
                    new Vector4(dest.width, dest.height, 1f / dest.width, 1f / dest.height)
                    );
                cmd.DispatchCompute(
                    m_ColorPyramidCS,
                    m_ColorPyramidKernel,
                    dstWorkMip.width / 8,
                    dstWorkMip.height / 8,
                    1
                    );

                var dstMipWidthToCopy  = Mathf.Min(dest.width, dstWorkMip.width);
                var dstMipHeightToCopy = Mathf.Min(dest.height, dstWorkMip.height);

                // If we could bind texture mips as UAV we could avoid this copy...(which moreover copies more than the needed viewport if not fullscreen)
                cmd.CopyTexture(
                    mips[i],
                    0, 0, 0, 0,
                    dstMipWidthToCopy, dstMipHeightToCopy, targetTexture, 0, i + 1, 0, 0
                    );

                src = dest;
            }
        }
Пример #17
0
        public void RenderDepthPyramid(
            int width, int height,
            CommandBuffer cmd,
            RTHandle sourceTexture,
            RTHandle targetTexture,
            List <RTHandle> mips,
            int lodCount,
            Vector2 scale
            )
        {
            m_GPUCopy.SampleCopyChannel_xyzw2x(cmd, sourceTexture, targetTexture, new RectInt(0, 0, width, height));

            RTHandle src = targetTexture;

            for (var i = 0; i < lodCount; i++)
            {
                RTHandle dest = mips[i];

                var srcMip = new RectInt(0, 0, width >> i, height >> i);
                var dstMip = new RectInt(0, 0, srcMip.width >> 1, srcMip.height >> 1);

                var kernel     = depthKernel1;
                var kernelSize = 1;
                var srcWorkMip = srcMip;
                var dstWorkMip = dstMip;

                if (dstWorkMip.width >= 8 && dstWorkMip.height >= 8)
                {
                    srcWorkMip.width  = Mathf.CeilToInt(srcWorkMip.width / 16.0f) * 16;
                    srcWorkMip.height = Mathf.CeilToInt(srcWorkMip.height / 16.0f) * 16;
                    dstWorkMip.width  = srcWorkMip.width >> 1;
                    dstWorkMip.height = srcWorkMip.height >> 1;

                    m_TexturePadding.Pad(cmd, src, srcMip, srcWorkMip);
                    kernel     = depthKernel8;
                    kernelSize = 8;
                }
                else
                {
                    m_TexturePadding.Pad(cmd, src, srcMip, new RectInt(0, 0, src.rt.width, src.rt.height));
                }

                cmd.SetComputeTextureParam(m_DepthPyramidCS, kernel, _Source, src);
                cmd.SetComputeTextureParam(m_DepthPyramidCS, kernel, _Result, dest);
                cmd.SetComputeVectorParam(m_DepthPyramidCS, _SrcSize, new Vector4(
                                              srcWorkMip.width, srcWorkMip.height,
                                              (1.0f / srcWorkMip.width) * scale.x, (1.0f / srcWorkMip.height) * scale.y)
                                          );

                cmd.DispatchCompute(
                    m_DepthPyramidCS,
                    kernel,
                    Mathf.CeilToInt(dstWorkMip.width / (float)kernelSize),
                    Mathf.CeilToInt(dstWorkMip.height / (float)kernelSize),
                    1
                    );

                var dstMipWidthToCopy  = Mathf.Min(dest.rt.width, dstWorkMip.width);
                var dstMipHeightToCopy = Mathf.Min(dest.rt.height, dstWorkMip.height);

                // If we could bind texture mips as UAV we could avoid this copy...(which moreover copies more than the needed viewport if not fullscreen)
                cmd.CopyTexture(mips[i], 0, 0, 0, 0, dstMipWidthToCopy, dstMipHeightToCopy, targetTexture, 0, i + 1, 0, 0);
                src = dest;
            }
        }
 public virtual void TransferToSlice(CommandBuffer cmd, int sliceIndex, Texture texture)
 {
 }
 public void UpdateSlice(CommandBuffer cmd, int sliceIndex, Texture content)
 {
     UpdateSlice(cmd, sliceIndex, content, GetTextureHash(content));
 }
 // In case the texture content with which we update the cache is not the input texture, we need to provide the right update count.
 public void UpdateSlice(CommandBuffer cmd, int sliceIndex, Texture content, uint textureHash)
 {
     // transfer new slice to sliceIndex from source texture
     SetSliceHash(sliceIndex, textureHash);
     TransferToSlice(cmd, sliceIndex, content);
 }
    private void RemoveCommandBuffersFromCamera(Camera camera, CommandBuffer normalBuffer) {
        if (m_copyTransmission != null) {
            camera.RemoveCommandBuffer(CameraEvent.AfterGBuffer, m_copyTransmission);
        }

        if (normalBuffer != null) {
            camera.RemoveCommandBuffer(CameraEvent.BeforeLighting, normalBuffer);
        }

        if (m_releaseDeferredPlus != null) {
            camera.RemoveCommandBuffer(CameraEvent.AfterLighting, m_releaseDeferredPlus);
        }
    }
Пример #22
0
 private extern void ExecuteCommandBuffer_Internal(CommandBuffer commandBuffer);
Пример #23
0
    public void OnWillRenderObject()
    {
        var act = gameObject.activeInHierarchy && enabled;
        if (!act)
        {
            OnDisable();
            return;
        }

        var cam = Camera.current;
        if (!cam)
            return;

        CommandBuffer buf = null;
        if (m_Cameras.ContainsKey(cam))
        {
            buf = m_Cameras[cam];
            buf.Clear ();
        }
        else
        {
            buf = new CommandBuffer();
            buf.name = "Deferred decals";
            m_Cameras[cam] = buf;

            // set this command buffer to be executed just before deferred lighting pass
            // in the camera
            cam.AddCommandBuffer (CameraEvent.BeforeLighting, buf);
        }

        //@TODO: in a real system should cull decals, and possibly only
        // recreate the command buffer when something has changed.

        var system = DeferredDecalSystem.instance;

        // copy g-buffer normals into a temporary RT
        var normalsID = Shader.PropertyToID("_NormalsCopy");
        buf.GetTemporaryRT (normalsID, -1, -1);
        buf.Blit (BuiltinRenderTextureType.GBuffer2, normalsID);
        // render diffuse-only decals into diffuse channel
        buf.SetRenderTarget (BuiltinRenderTextureType.GBuffer0, BuiltinRenderTextureType.CameraTarget);
        foreach (var decal in system.m_DecalsDiffuse)
        {
            buf.DrawMesh (m_CubeMesh, decal.transform.localToWorldMatrix, decal.m_Material);
        }

        // render normals-only decals into normals channel
        buf.SetRenderTarget (BuiltinRenderTextureType.GBuffer2, BuiltinRenderTextureType.CameraTarget);
        foreach (var decal in system.m_DecalsNormals)
        {
            buf.DrawMesh (m_CubeMesh, decal.transform.localToWorldMatrix, decal.m_Material);
        }
        // render diffuse+normals decals into two MRTs
        RenderTargetIdentifier[] mrt = {BuiltinRenderTextureType.GBuffer0, BuiltinRenderTextureType.GBuffer2};
        buf.SetRenderTarget (mrt, BuiltinRenderTextureType.CameraTarget);
        foreach (var decal in system.m_DecalsBoth)
        {
            buf.DrawMesh (m_CubeMesh, decal.transform.localToWorldMatrix, decal.m_Material);
        }
        // release temporary normals RT
        buf.ReleaseTemporaryRT (normalsID);
    }
Пример #24
0
        internal override void UpdateTransform(CommandBuffer updateCB, bool starting)
        {
            if (!m_initialized || m_capacity != m_particleSystem.maxParticles)
            {
                Initialize();
                return;
            }

            Profiler.BeginSample("Particle.Update");

            if (!starting && m_wasVisible)
            {
                var enumerator = m_particleDict.GetEnumerator();
                while (enumerator.MoveNext())
                {
                    Particle particle = enumerator.Current.Value;
                    particle.prevLocalToWorld = particle.currLocalToWorld;
                }
            }

            m_moved = true;

            int numAlive = m_particleSystem.GetParticles(m_particles);

            Matrix4x4 transformLocalToWorld = Matrix4x4.TRS(m_transform.position, m_transform.rotation, Vector3.one);

            bool separateAxes = (rotationOverLifetime.enabled && rotationOverLifetime.separateAxes) ||
                                (rotationBySpeed.enabled && rotationBySpeed.separateAxes);

            for (int i = 0; i < numAlive; i++)
            {
                uint     seed = m_particles[i].randomSeed;
                Particle particle;

                bool justSpawned = false;
                if (!m_particleDict.TryGetValue(seed, out particle) && m_particleStack.Count > 0)
                {
                    m_particleDict[seed] = particle = m_particleStack.Pop();
                    justSpawned          = true;
                }

                if (particle == null)
                {
                    continue;
                }

                float   currentSize = m_particles[i].GetCurrentSize(m_particleSystem);
                Vector3 size        = new Vector3(currentSize, currentSize, currentSize);

                Matrix4x4 particleCurrLocalToWorld;
                if (m_renderer.renderMode == ParticleSystemRenderMode.Mesh)
                {
                    Quaternion rotation;
                    if (separateAxes)
                    {
                        rotation = Quaternion.Euler(m_particles[i].rotation3D);
                    }
                    else
                    {
                        rotation = Quaternion.AngleAxis(m_particles[i].rotation, m_particles[i].axisOfRotation);
                    }

                    Matrix4x4 particleMatrix = Matrix4x4.TRS(m_particles[i].position, rotation, size);

                    if (m_particleSystem.simulationSpace == ParticleSystemSimulationSpace.World)
                    {
                        particleCurrLocalToWorld = particleMatrix;
                    }
                    else
                    {
                        particleCurrLocalToWorld = transformLocalToWorld * particleMatrix;
                    }
                }
                else if (m_renderer.renderMode == ParticleSystemRenderMode.Billboard)
                {
                    if (m_particleSystem.simulationSpace == ParticleSystemSimulationSpace.Local)
                    {
                        m_particles[i].position = transformLocalToWorld.MultiplyPoint(m_particles[i].position);
                    }

                    Quaternion rotation;
                    if (separateAxes)
                    {
                        rotation = Quaternion.Euler(-m_particles[i].rotation3D.x, -m_particles[i].rotation3D.y, m_particles[i].rotation3D.z);
                    }
                    else
                    {
                        rotation = Quaternion.AngleAxis(m_particles[i].rotation, Vector3.back);
                    }

                    particleCurrLocalToWorld = Matrix4x4.TRS(m_particles[i].position, m_owner.Transform.rotation * rotation, size);
                }
                else
                {
                    // unsupported
                    particleCurrLocalToWorld = Matrix4x4.identity;
                }

                particle.refCount         = 1;
                particle.currLocalToWorld = particleCurrLocalToWorld;
                if (justSpawned)
                {
                    particle.prevLocalToWorld = particle.currLocalToWorld;
                }
            }

            if (starting || !m_wasVisible)
            {
                var enumerator = m_particleDict.GetEnumerator();
                while (enumerator.MoveNext())
                {
                    Particle particle = enumerator.Current.Value;
                    particle.prevLocalToWorld = particle.currLocalToWorld;
                }
            }

            RemoveDeadParticles();

            m_wasVisible = m_renderer.isVisible;

            Profiler.EndSample();
        }
Пример #25
0
    public override void OnDisable()
    {
        base.OnDisable();
        ReleaseGPUResources();

        foreach (var c in m_cameras)
        {
            if (c != null) c.RemoveCommandBuffer(CameraEvent.AfterLighting, m_cb);
        }
        m_cameras = null;

        if(m_cb!=null)
        {
            m_cb.Release();
            m_cb = null;
        }
    }
 extern public static void SetAsyncCompilation([NotNull] CommandBuffer cmd, bool allow);
Пример #27
0
	public void RenderVectors( CommandBuffer renderCB, float scale, float fixedScale, AmplifyMotion.Quality quality )
#endif
	{
		if ( !m_initialized )
			Initialize();

		// For some reason Unity's own values weren't working correctly on Windows/OpenGL
		float near = m_camera.nearClipPlane;
		float far = m_camera.farClipPlane;
		Vector4 zparam;

		if ( AmplifyMotionEffectBase.IsD3D )
		{
			zparam.x = 1.0f - far / near;
			zparam.y = far / near;
		}
		else
		{
			// OpenGL
			zparam.x = ( 1.0f - far / near ) / 2.0f;
			zparam.y = ( 1.0f + far / near ) / 2.0f;
		}

		zparam.z = zparam.x / far;
		zparam.w = zparam.y / far;

		Shader.SetGlobalVector( "_AM_ZBUFFER_PARAMS", zparam );

		if ( m_affectedObjectsChanged )
			UpdateAffectedObjects();

		for ( int i = 0; i < m_affectedObjects.Length; i++ )
		{
			// don't render objects excluded via camera culling mask
			if ( ( m_camera.cullingMask & ( 1 << m_affectedObjects[ i ].gameObject.layer ) ) != 0 )
			{
			#if UNITY_4
				m_affectedObjects[ i ].OnRenderVectors( m_camera, m_affectedObjects[ i ].FixedStep ? fixedScale : scale, quality );
			#else
				m_affectedObjects[ i ].OnRenderVectors( m_camera, renderCB, m_affectedObjects[ i ].FixedStep ? fixedScale : scale, quality );
			#endif
			}
		}
	}
 extern public static void RestoreAsyncCompilation([NotNull] CommandBuffer cmd);
Пример #29
0
 // This case, both source and destination are camera-scaled but we want to override the scale/bias parameter.
 public static void BlitCameraTexture(CommandBuffer cmd, HDCamera camera, RTHandleSystem.RTHandle source, RTHandleSystem.RTHandle destination, Vector4 scaleBias, float mipLevel = 0.0f, bool bilinear = false)
 {
     // Will set the correct camera viewport as well.
     SetRenderTarget(cmd, camera, destination);
     BlitTexture(cmd, source, destination, scaleBias, mipLevel, bilinear);
 }
Пример #30
0
 public override void FrameCleanup(CommandBuffer cmd)
 {
     cmd.ReleaseTemporaryRT(m_WaterFX.id);
 }
Пример #31
0
        internal static void RenderPostProcessing(CommandBuffer cmd, ref CameraData cameraData, RenderTextureDescriptor sourceDescriptor,
                                                  RenderTargetIdentifier source, RenderTargetIdentifier destination, bool opaqueOnly, bool flip)
        {
            var layer = cameraData.postProcessLayer;
            int effectsCount;

            if (opaqueOnly)
            {
                effectsCount = layer.sortedBundles[PostProcessEvent.BeforeTransparent].Count;
            }
            else
            {
                effectsCount = layer.sortedBundles[PostProcessEvent.BeforeStack].Count +
                               layer.sortedBundles[PostProcessEvent.AfterStack].Count;
            }

            Camera camera = cameraData.camera;
            var    postProcessRenderContext = RenderingUtils.postProcessRenderContext;

            postProcessRenderContext.Reset();
            postProcessRenderContext.camera       = camera;
            postProcessRenderContext.source       = source;
            postProcessRenderContext.sourceFormat = sourceDescriptor.colorFormat;
            postProcessRenderContext.destination  = destination;
            postProcessRenderContext.command      = cmd;
            postProcessRenderContext.flip         = flip;

            // If there's only one effect in the stack and soure is same as dest we
            // create an intermediate blit rendertarget to handle it.
            // Otherwise, PostProcessing system will create the intermediate blit targets itself.
            if (effectsCount == 1 && source == destination)
            {
                RenderTargetIdentifier  rtId       = new RenderTargetIdentifier(m_PostProcessingTemporaryTargetId);
                RenderTextureDescriptor descriptor = sourceDescriptor;
                descriptor.msaaSamples     = 1;
                descriptor.depthBufferBits = 0;

                postProcessRenderContext.destination = rtId;
                cmd.GetTemporaryRT(m_PostProcessingTemporaryTargetId, descriptor, FilterMode.Point);

                if (opaqueOnly)
                {
                    cameraData.postProcessLayer.RenderOpaqueOnly(postProcessRenderContext);
                }
                else
                {
                    cameraData.postProcessLayer.Render(postProcessRenderContext);
                }

                cmd.Blit(rtId, destination);
                cmd.ReleaseTemporaryRT(m_PostProcessingTemporaryTargetId);
            }
            else
            {
                if (opaqueOnly)
                {
                    cameraData.postProcessLayer.RenderOpaqueOnly(postProcessRenderContext);
                }
                else
                {
                    cameraData.postProcessLayer.Render(postProcessRenderContext);
                }
            }
        }
    private void InitializeBuffers() {
        m_isScatteringEnabled = SkinSettings.Enabled;
        m_isTransmissionEnabled = TransmissionSettings.Enabled || m_isScatteringEnabled;

        if (SkinSettings.Lut == null) {
            SkinSettings.Lut = SkinLut;

#if UNITY_EDITOR
            EditorUtility.SetDirty(this);
#endif
        }

        if ((m_isTransmissionEnabled || m_isScatteringEnabled)
            && m_camera != null
            && DeferredTransmissionBlit != null
            && m_copyTransmission == null
            && m_releaseDeferredPlus == null) {
            int opacityBufferId = Shader.PropertyToID("_DeferredTransmissionBuffer");
            int blurredNormalsBufferIdTemp = Shader.PropertyToID("_DeferredBlurredNormalBufferTemp");
            int blurredNormalBuffer = Shader.PropertyToID("_DeferredBlurredNormalBuffer");

            m_deferredTransmissionBlitMaterial = new Material(DeferredTransmissionBlit);
            m_deferredTransmissionBlitMaterial.hideFlags = HideFlags.HideAndDontSave;

            // Copy Gbuffer emission buffer so we can get at the alpha channel for transmission.
            m_copyTransmission = new CommandBuffer();
            m_copyTransmission.name = c_copyTransmissionBufferName;
            m_copyTransmission.GetTemporaryRT(opacityBufferId, -1, -1, 0, FilterMode.Point, RenderTextureFormat.ARGB32);
            m_copyTransmission.Blit(BuiltinRenderTextureType.CameraTarget, opacityBufferId, m_deferredTransmissionBlitMaterial);

            // Blurred normals for skin
            if (m_isScatteringEnabled) {
                GenerateNormalBlurMaterialAndCommandBuffer(blurredNormalBuffer, blurredNormalsBufferIdTemp,
                    out m_deferredBlurredNormalsMaterial, out m_renderBlurredNormals);

#if UNITY_EDITOR
                GenerateNormalBlurMaterialAndCommandBuffer(blurredNormalBuffer, blurredNormalsBufferIdTemp,
                    out m_sceneViewBlurredNormalsMaterial, out m_sceneViewBlurredNormals);
#endif
            }

            // Cleanup resources.
            m_releaseDeferredPlus = new CommandBuffer();
            m_releaseDeferredPlus.name = c_releaseDeferredBuffer;
            m_releaseDeferredPlus.ReleaseTemporaryRT(opacityBufferId);

            if (m_isScatteringEnabled) {
                m_releaseDeferredPlus.ReleaseTemporaryRT(blurredNormalsBufferIdTemp);
            }

#if UNITY_EDITOR
            SceneView.onSceneGUIDelegate += OnSceneGUIDelegate;
#endif
        }

        AddCommandBuffersToCamera(m_camera, m_renderBlurredNormals);

#if UNITY_EDITOR
        EditorUtility.SetDirty(m_camera);
#endif
    }
Пример #33
0
 // This particular case is for blitting a camera-scaled texture into a non scaling texture. So we setup the full viewport (implicit in cmd.Blit) but have to scale the input UVs.
 public static void BlitCameraTexture(CommandBuffer cmd, HDCamera camera, RTHandleSystem.RTHandle source, RenderTargetIdentifier destination)
 {
     cmd.Blit(source, destination, new Vector2(camera.scaleBias.x, camera.scaleBias.y), Vector2.zero);
 }
    private void DestroyCommandBuffers() {
        RemoveCommandBuffersFromAllCameras();

        m_copyTransmission = null;
        m_renderBlurredNormals = null;
        m_releaseDeferredPlus = null;

#if UNITY_EDITOR
        m_sceneViewBlurredNormals = null;
        SceneView.onSceneGUIDelegate -= OnSceneGUIDelegate;
#endif

        if (m_deferredTransmissionBlitMaterial != null) {
            DestroyImmediate(m_deferredTransmissionBlitMaterial);
            m_deferredTransmissionBlitMaterial = null;
        }

        if (m_deferredBlurredNormalsMaterial != null) {
            DestroyImmediate(m_deferredBlurredNormalsMaterial);
            m_deferredBlurredNormalsMaterial = null;
        }

#if UNITY_EDITOR
        if (m_sceneViewBlurredNormalsMaterial != null) {
            DestroyImmediate(m_sceneViewBlurredNormalsMaterial);
            m_sceneViewBlurredNormalsMaterial = null;
        }
#endif
    }
Пример #35
0
        public void UpdateEnvironment(HDCamera hdCamera, Light sunLight, int frameIndex, CommandBuffer cmd)
        {
            bool isRegularPreview = HDUtils.IsRegularPreviewCamera(hdCamera.camera);

            SkyAmbientMode ambientMode = VolumeManager.instance.stack.GetComponent <VisualEnvironment>().skyAmbientMode.value;

            // Preview should never use dynamic ambient or they will conflict with main view (async readback of sky texture will update ambient probe for main view one frame later)
            if (isRegularPreview)
            {
                ambientMode = SkyAmbientMode.Static;
            }

            m_CurrentSkyRenderingContext.UpdateEnvironment(m_CurrentSky, sunLight, hdCamera.mainViewConstants.worldSpaceCameraPos, m_UpdateRequired, ambientMode == SkyAmbientMode.Dynamic, frameIndex, cmd);
            StaticLightingSky staticLightingSky = GetStaticLightingSky();

            // We don't want to update the static sky during preview because it contains custom lights that may change the result.
            // The consequence is that previews will use main scene static lighting but we consider this to be acceptable.
            if (staticLightingSky != null && !isRegularPreview)
            {
                m_StaticLightingSky.skySettings = staticLightingSky.skySettings;
                m_StaticLightingSkyRenderingContext.UpdateEnvironment(m_StaticLightingSky, sunLight, hdCamera.mainViewConstants.worldSpaceCameraPos, false, true, frameIndex, cmd);
            }

            bool useRealtimeGI = true;

#if UNITY_EDITOR
            useRealtimeGI = UnityEditor.Lightmapping.realtimeGI;
#endif
            // Working around GI current system
            // When using baked lighting, setting up the ambient probe should be sufficient => We only need to update RenderSettings.ambientProbe with either the static or visual sky ambient probe (computed from GPU)
            // When using real time GI. Enlighten will pull sky information from Skybox material. So in order for dynamic GI to work, we update the skybox material texture and then set the ambient mode to SkyBox
            // Problem: We can't check at runtime if realtime GI is enabled so we need to take extra care (see useRealtimeGI usage below)
            RenderSettings.ambientMode = AmbientMode.Custom; // Needed to specify ourselves the ambient probe (this will update internal ambient probe data passed to shaders)
            if (ambientMode == SkyAmbientMode.Static)
            {
                RenderSettings.ambientProbe = GetStaticLightingAmbientProbe();
                m_StandardSkyboxMaterial.SetTexture("_Tex", GetStaticLightingTexture());
            }
            else
            {
                RenderSettings.ambientProbe = m_CurrentSkyRenderingContext.ambientProbe;
                // Workaround in the editor:
                // When in the editor, if we use baked lighting, we need to setup the skybox material with the static lighting texture otherwise when baking, the dynamic texture will be used
                if (useRealtimeGI)
                {
                    m_StandardSkyboxMaterial.SetTexture("_Tex", m_CurrentSky.IsValid() ? (Texture)m_CurrentSkyRenderingContext.cubemapRT : CoreUtils.blackCubeTexture);
                }
                else
                {
                    m_StandardSkyboxMaterial.SetTexture("_Tex", GetStaticLightingTexture());
                }
            }

            // This is only needed if we use realtime GI otherwise enlighten won't get the right sky information
            RenderSettings.skybox              = m_StandardSkyboxMaterial; // Setup this material as the default to be use in RenderSettings
            RenderSettings.ambientIntensity    = 1.0f;
            RenderSettings.ambientMode         = AmbientMode.Skybox;       // Force skybox for our HDRI
            RenderSettings.reflectionIntensity = 1.0f;
            RenderSettings.customReflection    = null;

            m_UpdateRequired = false;

            SetGlobalSkyTexture(cmd);
            if (IsLightingSkyValid())
            {
                cmd.SetGlobalInt(HDShaderIDs._EnvLightSkyEnabled, 1);
            }
            else
            {
                cmd.SetGlobalInt(HDShaderIDs._EnvLightSkyEnabled, 0);
            }
        }
        // Generates the gaussian pyramid of source into destination
        // We can't do it in place as the color pyramid has to be read while writing to the color
        // buffer in some cases (e.g. refraction, distortion)
        // Returns the number of mips
        public int RenderColorGaussianPyramid(CommandBuffer cmd, Vector2Int size, Texture source, RenderTexture destination)
        {
            // Select between Tex2D and Tex2DArray versions of the kernels
            bool sourceIsArray = (source.dimension == TextureDimension.Tex2DArray);
            int  rtIndex       = sourceIsArray ? 1 : 0;

            // Sanity check
            if (sourceIsArray)
            {
                Debug.Assert(source.dimension == destination.dimension, "MipGenerator source texture does not match dimension of destination!");
            }

            // Only create the temporary target on-demand in case the game doesn't actually need it
            if (m_TempColorTargets[rtIndex] == null)
            {
                m_TempColorTargets[rtIndex] = RTHandles.Alloc(
                    Vector2.one * 0.5f,
                    sourceIsArray ? TextureXR.slices : 1,
                    dimension: source.dimension,
                    filterMode: FilterMode.Bilinear,
                    colorFormat: destination.graphicsFormat,
                    enableRandomWrite: true,
                    useMipMap: false,
                    enableMSAA: false,
                    useDynamicScale: true,
                    name: "Temp Gaussian Pyramid Target"
                    );
            }

            int srcMipLevel  = 0;
            int srcMipWidth  = size.x;
            int srcMipHeight = size.y;
            int slices       = destination.volumeDepth;

            int tempTargetWidth  = srcMipWidth >> 1;
            int tempTargetHeight = srcMipHeight >> 1;

            if (m_TempDownsamplePyramid[rtIndex] == null)
            {
                m_TempDownsamplePyramid[rtIndex] = RTHandles.Alloc(
                    Vector2.one * 0.5f,
                    sourceIsArray ? TextureXR.slices : 1,
                    dimension: source.dimension,
                    filterMode: FilterMode.Bilinear,
                    colorFormat: destination.graphicsFormat,
                    enableRandomWrite: false,
                    useMipMap: false,
                    enableMSAA: false,
                    useDynamicScale: true,
                    name: "Temporary Downsampled Pyramid"
                    );
            }

            float sourceScaleX = (float)size.x / source.width;
            float sourceScaleY = (float)size.y / source.height;

            // Copies src mip0 to dst mip0
            m_PropertyBlock.SetTexture(HDShaderIDs._BlitTexture, source);
            m_PropertyBlock.SetVector(HDShaderIDs._BlitScaleBias, new Vector4(sourceScaleX, sourceScaleY, 0f, 0f));
            m_PropertyBlock.SetFloat(HDShaderIDs._BlitMipLevel, 0f);
            cmd.SetRenderTarget(destination, 0, CubemapFace.Unknown, -1);
            cmd.SetViewport(new Rect(0, 0, srcMipWidth, srcMipHeight));
            cmd.DrawProcedural(Matrix4x4.identity, HDUtils.GetBlitMaterial(source.dimension), 0, MeshTopology.Triangles, 3, 1, m_PropertyBlock);

            int finalTargetMipWidth  = destination.width;
            int finalTargetMipHeight = destination.height;

            // Note: smaller mips are excluded as we don't need them and the gaussian compute works
            // on 8x8 blocks
            while (srcMipWidth >= 8 || srcMipHeight >= 8)
            {
                int dstMipWidth  = Mathf.Max(1, srcMipWidth >> 1);
                int dstMipHeight = Mathf.Max(1, srcMipHeight >> 1);

                // Scale for downsample
                float scaleX = ((float)srcMipWidth / finalTargetMipWidth);
                float scaleY = ((float)srcMipHeight / finalTargetMipHeight);

                using (new ProfilingSample(cmd, "Downsample", CustomSamplerId.ColorPyramid.GetSampler()))
                {
                    // Downsample.
                    m_PropertyBlock.SetTexture(HDShaderIDs._BlitTexture, destination);
                    m_PropertyBlock.SetVector(HDShaderIDs._BlitScaleBias, new Vector4(scaleX, scaleY, 0f, 0f));
                    m_PropertyBlock.SetFloat(HDShaderIDs._BlitMipLevel, srcMipLevel);
                    cmd.SetRenderTarget(m_TempDownsamplePyramid[rtIndex], 0, CubemapFace.Unknown, -1);
                    cmd.SetViewport(new Rect(0, 0, dstMipWidth, dstMipHeight));
                    cmd.DrawProcedural(Matrix4x4.identity, HDUtils.GetBlitMaterial(source.dimension), 1, MeshTopology.Triangles, 3, 1, m_PropertyBlock);
                }

                // In this mip generation process, source viewport can be smaller than the source render target itself because of the RTHandle system
                // We are not using the scale provided by the RTHandle system for two reasons:
                // - Source might be a planar probe which will not be scaled by the system (since it's actually the final target of probe rendering at the exact size)
                // - When computing mip size, depending on even/odd sizes, the scale computed for mip 0 might miss a texel at the border.
                //   This can result in a shift in the mip map downscale that depends on the render target size rather than the actual viewport
                //   (Two rendering at the same viewport size but with different RTHandle reference size would yield different results which can break automated testing)
                // So in the end we compute a specific scale for downscale and blur passes at each mip level.

                // Scales for Blur
                float blurSourceTextureWidth  = (float)m_TempDownsamplePyramid[rtIndex].rt.width; // Same size as m_TempColorTargets which is the source for vertical blur
                float blurSourceTextureHeight = (float)m_TempDownsamplePyramid[rtIndex].rt.height;
                scaleX = ((float)dstMipWidth / blurSourceTextureWidth);
                scaleY = ((float)dstMipHeight / blurSourceTextureHeight);

                // Blur horizontal.
                using (new ProfilingSample(cmd, "Blur horizontal", CustomSamplerId.ColorPyramid.GetSampler()))
                {
                    m_PropertyBlock.SetTexture(HDShaderIDs._Source, m_TempDownsamplePyramid[rtIndex]);
                    m_PropertyBlock.SetVector(HDShaderIDs._SrcScaleBias, new Vector4(scaleX, scaleY, 0f, 0f));
                    m_PropertyBlock.SetVector(HDShaderIDs._SrcUvLimits, new Vector4((dstMipWidth - 0.5f) / blurSourceTextureWidth, (dstMipHeight - 0.5f) / blurSourceTextureHeight, 1.0f / blurSourceTextureWidth, 0f));
                    m_PropertyBlock.SetFloat(HDShaderIDs._SourceMip, 0);
                    cmd.SetRenderTarget(m_TempColorTargets[rtIndex], 0, CubemapFace.Unknown, -1);
                    cmd.SetViewport(new Rect(0, 0, dstMipWidth, dstMipHeight));
                    cmd.DrawProcedural(Matrix4x4.identity, m_ColorPyramidPSMat, rtIndex, MeshTopology.Triangles, 3, 1, m_PropertyBlock);
                }

                // Blur vertical.
                using (new ProfilingSample(cmd, "Blur vertical", CustomSamplerId.ColorPyramid.GetSampler()))
                {
                    m_PropertyBlock.SetTexture(HDShaderIDs._Source, m_TempColorTargets[rtIndex]);
                    m_PropertyBlock.SetVector(HDShaderIDs._SrcScaleBias, new Vector4(scaleX, scaleY, 0f, 0f));
                    m_PropertyBlock.SetVector(HDShaderIDs._SrcUvLimits, new Vector4((dstMipWidth - 0.5f) / blurSourceTextureWidth, (dstMipHeight - 0.5f) / blurSourceTextureHeight, 0f, 1.0f / blurSourceTextureHeight));
                    m_PropertyBlock.SetFloat(HDShaderIDs._SourceMip, 0);
                    cmd.SetRenderTarget(destination, srcMipLevel + 1, CubemapFace.Unknown, -1);
                    cmd.SetViewport(new Rect(0, 0, dstMipWidth, dstMipHeight));
                    cmd.DrawProcedural(Matrix4x4.identity, m_ColorPyramidPSMat, rtIndex, MeshTopology.Triangles, 3, 1, m_PropertyBlock);
                }

                srcMipLevel++;
                srcMipWidth  = srcMipWidth >> 1;
                srcMipHeight = srcMipHeight >> 1;

                finalTargetMipWidth  = finalTargetMipWidth >> 1;
                finalTargetMipHeight = finalTargetMipHeight >> 1;
            }

            return(srcMipLevel + 1);
        }
    private void GenerateNormalBlurMaterialAndCommandBuffer(int blurredNormalBuffer, int blurredNormalsBufferIdTemp,
        out Material blurMaterial, out CommandBuffer blurCommandBuffer) {
        blurMaterial = new Material(DeferredBlurredNormals);
        blurMaterial.hideFlags = HideFlags.HideAndDontSave;

        blurCommandBuffer = new CommandBuffer();
        blurCommandBuffer.name = c_normalBufferName;
        blurCommandBuffer.GetTemporaryRT(blurredNormalsBufferIdTemp, -1, -1, 0, FilterMode.Point,
            RenderTextureFormat.ARGB2101010);
        blurCommandBuffer.GetTemporaryRT(blurredNormalBuffer, -1, -1, 0, FilterMode.Point, RenderTextureFormat.ARGB2101010);

        blurCommandBuffer.Blit(BuiltinRenderTextureType.GBuffer2, blurredNormalsBufferIdTemp, blurMaterial, 0);
        blurCommandBuffer.Blit(blurredNormalsBufferIdTemp, blurredNormalBuffer, blurMaterial, 1);

        blurCommandBuffer.Blit(blurredNormalBuffer, blurredNormalsBufferIdTemp, blurMaterial, 0);
        blurCommandBuffer.Blit(blurredNormalsBufferIdTemp, blurredNormalBuffer, blurMaterial, 1);
    }
Пример #38
0
 public void RenderSky(HDCamera camera, Light sunLight, RTHandle colorBuffer, RTHandle depthBuffer, DebugDisplaySettings debugSettings, int frameIndex, CommandBuffer cmd)
 {
     m_CurrentSkyRenderingContext.RenderSky(m_VisualSky, camera, sunLight, colorBuffer, depthBuffer, debugSettings, frameIndex, cmd);
 }
    private void AddCommandBuffersToCamera(Camera setCamera, CommandBuffer normalBuffer) {
        //Need depth texture for depth aware upsample
        setCamera.depthTextureMode |= DepthTextureMode.Depth;

        if (m_copyTransmission != null && !HasCommandBuffer(setCamera, CameraEvent.AfterGBuffer, c_copyTransmissionBufferName)) {
            setCamera.AddCommandBuffer(CameraEvent.AfterGBuffer, m_copyTransmission);
        }

        if (normalBuffer != null && !HasCommandBuffer(setCamera, CameraEvent.BeforeLighting, c_normalBufferName)) {
            setCamera.AddCommandBuffer(CameraEvent.BeforeLighting, normalBuffer);
        }

        if (m_releaseDeferredPlus != null && !HasCommandBuffer(setCamera, CameraEvent.AfterLighting, c_releaseDeferredBuffer)) {
            setCamera.AddCommandBuffer(CameraEvent.AfterLighting, m_releaseDeferredPlus);
        }

        RefreshProperties();
    }
Пример #40
0
 public static void SetGlobalNeutralSkyData(CommandBuffer cmd)
 {
     cmd.SetGlobalTexture(HDShaderIDs._AirSingleScatteringTexture, CoreUtils.blackVolumeTexture);
     cmd.SetGlobalTexture(HDShaderIDs._AerosolSingleScatteringTexture, CoreUtils.blackVolumeTexture);
     cmd.SetGlobalTexture(HDShaderIDs._MultipleScatteringTexture, CoreUtils.blackVolumeTexture);
 }
    // Whenever any camera will render us, add a command buffer to do the work on it
    public void OnWillRenderObject() {   // REQUIRES THIS OBJ TO HAVE MESHRENDERER COMPONENT!!!!
        var act = gameObject.activeInHierarchy && enabled;
        if (!act) {
            Cleanup();
            return;
        }

        var cam = Camera.current;
        if (!cam)
            return;

        CommandBuffer buf = null;  // clear CommandBuffer... why does this have to be done every frame?
                                   // Did we already add the command buffer on this camera? Nothing to do then.
        if (m_Cameras.ContainsKey(cam))
            return;

        //if (!m_Material) {
        //    m_Material = new Material(m_BlurShader);
        //    m_Material.hideFlags = HideFlags.HideAndDontSave;  // not sure what this does -- prevents garbage collection??
        //}

        buf = new CommandBuffer();
        buf.name = "TestDrawProcedural";
        m_Cameras[cam] = buf;  // fill in dictionary entry for this Camera

        // START!!!
        // Canvas First:
        canvasMaterial.SetColor("_Color", Color.gray);  // initialize canvas        
        canvasMaterial.SetTexture("_DepthTex", canvasDepthTex);
        canvasMaterial.SetFloat("_MaxDepth", 1.0f);

        // Create RenderTargets:
        int colorReadID = Shader.PropertyToID("_ColorTextureRead");
        int colorWriteID = Shader.PropertyToID("_ColorTextureWrite");
        int depthReadID = Shader.PropertyToID("_DepthTextureRead");
        int depthWriteID = Shader.PropertyToID("_DepthTextureWrite");
        buf.GetTemporaryRT(colorReadID, -1, -1, 0, FilterMode.Bilinear);
        buf.GetTemporaryRT(colorWriteID, -1, -1, 0, FilterMode.Bilinear);
        buf.GetTemporaryRT(depthReadID, -1, -1, 0, FilterMode.Bilinear);
        buf.GetTemporaryRT(depthWriteID, -1, -1, 0, FilterMode.Bilinear);

        RenderTargetIdentifier[] mrt = { colorWriteID, depthWriteID };  // Define multipleRenderTarget so I can render to color AND depth
        buf.SetRenderTarget(mrt, BuiltinRenderTextureType.CameraTarget);  // Set render Targets
        buf.ClearRenderTarget(true, true, Color.white, 1.0f);  // clear -- needed???
        buf.DrawMesh(CreateFullscreenQuad(mainCam), Matrix4x4.identity, canvasMaterial);   // Write into canvas Color & Depth buffers
        
        // Copy results into Read buffers for next pass:
        buf.Blit(colorWriteID, colorReadID);
        buf.Blit(depthWriteID, depthReadID);

        // Gesso/Primer Pass:
        gessoMaterial.SetPass(0);
        gessoMaterial.SetColor("_Color", new Color(0.19f, 0.192f, 0.194f, 1.0f));
        buf.SetGlobalTexture("_ColorReadTex", colorReadID);
        buf.SetGlobalTexture("_DepthReadTex", depthReadID);
        buf.SetRenderTarget(mrt, BuiltinRenderTextureType.CameraTarget);  // Set render Targets
        buf.DrawMesh(CreateFullscreenQuad(mainCam), Matrix4x4.identity, gessoMaterial);
        // Copy results into Read buffers for next pass:
        buf.Blit(colorWriteID, colorReadID);
        buf.Blit(depthWriteID, depthReadID);

        // MAIN BRUSHSTROKE CONTENTS PASS!!!:
        strokeMaterial.SetPass(0);
        strokeMaterial.SetColor("_Color", brushStrokeColor);
        strokeMaterial.SetVector("_Size", size);
        strokeMaterial.SetBuffer("strokeDataBuffer", strokeBuffer);
        strokeMaterial.SetBuffer("quadPointsBuffer", quadPointsBuffer);
        buf.SetGlobalTexture("_ColorReadTex", colorReadID);
        buf.SetGlobalTexture("_DepthReadTex", depthReadID);
        buf.SetRenderTarget(colorWriteID);
        buf.SetGlobalTexture("_FrameBufferTexture", BuiltinRenderTextureType.CameraTarget); // Copy the Contents of FrameBuffer into brushstroke material so it knows what color it should be
        buf.DrawProcedural(Matrix4x4.identity, strokeMaterial, 0, MeshTopology.Triangles, 6, strokeBuffer.count);   // Apply brushstrokes

        // DISPLAY TO SCREEN:
        buf.Blit(colorWriteID, BuiltinRenderTextureType.CameraTarget);   // copy canvas target into main displayTarget so it is what the player sees

        // apply the commandBuffer
        cam.AddCommandBuffer(CameraEvent.AfterFinalPass, buf);  
        

        
        //buf.SetRenderTarget(canvasRT);
        //buf.ClearRenderTarget(true, true, Color.black, 1.0f);
        //int canvasID = Shader.PropertyToID("_CanvasTexture");
        //int tempID = Shader.PropertyToID("_TempTexture");
        //buf.GetTemporaryRT(canvasID, -1, -1, 0, FilterMode.Bilinear);  // Create a Temporary RenderTarget for the "canvas"
        //buf.GetTemporaryRT(tempID, -1, -1, 0, FilterMode.Bilinear);  // Create a Temporary RenderTarget for the "canvas"
        //buf.SetRenderTarget(canvasID);  // Set commandBuffer target to this "canvas" so the DrawProcedural will apply to this
        //buf.ClearRenderTarget(true, true, Color.white, 1.0f);  // Clear the target each frame and rebuild
        //buf.Blit(canvasID, tempID);  // copy into temporary buffer
        //buf.Blit(tempID, canvasID, gessoBlitMaterial);  // copy back into renderTarget, apply Gesso Primer
        //buf.SetGlobalTexture("_FrameBufferTexture", BuiltinRenderTextureType.CameraTarget);  // Copy the Contents of FrameBuffer into brushstroke material so it knows what color it should be
        //buf.DrawProcedural(Matrix4x4.identity, strokeMaterial, 0, MeshTopology.Triangles, 6, strokeBuffer.count);   // Apply brushstrokes

        // MRT example:
        //RenderTargetIdentifier[] mrt = { BuiltinRenderTextureType.GBuffer0, BuiltinRenderTextureType.GBuffer2 };
        //buf.SetRenderTarget(mrt, BuiltinRenderTextureType.CameraTarget);

        //buf.Blit(canvasID, BuiltinRenderTextureType.CameraTarget);   // copy canvas target into main displayTarget so it is what the player sees

        //Material testMat = new Material(Shader.Find("Unlit/Color"));
        //testMat.color = Color.yellow;
        //buf.DrawMesh(CreateFullscreenQuad(mainCam), Matrix4x4.identity, testMat);

        //buf.ReleaseTemporaryRT(colorReadID);
        //buf.ReleaseTemporaryRT(colorWriteID);
        //buf.ReleaseTemporaryRT(depthReadID);
        //buf.ReleaseTemporaryRT(depthWriteID);
    }
Пример #42
0
 public virtual void SetGlobalSkyData(CommandBuffer cmd)
 {
     SetGlobalNeutralSkyData(cmd);
 }
Пример #43
0
    public void ResetGPUData()
    {
        ReleaseGPUData();

        m_instance_data.Resize(m_max_instances);
        if (m_instance_buffer != null)
        {
            m_instance_buffer.Allocate(m_max_instances);
        }
        BatchRendererUtil.CreateVertexBuffer(m_mesh, ref m_vertex_buffer, ref m_vertex_count);

        {
            Material m = m_material;
            m.SetInt("g_flag_rotation", m_enable_rotation ? 1 : 0);
            m.SetInt("g_flag_scale", m_enable_scale ? 1 : 0);
            m.SetInt("g_flag_color", m_enable_color ? 1 : 0);
            m.SetInt("g_flag_emission", m_enable_emission ? 1 : 0);
            m.SetInt("g_flag_uvoffset", m_enable_uv_offset ? 1 : 0);
            if (m_instance_buffer != null)
            {
                m.SetBuffer("g_vertices", m_vertex_buffer);
                m.SetBuffer("g_instance_buffer_t", m_instance_buffer.translation);
                m.SetBuffer("g_instance_buffer_r", m_instance_buffer.rotation);
                m.SetBuffer("g_instance_buffer_s", m_instance_buffer.scale);
                m.SetBuffer("g_instance_buffer_color", m_instance_buffer.color);
                m.SetBuffer("g_instance_buffer_emission", m_instance_buffer.emission);
                m.SetBuffer("g_instance_buffer_uv", m_instance_buffer.uv_offset);
            }
        }
        {
            m_cb = new CommandBuffer();
            m_cb.name = "ProceduralGBuffer";
            m_cb.DrawProcedural(Matrix4x4.identity, m_material, 0, MeshTopology.Triangles, m_vertex_count, m_max_instances);
            m_camera.AddCommandBuffer(CameraEvent.AfterGBuffer, m_cb);
        }

        // set default values
        UpdateGPUResources();
    }
Пример #44
0
 // called each frame before Execute, use it to set up things the pass will need
 public override void Configure(CommandBuffer cmd, RenderTextureDescriptor cameraTextureDescriptor)
 {
     // create a temporary render texture that matches the camera
     cmd.GetTemporaryRT(tempTexture.id, cameraTextureDescriptor);
 }
Пример #45
0
    void OnEnable()
    {
        System.IO.Directory.CreateDirectory(m_output_directory);
        m_cam = GetComponent<Camera>();
        m_quad = FrameCapturerUtils.CreateFullscreenQuad();
        m_mat_copy = new Material(m_sh_copy);
        if (m_cam.targetTexture != null)
        {
            m_mat_copy.EnableKeyword("OFFSCREEN");
        }

        if (m_capture_framebuffer)
        {
            int tid = Shader.PropertyToID("_TmpFrameBuffer");
            m_cb = new CommandBuffer();
            m_cb.name = "ExrCapturer: copy frame buffer";
            m_cb.GetTemporaryRT(tid, -1, -1, 0, FilterMode.Point);
            m_cb.Blit(BuiltinRenderTextureType.CurrentActive, tid);
            // tid は意図的に開放しない
            m_cam.AddCommandBuffer(CameraEvent.AfterEverything, m_cb);

            m_frame_buffer = new RenderTexture(m_cam.pixelWidth, m_cam.pixelHeight, 0, RenderTextureFormat.ARGBHalf);
            m_frame_buffer.wrapMode = TextureWrapMode.Repeat;
            m_frame_buffer.Create();
        }

        if (m_capture_gbuffer &&
            m_cam.renderingPath != RenderingPath.DeferredShading &&
            (m_cam.renderingPath == RenderingPath.UsePlayerSettings && PlayerSettings.renderingPath != RenderingPath.DeferredShading))
        {
            Debug.Log("ExrCapturer: Rendering path must be deferred to use capture_gbuffer mode.");
            m_capture_gbuffer = false;
        }
        if(m_capture_gbuffer)
        {
            m_gbuffer = new RenderTexture[4];
            m_rt_gbuffer = new RenderBuffer[4];
            for (int i = 0; i < m_gbuffer.Length; ++i)
            {
                m_gbuffer[i] = new RenderTexture(m_cam.pixelWidth, m_cam.pixelHeight, 0, RenderTextureFormat.ARGBHalf);
                m_gbuffer[i].filterMode = FilterMode.Point;
                m_gbuffer[i].Create();
                m_rt_gbuffer[i] = m_gbuffer[i].colorBuffer;
            }
            {
                RenderTextureFormat format = m_depth_format == DepthFormat.Half ? RenderTextureFormat.RHalf : RenderTextureFormat.RFloat;
                m_depth = new RenderTexture(m_cam.pixelWidth, m_cam.pixelHeight, 0, format);
                m_depth.filterMode = FilterMode.Point;
                m_depth.Create();
            }
        }

        FrameCapturer.fcExrConfig conf;
        conf.max_active_tasks = m_max_active_tasks;
        m_exr = FrameCapturer.fcExrCreateContext(ref conf);
    }
Пример #46
0
 // called after Execute, use it to clean up anything allocated in Configure
 public override void FrameCleanup(CommandBuffer cmd)
 {
     cmd.ReleaseTemporaryRT(tempTexture.id);
 }
Пример #47
0
    protected override void IssueDrawCall()
    {
        if(m_cb==null)
        {
            m_cb = new CommandBuffer();
            m_cb.name = "MPGPLightRenderer";
            foreach(var c in m_cameras)
            {
                if(c!=null) c.AddCommandBuffer(CameraEvent.AfterLighting, m_cb);
            }

            m_mpb = new MaterialPropertyBlock();
            m_mpb.AddColor("_Color", GetLinearColor());

        }
        m_cb.Clear();

        if (m_hdr)
        {
            m_cb.SetRenderTarget(BuiltinRenderTextureType.CameraTarget);
        }
        else
        {
            m_cb.SetRenderTarget(BuiltinRenderTextureType.GBuffer3);
        }
        m_mpb.SetColor("_Color", GetLinearColor());
        m_mpb.SetFloat("g_size", m_size);
        m_mpb.SetFloat("_OcculusionStrength", m_occulusion_strength);
        m_mpb.SetColor("_HeatColor", GetLinearHeatColor());
        m_mpb.SetFloat("_HeatThreshold", m_heat_threshold);

        Matrix4x4 matrix = Matrix4x4.identity;
        m_actual_materials.ForEach(a =>
        {
            for (int i = 0; i < m_batch_count; ++i)
            {
                m_cb.DrawMesh(m_expanded_mesh, matrix, a[i], 0, 0, m_mpb);
            }
        });
    }
 public bool AddGraphicsThreadMirrorViewBlit(CommandBuffer cmd, bool allowGraphicsStateInvalidate)
 {
     return(this.AddGraphicsThreadMirrorViewBlit(cmd, allowGraphicsStateInvalidate, -1));
 }
Пример #49
0
	public void UpdateTransform( CommandBuffer updateCB )
#endif
	{
		if ( !m_initialized )
			Initialize();

		if ( Time.frameCount > m_prevFrameCount && ( m_autoStep || m_step ) )
		{
			UpdateMatrices();

			if ( m_affectedObjectsChanged )
				UpdateAffectedObjects();

			for ( int i = 0; i < m_affectedObjects.Length; i++ )
			{
				if ( !m_affectedObjects[ i ].FixedStep )
				{
				#if UNITY_4
					m_affectedObjects[ i ].OnUpdateTransform( m_camera, m_starting );
				#else
					m_affectedObjects[ i ].OnUpdateTransform( m_camera, updateCB, m_starting );
				#endif

				}
			}

			m_starting = false;
			m_step = false;

			m_prevFrameCount = Time.frameCount;
		}
	}
 public extern bool AddGraphicsThreadMirrorViewBlit(CommandBuffer cmd, bool allowGraphicsStateInvalidate, int mode);
    void InitializeOnStartup() {
        fullscreenQuadMesh = new Mesh();
        
        // Create Materials        
        canvasMaterial = new Material(canvasShader);
        paintMaterial = new Material(paintShader);
        brushstrokeGessoMaterial = new Material(brushGessoShader);
        brushstrokeBackgroundMaterial = new Material(brushWorldSpaceShader);
        brushstrokeTerrainMaterial = new Material(brushTerrainShader);
        brushstrokeCritterMaterial = new Material(brushCritterShader);
        brushstrokeDecorationsMaterial = new Material(brushWorldSpaceShader);

        // GESSO BUFFER:
        int gessoRes = 16;
        Vector3[] gesso1 = PointCloudSphericalShell.GetPointsSphericalShell(50f, gessoRes, 1f);
        Vector3[] gesso2 = PointCloudSphericalShell.GetPointsSphericalShell(55f, gessoRes, 1f);
        Vector3[] gesso3 = PointCloudSphericalShell.GetPointsSphericalShell(60f, gessoRes, 1f);        
        strokeGessoArray = new strokeStruct[gesso1.Length + gesso2.Length + gesso3.Length];
        for(int i = 0; i < gesso1.Length; i++) {
            strokeGessoArray[i].pos = gesso1[i];
            strokeGessoArray[i].col = new Vector3(1f, 1f, 1f);
            strokeGessoArray[i].normal = -gesso1[i].normalized;
            strokeGessoArray[i + gesso1.Length].pos = gesso2[i];
            strokeGessoArray[i + gesso1.Length].col = new Vector3(1f, 1f, 1f);
            strokeGessoArray[i + gesso1.Length].normal = -gesso2[i].normalized;
            strokeGessoArray[i + gesso1.Length + gesso2.Length].pos = gesso3[i];
            strokeGessoArray[i + gesso1.Length + gesso2.Length].col = new Vector3(1f, 1f, 1f);
            strokeGessoArray[i + gesso1.Length + gesso2.Length].normal = -gesso3[i].normalized;
        }
        gessoStrokesBuffer = new ComputeBuffer(strokeGessoArray.Length, sizeof(float) * (3 + 3 + 3 + 3 + 3 + 2) + sizeof(int) * 1);  // col=3f, pos=3f, nml=3f, tan=3f, prevP=3f, dim=2f, type=1i
        gessoStrokesBuffer.SetData(strokeGessoArray);

        // BACKGROUND BUFFER:
        int backgroundRes = 24;
        Vector3[] background1 = PointCloudSphericalShell.GetPointsSphericalShell(100f, backgroundRes, 1f);
        Vector3[] background2 = PointCloudSphericalShell.GetPointsSphericalShell(110f, backgroundRes, 1f);
        Vector3[] background3 = PointCloudSphericalShell.GetPointsSphericalShell(120f, backgroundRes, 1f);
        strokeBackgroundArray = new strokeStruct[background1.Length + background2.Length + background3.Length];
        
        //float groundPos = -10f;
        for (int i = 0; i < background1.Length; i++) {
            float colorNoiseFrequency = 1000f;
            
            background1[i].y = Mathf.Abs(background1[i].y) + terrainAltitude;
            NoiseSample colorNoiseSample = NoisePrime.Simplex3D(background1[i], skyNoiseFrequency * colorNoiseFrequency);
            float colorNoise = colorNoiseSample.value * 0.5f + 0.5f;
            NoiseSample noiseSample = NoisePrime.Simplex3D(background1[i], skyNoiseFrequency);            
            strokeBackgroundArray[i].pos = background1[i];            
            strokeBackgroundArray[i].col = new Vector3(colorNoise, colorNoise, colorNoise);
            strokeBackgroundArray[i].normal = -background1[i].normalized;       
            //Vector3 cross = Vector3.Cross(strokeBackgroundArray[i].normal, noiseSample.derivative);
            strokeBackgroundArray[i].tangent = Vector3.Cross(strokeBackgroundArray[i].normal, noiseSample.derivative);
            strokeBackgroundArray[i].prevPos = background1[i];
            strokeBackgroundArray[i].dimensions = new Vector2(1f, noiseSample.derivative.magnitude);


            background2[i].y = Mathf.Abs(background2[i].y) + terrainAltitude;
            colorNoiseSample = NoisePrime.Simplex3D(background1[i], skyNoiseFrequency * colorNoiseFrequency);
            colorNoise = colorNoiseSample.value * 0.5f + 0.5f;
            noiseSample = NoisePrime.Simplex3D(background2[i], skyNoiseFrequency);
            strokeBackgroundArray[i + background1.Length].pos = background2[i];
            strokeBackgroundArray[i + background1.Length].col = new Vector3(colorNoise, colorNoise, colorNoise);           
            strokeBackgroundArray[i + background1.Length].normal = -background2[i].normalized;
            //strokeBackgroundArray[i + background1.Length].normal = -background2[i].normalized;
            //cross = Vector3.Cross(strokeBackgroundArray[i].normal, noiseSample.derivative);
            strokeBackgroundArray[i + background1.Length].tangent = Vector3.Cross(strokeBackgroundArray[i + background1.Length].normal, noiseSample.derivative);
            strokeBackgroundArray[i + background1.Length].prevPos = background2[i];
            strokeBackgroundArray[i + background1.Length].dimensions = new Vector2(1f, noiseSample.derivative.magnitude);

            background3[i].y = Mathf.Abs(background3[i].y) + terrainAltitude;
            colorNoiseSample = NoisePrime.Simplex3D(background1[i], skyNoiseFrequency * colorNoiseFrequency);
            colorNoise = colorNoiseSample.value * 0.5f + 0.5f;
            noiseSample = NoisePrime.Simplex3D(background3[i], skyNoiseFrequency);            
            strokeBackgroundArray[i + background1.Length + background2.Length].pos = background3[i];
            strokeBackgroundArray[i + background1.Length + background2.Length].col = new Vector3(colorNoise, colorNoise, colorNoise);            
            strokeBackgroundArray[i + background1.Length + background2.Length].normal = -background1[i].normalized;            
            //cross = Vector3.Cross(strokeBackgroundArray[i].normal, noiseSample.derivative);
            strokeBackgroundArray[i + background1.Length + background2.Length].tangent = Vector3.Cross(strokeBackgroundArray[i + background1.Length + background2.Length].normal, noiseSample.derivative);
            strokeBackgroundArray[i + background1.Length + background2.Length].prevPos = background3[i];
            strokeBackgroundArray[i + background1.Length + background2.Length].dimensions = new Vector2(1f, noiseSample.derivative.magnitude);
        }
        backgroundStrokesBuffer = new ComputeBuffer(strokeBackgroundArray.Length, sizeof(float) * (3 + 3 + 3 + 3 + 3 + 2) + sizeof(int) * 1);
        backgroundStrokesBuffer.SetData(strokeBackgroundArray);

        /*// TERRAIN BUFFER:
        int terrainCount = 20000;
        Vector3[] terrain1 = new Vector3[terrainCount]; //PointCloudSphericalShell.GetPointsSphericalShell(2f, backgroundRes, 1f);
        strokeTerrainArray = new strokeStruct[terrain1.Length];

        float groundPos = -5f;
        for (int i = 0; i < strokeTerrainArray.Length; i++) {
            terrain1[i] = UnityEngine.Random.insideUnitSphere * groundSpreadExponent;
            //terrain1[i] *= terrain1[i].sqrMagnitude;
            terrain1[i].y = groundPos;  // start at groundHeight
            NoiseSample noiseSample = NoisePrime.Simplex3D(new Vector3(terrain1[i].x, 0f, terrain1[i].z), groundNoiseFrequency);
            float heightOffset = noiseSample.value * groundNoiseAmplitude;
            terrain1[i].y += heightOffset;
            strokeTerrainArray[i].pos = terrain1[i];
            Color color = terrainColorGradient.Evaluate(noiseSample.value * 0.5f + 0.5f);
            strokeTerrainArray[i].col = new Vector3(color.r, color.g, color.b);
            Vector3 preTangent = noiseSample.derivative;
            preTangent.y *= groundNoiseAmplitude;
            strokeTerrainArray[i].tangent = preTangent.normalized;
            strokeTerrainArray[i].normal = new Vector3(-noiseSample.derivative.x, 1f, -noiseSample.derivative.z).normalized;
            //Vector3 cross = Vector3.Cross(strokeBackgroundArray[i].normal, noiseSample.derivative);
            strokeTerrainArray[i].prevPos = strokeTerrainArray[i].prevPos;
            strokeTerrainArray[i].dimensions = new Vector2(1f, 1f);
            
        }
        terrainStrokesBuffer = new ComputeBuffer(strokeTerrainArray.Length, sizeof(float) * (3 + 3 + 3 + 3 + 3 + 2) + sizeof(int) * 1);
        terrainStrokesBuffer.SetData(strokeTerrainArray);
        */

        InitTerrain();

        //Create quad buffer for brushtroke billboard
        quadPointsBuffer = new ComputeBuffer(6, sizeof(float) * 3);
        quadPointsBuffer.SetData(new[] {
            new Vector3(-0.5f, 0.5f),
            new Vector3(0.5f, 0.5f),
            new Vector3(0.5f, -0.5f),
            new Vector3(0.5f, -0.5f),
            new Vector3(-0.5f, -0.5f),
            new Vector3(-0.5f, 0.5f)
        });

        // Create master commandBuffer that makes the magic happen
        cmdBuffer = new CommandBuffer();
        cmdBuffer.name = "cmdBuffer";
    }
Пример #52
0
        public void PipelineUpdate(ref PipelineCommandData data)
        {
            if (renderingCommand.Length <= 0)
            {
                return;
            }
            CommandBuffer buffer = data.buffer;

            for (int i = 0; i < renderingCommand.Length; ++i)
            {
                ref CameraState             orthoCam = ref renderingCommand[i];
                ScriptableCullingParameters cullParam;
                bool rendering = orthoCam.cullingMask != 0;
                if (rendering)
                {
                    transform.position   = orthoCam.position;
                    transform.rotation   = orthoCam.rotation;
                    cam.orthographicSize = orthoCam.size;
                    cam.nearClipPlane    = orthoCam.nearClipPlane;
                    cam.farClipPlane     = orthoCam.farClipPlane;
                    cam.cullingMask      = orthoCam.cullingMask;
                    rendering            = cam.TryGetCullingParameters(out cullParam);
                    if (rendering)
                    {
                        data.context.SetupCameraProperties(cam);
                        cullParam.cullingMask    = (uint)orthoCam.cullingMask;
                        cullParam.cullingOptions = CullingOptions.ForceEvenIfCameraIsNotActive;
                        CullingResults    result = data.context.Cull(ref cullParam);
                        FilteringSettings filter = new FilteringSettings
                        {
                            layerMask          = orthoCam.cullingMask,
                            renderingLayerMask = 1,
                            renderQueueRange   = new RenderQueueRange(1000, 5000)
                        };
                        SortingSettings sort = new SortingSettings(cam)
                        {
                            criteria = SortingCriteria.RenderQueue
                        };
                        DrawingSettings drawS = new DrawingSettings(new ShaderTagId("TerrainDecal"), sort)
                        {
                            perObjectData = UnityEngine.Rendering.PerObjectData.None
                        };
                        DrawingSettings drawH = new DrawingSettings(new ShaderTagId("TerrainDisplacement"), sort)
                        {
                            perObjectData = UnityEngine.Rendering.PerObjectData.None
                        };

                        ComputeShader copyShader = data.resources.shaders.texCopyShader;
                        buffer.SetGlobalVector(ShaderIDs._MaskScaleOffset, float4(orthoCam.maskScaleOffset, (float)(1.0 / MTerrain.current.terrainData.displacementScale)));
                        buffer.SetGlobalInt(ShaderIDs._OffsetIndex, orthoCam.heightIndex);
                        var terrainData = MTerrain.current.terrainData;
                        buffer.SetGlobalVector(ShaderIDs._HeightScaleOffset, (float4)double4(terrainData.heightScale, terrainData.heightOffset, 1, 1));
                        buffer.GetTemporaryRT(RenderTargets.gbufferIndex[0], MTerrain.COLOR_RESOLUTION, MTerrain.COLOR_RESOLUTION, 16, FilterMode.Point, RenderTextureFormat.ARGB32, RenderTextureReadWrite.Linear, 1, true);
                        buffer.GetTemporaryRT(RenderTargets.gbufferIndex[2], MTerrain.COLOR_RESOLUTION, MTerrain.COLOR_RESOLUTION, 0, FilterMode.Point, RenderTextureFormat.RGHalf, RenderTextureReadWrite.Linear, 1, true);
                        buffer.GetTemporaryRT(RenderTargets.gbufferIndex[1], MTerrain.COLOR_RESOLUTION, MTerrain.COLOR_RESOLUTION, 0, FilterMode.Point, RenderTextureFormat.ARGB32, RenderTextureReadWrite.Linear, 1, true);
                        idfs[0] = RenderTargets.gbufferIndex[0];
                        idfs[1] = RenderTargets.gbufferIndex[2];
                        idfs[2] = RenderTargets.gbufferIndex[1];
                        buffer.SetComputeIntParam(copyShader, ShaderIDs._Count, orthoCam.depthSlice);
                        buffer.SetComputeTextureParam(copyShader, 7, ShaderIDs._VirtualMainTex, orthoCam.albedoRT);
                        buffer.SetComputeTextureParam(copyShader, 7, ShaderIDs._VirtualBumpMap, orthoCam.normalRT);
                        buffer.SetComputeTextureParam(copyShader, 7, ShaderIDs._VirtualSMO, orthoCam.smoRT);
                        buffer.SetComputeTextureParam(copyShader, 7, RenderTargets.gbufferIndex[0], RenderTargets.gbufferIndex[0]);
                        buffer.SetComputeTextureParam(copyShader, 7, RenderTargets.gbufferIndex[1], RenderTargets.gbufferIndex[1]);
                        buffer.SetComputeTextureParam(copyShader, 7, RenderTargets.gbufferIndex[2], RenderTargets.gbufferIndex[2]);
                        const int disp = MTerrain.COLOR_RESOLUTION / 8;
                        buffer.DispatchCompute(copyShader, 7, disp, disp, 1);
                        buffer.SetRenderTarget(colors: idfs, depth: idfs[0]);
                        buffer.ClearRenderTarget(true, false, new Color(0, 0, 0, 0));
                        data.ExecuteCommandBuffer();
                        data.context.DrawRenderers(result, ref drawS, ref filter);

                        buffer.SetComputeTextureParam(copyShader, 6, ShaderIDs._VirtualMainTex, orthoCam.albedoRT);
                        buffer.SetComputeTextureParam(copyShader, 6, ShaderIDs._VirtualBumpMap, orthoCam.normalRT);
                        buffer.SetComputeTextureParam(copyShader, 6, ShaderIDs._VirtualSMO, orthoCam.smoRT);
                        buffer.SetComputeTextureParam(copyShader, 6, RenderTargets.gbufferIndex[0], RenderTargets.gbufferIndex[0]);
                        buffer.SetComputeTextureParam(copyShader, 6, RenderTargets.gbufferIndex[1], RenderTargets.gbufferIndex[1]);
                        buffer.SetComputeTextureParam(copyShader, 6, RenderTargets.gbufferIndex[2], RenderTargets.gbufferIndex[2]);
                        buffer.DispatchCompute(copyShader, 6, disp, disp, 1);
                        buffer.ReleaseTemporaryRT(RenderTargets.gbufferIndex[1]);
                        buffer.ReleaseTemporaryRT(RenderTargets.gbufferIndex[2]);
                        buffer.ReleaseTemporaryRT(RenderTargets.gbufferIndex[0]);

                        buffer.SetRenderTarget(color: heightTempTex.colorBuffer, depth: heightTempTex.depthBuffer, 0);
                        buffer.ClearRenderTarget(true, true, Color.black);
                        data.ExecuteCommandBuffer();
                        data.context.DrawRenderers(result, ref drawH, ref filter);
                        buffer.GenerateMips(heightTempTex);
                        buffer.CopyTexture(heightTempTex, 0, 2, orthoCam.heightRT, orthoCam.depthSlice, 0);
                    }
                }
                if (!rendering)
                {
                    buffer.SetRenderTarget(orthoCam.heightRT, mipLevel: 0, cubemapFace: CubemapFace.Unknown, depthSlice: orthoCam.depthSlice);
                    buffer.ClearRenderTarget(false, true, Color.black);
                }
                MTerrain.current.GenerateMips(orthoCam.depthSlice, buffer);
                data.ExecuteCommandBuffer();
                data.context.Submit();
            }
Пример #53
0
 void OnEnable()
 {
     OnDisable();
     var camera = GetComponent<Camera>();
     commandBuffer = new CommandBuffer();
     commandBuffer.name = "SkyMesh";
     commandBuffer.SetRenderTarget(BuiltinRenderTextureType.CameraTarget);
     var matrix = Matrix4x4.TRS(m_position, Quaternion.Euler(m_rotation), m_scale);
     commandBuffer.DrawMesh(m_mesh, matrix, m_material);
     camera.AddCommandBuffer(m_cameraEvent, commandBuffer);
     lastCameraEvent = m_cameraEvent;
 }
	internal void OnUpdateTransform( Camera camera, CommandBuffer updateCB, bool starting )
#endif
	{
		AmplifyMotion.MotionState state;
		if ( m_states.TryGetValue( camera, out state ) )
		{
			if ( !state.Error )
			{
			#if UNITY_4
				state.UpdateTransform( starting );
			#else
				state.UpdateTransform( updateCB, starting );
			#endif
			}
		}
	}
Пример #55
0
    void UpdateCommandBuffer()
    {
        var cam = GetComponent<Camera>();

        RenderSettings.fogColor = m_fog_color;

        if (m_quad == null)
        {
            m_quad = RaymarcherUtils.GenerateQuad();
        }

        bool reflesh_command_buffer = false;

        Vector2 reso = new Vector2(cam.pixelWidth, cam.pixelHeight);
        if(m_resolution_prev!=reso)
        {
            m_resolution_prev = reso;
            reflesh_command_buffer = true;
        }

        if (m_enable_adaptive_prev != m_enable_adaptive)
        {
            m_enable_adaptive_prev = m_enable_adaptive;
            reflesh_command_buffer = true;
        }
        if (m_dbg_show_steps_prev != m_dbg_show_steps)
        {
            m_dbg_show_steps_prev = m_dbg_show_steps;
            reflesh_command_buffer = true;
        }

        if (reflesh_command_buffer)
        {
            reflesh_command_buffer = false;
            ClearCommandBuffer();
        }

        if (m_cb_raymarch==null)
        {
            if (m_enable_adaptive)
            {
                RenderTargetIdentifier[] rt;

                m_cb_prepass = new CommandBuffer();
                m_cb_prepass.name = "Raymarcher Adaptive PrePass";

                int odepth      = Shader.PropertyToID("ODepth");
                int odepth_prev = Shader.PropertyToID("ODepthPrev");
                int ovelocity   = Shader.PropertyToID("OVelocity");
                int qdepth      = Shader.PropertyToID("QDepth");
                int qdepth_prev = Shader.PropertyToID("QDepthPrev");
                int hdepth      = Shader.PropertyToID("HDepth");
                int hdepth_prev = Shader.PropertyToID("HDepthPrev");
                int adepth      = Shader.PropertyToID("ADepth");
                int adepth_prev = Shader.PropertyToID("ADepthPrev");

                m_cb_prepass.GetTemporaryRT(odepth,     cam.pixelWidth / 8, cam.pixelHeight / 8, 0, FilterMode.Point, RenderTextureFormat.RFloat);
                m_cb_prepass.GetTemporaryRT(odepth_prev,cam.pixelWidth / 8, cam.pixelHeight / 8, 0, FilterMode.Point, RenderTextureFormat.RFloat);
                m_cb_prepass.GetTemporaryRT(ovelocity,  cam.pixelWidth / 8, cam.pixelHeight / 8, 0, FilterMode.Point, RenderTextureFormat.RHalf);
                m_cb_prepass.GetTemporaryRT(qdepth,     cam.pixelWidth / 4, cam.pixelHeight / 4, 0, FilterMode.Point, RenderTextureFormat.RFloat);
                m_cb_prepass.GetTemporaryRT(qdepth_prev,cam.pixelWidth / 4, cam.pixelHeight / 4, 0, FilterMode.Point, RenderTextureFormat.RFloat);
                m_cb_prepass.GetTemporaryRT(hdepth,     cam.pixelWidth / 2, cam.pixelHeight / 2, 0, FilterMode.Point, RenderTextureFormat.RFloat);
                m_cb_prepass.GetTemporaryRT(hdepth_prev,cam.pixelWidth / 2, cam.pixelHeight / 2, 0, FilterMode.Point, RenderTextureFormat.RFloat);
                m_cb_prepass.GetTemporaryRT(adepth,     cam.pixelWidth / 1, cam.pixelHeight / 1, 0, FilterMode.Point, RenderTextureFormat.RFloat);
                m_cb_prepass.GetTemporaryRT(adepth_prev,cam.pixelWidth / 1, cam.pixelHeight / 1, 0, FilterMode.Point, RenderTextureFormat.RFloat);

                rt = new RenderTargetIdentifier[2] { odepth, ovelocity };
                m_cb_prepass.SetGlobalTexture("g_depth_prev", odepth_prev);
                m_cb_prepass.SetRenderTarget(rt, odepth);
                m_cb_prepass.DrawMesh(m_quad, Matrix4x4.identity, m_internal_material, 0, 1);

                m_cb_prepass.Blit(odepth, odepth_prev);
                m_cb_prepass.SetGlobalTexture("g_velocity", ovelocity);

                m_cb_prepass.SetRenderTarget(qdepth);
                m_cb_prepass.SetGlobalTexture("g_depth", odepth);
                m_cb_prepass.SetGlobalTexture("g_depth_prev", qdepth_prev);
                m_cb_prepass.DrawMesh(m_quad, Matrix4x4.identity, m_internal_material, 0, 2);

                m_cb_prepass.Blit(qdepth, qdepth_prev);

                m_cb_prepass.SetRenderTarget(hdepth);
                m_cb_prepass.SetGlobalTexture("g_depth", qdepth);
                m_cb_prepass.SetGlobalTexture("g_depth_prev", hdepth_prev);
                m_cb_prepass.DrawMesh(m_quad, Matrix4x4.identity, m_internal_material, 0, 3);

                m_cb_prepass.Blit(hdepth, hdepth_prev);

                m_cb_prepass.SetRenderTarget(adepth);
                m_cb_prepass.SetGlobalTexture("g_depth", hdepth);
                m_cb_prepass.SetGlobalTexture("g_depth_prev", adepth_prev);
                m_cb_prepass.DrawMesh(m_quad, Matrix4x4.identity, m_internal_material, 0, 4);

                m_cb_prepass.Blit(adepth, adepth_prev);
                m_cb_prepass.SetGlobalTexture("g_depth", adepth);

                cam.AddCommandBuffer(CameraEvent.BeforeGBuffer, m_cb_prepass);
            }

            m_cb_raymarch = new CommandBuffer();
            m_cb_raymarch.name = "Raymarcher";
            m_cb_raymarch.DrawMesh(m_quad, Matrix4x4.identity, m_internal_material, 0, 0);
            cam.AddCommandBuffer(CameraEvent.BeforeGBuffer, m_cb_raymarch);
        }
    }
        protected override void Render(ScriptableRenderContext RenderContext, Camera[] RenderCameras)
        {
            //Gather MeshBatch
            NativeList <FMeshBatch> MeshBatchList = GetWorld().GetMeshBatchColloctor().GetMeshBatchList();

            //Render Pipeline
            BeginFrameRendering(RenderContext, RenderCameras);
            foreach (Camera RenderCamera in RenderCameras)
            {
                RenderCamera.allowHDR = true;

                bool isSceneViewCam = RenderCamera.cameraType == CameraType.SceneView;
                #if UNITY_EDITOR
                if (isSceneViewCam)
                {
                    ScriptableRenderContext.EmitWorldGeometryForSceneView(RenderCamera);
                }
                #endif

                //Prepare VisualEffects
                VFXManager.PrepareCamera(RenderCamera);

                //Prepare ViewUnifrom
                ViewUnifrom.UnpateBufferData(false, RenderCamera);

                //View RenderFamily
                CommandBuffer CmdBuffer = CommandBufferPool.Get("");

                //Binding ViewParameter
                BeginCameraRendering(RenderContext, RenderCamera);
                CmdBuffer.DisableScissorRect();
                ViewUnifrom.BindGPUProperty(CmdBuffer);
                RenderContext.SetupCameraProperties(RenderCamera);

                //Binding VisualEffects
                VFXManager.ProcessCameraCommand(RenderCamera, CmdBuffer);

                //Culling MeshBatch
                NativeArray <FPlane>            ViewFrustum          = new NativeArray <FPlane>(6, Allocator.Persistent);
                NativeArray <FVisibleMeshBatch> VisibleMeshBatchList = new NativeArray <FVisibleMeshBatch>(MeshBatchList.Length, Allocator.TempJob);

                Plane[] FrustumPlane = GeometryUtility.CalculateFrustumPlanes(RenderCamera);
                for (int PlaneIndex = 0; PlaneIndex < 6; PlaneIndex++)
                {
                    ViewFrustum[PlaneIndex] = FrustumPlane[PlaneIndex];
                }

                CullMeshBatch CullTask = new CullMeshBatch();
                {
                    CullTask.ViewFrustum          = ViewFrustum;
                    CullTask.ViewOrigin           = RenderCamera.transform.position;
                    CullTask.MeshBatchList        = MeshBatchList;
                    CullTask.VisibleMeshBatchList = VisibleMeshBatchList;
                }
                JobHandle CullTaskHandle = CullTask.Schedule(MeshBatchList.Length, 256);

                /*SortMeshBatch SortTask = new SortMeshBatch();
                 * {
                 *  SortTask.VisibleMeshBatchList = VisibleMeshBatchList;
                 * }
                 * JobHandle SortTaskHandle = SortTask.Schedule(CullTaskHandle);*/

                //Culling Context
                ScriptableCullingParameters CullingParameter;
                RenderCamera.TryGetCullingParameters(out CullingParameter);
                CullingResults CullingResult = RenderContext.Cull(ref CullingParameter);

                CullTaskHandle.Complete();
                //SortTaskHandle.Complete();

                //Render Family
                RenderOpaqueDepth(RenderCamera, CullingResult);
                RenderOpaqueGBuffer(RenderCamera, CullingResult, VisibleMeshBatchList);
                RenderOpaqueMotion(RenderCamera, CullingResult);
                RenderSkyAtmosphere(RenderCamera);
                RenderPresentView(RenderCamera, GraphBuilder.ScopeTexture(InfinityShaderIDs.RT_ThinGBufferA), RenderCamera.targetTexture);

                //Draw DrawGizmos
                #if UNITY_EDITOR
                if (Handles.ShouldRenderGizmos())
                {
                    RenderGizmo(RenderCamera, GizmoSubset.PostImageEffects);
                }
                #endif

                //Execute RenderGraph
                GraphBuilder.Execute(RenderContext, GetWorld(), CmdBuffer, ViewUnifrom.FrameIndex);
                EndCameraRendering(RenderContext, RenderCamera);

                //Execute ViewRender
                RenderContext.ExecuteCommandBuffer(CmdBuffer);
                CommandBufferPool.Release(CmdBuffer);
                RenderContext.Submit();

                //Prepare ViewUnifrom
                ViewUnifrom.UnpateBufferData(true, RenderCamera);

                //Release View
                ViewFrustum.Dispose();
                VisibleMeshBatchList.Dispose();
            }
            EndFrameRendering(RenderContext, RenderCameras);
        }
Пример #57
0
        void BuildLightData(CommandBuffer cmd, HDCamera hdCamera, HDRayTracingLights rayTracingLights, DebugDisplaySettings debugDisplaySettings)
        {
            // If no lights, exit
            if (rayTracingLights.lightCount == 0)
            {
                ResizeLightDataBuffer(1);
                return;
            }

            // Also we need to build the light list data
            if (m_LightDataGPUArray == null || m_LightDataGPUArray.count != rayTracingLights.lightCount)
            {
                ResizeLightDataBuffer(rayTracingLights.lightCount);
            }

            m_LightDataCPUArray.Clear();

            // Grab the shadow settings
            var hdShadowSettings = hdCamera.volumeStack.GetComponent <HDShadowSettings>();
            BoolScalableSetting contactShadowScalableSetting = HDAdditionalLightData.ScalableSettings.UseContactShadow(m_RenderPipeline.asset);

            // Build the data for every light
            for (int lightIdx = 0; lightIdx < rayTracingLights.hdLightArray.Count; ++lightIdx)
            {
                // Grab the additinal light data to process
                HDAdditionalLightData additionalLightData = rayTracingLights.hdLightArray[lightIdx];

                LightData lightData = new LightData();
                // When the user deletes a light source in the editor, there is a single frame where the light is null before the collection of light in the scene is triggered
                // the workaround for this is simply to add an invalid light for that frame
                if (additionalLightData == null)
                {
                    m_LightDataCPUArray.Add(lightData);
                    continue;
                }

                // Evaluate all the light type data that we need
                LightCategory   lightCategory   = LightCategory.Count;
                GPULightType    gpuLightType    = GPULightType.Point;
                LightVolumeType lightVolumeType = LightVolumeType.Count;
                HDLightType     lightType       = additionalLightData.type;
                HDRenderPipeline.EvaluateGPULightType(lightType, additionalLightData.spotLightShape, additionalLightData.areaLightShape, ref lightCategory, ref gpuLightType, ref lightVolumeType);

                // Fetch the light component for this light
                additionalLightData.gameObject.TryGetComponent(out lightComponent);

                // Build the processed light data  that we need
                ProcessedLightData processedData = new ProcessedLightData();
                processedData.additionalLightData = additionalLightData;
                processedData.lightType           = additionalLightData.type;
                processedData.lightCategory       = lightCategory;
                processedData.gpuLightType        = gpuLightType;
                processedData.lightVolumeType     = lightVolumeType;
                // Both of these positions are non-camera-relative.
                processedData.distanceToCamera  = (additionalLightData.gameObject.transform.position - hdCamera.camera.transform.position).magnitude;
                processedData.lightDistanceFade = HDUtils.ComputeLinearDistanceFade(processedData.distanceToCamera, additionalLightData.fadeDistance);
                processedData.isBakedShadowMask = HDRenderPipeline.IsBakedShadowMaskLight(lightComponent);

                // Build a visible light
                Color finalColor = lightComponent.color.linear * lightComponent.intensity;
                if (additionalLightData.useColorTemperature)
                {
                    finalColor *= Mathf.CorrelatedColorTemperatureToRGB(lightComponent.colorTemperature);
                }
                visibleLight.finalColor = finalColor;
                visibleLight.range      = lightComponent.range;
                // This should be done explicitely, localtoworld matrix doesn't work here
                localToWorldMatrix.SetColumn(3, lightComponent.gameObject.transform.position);
                localToWorldMatrix.SetColumn(2, lightComponent.transform.forward);
                localToWorldMatrix.SetColumn(1, lightComponent.transform.up);
                localToWorldMatrix.SetColumn(0, lightComponent.transform.right);
                visibleLight.localToWorldMatrix = localToWorldMatrix;
                visibleLight.spotAngle          = lightComponent.spotAngle;

                int     shadowIndex            = additionalLightData.shadowIndex;
                int     screenSpaceShadowIndex = -1;
                int     screenSpaceChannelSlot = -1;
                Vector3 lightDimensions        = new Vector3(0.0f, 0.0f, 0.0f);

                // Use the shared code to build the light data
                m_RenderPipeline.GetLightData(cmd, hdCamera, hdShadowSettings, visibleLight, lightComponent, in processedData,
                                              shadowIndex, contactShadowScalableSetting, isRasterization: false, ref lightDimensions, ref screenSpaceShadowIndex, ref screenSpaceChannelSlot, ref lightData);

                // We make the light position camera-relative as late as possible in order
                // to allow the preceding code to work with the absolute world space coordinates.
                Vector3 camPosWS = hdCamera.mainViewConstants.worldSpaceCameraPos;
                HDRenderPipeline.UpdateLightCameraRelativetData(ref lightData, camPosWS);

                // Set the data for this light
                m_LightDataCPUArray.Add(lightData);
            }

            // Push the data to the GPU
            m_LightDataGPUArray.SetData(m_LightDataCPUArray);
        }
Пример #58
0
        public override void BuildCommandBuffer(OceanRenderer ocean, CommandBuffer buf)
        {
            base.BuildCommandBuffer(ocean, buf);

            var lodCount  = OceanRenderer.Instance.CurrentLodCount;
            var steps     = GetNumSubsteps(Time.deltaTime);
            var substepDt = Time.deltaTime / steps;

            for (int stepi = 0; stepi < steps; stepi++)
            {
                for (var lodIdx = lodCount - 1; lodIdx >= 0; lodIdx--)
                {
                    SwapRTs(ref _sources[lodIdx], ref _targets[lodIdx]);
                }

                for (var lodIdx = lodCount - 1; lodIdx >= 0; lodIdx--)
                {
                    _renderSimMaterial[stepi, lodIdx].SetFloat("_SimDeltaTime", substepDt);
                    _renderSimMaterial[stepi, lodIdx].SetFloat("_SimDeltaTimePrev", _substepDtPrevious);

                    _renderSimMaterial[stepi, lodIdx].SetFloat("_GridSize", OceanRenderer.Instance._lods[lodIdx]._renderData._texelWidth);

                    // compute which lod data we are sampling source data from. if a scale change has happened this can be any lod up or down the chain.
                    // this is only valid on the first update step, after that the scale src/target data are in the right places.
                    var srcDataIdx = lodIdx + ((stepi == 0) ? ScaleDifferencePow2 : 0);

                    // only take transform from previous frame on first substep
                    var usePreviousFrameTransform = stepi == 0;

                    if (srcDataIdx >= 0 && srcDataIdx < lodCount)
                    {
                        // bind data to slot 0 - previous frame data
                        BindSourceData(srcDataIdx, 0, _renderSimMaterial[stepi, lodIdx], false, usePreviousFrameTransform);
                    }
                    else
                    {
                        // no source data - bind params only
                        BindSourceData(lodIdx, 0, _renderSimMaterial[stepi, lodIdx], true, usePreviousFrameTransform);
                    }

                    SetAdditionalSimParams(lodIdx, _renderSimMaterial[stepi, lodIdx]);

                    {
                        var rt = DataTexture(lodIdx);
                        buf.SetRenderTarget(rt, rt.depthBuffer);
                    }

                    buf.DrawMesh(FullScreenQuad(), Matrix4x4.identity, _renderSimMaterial[stepi, lodIdx]);

                    SubmitDraws(lodIdx, buf);
                }

                _substepDtPrevious = substepDt;
            }

            // any post-sim steps. the dyn waves updates the copy sim material, which the anim wave will later use to copy in
            // the dyn waves results.
            for (var lodIdx = lodCount - 1; lodIdx >= 0; lodIdx--)
            {
                BuildCommandBufferInternal(lodIdx);
            }
        }
	internal void OnRenderVectors( Camera camera, CommandBuffer renderCB, float scale, AmplifyMotion.Quality quality )
#endif
	{
		AmplifyMotion.MotionState state;
		if ( m_states.TryGetValue( camera, out state ) )
		{
			if ( !state.Error )
			{
			#if UNITY_4
				state.RenderVectors( camera, scale, quality );
			#else
				state.RenderVectors( camera, renderCB, scale, quality );
			#endif
			}
		}
	}
Пример #60
0
 public void BindLightClusterData(CommandBuffer cmd)
 {
     cmd.SetGlobalBuffer(HDShaderIDs._RaytracingLightCluster, GetCluster());
     cmd.SetGlobalBuffer(HDShaderIDs._LightDatasRT, GetLightDatas());
     cmd.SetGlobalBuffer(HDShaderIDs._EnvLightDatasRT, GetEnvLightDatas());
 }