Exemple #1
0
    public void Render(
        ScriptableRenderContext context, Camera camera,
        CameraBufferSettings bufferSettings,
        bool useDynamicBatching, bool useGPUInstancing, bool useLightsPerObject,
        ShadowSettings shadowSettings, PostFXSettings postFXSettings,
        int colorLUTResolution
        )
    {
        this.context = context;
        this.camera  = camera;

        var            crpCamera      = camera.GetComponent <CustomRenderPipelineCamera>();
        CameraSettings cameraSettings =
            crpCamera ? crpCamera.Settings : defaultCameraSettings;

        if (camera.cameraType == CameraType.Reflection)
        {
            useColorTexture = bufferSettings.copyColorReflection;
            useDepthTexture = bufferSettings.copyDepthReflection;
        }
        else
        {
            useColorTexture = bufferSettings.copyColor && cameraSettings.copyColor;
            useDepthTexture = bufferSettings.copyDepth && cameraSettings.copyDepth;
        }

        if (cameraSettings.overridePostFX)
        {
            postFXSettings = cameraSettings.postFXSettings;
        }

        float renderScale = cameraSettings.GetRenderScale(bufferSettings.renderScale);

        useScaledRendering = renderScale <0.99f || renderScale> 1.01f;
        PrepareBuffer();
        PrepareForSceneWindow();
        if (!Cull(shadowSettings.maxDistance))
        {
            return;
        }

        useHDR = bufferSettings.allowHDR && camera.allowHDR;
        if (useScaledRendering)
        {
            renderScale  = Mathf.Clamp(renderScale, renderScaleMin, renderScaleMax);
            bufferSize.x = (int)(camera.pixelWidth * renderScale);
            bufferSize.y = (int)(camera.pixelHeight * renderScale);
        }
        else
        {
            bufferSize.x = camera.pixelWidth;
            bufferSize.y = camera.pixelHeight;
        }

        buffer.BeginSample(SampleName);
        buffer.SetGlobalVector(bufferSizeId, new Vector4(
                                   1f / bufferSize.x, 1f / bufferSize.y,
                                   bufferSize.x, bufferSize.y
                                   ));
        ExecuteBuffer();
        lighting.Setup(
            context, cullingResults, shadowSettings, useLightsPerObject,
            cameraSettings.maskLights ? cameraSettings.renderingLayerMask : -1
            );

        bufferSettings.fxaa.enabled &= cameraSettings.allowFXAA;
        postFXStack.Setup(
            context, camera, bufferSize, postFXSettings, cameraSettings.keepAlpha, useHDR,
            colorLUTResolution, cameraSettings.finalBlendMode,
            bufferSettings.bicubicRescaling, bufferSettings.fxaa
            );
        buffer.EndSample(SampleName);
        Setup();
        DrawVisibleGeometry(
            useDynamicBatching, useGPUInstancing, useLightsPerObject,
            cameraSettings.renderingLayerMask
            );
        DrawUnsupportedShaders();
        DrawGizmosBeforeFX();
        if (postFXStack.IsActive)
        {
            postFXStack.Render(colorAttachmentId);
        }
        else if (useIntermediateBuffer)
        {
            DrawFinal(cameraSettings.finalBlendMode);
            ExecuteBuffer();
        }
        DrawGizmosAfterFX();
        Cleanup();
        Submit();
    }
Exemple #2
0
    public void Render(ScriptableRenderContext IN_context, 
        Camera IN_camera, CameraBufferSettings cameraBufferSettings, bool useDynameicBatching, 
        bool useGPUInstancing, bool useLightPerObject,
        ShadowSettings shadowSetting, PostFXSettings postFXSettings, int colorLUTResolution)
    {
        this.context = IN_context;
        this.camera = IN_camera;

        //setup custom camera settings
        //for per camera blend, PostFX settings
        var crpCamera = camera.GetComponent<CustomRenderPipelineCamera>();
        CameraSettings cameraSettings = crpCamera ? crpCamera.Settings : defaultCameraSettings;

        //use depthtexture so shader can access the current buffer depth    
        if (camera.cameraType == CameraType.Reflection)
        {
            useDepthTexture = cameraBufferSettings.copyDepthReflection;
            useColorTexture = cameraBufferSettings.copyColorReflection;
        }
        else 
        {
            useDepthTexture = cameraBufferSettings.copyDepth && cameraSettings.copyDepth;
            useColorTexture = cameraBufferSettings.copyColor && cameraSettings.copyColor;
        }

        if (cameraSettings.overridePostFX)
        {
            //override PostFX option for each cam
            postFXSettings = cameraSettings.postFXSettings;
        }

        //set render scale, scale should atleast move a bit to take effect
        float renderScale = cameraSettings.GetRenderScale(cameraBufferSettings.renderScale);
        useScaledRendering = renderScale < 0.99f || renderScale > 1.01f;

        //change buffer name to the camera name
        PrepareBuffer();

        //add UI (WorldGeometry) to the scene camera, so we can see UI in editor view
        PrepareForSceneView();
        if (!Cull(shadowSetting.maxDistance))
        {
            return;
        }

        this.useHDR = cameraBufferSettings.allowHDR && camera.allowHDR;

        //calculate and store buffersize
        if (useScaledRendering)
        {
            renderScale = Mathf.Clamp(renderScale, renderScaleMin, renderScaleMax);
            bufferSize.x = (int)(camera.pixelWidth * renderScale);
            bufferSize.y = (int)(camera.pixelHeight * renderScale);
        }
        else 
        {
            bufferSize.x = camera.pixelWidth;
            bufferSize.y = camera.pixelHeight;
        }

        buffer.BeginSample(SampleName);//Include lights and shadow rendering in main cam profile

        //pass the buffer size to GPU so the when sample color and depthe texture, 
        //we can refer to correct buffer size
        buffer.SetGlobalVector(bufferSizeId, new Vector4(
            (float)1/bufferSize.x, (float)1 /bufferSize.y, bufferSize.x, bufferSize.y
        ));


        ExecuteBuffer();
        //get transfer DirLight data to GPU
        //Setup shadow RT and shadow rendering
        lighting.Setup(context, cullingResults, shadowSetting, useLightPerObject,
            cameraSettings.maskLights ? cameraSettings.RenderingLayerMask : -1);

        //FXAA is enable per camera
        cameraBufferSettings.fxaa.enabled = cameraSettings.allowFXAA;
        //setup postFX
        postFXStack.Setup(context, camera, 
            bufferSize, postFXSettings, cameraSettings.keepAlpha, useHDR, 
            colorLUTResolution, cameraSettings.finalBlendMode,
            cameraBufferSettings.bicubicResampling,
            cameraBufferSettings.fxaa);

        buffer.EndSample(SampleName);

        //Setup rendertarget for normal oject rendering
        Setup();
        DrawVisibleGeometry(useDynameicBatching, useGPUInstancing, useLightPerObject, cameraSettings.RenderingLayerMask);

        //this makes the Legacy shader draw upon the tranparent object
        //makes it wired, but they are not supported who cares~
        DrawUnsupportedShaders();

        DrawGizmosBeforeFX();

        if (postFXStack.IsActive)
        {
            postFXStack.Render(colorAttachmentId);
        }
        else if (useIntermediateBuffer)
        {
            // we need to copy the image from intermediate to final 
            //otherwise nothing goes to camera target, Since PFX is not active 
            //Draw(colorAttachmentId, BuiltinRenderTextureType.CameraTarget);
            DrawFinal(cameraSettings.finalBlendMode);
            ExecuteBuffer();
        }

        DrawGizmosAfterFX();

        Cleanup();

        //all action will be buffered and render action only begin after submit!
        Submit();
    }