示例#1
0
 public void Setup(RenderTargetIdentifier source, RenderTargetHandle destination, Downsampling downsampling)
 {
     this.source          = RTHandles.Alloc(source);
     this.destination     = RTHandles.Alloc(destination.Identifier());
     this.destinationID   = destination.id;
     m_DownsamplingMethod = downsampling;
 }
示例#2
0
        public void SaveCompressedClick()
        {
            Downsampling     d  = new Downsampling(this.model.getMainImage());
            DownsampleFormat df = d.Downsample(1);

            Utilities.SaveCompressed(df);
        }
示例#3
0
 /// <summary>
 /// Create the CopyColorPass
 /// </summary>
 public CopyColorPass(RenderPassEvent evt, Material samplingMaterial)
 {
     m_SamplingMaterial         = samplingMaterial;
     m_SampleOffsetShaderHandle = Shader.PropertyToID("_SampleOffset");
     renderPassEvent            = evt;
     m_DownsamplingMethod       = Downsampling.None;
 }
        public override void Execute(ScriptableRenderer renderer, ScriptableRenderContext context, ref RenderingData renderingData)
        {
            CommandBuffer cmd          = CommandBufferPool.Get(k_CopyColorTag);
            Downsampling  downsampling = renderingData.cameraData.opaqueTextureDownsampling;
            float         opaqueScaler = m_OpaqueScalerValues[(int)downsampling];

            RenderTextureDescriptor opaqueDesc    = ScriptableRenderer.CreateRenderTextureDescriptor(ref renderingData.cameraData, opaqueScaler);
            RenderTargetIdentifier  colorRT       = source.Identifier();
            RenderTargetIdentifier  opaqueColorRT = destination.Identifier();

            cmd.GetTemporaryRT(destination.id, opaqueDesc, renderingData.cameraData.opaqueTextureDownsampling == Downsampling.None ? FilterMode.Point : FilterMode.Bilinear);
            switch (downsampling)
            {
            case Downsampling.None:
                cmd.Blit(colorRT, opaqueColorRT);
                break;

            case Downsampling._2xBilinear:
                cmd.Blit(colorRT, opaqueColorRT);
                break;

            case Downsampling._4xBox:
                Material samplingMaterial = renderer.GetMaterial(MaterialHandles.Sampling);
                samplingMaterial.SetFloat(m_SampleOffsetShaderHandle, 2);
                cmd.Blit(colorRT, opaqueColorRT, samplingMaterial, 0);
                break;

            case Downsampling._4xBilinear:
                cmd.Blit(colorRT, opaqueColorRT);
                break;
            }

            context.ExecuteCommandBuffer(cmd);
            CommandBufferPool.Release(cmd);
        }
示例#5
0
        void CopyColorSubpass(ref ScriptableRenderContext context, ref CameraData cameraData)
        {
            CommandBuffer cmd          = CommandBufferPool.Get("Copy Opaque Color");
            Downsampling  downsampling = cameraData.opaqueTextureDownsampling;
            float         opaqueScaler = m_OpaqueScalerValues[(int)downsampling];

            RenderTextureDescriptor opaqueDesc    = renderer.CreateRTDesc(ref cameraData, opaqueScaler);
            RenderTargetIdentifier  colorRT       = GetSurface(colorAttachmentHandle);
            RenderTargetIdentifier  opaqueColorRT = GetSurface(RenderTargetHandles.OpaqueColor);

            cmd.GetTemporaryRT(RenderTargetHandles.OpaqueColor, opaqueDesc, cameraData.opaqueTextureDownsampling == Downsampling.None ? FilterMode.Point : FilterMode.Bilinear);
            switch (downsampling)
            {
            case Downsampling.None:
                cmd.Blit(colorRT, opaqueColorRT);
                break;

            case Downsampling._2xBilinear:
                cmd.Blit(colorRT, opaqueColorRT);
                break;

            case Downsampling._4xBox:
                m_SamplingMaterial.SetFloat(m_SampleOffsetShaderHandle, 2);
                cmd.Blit(colorRT, opaqueColorRT, m_SamplingMaterial, 0);
                break;

            case Downsampling._4xBilinear:
                cmd.Blit(colorRT, opaqueColorRT);
                break;
            }

            context.ExecuteCommandBuffer(cmd);
            CommandBufferPool.Release(cmd);
        }
        public static Bitmap DecompressHuffman(string imgSrc)
        {
            //moram da pocnem odmah otvaranjem fajla i ucitavanjem bajtova redom kako su upisivani
            byte[]      bmpWidth;
            byte[]      bmpHeight;
            byte[]      stride;
            byte[]      downsampleChannels;
            byte[]      dictionarySize;
            byte[]      dictionary;
            byte[]      YDataLen;
            byte[]      CbDataLen;
            byte[]      CrDataLen;
            byte[]      imageData;
            HuffmanTree tree = new HuffmanTree();

            using (var reader = new BinaryReader(File.Open(imgSrc, FileMode.Open)))
            {
                bmpWidth           = reader.ReadBytes(4);
                bmpHeight          = reader.ReadBytes(4);
                stride             = reader.ReadBytes(4);
                downsampleChannels = reader.ReadBytes(4);

                YDataLen  = reader.ReadBytes(4);
                CbDataLen = reader.ReadBytes(4);
                CrDataLen = reader.ReadBytes(4);

                dictionarySize = reader.ReadBytes(4);

                int dictSize = BitConverter.ToInt32(dictionarySize, 0);

                dictionary = reader.ReadBytes(dictSize);
                tree.DeserializeDictionary(dictionary);

                List <byte> compressedData = new List <byte>();
                while (reader.BaseStream.Position != reader.BaseStream.Length)
                {
                    compressedData.Add(reader.ReadByte());
                }

                imageData = tree.Decode(new BitArray(compressedData.ToArray()));
            }

            int width                = BitConverter.ToInt32(bmpWidth, 0);
            int height               = BitConverter.ToInt32(bmpHeight, 0);
            int strideInt            = BitConverter.ToInt32(stride, 0);
            int downampleChannelsInt = BitConverter.ToInt32(downsampleChannels, 0);
            int yDataLen             = BitConverter.ToInt32(YDataLen, 0);
            int cbDataLen            = BitConverter.ToInt32(CbDataLen, 0);
            int crDataLen            = BitConverter.ToInt32(CrDataLen, 0);


            DownsampleFormat format = new DownsampleFormat(strideInt, width, height, yDataLen, cbDataLen, crDataLen, downampleChannelsInt);

            format.data = imageData;
            Downsampling sampling = new Downsampling();

            return(sampling.RestoreBitmap(format));
        }
示例#7
0
    public override void Create()
    {
        m_SamplingMaterial = CoreUtils.CreateEngineMaterial(Shader.Find("Hidden/Lightweight Render Pipeline/Sampling"));

        if (LightweightRenderPipeline.asset != null)
        {
            m_DownsamplingMethod = LightweightRenderPipeline.asset.opaqueDownsampling;
        }
    }
示例#8
0
        /// <summary>
        /// Create the CopyColorPass
        /// </summary>
        public CopyColorPass(RenderPassEvent evt, Material samplingMaterial, Material copyColorMaterial = null)
        {
            base.profilingSampler = new ProfilingSampler(nameof(CopyColorPass));

            m_SamplingMaterial         = samplingMaterial;
            m_CopyColorMaterial        = copyColorMaterial;
            m_SampleOffsetShaderHandle = Shader.PropertyToID("_SampleOffset");
            renderPassEvent            = evt;
            m_DownsamplingMethod       = Downsampling.None;
        }
示例#9
0
        public override void AddRenderPasses(ScriptableRenderer renderer, ref RenderingData renderingData)
        {
            m_CopyDepthPass.Setup(m_CameraDepthAttachment, m_DepthTexture);
            renderer.EnqueuePass(m_CopyDepthPass);

            Downsampling downsamplingMethod = UniversalRenderPipeline.asset.opaqueDownsampling;

            m_CopyColorPass.Setup(m_CameraColorAttachment.Identifier(), m_ColorTexture, downsamplingMethod);
            renderer.EnqueuePass(m_CopyColorPass);
        }
        public override void AddRenderPasses(ScriptableRenderer renderer,
                                             ref RenderingData renderingData)
        {
            int     numFrustumsX, numFrustumsY;
            Vector2 screenSizeRatio;

            SetupScreenReferenceInfo(ref renderingData, out numFrustumsX, out numFrustumsY,
                                     out screenSizeRatio);

            if (m_precomputeFrustumsPass != null)
            {
                m_precomputeFrustumsPass.Setup(ref screenSizeRatio, m_frustumTileSize,
                                               m_inverseProjectionMatrixFloats, numFrustumsX, numFrustumsY);
                renderer.EnqueuePass(m_precomputeFrustumsPass);
            }

            if (renderingData.cameraData.cameraType == CameraType.Game)
            {
                renderer.EnqueuePass(m_depthOnlyPass);
            }

            if (m_copyDepthPass != null)
            {
                m_copyDepthPass.Setup(renderer.cameraDepth);
                renderer.EnqueuePass(m_copyDepthPass);
            }

            if (m_tileLightCullingPass != null)
            {
                m_tileLightCullingPass.Setup(
                    m_precomputeFrustumsPass.GetFrustumsBuffer(),
                    m_inverseProjectionMatrixFloats,
                    ref screenSizeRatio, m_frustumTileSize, numFrustumsX,
                    numFrustumsY);
                renderer.EnqueuePass(m_tileLightCullingPass);
            }

            if (m_copyColorPass != null)
            {
                Downsampling downsamplingMethod =
                    UniversalRenderPipeline.asset.opaqueDownsampling;
                m_copyColorPass.Setup(renderer.cameraColorTarget,
                                      m_backgroundRT, downsamplingMethod);
                renderer.EnqueuePass(m_copyColorPass);
            }

            if ((m_showLightGridsPass == null) || m_showTileLightGridRatio <= 0.0f)
            {
                return;
            }
            m_showDebugTileLightGridMaterial.SetColor("_GridColor", m_tileLightGridColor);
            m_showDebugTileLightGridMaterial.SetFloat("_Show", m_showTileLightGridRatio);
            m_showLightGridsPass.Setup(renderer.cameraColorTarget);
            renderer.EnqueuePass(m_showLightGridsPass);
        }
示例#11
0
        public ForwardRenderer(ForwardRendererData data) : base(data)
        {
            Downsampling downsamplingMethod = LightweightRenderPipeline.asset.opaqueDownsampling;

            Material blitMaterial               = CoreUtils.CreateEngineMaterial(data.blitShader);
            Material copyDepthMaterial          = CoreUtils.CreateEngineMaterial(data.copyDepthShader);
            Material samplingMaterial           = CoreUtils.CreateEngineMaterial(data.samplingShader);
            Material screenspaceShadowsMaterial = CoreUtils.CreateEngineMaterial(data.screenSpaceShadowShader);
            Material ssDownSampleMaterial       = CoreUtils.CreateEngineMaterial("Hidden/Lightweight Render Pipeline/SSDownSample");

            // Note: Since all custom render passes inject first and we have stable sort,
            // we inject the builtin passes in the before events.
            m_MainLightShadowCasterPass        = new MainLightShadowCasterPass(RenderPassEvent.BeforeRenderingShadows);
            m_AdditionalLightsShadowCasterPass = new AdditionalLightsShadowCasterPass(RenderPassEvent.BeforeRenderingShadows);
            m_DepthPrepass = new DepthOnlyPass(RenderPassEvent.BeforeRenderingPrepasses, RenderQueueRange.all);
            m_ScreenSpaceShadowResolvePass = new ScreenSpaceShadowResolvePass(RenderPassEvent.BeforeRenderingPrepasses, screenspaceShadowsMaterial);
            m_SSSDownsamplePass            = new SSShadowDownSamplePass(RenderPassEvent.BeforeRenderingPrepasses, ssDownSampleMaterial);

            m_RenderOpaqueForwardPass = new RenderOpaqueForwardPass(RenderPassEvent.BeforeRenderingOpaques, RenderQueueRange.opaque, data.opaqueLayerMask);

            m_CopyDepthPass         = new CopyDepthPass(RenderPassEvent.BeforeRenderingOpaques, copyDepthMaterial);
            m_OpaquePostProcessPass = new PostProcessPass(RenderPassEvent.BeforeRenderingOpaques, true);
            m_DrawSkyboxPass        = new DrawSkyboxPass(RenderPassEvent.BeforeRenderingSkybox);
            m_CopyColorPass         = new CopyColorPass(RenderPassEvent.BeforeRenderingTransparents, samplingMaterial, downsamplingMethod);
            //m_RenderOpaqueDiscardAndBlendPass = new RenderOpaqueDiscardAndBlendPass(RenderPassEvent.BeforeRenderingTransparents , new RenderQueueRange(2350,2550), data.opaqueLayerMask);

            m_RenderTransparentForwardPass = new RenderTransparentForwardPass(RenderPassEvent.BeforeRenderingTransparents, RenderQueueRange.transparent, data.transparentLayerMask);

            //m_RenderObjectsPass = new RenderObjectsPass(RenderPassEvent.AfterRenderingTransparents, RenderQueueRange.all);

            m_PostProcessPass = new PostProcessPass(RenderPassEvent.BeforeRenderingPostProcessing);
            m_UICameraPass    = new UICameraPass(RenderPassEvent.AfterRendering, RenderQueueRange.transparent, data.transparentLayerMask);
            m_CapturePass     = new CapturePass(RenderPassEvent.AfterRendering);
            m_FinalBlitPass   = new FinalBlitPass(RenderPassEvent.AfterRendering, blitMaterial);

#if UNITY_EDITOR
            m_SceneViewDepthCopyPass = new SceneViewDepthCopyPass(RenderPassEvent.AfterRendering + 9, copyDepthMaterial);
#endif

            // RenderTexture format depends on camera and pipeline (HDR, non HDR, etc)
            // Samples (MSAA) depend on camera and pipeline
            m_CameraColorAttachment.Init("_CameraColorTexture");
            m_CameraDepthAttachment.Init("_CameraDepthAttachment");
            m_DepthTexture.Init("_CameraDepthTexture");
            m_OpaqueColor.Init("_CameraOpaqueTexture");

            //m_CameraNormalAttachment.Init("_CameraNormalAttachment");

            m_ForwardLights = new ForwardLights();
        }
        /// <inheritdoc/>
        public override void Execute(ScriptableRenderer renderer, ScriptableRenderContext context, ref RenderingData renderingData)
        {
            if (m_SamplingMaterial == null)
            {
                Debug.LogErrorFormat("Missing {0}. {1} render pass will not execute. Check for missing reference in the renderer resources.", m_SamplingMaterial, GetType().Name);
                return;
            }

            if (renderer == null)
            {
                throw new ArgumentNullException("renderer");
            }

            CommandBuffer cmd          = CommandBufferPool.Get(k_CopyColorTag);
            Downsampling  downsampling = renderingData.cameraData.opaqueTextureDownsampling;
            float         opaqueScaler = m_OpaqueScalerValues[(int)downsampling];

            RenderTextureDescriptor opaqueDesc = ScriptableRenderer.CreateRenderTextureDescriptor(ref renderingData.cameraData, opaqueScaler);

            opaqueDesc.msaaSamples     = 1;
            opaqueDesc.depthBufferBits = 0;

            RenderTargetIdentifier colorRT       = source.Identifier();
            RenderTargetIdentifier opaqueColorRT = destination.Identifier();

            cmd.GetTemporaryRT(destination.id, opaqueDesc, renderingData.cameraData.opaqueTextureDownsampling == Downsampling.None ? FilterMode.Point : FilterMode.Bilinear);
            switch (downsampling)
            {
            case Downsampling.None:
                cmd.Blit(colorRT, opaqueColorRT);
                break;

            case Downsampling._2xBilinear:
                cmd.Blit(colorRT, opaqueColorRT);
                break;

            case Downsampling._4xBox:
                m_SamplingMaterial.SetFloat(m_SampleOffsetShaderHandle, 2);
                cmd.Blit(colorRT, opaqueColorRT, m_SamplingMaterial, 0);
                break;

            case Downsampling._4xBilinear:
                cmd.Blit(colorRT, opaqueColorRT);
                break;
            }

            context.ExecuteCommandBuffer(cmd);
            CommandBufferPool.Release(cmd);
        }
        public ForwardRenderer(ForwardRendererData data) : base(data)
        {
            Downsampling downsamplingMethod = LightweightRenderPipeline.asset.opaqueDownsampling;

            Material blitMaterial               = CoreUtils.CreateEngineMaterial(data.shaders.blitPS);
            Material copyDepthMaterial          = CoreUtils.CreateEngineMaterial(data.shaders.copyDepthPS);
            Material samplingMaterial           = CoreUtils.CreateEngineMaterial(data.shaders.samplingPS);
            Material screenspaceShadowsMaterial = CoreUtils.CreateEngineMaterial(data.shaders.screenSpaceShadowPS);

            StencilStateData stencilData = data.defaultStencilState;

            m_DefaultStencilState         = StencilState.defaultValue;
            m_DefaultStencilState.enabled = stencilData.overrideStencilState;
            m_DefaultStencilState.SetCompareFunction(stencilData.stencilCompareFunction);
            m_DefaultStencilState.SetPassOperation(stencilData.passOperation);
            m_DefaultStencilState.SetFailOperation(stencilData.failOperation);
            m_DefaultStencilState.SetZFailOperation(stencilData.zFailOperation);

            // Note: Since all custom render passes inject first and we have stable sort,
            // we inject the builtin passes in the before events.
            m_MainLightShadowCasterPass        = new MainLightShadowCasterPass(RenderPassEvent.BeforeRenderingShadows);
            m_AdditionalLightsShadowCasterPass = new AdditionalLightsShadowCasterPass(RenderPassEvent.BeforeRenderingShadows);
            m_DepthPrepass = new DepthOnlyPass(RenderPassEvent.BeforeRenderingPrepasses, RenderQueueRange.opaque, data.opaqueLayerMask);
            m_ScreenSpaceShadowResolvePass = new ScreenSpaceShadowResolvePass(RenderPassEvent.BeforeRenderingPrepasses, screenspaceShadowsMaterial);
            m_ScreenSpaceShadowComputePass = new ScreenSpaceShadowComputePass(RenderPassEvent.BeforeRenderingPrepasses, data.shaders.screenSpaceShadowCS); //seongdae;vxsm
            m_RenderOpaqueForwardPass      = new DrawObjectsPass("Render Opaques", true, RenderPassEvent.BeforeRenderingOpaques, RenderQueueRange.opaque, data.opaqueLayerMask, m_DefaultStencilState, stencilData.stencilReference);
            m_CopyDepthPass                = new CopyDepthPass(RenderPassEvent.BeforeRenderingOpaques, copyDepthMaterial);
            m_OpaquePostProcessPass        = new PostProcessPass(RenderPassEvent.BeforeRenderingOpaques, true);
            m_DrawSkyboxPass               = new DrawSkyboxPass(RenderPassEvent.BeforeRenderingSkybox);
            m_CopyColorPass                = new CopyColorPass(RenderPassEvent.BeforeRenderingTransparents, samplingMaterial, downsamplingMethod);
            m_RenderTransparentForwardPass = new DrawObjectsPass("Render Transparents", false, RenderPassEvent.BeforeRenderingTransparents, RenderQueueRange.transparent, data.transparentLayerMask, m_DefaultStencilState, stencilData.stencilReference);
            m_PostProcessPass              = new PostProcessPass(RenderPassEvent.BeforeRenderingPostProcessing);
            m_CapturePass   = new CapturePass(RenderPassEvent.AfterRendering);
            m_FinalBlitPass = new FinalBlitPass(RenderPassEvent.AfterRendering, blitMaterial);

            #if UNITY_EDITOR
            m_SceneViewDepthCopyPass = new SceneViewDepthCopyPass(RenderPassEvent.AfterRendering + 9, copyDepthMaterial);
            #endif

            // RenderTexture format depends on camera and pipeline (HDR, non HDR, etc)
            // Samples (MSAA) depend on camera and pipeline
            m_CameraColorAttachment.Init("_CameraColorTexture");
            m_CameraDepthAttachment.Init("_CameraDepthAttachment");
            m_DepthTexture.Init("_CameraDepthTexture");
            m_OpaqueColor.Init("_CameraOpaqueTexture");
            m_ForwardLights = new ForwardLights();
        }
        /// <inheritdoc/>
        public override void Execute(ScriptableRenderer renderer, ScriptableRenderContext context, ref RenderingData renderingData)
        {
            if (renderer == null)
            {
                throw new ArgumentNullException("renderer");
            }

            CommandBuffer cmd          = CommandBufferPool.Get(k_Tag);
            Downsampling  downsampling = renderingData.cameraData.opaqueTextureDownsampling;
            float         opaqueScaler = m_OpaqueScalerValues[(int)downsampling];

            RenderTextureDescriptor opaqueDesc    = ScriptableRenderer.CreateRenderTextureDescriptor(ref renderingData.cameraData, opaqueScaler);
            RenderTargetIdentifier  colorRT       = source.Identifier();
            RenderTargetIdentifier  opaqueColorRT = destination.Identifier();

            cmd.GetTemporaryRT(destination.id, opaqueDesc, renderingData.cameraData.opaqueTextureDownsampling == Downsampling.None ? FilterMode.Point : FilterMode.Bilinear);
            switch (downsampling)
            {
            case Downsampling.None:
                cmd.Blit(colorRT, opaqueColorRT);
                break;

            case Downsampling._2xBilinear:
                cmd.Blit(colorRT, opaqueColorRT);
                break;

            case Downsampling._4xBox:
                Material samplingMaterial = renderer.GetMaterial(MaterialHandle.Sampling);
                samplingMaterial.SetFloat(m_SampleOffsetShaderHandle, 2);
                cmd.Blit(colorRT, opaqueColorRT, samplingMaterial, 0);
                break;

            case Downsampling._4xBilinear:
                cmd.Blit(colorRT, opaqueColorRT);
                break;
            }

            //resume render target
            RenderBufferLoadAction  loadOp  = RenderBufferLoadAction.Load;
            RenderBufferStoreAction storeOp = RenderBufferStoreAction.Store;

            SetRenderTarget(cmd, source.Identifier(), loadOp, storeOp,
                            depth.Identifier(), loadOp, storeOp, ClearFlag.None, Color.black, baseDescriptor.dimension);

            context.ExecuteCommandBuffer(cmd);
            CommandBufferPool.Release(cmd);
        }
示例#15
0
        /// <summary>
        /// Get a descriptor and filter mode for the required texture for this pass
        /// </summary>
        /// <param name="downsamplingMethod"></param>
        /// <param name="descriptor"></param>
        /// <param name="filterMode"></param>
        /// <seealso cref="Downsampling"/>
        /// <seealso cref="RenderTextureDescriptor"/>
        /// <seealso cref="FilterMode"/>
        public static void ConfigureDescriptor(Downsampling downsamplingMethod, ref RenderTextureDescriptor descriptor, out FilterMode filterMode)
        {
            descriptor.msaaSamples     = 1;
            descriptor.depthBufferBits = 0;
            if (downsamplingMethod == Downsampling._2xBilinear)
            {
                descriptor.width  /= 2;
                descriptor.height /= 2;
            }
            else if (downsamplingMethod == Downsampling._4xBox || downsamplingMethod == Downsampling._4xBilinear)
            {
                descriptor.width  /= 4;
                descriptor.height /= 4;
            }

            filterMode = downsamplingMethod == Downsampling.None ? FilterMode.Point : FilterMode.Bilinear;
        }
示例#16
0
    public override void AddRenderPasses(ScriptableRenderer renderer, ref RenderingData renderingData)
    {
        Downsampling downSamplingMethod = UniversalRenderPipeline.asset.opaqueDownsampling;

        var cameraColorTarget = renderer.cameraColorTarget;
        var clearRenderPass   = new ClearColorPass(RenderPassEvent.BeforeRenderingOpaques, cameraColorTarget);

        var copyBeforeOpaquePass = new CopyColorPass(RenderPassEvent.BeforeRenderingOpaques, m_SamplingMaterial);

        copyBeforeOpaquePass.Setup(cameraColorTarget, beforeAll, downSamplingMethod);

        var copyAfterOpaquePass = new CopyColorPass(RenderPassEvent.AfterRenderingOpaques, m_SamplingMaterial);

        copyAfterOpaquePass.Setup(cameraColorTarget, afterOpaque, downSamplingMethod);

        var copyAfterSkyboxPass = new CopyColorPass(RenderPassEvent.AfterRenderingSkybox, m_SamplingMaterial);

        copyAfterSkyboxPass.Setup(cameraColorTarget, afterSkybox, downSamplingMethod);

        var copyAfterSkyboxPass2 = new CopyColorPass(RenderPassEvent.AfterRenderingSkybox, m_SamplingMaterial);

        copyAfterSkyboxPass.Setup(cameraColorTarget, afterSkybox2, downSamplingMethod);

        var copyAfterTransparents = new CopyColorPass(RenderPassEvent.AfterRenderingTransparents, m_SamplingMaterial);

        copyAfterTransparents.Setup(cameraColorTarget, afterTransparent, downSamplingMethod);

        var copyAfterEverything = new CopyColorPass(RenderPassEvent.AfterRenderingPostProcessing, m_SamplingMaterial);

        copyAfterEverything.Setup(afterPost.id, afterAll, downSamplingMethod);

        if (m_BlitRenderPassesToScreen == null)
        {
            m_BlitRenderPassesToScreen = new BlitPass(RenderPassEvent.AfterRendering, cameraColorTarget);
        }

        // Inserts out of order so we also test render passes sort correctly
        renderer.EnqueuePass(copyAfterEverything);
        renderer.EnqueuePass(m_BlitRenderPassesToScreen);
        renderer.EnqueuePass(copyAfterOpaquePass);
        renderer.EnqueuePass(copyAfterSkyboxPass);
        renderer.EnqueuePass(copyAfterSkyboxPass2);
        renderer.EnqueuePass(copyAfterTransparents);
        renderer.EnqueuePass(clearRenderPass);
        renderer.EnqueuePass(copyBeforeOpaquePass);
    }
示例#17
0
        public void DownsampleClick()
        {
            this.channelsOn = true;
            this.view.MakeChannelsVisible();

            Downsampling     down = new Downsampling(this.model.getMainImage());
            DownsampleFormat df1  = down.Downsample(1);
            DownsampleFormat df2  = down.Downsample(2);
            DownsampleFormat df3  = down.Downsample(2);

            //Compression.DownsamplingNew.Downsampling down = new Compression.DownsamplingNew.Downsampling(this.model.getMainImage());
            //Compression.DownsamplingNew.DownsampleFormat df1 = down.DownsampleImage("YCb");
            //Compression.DownsamplingNew.DownsampleFormat df2 = down.DownsampleImage("CbCr");
            //Compression.DownsamplingNew.DownsampleFormat df3 = down.DownsampleImage("YCr");

            this.model.setYChannel(down.RestoreBitmap(df1));
            this.model.setCbChannel(down.RestoreBitmap(df2));
            this.model.setCrChannel(down.RestoreBitmap(df3));

            this.view.ShowImages(model.getMainImage(), model.getYChannel(), model.getCbChannel(), model.getCrChannel(), model.getImageName());
        }
        public override void Setup(ScriptableRenderContext context, ref RenderingData renderingData)
        {
            Camera camera = renderingData.cameraData.camera;
            RenderTextureDescriptor cameraTargetDescriptor = renderingData.cameraData.cameraTargetDescriptor;

            EnqueuePass(m_VolumeBlendingPass);

            // Special path for depth only offscreen cameras. Only write opaques + transparents.
            bool isOffscreenDepthTexture = camera.targetTexture != null && camera.targetTexture.format == RenderTextureFormat.Depth;

            if (isOffscreenDepthTexture)
            {
                ConfigureCameraTarget(BuiltinRenderTextureType.CameraTarget, BuiltinRenderTextureType.CameraTarget);

                for (int i = 0; i < rendererFeatures.Count; ++i)
                {
                    rendererFeatures[i].AddRenderPasses(this, ref renderingData);
                }

                EnqueuePass(m_RenderOpaqueForwardPass);
                EnqueuePass(m_DrawSkyboxPass);
                EnqueuePass(m_RenderTransparentForwardPass);
                return;
            }

            bool mainLightShadows            = m_MainLightShadowCasterPass.Setup(ref renderingData);
            bool additionalLightShadows      = m_AdditionalLightsShadowCasterPass.Setup(ref renderingData);
            bool resolveShadowsInScreenSpace = mainLightShadows && renderingData.shadowData.requiresScreenSpaceShadowResolve;

            // Depth prepass is generated in the following cases:
            // - We resolve shadows in screen space
            // - Scene view camera always requires a depth texture. We do a depth pre-pass to simplify it and it shouldn't matter much for editor.
            // - If game or offscreen camera requires it we check if we can copy the depth from the rendering opaques pass and use that instead.
            bool requiresDepthPrepass = renderingData.cameraData.isSceneViewCamera ||
                                        (renderingData.cameraData.requiresDepthTexture && (!CanCopyDepth(ref renderingData.cameraData)));

            requiresDepthPrepass |= resolveShadowsInScreenSpace;

            // TODO: There's an issue in multiview and depth copy pass. Atm forcing a depth prepass on XR until
            // we have a proper fix.
            if (renderingData.cameraData.isStereoEnabled && renderingData.cameraData.requiresDepthTexture)
            {
                requiresDepthPrepass = true;
            }

            bool createColorTexture = RequiresIntermediateColorTexture(ref renderingData, cameraTargetDescriptor) ||
                                      rendererFeatures.Count != 0;

            // If camera requires depth and there's no depth pre-pass we create a depth texture that can be read
            // later by effect requiring it.
            bool createDepthTexture = renderingData.cameraData.requiresDepthTexture && !requiresDepthPrepass;
            bool postProcessEnabled = renderingData.cameraData.postProcessEnabled;

            m_ActiveCameraColorAttachment = (createColorTexture) ? m_CameraColorAttachment : RenderTargetHandle.CameraTarget;
            m_ActiveCameraDepthAttachment = (createDepthTexture) ? m_CameraDepthAttachment : RenderTargetHandle.CameraTarget;
            bool intermediateRenderTexture = createColorTexture || createDepthTexture;

            if (intermediateRenderTexture)
            {
                CreateCameraRenderTarget(context, ref renderingData.cameraData);
            }

            ConfigureCameraTarget(m_ActiveCameraColorAttachment.Identifier(), m_ActiveCameraDepthAttachment.Identifier());

            // if rendering to intermediate render texture we don't have to create msaa backbuffer
            int backbufferMsaaSamples = (intermediateRenderTexture) ? 1 : cameraTargetDescriptor.msaaSamples;

            if (Camera.main == camera && camera.cameraType == CameraType.Game && camera.targetTexture == null)
            {
                SetupBackbufferFormat(backbufferMsaaSamples, renderingData.cameraData.isStereoEnabled);
            }

            for (int i = 0; i < rendererFeatures.Count; ++i)
            {
                rendererFeatures[i].AddRenderPasses(this, ref renderingData);
            }

            int count = activeRenderPassQueue.Count;

            for (int i = count - 1; i >= 0; i--)
            {
                if (activeRenderPassQueue[i] == null)
                {
                    activeRenderPassQueue.RemoveAt(i);
                }
            }
            bool hasAfterRendering = activeRenderPassQueue.Find(x => x.renderPassEvent == RenderPassEvent.AfterRendering) != null;

            if (mainLightShadows)
            {
                EnqueuePass(m_MainLightShadowCasterPass);
            }

            if (additionalLightShadows)
            {
                EnqueuePass(m_AdditionalLightsShadowCasterPass);
            }

            if (requiresDepthPrepass)
            {
                m_DepthPrepass.Setup(cameraTargetDescriptor, m_DepthTexture);
                EnqueuePass(m_DepthPrepass);
            }

            if (resolveShadowsInScreenSpace)
            {
                m_ScreenSpaceShadowResolvePass.Setup(cameraTargetDescriptor);
                EnqueuePass(m_ScreenSpaceShadowResolvePass);
            }

            if (postProcessEnabled)
            {
                m_ColorGradingLutPass.Setup(m_ColorGradingLut);
                EnqueuePass(m_ColorGradingLutPass);
            }

            EnqueuePass(m_RenderOpaqueForwardPass);

            if (camera.clearFlags == CameraClearFlags.Skybox && RenderSettings.skybox != null)
            {
                EnqueuePass(m_DrawSkyboxPass);
            }

            // If a depth texture was created we necessarily need to copy it, otherwise we could have render it to a renderbuffer
            if (createDepthTexture)
            {
                m_CopyDepthPass.Setup(m_ActiveCameraDepthAttachment, m_DepthTexture);
                EnqueuePass(m_CopyDepthPass);
            }

            if (renderingData.cameraData.requiresOpaqueTexture)
            {
                // TODO: Downsampling method should be store in the renderer isntead of in the asset.
                // We need to migrate this data to renderer. For now, we query the method in the active asset.
                Downsampling downsamplingMethod = UniversalRenderPipeline.asset.opaqueDownsampling;
                m_CopyColorPass.Setup(m_ActiveCameraColorAttachment.Identifier(), m_OpaqueColor, downsamplingMethod);
                EnqueuePass(m_CopyColorPass);
            }

            EnqueuePass(m_RenderTransparentForwardPass);

            bool afterRenderExists = renderingData.cameraData.captureActions != null ||
                                     hasAfterRendering;

            bool requiresFinalPostProcessPass = postProcessEnabled &&
                                                renderingData.cameraData.antialiasing == AntialiasingMode.FastApproximateAntialiasing;

            // if we have additional filters
            // we need to stay in a RT
            if (afterRenderExists)
            {
                // perform post with src / dest the same
                if (postProcessEnabled)
                {
                    m_PostProcessPass.Setup(cameraTargetDescriptor, m_ActiveCameraColorAttachment, m_AfterPostProcessColor, m_ActiveCameraDepthAttachment, m_ColorGradingLut);
                    EnqueuePass(m_PostProcessPass);
                }

                //now blit into the final target
                if (m_ActiveCameraColorAttachment != RenderTargetHandle.CameraTarget)
                {
                    if (renderingData.cameraData.captureActions != null)
                    {
                        m_CapturePass.Setup(m_ActiveCameraColorAttachment);
                        EnqueuePass(m_CapturePass);
                    }

                    if (requiresFinalPostProcessPass)
                    {
                        m_FinalPostProcessPass.SetupFinalPass(m_ActiveCameraColorAttachment);
                        EnqueuePass(m_FinalPostProcessPass);
                    }
                    else
                    {
                        m_FinalBlitPass.Setup(cameraTargetDescriptor, m_ActiveCameraColorAttachment);
                        EnqueuePass(m_FinalBlitPass);
                    }
                }
            }
            else
            {
                if (postProcessEnabled)
                {
                    if (requiresFinalPostProcessPass)
                    {
                        m_PostProcessPass.Setup(cameraTargetDescriptor, m_ActiveCameraColorAttachment, m_AfterPostProcessColor, m_ActiveCameraDepthAttachment, m_ColorGradingLut);
                        EnqueuePass(m_PostProcessPass);
                        m_FinalPostProcessPass.SetupFinalPass(m_AfterPostProcessColor);
                        EnqueuePass(m_FinalPostProcessPass);
                    }
                    else
                    {
                        m_PostProcessPass.Setup(cameraTargetDescriptor, m_ActiveCameraColorAttachment, RenderTargetHandle.CameraTarget, m_ActiveCameraDepthAttachment, m_ColorGradingLut);
                        EnqueuePass(m_PostProcessPass);
                    }
                }
                else if (m_ActiveCameraColorAttachment != RenderTargetHandle.CameraTarget)
                {
                    m_FinalBlitPass.Setup(cameraTargetDescriptor, m_ActiveCameraColorAttachment);
                    EnqueuePass(m_FinalBlitPass);
                }
            }

#if UNITY_EDITOR
            if (renderingData.cameraData.isSceneViewCamera)
            {
                m_SceneViewDepthCopyPass.Setup(m_DepthTexture);
                EnqueuePass(m_SceneViewDepthCopyPass);
            }
#endif
        }
示例#19
0
 /// <summary>
 /// Configure the pass with the source and destination to execute on.
 /// </summary>
 /// <param name="source">Source Render Target</param>
 /// <param name="destination">Destination Render Target</param>
 public void Setup(RenderTargetIdentifier source, RenderTargetHandle destination, Downsampling downsampling)
 {
     this.source          = source;
     this.destination     = destination;
     m_DownsamplingMethod = downsampling;
 }
示例#20
0
 /// <summary>
 /// Configure the pass with the source and destination to execute on.
 /// </summary>
 /// <param name="source">Source Render Target</param>
 /// <param name="destination">Destination Render Target</param>
 public void Setup(RTHandle source, RTHandle destination, Downsampling downsampling)
 {
     this.source          = source;
     this.destination     = destination;
     m_DownsamplingMethod = downsampling;
 }
示例#21
0
 /* ----------------------------------------------------------------- */
 ///
 /// GetArgument
 ///
 /// <summary>
 /// Downsampling を表す Argument オブジェクトを取得します。
 /// </summary>
 ///
 /// <param name="src">Downsampling</param>
 /// <param name="name">名前</param>
 ///
 /// <returns>Argument オブジェクト</returns>
 ///
 /* ----------------------------------------------------------------- */
 public static Argument GetArgument(this Downsampling src, string name) =>
 src != Downsampling.None ?
 new Argument(name, src.ToString()) :
 null;
示例#22
0
        public static void CreateCameraSortingLayerRenderTexture(this IRenderPass2D pass, RenderingData renderingData, CommandBuffer cmd, Downsampling downsamplingMethod)
        {
            var renderTextureScale = 1.0f;

            if (downsamplingMethod == Downsampling._2xBilinear)
            {
                renderTextureScale = 0.5f;
            }
            else if (downsamplingMethod == Downsampling._4xBox || downsamplingMethod == Downsampling._4xBilinear)
            {
                renderTextureScale = 0.25f;
            }

            var width  = (int)(renderingData.cameraData.cameraTargetDescriptor.width * renderTextureScale);
            var height = (int)(renderingData.cameraData.cameraTargetDescriptor.height * renderTextureScale);

            var descriptor = new RenderTextureDescriptor(width, height);

            descriptor.graphicsFormat   = renderingData.cameraData.cameraTargetDescriptor.graphicsFormat;
            descriptor.useMipMap        = false;
            descriptor.autoGenerateMips = false;
            descriptor.depthBufferBits  = 0;
            descriptor.msaaSamples      = 1;
            descriptor.dimension        = TextureDimension.Tex2D;

            cmd.GetTemporaryRT(pass.rendererData.cameraSortingLayerRenderTarget.id, descriptor, FilterMode.Bilinear);
        }