public void RenderAllRealtimeProbesFor(ReflectionProbeType probeType, Camera viewerCamera) { if ((probeType & ReflectionProbeType.PlanarReflection) != 0) { var length = Mathf.Min(m_PlanarReflectionProbe_PerCamera_RealtimeUpdate.Count, m_PlanarReflectionProbe_RealtimeUpdate_WorkArray.Length); var index = 0; foreach (var p in m_PlanarReflectionProbe_PerCamera_RealtimeUpdate) { m_PlanarReflectionProbe_RealtimeUpdate_WorkArray[index] = p; if (++index >= length) { break; } } #if DEBUG var discarded = m_PlanarReflectionProbe_PerCamera_RealtimeUpdate.Count - length; if (discarded > 0) { Debug.LogWarningFormat("There are more planar probe than supported in a single rendering, {0} probes discardeds", discarded); } #endif // 1. Allocate if necessary target texture var renderCamera = GetRenderCamera(); for (var i = 0; i < length; i++) { var probe = m_PlanarReflectionProbe_RealtimeUpdate_WorkArray[i]; var hdCamera = HDCamera.Get(renderCamera); if (hdCamera == null) { // Warning: this is a bad design pattern. // An individual system should not create an HDCamera (which is a shared resource). hdCamera = HDCamera.Create(renderCamera, null); } hdCamera.Update(probe.frameSettings, null, null); if (!IsRealtimeTextureValid(probe.realtimeTexture, hdCamera)) { if (probe.realtimeTexture != null) { probe.realtimeTexture.Release(); } probe.realtimeTexture = NewRenderTarget(probe); } } // 2. Render for (var i = 0; i < length; i++) { var probe = m_PlanarReflectionProbe_RealtimeUpdate_WorkArray[i]; Render(probe, probe.realtimeTexture, viewerCamera); } } }
public void RenderAllRealtimeProbes(ReflectionProbeType probeTypes) { if ((probeTypes & ReflectionProbeType.PlanarReflection) != 0) { // Discard disabled probes in requested render probes m_PlanarReflectionProbe_RequestRealtimeRender.IntersectWith(m_PlanarReflectionProbes); // Include all realtime probe modes m_PlanarReflectionProbe_RequestRealtimeRender.UnionWith(m_PlanarReflectionProbe_RealtimeUpdate); var length = Mathf.Min(m_PlanarReflectionProbe_RequestRealtimeRender.Count, m_PlanarReflectionProbe_RealtimeUpdate_WorkArray.Length); m_PlanarReflectionProbe_RequestRealtimeRender.CopyTo(m_PlanarReflectionProbe_RealtimeUpdate_WorkArray); m_PlanarReflectionProbe_RequestRealtimeRender.Clear(); // 1. Allocate if necessary target texture var camera = GetRenderCamera(); for (var i = 0; i < length; i++) { var probe = m_PlanarReflectionProbe_RealtimeUpdate_WorkArray[i]; var hdCamera = HDCamera.Get(camera); if (hdCamera == null) { // Warning: this is a bad design pattern. // An individual system should not create an HDCamera (which is a shared resource). hdCamera = HDCamera.Create(camera, null); } hdCamera.Update(probe.frameSettings, null, null); if (!IsRealtimeTextureValid(probe.realtimeTexture, hdCamera)) { if (probe.realtimeTexture != null) { probe.realtimeTexture.Release(); } probe.realtimeTexture = NewRenderTarget(probe); } } // 2. Render for (var i = 0; i < length; i++) { var probe = m_PlanarReflectionProbe_RealtimeUpdate_WorkArray[i]; Render(probe, probe.realtimeTexture); } } }
public static HDCamera GetRenderHDCamera(PlanarReflectionProbe probe) { var camera = GetRenderCamera(); probe.frameSettings.CopyTo(s_RenderCameraData.GetFrameSettings()); var hdCamera = HDCamera.Get(camera); if (hdCamera == null) { // Warning: this is a bad design pattern. // An individual system should not create an HDCamera (which is a shared resource). hdCamera = HDCamera.Create(camera, null); } hdCamera.Update(probe.frameSettings, null, null); return(hdCamera); }