/// <summary> /// Prepares and returns source and target RT handles for a single sensor postprocessing pass. /// </summary> /// <param name="cmd">Buffer used to queue commands.</param> /// <param name="colorBuffer">Original color buffer of the sensor.</param> /// <param name="sensor">Sensor that will have postprocessing effects rendered.</param> /// <param name="autoSize">Does the <see cref="colorBuffer"/> use auto-scaling?</param> /// <param name="lateQueue">True if executed after distortion, false otherwise.</param> /// <param name="source"><see cref="RTHandle"/> that should be used as a source (can be sampled).</param> /// <param name="target"><see cref="RTHandle"/> that should be used as a target (can't be sampled).</param> /// <param name="cubemapFace">Specifies target face if cubemap is used.</param> /// <exception cref="Exception">Sensor has no postprocessing effects.</exception> public void GetRTHandles(CommandBuffer cmd, RTHandle colorBuffer, CameraSensorBase sensor, bool autoSize, bool lateQueue, out RTHandle source, out RTHandle target, CubemapFace cubemapFace = CubemapFace.Unknown) { /* NOTE: * Each pass requires source texture (to sample from) and target (to render to). Since texture bound as a * target can't be sampled, temporary one is needed. * It's possible to only use two textures for rendering - original color buffer and temporary texture will * be swapped back and forth as source and target. If amount of passes is odd, at least one copy is needed * to make sure that final pass is always rendered to color buffer. This copy happens before the last pass. * Multiple sensors will use one temporary texture pool, assuming that their color buffers have the same * properties (size, format, dimension etc.). */ var sensorSwaps = lateQueue ? postDistortionSensorSwaps : preDistortionSensorSwaps; var lastTarget = lateQueue ? postDistortionLastTarget : preDistortionLastTarget; var passCount = lateQueue ? sensor.LatePostprocessing?.Count ?? 0 : sensor.Postprocessing?.Count ?? 0; if (passCount == 0) throw new Exception("Sensor has no postprocessing passes defined."); var isLastPass = sensorSwaps[sensor] == passCount - 1; // Rendering to cubemap is a special case. We never want to force explicit face binding in Render() method, // so postprocessing only exposes intermediate, non-cubemap textures. In the final step, result will be // blit to desired cubemap face, reducing complexity of te Render() method. if (cubemapFace != CubemapFace.Unknown) { // First pass - copy specific face to target with TextureXR format, prepare target with same format if (lastTarget[sensor] == null) { source = GetPooledHandle(colorBuffer, autoSize); cmd.CopyTexture(colorBuffer, (int) cubemapFace, 0, 0, 0, colorBuffer.rt.width, colorBuffer.rt.height, source, 0, 0, 0, 0); } else { source = lastTarget[sensor]; } target = GetPooledHandle(colorBuffer, autoSize); } // Rendering to non-cubemap texture can use color buffer as intermediate target - just make sure that final // pass will be rendered to color buffer, not pooled RT (happens for odd number of passes). else { if (lastTarget[sensor] == colorBuffer) { // Last pass has to be rendered to color buffer - copy is needed if (isLastPass) { source = GetPooledHandle(colorBuffer, autoSize); target = colorBuffer; cmd.CopyTexture(colorBuffer, 0, source, 0); } else { source = colorBuffer; target = GetPooledHandle(colorBuffer, autoSize); } } else if (lastTarget[sensor] == null) { // Last pass has to be rendered to color buffer - copy is needed if (isLastPass) { source = GetPooledHandle(colorBuffer, autoSize); target = colorBuffer; cmd.CopyTexture(colorBuffer, 0, source, 0); } else { source = colorBuffer; target = GetPooledHandle(colorBuffer, autoSize); } } else { source = lastTarget[sensor]; target = colorBuffer; } } // This was last pass in this frame - reset counters if (isLastPass) { lastTarget[sensor] = null; sensorSwaps[sensor] = 0; } // Keep track of last target and amount of passes completed (for further passes) else { lastTarget[sensor] = target; sensorSwaps[sensor]++; } }
private static void RenderPostProcess <T>(PostProcessPassContext ctx, CameraSensorBase sensor, CustomPass pass, T data, CubemapFace cubemapFace = CubemapFace.Unknown) where T : PostProcessData { var postProcessPass = pass as IPostProcessRenderer; postProcessPass?.Render(ctx, sensor, data, cubemapFace); }
private static void RenderPostProcess<T>(CommandBuffer cmd, HDCamera hdCamera, CameraSensorBase sensor, RTHandle sensorColorBuffer, CustomPass pass, T data, CubemapFace cubemapFace = CubemapFace.Unknown) where T : PostProcessData { var postProcessPass = pass as IPostProcessRenderer; postProcessPass?.Render(cmd, hdCamera, sensor, sensorColorBuffer, data, cubemapFace); }
public override void OnInspectorGUI() { camera = (CameraSensorBase)target; serializedObject.Update(); EditorGUILayout.PropertyField(CameraName); EditorGUILayout.PropertyField(CameraTopic); EditorGUILayout.PropertyField(CameraFrame); EditorGUILayout.PropertyField(Width); EditorGUILayout.PropertyField(Height); EditorGUILayout.PropertyField(Frequency); EditorGUILayout.PropertyField(JpegQuality); EditorGUILayout.PropertyField(FieldOfView); EditorGUILayout.PropertyField(MinDistance); EditorGUILayout.PropertyField(MaxDistance); switch (camera.CubemapSize) { case 512: { CubemapSizeIndex = 0; break; } case 1024: { CubemapSizeIndex = 1; break; } case 2048: { CubemapSizeIndex = 2; break; } default: { throw new Exception("Unsupported Cubemap Size: " + camera.CubemapSize); } } CubemapSizeIndex = EditorGUILayout.Popup("Cubemap Size:", CubemapSizeIndex, CubemapSizeOptions); Int32.TryParse(CubemapSizeOptions[CubemapSizeIndex], out camera.CubemapSize); EditorGUILayout.PropertyField(Distorted); if (camera.Distorted) { EditorGUILayout.PropertyField(DistortionParameters, true); EditorGUILayout.PropertyField(Fisheye); if (camera.Fisheye) { EditorGUILayout.PropertyField(Xi); } } if (InstanceSegmentationTags != null) { EditorGUILayout.PropertyField(InstanceSegmentationTags, true); } serializedObject.ApplyModifiedProperties(); }