Beispiel #1
0
 public PendingMetric(MetricDefinition metricDefinition, int metricId, SensorHandle sensorHandle, Guid captureId, Annotation annotation, Guid sequenceId, int step, JToken values = null)
 {
     MetricDefinition = metricDefinition;
     MetricId         = metricId;
     SensorHandle     = sensorHandle;
     Annotation       = annotation;
     SequenceId       = sequenceId;
     Step             = step;
     CaptureId        = captureId;
     Values           = values;
 }
Beispiel #2
0
 /// <summary>
 /// Requests a capture from this camera on the next rendered frame. Can only be used when using <see cref="PerceptionCamera.CaptureTriggerMode.Manual"/> capture mode.
 /// </summary>
 public void RequestCapture()
 {
     if (captureTriggerMode.Equals(CaptureTriggerMode.Manual))
     {
         SensorHandle.RequestCapture();
     }
     else
     {
         Debug.LogError($"{nameof(RequestCapture)} can only be used if the camera is in {nameof(CaptureTriggerMode.Manual)} capture mode.");
     }
 }
Beispiel #3
0
        /// <summary>
        /// Register a new sensor under the given ego.
        /// </summary>
        /// <param name="egoHandle">The ego container for the sensor. Sensor orientation will be reported in the context of the given ego.</param>
        /// <param name="modality">The kind of the sensor (ex. "camera", "lidar")</param>
        /// <param name="description">A human-readable description of the sensor (ex. "front-left rgb camera")</param>
        /// <param name="firstCaptureFrame">The offset from the current frame on which this sensor should first be scheduled.</param>
        /// <param name="captureTriggerMode">The method of triggering captures for this sensor.</param>
        /// <param name="simulationDeltaTime">The simulation frame time (seconds) requested by this sensor.</param>
        /// <param name="framesBetweenCaptures">The number of frames to simulate and render between the camera's scheduled captures. Setting this to 0 makes the camera capture every frame.</param>
        /// <param name="manualSensorAffectSimulationTiming">Have this unscheduled (manual capture) camera affect simulation timings (similar to a scheduled camera) by requesting a specific frame delta time</param>
        /// <returns>A <see cref="SensorHandle"/>, which should be used to check <see cref="SensorHandle.ShouldCaptureThisFrame"/> each frame to determine whether to capture (or render) that frame.
        /// It is also used to report captures, annotations, and metrics on the sensor.</returns>
        /// <exception cref="ArgumentException">Thrown if ego is invalid.</exception>
        public static SensorHandle RegisterSensor(EgoHandle egoHandle, string modality, string description, float firstCaptureFrame, CaptureTriggerMode captureTriggerMode, float simulationDeltaTime, int framesBetweenCaptures, bool manualSensorAffectSimulationTiming = false)
        {
            if (!SimulationState.Contains(egoHandle.Id))
            {
                throw new ArgumentException("Supplied ego is not part of the simulation.", nameof(egoHandle));
            }

            var sensor = new SensorHandle(Guid.NewGuid());

            SimulationState.AddSensor(egoHandle, modality, description, firstCaptureFrame, captureTriggerMode, simulationDeltaTime, framesBetweenCaptures, manualSensorAffectSimulationTiming, sensor);
            return(sensor);
        }
Beispiel #4
0
        /// <summary>
        /// Register a new sensor under the given ego.
        /// </summary>
        /// <param name="egoHandle">The ego container for the sensor. Sensor orientation will be reported in the context of the given ego.</param>
        /// <param name="modality">The kind of the sensor (ex. "camera", "lidar")</param>
        /// <param name="description">A human-readable description of the sensor (ex. "front-left rgb camera")</param>
        /// <param name="period">The period, in seconds, on which the sensor should capture. Frames will be scheduled in the simulation such that each sensor is triggered every _period_ seconds.</param>
        /// <param name="firstCaptureTime">The time, in seconds, from the start of the sequence on which this sensor should first be scheduled.</param>
        /// <returns>A <see cref="SensorHandle"/>, which should be used to check <see cref="SensorHandle.ShouldCaptureThisFrame"/> each frame to determine whether to capture (or render) that frame.
        /// It is also used to report captures, annotations, and metrics on the sensor.</returns>
        /// <exception cref="ArgumentException">Thrown if ego is invalid.</exception>
        public static SensorHandle RegisterSensor(EgoHandle egoHandle, string modality, string description, float period, float firstCaptureTime)
        {
            if (!SimulationState.Contains(egoHandle.Id))
            {
                throw new ArgumentException("Supplied ego is not part of the simulation.", nameof(egoHandle));
            }

            var sensor = new SensorHandle(Guid.NewGuid());

            SimulationState.AddSensor(egoHandle, modality, description, period, firstCaptureTime, sensor);
            return(sensor);
        }
Beispiel #5
0
    public void ApplyViewPortAnnotation(List <TargetPlantCounting.PlantAnnotation> _capture)
    {
        SensorHandle sensorHandle = GetComponent <PerceptionCamera>().SensorHandle;

        if (sensorHandle.ShouldCaptureThisFrame)
        {
            //Debug.Log("ShouldCapture");
            AsyncAnnotation asyncplantAnnotationDefinition = sensorHandle.ReportAnnotationAsync(plantAnnotationDefinition);

            asyncplantAnnotationDefinition.ReportValues(_capture);
        }
    }
Beispiel #6
0
        // Start is called before the first frame update
        void Awake()
        {
            m_EgoMarker = this.GetComponentInParent <Ego>();
            var ego = m_EgoMarker == null?DatasetCapture.RegisterEgo("") : m_EgoMarker.EgoHandle;

            SensorHandle = DatasetCapture.RegisterSensor(ego, "camera", description, period, startTime);

            SetupInstanceSegmentation();

            RenderPipelineManager.beginCameraRendering += OnBeginCameraRendering;
            RenderPipelineManager.endCameraRendering   += CheckForRendererFeature;
            DatasetCapture.SimulationEnding            += OnSimulationEnding;
        }
Beispiel #7
0
        void OnDisable()
        {
            DatasetCapture.SimulationEnding            -= OnSimulationEnding;
            RenderPipelineManager.beginCameraRendering -= OnBeginCameraRendering;

            OnSimulationEnding();

            if (SensorHandle.IsValid)
            {
                SensorHandle.Dispose();
            }

            SensorHandle = default;
        }
Beispiel #8
0
        void OnDestroy()
        {
            DatasetCapture.SimulationEnding -= OnSimulationEnding;

            OnSimulationEnding();
            CleanupVisualization();

            if (SensorHandle.IsValid)
            {
                SensorHandle.Dispose();
            }

            SensorHandle = default;
        }
Beispiel #9
0
 internal void Init(PerceptionCamera newPerceptionCamera)
 {
     try
     {
         this.perceptionCamera = newPerceptionCamera;
         sensorHandle          = newPerceptionCamera.SensorHandle;
         Setup();
         isInitialized = true;
     }
     catch (Exception)
     {
         this.enabled = false;
         throw;
     }
 }
Beispiel #10
0
        // Start is called before the first frame update
        void Awake()
        {
            m_EgoMarker = this.GetComponentInParent <Ego>();
            var ego = m_EgoMarker == null?DatasetCapture.RegisterEgo("") : m_EgoMarker.EgoHandle;

            SensorHandle = DatasetCapture.RegisterSensor(ego, "camera", description, period, startTime);

            AsyncRequest.maxJobSystemParallelism = 0; // Jobs are not chained to one another in any way, maximizing parallelism
            AsyncRequest.maxAsyncRequestFrameAge = 4; // Ensure that readbacks happen before Allocator.TempJob allocations get stale

            SetupInstanceSegmentation();
            var cam = GetComponent <Camera>();

#if UNITY_EDITOR || DEVELOPMENT_BUILD
            SetupVisualizationCamera(cam);
#endif

            DatasetCapture.SimulationEnding += OnSimulationEnding;
        }
 internal SimulationState.SensorData GetSensorData(SensorHandle sensorHandle)
 {
     return(m_SensorsReference[sensorHandle]);
 }
        // Start is called before the first frame update
        void Awake()
        {
            //CaptureOptions.useAsyncReadbackIfSupported = false;

            m_EgoMarker = this.GetComponentInParent <Ego>();
            var ego = m_EgoMarker == null?SimulationManager.RegisterEgo("") : m_EgoMarker.EgoHandle;

            SensorHandle = SimulationManager.RegisterSensor(ego, "camera", description, period, startTime);

            var myCamera = GetComponent <Camera>();
            var width    = myCamera.pixelWidth;
            var height   = myCamera.pixelHeight;

            if ((produceSegmentationImages || produceObjectCountAnnotations || produceBoundingBoxAnnotations) && LabelingConfiguration == null)
            {
                Debug.LogError("LabelingConfiguration must be set if producing ground truth data");
                produceSegmentationImages     = false;
                produceObjectCountAnnotations = false;
                produceBoundingBoxAnnotations = false;
            }

            segmentationTexture      = new RenderTexture(new RenderTextureDescriptor(width, height, GraphicsFormat.R8G8B8A8_UNorm, 8));
            segmentationTexture.name = "Segmentation";
            labelingTexture          = new RenderTexture(new RenderTextureDescriptor(width, height, GraphicsFormat.R8G8B8A8_UNorm, 8));
            labelingTexture.name     = "Labeling";

#if HDRP_PRESENT
            var customPassVolume = this.GetComponent <CustomPassVolume>() ?? gameObject.AddComponent <CustomPassVolume>();
            customPassVolume.injectionPoint = CustomPassInjectionPoint.BeforeRendering;
            customPassVolume.isGlobal       = true;
            m_SegmentationPass = new InstanceSegmentationPass()
            {
                name          = "Segmentation Pass",
                targetCamera  = myCamera,
                targetTexture = segmentationTexture
            };
            m_SegmentationPass.EnsureInit();
            m_SemanticSegmentationPass = new SemanticSegmentationPass(myCamera, labelingTexture, LabelingConfiguration)
            {
                name = "Labeling Pass"
            };

            SetupPasses(customPassVolume);
#endif
#if URP_PRESENT
            instanceSegmentationUrpPass = new InstanceSegmentationUrpPass(myCamera, segmentationTexture);
            semanticSegmentationUrpPass = new SemanticSegmentationUrpPass(myCamera, labelingTexture, LabelingConfiguration);
#endif

            if (produceSegmentationImages)
            {
                var specs = LabelingConfiguration.LabelEntries.Select((l) => new SemanticSegmentationSpec()
                {
                    label_id    = l.id,
                    label_name  = l.label,
                    pixel_value = l.value
                }).ToArray();

                m_SegmentationAnnotationDefinition = SimulationManager.RegisterAnnotationDefinition("semantic segmentation", specs, "pixel-wise semantic segmentation label", "PNG");

                m_ClassLabelingTextureReader = new RenderTextureReader <short>(labelingTexture, myCamera,
                                                                               (frameCount, data, tex) => OnSemanticSegmentationImageRead(frameCount, data));
            }

            if (produceObjectCountAnnotations || produceBoundingBoxAnnotations || produceRenderedObjectInfoMetric)
            {
                var labelingMetricSpec = LabelingConfiguration.LabelEntries.Select((l) => new ObjectCountSpec()
                {
                    label_id   = l.id,
                    label_name = l.label,
                }).ToArray();

                if (produceObjectCountAnnotations)
                {
                    m_ObjectCountMetricDefinition = SimulationManager.RegisterMetricDefinition("object count", labelingMetricSpec, "Counts of objects for each label in the sensor's view", id: new Guid(objectCountId));
                }

                if (produceBoundingBoxAnnotations)
                {
                    m_BoundingBoxAnnotationDefinition = SimulationManager.RegisterAnnotationDefinition("bounding box", labelingMetricSpec, "Bounding box for each labeled object visible to the sensor", id: new Guid(boundingBoxId));
                }

                if (produceRenderedObjectInfoMetric)
                {
                    m_RenderedObjectInfoMetricDefinition = SimulationManager.RegisterMetricDefinition("rendered object info", labelingMetricSpec, "Information about each labeled object visible to the sensor", id: new Guid(renderedObjectInfoId));
                }

                m_RenderedObjectInfoGenerator = new RenderedObjectInfoGenerator(LabelingConfiguration);
                World.DefaultGameObjectInjectionWorld.GetExistingSystem <GroundTruthLabelSetupSystem>().Activate(m_RenderedObjectInfoGenerator);

                m_SegmentationReader = new RenderTextureReader <uint>(segmentationTexture, myCamera, (frameCount, data, tex) =>
                {
                    if (segmentationImageReceived != null)
                    {
                        segmentationImageReceived(frameCount, data);
                    }

                    m_RenderedObjectInfoGenerator.Compute(data, tex.width, boundingBoxOrigin, out var renderedObjectInfos, out var classCounts, Allocator.Temp);

                    using (s_RenderedObjectInfosCalculatedEvent.Auto())
                        renderedObjectInfosCalculated?.Invoke(frameCount, renderedObjectInfos);

                    if (produceObjectCountAnnotations)
                    {
                        OnObjectCountsReceived(classCounts, LabelingConfiguration.LabelEntries, frameCount);
                    }

                    if (produceBoundingBoxAnnotations)
                    {
                        ProduceBoundingBoxesAnnotation(renderedObjectInfos, LabelingConfiguration.LabelEntries, frameCount);
                    }

                    if (produceRenderedObjectInfoMetric)
                    {
                        ProduceRenderedObjectInfoMetric(renderedObjectInfos, frameCount);
                    }
                });
Beispiel #13
0
 internal Sensor(SensorHandle sensor)
 {
     handle  = sensor;
     options = new OptionCollection(handle);
 }