private void SetupFilters(out DecimationFilter decimate, out SpatialFilter spatial, out TemporalFilter temp, out HoleFillingFilter holeFill, out ThresholdFilter threshold) { // Colorizer is used to visualize depth data colorizer = new Colorizer(); // Decimation filter reduces the amount of data (while preserving best samples) decimate = new DecimationFilter(); decimate.Options[Option.FilterMagnitude].Value = 1.0F;//change scall // Define spatial filter (edge-preserving) spatial = new SpatialFilter(); // Enable hole-filling // Hole filling is an agressive heuristic and it gets the depth wrong many times // However, this demo is not built to handle holes // (the shortest-path will always prefer to "cut" through the holes since they have zero 3D distance) spatial.Options[Option.HolesFill].Value = 1.0F; //change resolution on the edge of image spatial.Options[Option.FilterMagnitude].Value = 5.0F; spatial.Options[Option.FilterSmoothAlpha].Value = 1.0F; spatial.Options[Option.FilterSmoothDelta].Value = 50.0F; // Define temporal filter temp = new TemporalFilter(); // Define holefill filter holeFill = new HoleFillingFilter(); // Aline color to depth //align_to = new Align(Stream.Depth); //try to define depth max threshold = new ThresholdFilter(); threshold.Options[Option.MinDistance].Value = 0; threshold.Options[Option.MaxDistance].Value = 1; }
public override ProcessingBlock GetFilter() { var filter = new HoleFillingFilter(); filter.Options[Option.HolesFill].Value = HolesFill; return(filter); }
/// <inheritdoc/> public override ProcessingBlock Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) { //Note: Maybe we should add checks for each read. _ = reader.Read(); // Read start object _ = reader.GetString(); // Read "Name" _ = reader.Read(); // Read the ':' var name = reader.GetString(); // Read the name ProcessingBlock block; switch (name) { case "Decimation Filter": block = new DecimationFilter(); _ = reader.Read(); // Read the ',' _ = reader.GetString(); // Read "FilterMagnitude" _ = reader.Read(); // Read the ':' block.Options[Option.FilterMagnitude].Value = reader.GetSingle(); _ = reader.Read(); // Read end object break; case "Spatial Filter": block = new SpatialFilter(); _ = reader.Read(); // Read the ',' _ = reader.GetString(); // Read "FilterMagnitude" _ = reader.Read(); // Read the ':' block.Options[Option.FilterMagnitude].Value = reader.GetSingle(); _ = reader.Read(); // Read the ',' _ = reader.GetString(); // Read "FilterSmoothAlpha" _ = reader.Read(); // Read the ':' block.Options[Option.FilterSmoothAlpha].Value = reader.GetSingle(); _ = reader.Read(); // Read the ',' _ = reader.GetString(); // Read "FilterSmoothDelta" _ = reader.Read(); // Read the ':' block.Options[Option.FilterSmoothDelta].Value = reader.GetSingle(); _ = reader.Read(); // Read end object break; case "Temporal Filter": block = new TemporalFilter(); _ = reader.Read(); // Read the ',' _ = reader.GetString(); // Read "FilterSmoothAlpha" _ = reader.Read(); // Read the ':' block.Options[Option.FilterSmoothAlpha].Value = reader.GetSingle(); _ = reader.Read(); // Read the ',' _ = reader.GetString(); // Read "FilterSmoothDelta" _ = reader.Read(); // Read the ':' block.Options[Option.FilterSmoothDelta].Value = reader.GetSingle(); _ = reader.Read(); // Read end object break; case "Hole Filling Filter": block = new HoleFillingFilter(); _ = reader.Read(); // Read the ',' _ = reader.GetString(); // Read "HolesFill" _ = reader.Read(); // Read the ':' block.Options[Option.HolesFill].Value = reader.GetSingle(); _ = reader.Read(); // Read end object break; case "Threshold Filter": block = new ThresholdFilter(); _ = reader.Read(); // Read the ',' _ = reader.GetString(); // Read "MinDistance" _ = reader.Read(); // Read the ':' block.Options[Option.MinDistance].Value = reader.GetSingle(); _ = reader.Read(); // Read the ',' _ = reader.GetString(); // Read "MaxDistance" _ = reader.Read(); // Read the ':' block.Options[Option.MaxDistance].Value = reader.GetSingle(); _ = reader.Read(); // Read end object break; default: throw new NotSupportedException($"The filter {name} is not supported in this converter"); } return(block); }
private void SetupProcessingBlock(Pipeline pipeline, Colorizer colorizer, DecimationFilter decimate, SpatialFilter spatial, TemporalFilter temp, HoleFillingFilter holeFill, ThresholdFilter threshold) { // Setup / start frame processing processingBlock = new CustomProcessingBlock((f, src) => { // We create a FrameReleaser object that would track // all newly allocated .NET frames, and ensure deterministic finalization // at the end of scope. using (var releaser = new FramesReleaser()) { using (var frames = pipeline.WaitForFrames().DisposeWith(releaser)) { var processedFrames = frames .ApplyFilter(decimate).DisposeWith(releaser) .ApplyFilter(spatial).DisposeWith(releaser) .ApplyFilter(temp).DisposeWith(releaser) .ApplyFilter(holeFill).DisposeWith(releaser) .ApplyFilter(colorizer).DisposeWith(releaser) .ApplyFilter(threshold).DisposeWith(releaser); // Send it to the next processing stage src.FrameReady(processedFrames); } } }); }
private CustomProcessingBlock SetupProcessingBlock(Pipeline pipeline, Colorizer colorizer, DecimationFilter decimate, SpatialFilter spatial, TemporalFilter temp, HoleFillingFilter holeFill, Align align_to) { CustomProcessingBlock processingBlock = null; if (showType == imgType.color) { processingBlock = new CustomProcessingBlock((f, src) => { using (var releaser = new FramesReleaser()) { using (var frames = pipeline.WaitForFrames().DisposeWith(releaser)) { var processedFrames = frames .ApplyFilter(align_to).DisposeWith(releaser); // Send it to the next processing stage src.FramesReady(processedFrames); } } }); } else if (showType == imgType.mix) { // Setup / start frame processing processingBlock = new CustomProcessingBlock((f, src) => { using (var releaser = new FramesReleaser()) { using (var frames = pipeline.WaitForFrames().DisposeWith(releaser)) { var processedFrames = frames .ApplyFilter(align_to).DisposeWith(releaser) .ApplyFilter(decimate).DisposeWith(releaser) .ApplyFilter(spatial).DisposeWith(releaser) .ApplyFilter(temp).DisposeWith(releaser) .ApplyFilter(holeFill).DisposeWith(releaser) .ApplyFilter(colorizer).DisposeWith(releaser); // Send it to the next processing stage src.FramesReady(processedFrames); } } }); } return(processingBlock); }
private void OnStartStreaming(PipelineProfile activeProfile) { pc = new PointCloud(); spatial = new SpatialFilter(); temporal = new TemporalFilter(); holeFilling = new HoleFillingFilter(); using (var profile = activeProfile.GetStream(stream)) { if (profile == null) { Debug.LogWarningFormat("Stream {0} not in active profile", stream); } } using (var profile = activeProfile.GetStream(Stream.Depth) as VideoStreamProfile) { intrinsics = profile.GetIntrinsics(); Assert.IsTrue(SystemInfo.SupportsTextureFormat(TextureFormat.RGFloat)); numParticles = (profile.Width - 1) * (profile.Height - 1) * 2; vertices = new Vector3[profile.Width * profile.Height]; handle = GCHandle.Alloc(vertices, GCHandleType.Pinned); verticesPtr = handle.AddrOfPinnedObject(); var indices = new int[(profile.Width - 1) * (profile.Height - 1) * 6]; var iIdx = 0; for (int j = 0; j < profile.Height; j++) { for (int i = 0; i < profile.Width; i++) { if (i < profile.Width - 1 && j < profile.Height - 1) { var idx = i + j * profile.Width; var y = profile.Width; indices[iIdx++] = idx + 0; indices[iIdx++] = idx + y; indices[iIdx++] = idx + 1; indices[iIdx++] = idx + 1; indices[iIdx++] = idx + y; indices[iIdx++] = idx + y + 1; } } } particleBuffer = new ComputeBuffer(numParticles, Marshal.SizeOf(typeof(VoxelParticle))); vertexBuffer = new ComputeBuffer(vertices.Length, sizeof(float) * 3); indicesBuffer = new ComputeBuffer(indices.Length, sizeof(int)); vertexBuffer.SetData(vertices); indicesBuffer.SetData(indices); renderer.SetBuffer("_VoxelBuffer", particleBuffer); ResetParticle(); if (mesh != null) { Destroy(mesh); } mesh = new Mesh() { indexFormat = IndexFormat.UInt32, }; mesh.MarkDynamic(); mesh.vertices = new Vector3[numParticles]; var newIdices = Enumerable.Range(0, numParticles).ToArray(); mesh.SetIndices(newIdices, MeshTopology.Points, 0, false); mesh.bounds = new Bounds(Vector3.zero, Vector3.one * 10f); GetComponent <MeshFilter>().sharedMesh = mesh; } RealSenseDevice.Instance.onNewSampleSet += OnFrames; }
void Init() { try { #region FILTERS spatialFilter = new SpatialFilter(); spatialFilter.Options[Option.FilterMagnitude].Value = 5.0F; spatialFilter.Options[Option.FilterSmoothAlpha].Value = 0.25F; spatialFilter.Options[Option.FilterSmoothDelta].Value = 50.0F; decimationFilter = new DecimationFilter(); decimationFilter.Options[Option.FilterMagnitude].Value = 2.0F; holeFilter = new HoleFillingFilter(); thresholdFilter = new ThresholdFilter(); //thresholdFilter.Options[Option.MinDistance].Value = 0.73F; //thresholdFilter.Options[Option.MaxDistance].Value = 0.81F; #endregion align_to = new Align(Intel.RealSense.Stream.Depth); colorizer = new Colorizer(); pipeline = new Pipeline(); //CONFIG SETTINGS var cfg = new Config(); cfg.EnableStream(Intel.RealSense.Stream.Depth, resolutionW, resolutionH, Format.Z16, FPS); //depth resolution manuel change cfg.EnableStream(Intel.RealSense.Stream.Color, 640, 480, Format.Rgb8, 30); pipelineProfile = pipeline.Start(cfg); //stream starting with user config var advancedDevice = AdvancedDevice.FromDevice(pipelineProfile.Device); //connected device //read device's configuration settings from json file advancedDevice.JsonConfiguration = File.ReadAllText(@"CustomConfig.json"); selectedDevice = pipelineProfile.Device; #region Field Of View Info float[] dfov, cfov, irfov; var depth_stream = pipelineProfile.GetStream <VideoStreamProfile>(Intel.RealSense.Stream.Depth); Intrinsics depthIntr = depth_stream.GetIntrinsics(); dfov = depthIntr.FOV; // float[2] - horizontal and vertical field of view in degrees var color_stream = pipelineProfile.GetStream <VideoStreamProfile>(Intel.RealSense.Stream.Color); Intrinsics colorIntr = color_stream.GetIntrinsics(); cfov = colorIntr.FOV; // float[2] - horizontal and vertical field of view in degrees var ir_stream = pipelineProfile.GetStream <VideoStreamProfile>(Intel.RealSense.Stream.Infrared); Intrinsics irIntr = ir_stream.GetIntrinsics(); irfov = irIntr.FOV; // float[2] - horizontal and vertical field of view in degrees lblDepthFov.Text = "Depth FOV : " + "H = " + Convert.ToInt32(dfov[0]).ToString() + "° , " + "V = " + Convert.ToInt32(dfov[1]).ToString() + "°"; lblColorFov.Text = "RGB FOV : " + "H = " + Convert.ToInt32(cfov[0]).ToString() + "° , " + "V = " + Convert.ToInt32(cfov[1]).ToString() + "°"; lblInfraredFov.Text = "IR FOV : " + "H = " + Convert.ToInt32(irfov[0]).ToString() + "° , " + "V = " + Convert.ToInt32(irfov[1]).ToString() + "°"; #endregion //get primary screen resolutions screenWidth = Convert.ToInt32(System.Windows.SystemParameters.PrimaryScreenWidth.ToString()); screenHeight = Convert.ToInt32(System.Windows.SystemParameters.PrimaryScreenHeight.ToString()); //camera started working. transfer image to interface SetupWindow(pipelineProfile, out updateDepth, out updateColor, out updateIR1, out updateIR2); } catch (Exception ex) { MessageBox.Show(ex.Message); } }
public void Init() { _pb = new HoleFillingFilter(); holeFillOption = _pb.Options[Option.HolesFill]; }