private void SetupFilters(out DecimationFilter decimate, out SpatialFilter spatial, out TemporalFilter temp, out HoleFillingFilter holeFill, out ThresholdFilter threshold) { // Colorizer is used to visualize depth data colorizer = new Colorizer(); // Decimation filter reduces the amount of data (while preserving best samples) decimate = new DecimationFilter(); decimate.Options[Option.FilterMagnitude].Value = 1.0F;//change scall // Define spatial filter (edge-preserving) spatial = new SpatialFilter(); // Enable hole-filling // Hole filling is an agressive heuristic and it gets the depth wrong many times // However, this demo is not built to handle holes // (the shortest-path will always prefer to "cut" through the holes since they have zero 3D distance) spatial.Options[Option.HolesFill].Value = 1.0F; //change resolution on the edge of image spatial.Options[Option.FilterMagnitude].Value = 5.0F; spatial.Options[Option.FilterSmoothAlpha].Value = 1.0F; spatial.Options[Option.FilterSmoothDelta].Value = 50.0F; // Define temporal filter temp = new TemporalFilter(); // Define holefill filter holeFill = new HoleFillingFilter(); // Aline color to depth //align_to = new Align(Stream.Depth); //try to define depth max threshold = new ThresholdFilter(); threshold.Options[Option.MinDistance].Value = 0; threshold.Options[Option.MaxDistance].Value = 1; }
private void SetupFilters(out DecimationFilter decimate, out SpatialFilter spatial, out ThresholdFilter threshold) { // Colorizer is used to visualize depth data colorizer = new Colorizer(); // Decimation filter reduces the amount of data (while preserving best samples) decimate = new DecimationFilter(); decimate.Options[Option.FilterMagnitude].Value = 1.0F;//change scall // Define spatial filter (edge-preserving) spatial = new SpatialFilter(); spatial.Options[Option.HolesFill].Value = 1.0F; //change resolution on the edge of image spatial.Options[Option.FilterMagnitude].Value = 5.0F; spatial.Options[Option.FilterSmoothAlpha].Value = 1.0F; spatial.Options[Option.FilterSmoothDelta].Value = 50.0F; // Aline color to depth //align_to = new Align(Stream.Depth); //try to define depth max threshold = new ThresholdFilter(); threshold.Options[Option.MinDistance].Value = 0; threshold.Options[Option.MaxDistance].Value = 1; }
public override ProcessingBlock GetFilter() { var filter = new ThresholdFilter(); filter.Options[Option.MinDistance].Value = MinDistance; filter.Options[Option.MaxDistance].Value = MaxDistance; return(filter); }
public async Task ApplyThresholdFilter(int w, int h, int threshold) { var filter = new ThresholdFilter(threshold); using (var img = new BitmapImage(TestData.GetTestImage(w, h))) { await filter.Apply(img, img.Bounds, CancellationToken.None); await img.SaveAsync(File.Create(Path.Combine(TestData.RootFolder, $"threshold({threshold})-{w}x{h}.png")), CancellationToken.None); } }
public FilterGenerator(int quadrantSize, LocalTerrain localTerrain) { globalFilterMountainC = new GlobalCoordinates(100); globalFilterAverageC = new GlobalCoordinates(100); globalFilterMedianC = new GlobalCoordinates(100); globalFilterSpikeC = new GlobalCoordinates(100); globalFilterGaussianC = new GlobalCoordinates(100); globalFilterMinThresholdC = new GlobalCoordinates(100); globalFilterMaxThresholdC = new GlobalCoordinates(100); lt = localTerrain; localCoordinates = lt.localTerrainC; mf = new MountainFilter(this); af = new AverageFilter(this); mdf = new MedianFilter(this); sf = new SpikeFilter(this); gf = new GaussianFilter(this); tf = new ThresholdFilter(this); }
private void ProcessImage(KalikoImage image) { image.Resize(XSIZE, YSIZE); // for faster operations / debugging // operating on image NormalizationFilter normalization = new NormalizationFilter(); image.ApplyFilter(normalization); FastGaussianBlurFilter gaussian = new FastGaussianBlurFilter(5f); image.ApplyFilter(gaussian); Histogram histogram = new Histogram(image); byte filteredThreshold = histogram.GetThresholdLevel(); ThresholdFilter threshold = new ThresholdFilter(filteredThreshold); image.ApplyFilter(threshold); }
/// <inheritdoc/> public override ProcessingBlock Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) { //Note: Maybe we should add checks for each read. _ = reader.Read(); // Read start object _ = reader.GetString(); // Read "Name" _ = reader.Read(); // Read the ':' var name = reader.GetString(); // Read the name ProcessingBlock block; switch (name) { case "Decimation Filter": block = new DecimationFilter(); _ = reader.Read(); // Read the ',' _ = reader.GetString(); // Read "FilterMagnitude" _ = reader.Read(); // Read the ':' block.Options[Option.FilterMagnitude].Value = reader.GetSingle(); _ = reader.Read(); // Read end object break; case "Spatial Filter": block = new SpatialFilter(); _ = reader.Read(); // Read the ',' _ = reader.GetString(); // Read "FilterMagnitude" _ = reader.Read(); // Read the ':' block.Options[Option.FilterMagnitude].Value = reader.GetSingle(); _ = reader.Read(); // Read the ',' _ = reader.GetString(); // Read "FilterSmoothAlpha" _ = reader.Read(); // Read the ':' block.Options[Option.FilterSmoothAlpha].Value = reader.GetSingle(); _ = reader.Read(); // Read the ',' _ = reader.GetString(); // Read "FilterSmoothDelta" _ = reader.Read(); // Read the ':' block.Options[Option.FilterSmoothDelta].Value = reader.GetSingle(); _ = reader.Read(); // Read end object break; case "Temporal Filter": block = new TemporalFilter(); _ = reader.Read(); // Read the ',' _ = reader.GetString(); // Read "FilterSmoothAlpha" _ = reader.Read(); // Read the ':' block.Options[Option.FilterSmoothAlpha].Value = reader.GetSingle(); _ = reader.Read(); // Read the ',' _ = reader.GetString(); // Read "FilterSmoothDelta" _ = reader.Read(); // Read the ':' block.Options[Option.FilterSmoothDelta].Value = reader.GetSingle(); _ = reader.Read(); // Read end object break; case "Hole Filling Filter": block = new HoleFillingFilter(); _ = reader.Read(); // Read the ',' _ = reader.GetString(); // Read "HolesFill" _ = reader.Read(); // Read the ':' block.Options[Option.HolesFill].Value = reader.GetSingle(); _ = reader.Read(); // Read end object break; case "Threshold Filter": block = new ThresholdFilter(); _ = reader.Read(); // Read the ',' _ = reader.GetString(); // Read "MinDistance" _ = reader.Read(); // Read the ':' block.Options[Option.MinDistance].Value = reader.GetSingle(); _ = reader.Read(); // Read the ',' _ = reader.GetString(); // Read "MaxDistance" _ = reader.Read(); // Read the ':' block.Options[Option.MaxDistance].Value = reader.GetSingle(); _ = reader.Read(); // Read end object break; default: throw new NotSupportedException($"The filter {name} is not supported in this converter"); } return(block); }
public void Init() { _pb = new ThresholdFilter(); minOption = _pb.Options[Option.MinDistance]; maxOption = _pb.Options[Option.MaxDistance]; }
private void SetupProcessingBlock(Pipeline pipeline, Colorizer colorizer, DecimationFilter decimate, SpatialFilter spatial, TemporalFilter temp, HoleFillingFilter holeFill, ThresholdFilter threshold) { // Setup / start frame processing processingBlock = new CustomProcessingBlock((f, src) => { // We create a FrameReleaser object that would track // all newly allocated .NET frames, and ensure deterministic finalization // at the end of scope. using (var releaser = new FramesReleaser()) { using (var frames = pipeline.WaitForFrames().DisposeWith(releaser)) { var processedFrames = frames .ApplyFilter(decimate).DisposeWith(releaser) .ApplyFilter(spatial).DisposeWith(releaser) .ApplyFilter(temp).DisposeWith(releaser) .ApplyFilter(holeFill).DisposeWith(releaser) .ApplyFilter(colorizer).DisposeWith(releaser) .ApplyFilter(threshold).DisposeWith(releaser); // Send it to the next processing stage src.FrameReady(processedFrames); } } }); }
// add MaskFilter // add contour filter // draw contours // неплохо бы у мержделки лвл выбирать (что брать не только предыдущую) static void Main(string[] args) { // dnn tests DnnInfo dnn = new DnnInfo("model_bn.pb"); //--------------------------------------------------------------------------- String winName = "Test Window"; //The name of the window CvInvoke.NamedWindow(winName); //Create the window using the specific name // bitmap to mat var bitmap = new Bitmap(@"d:\GM\all\15752977512762.png"); Mat fromBitmap = GetCvMatFromBitmap(bitmap); //CvInvoke.Imshow(winName, fromBitmap); //CvInvoke.WaitKey(0); var bittt = GetRGBBitmapFromCvMat(fromBitmap); bittt.Save("out.jpg"); // graph test------- GraphFilter graph = new GraphFilter(); var scFilter = graph.Add(FilterType.Source); scFilter["Source"] = fromBitmap; var threshFilter = graph.Add(FilterType.Threshold).ConnectBefore(graph.Add(FilterType.Bgr2Gray).ConnectBefore(scFilter)); threshFilter["Type"] = "AdaptiveGaussianBinaryInv"; threshFilter["Threshold"] = 125; threshFilter["ValueSet"] = 255; threshFilter["Kern"] = 5; threshFilter["SubtractedFromMean"] = 0; var outs = graph.GetOuts(); CvInvoke.Imshow(winName, outs[0].Image); CvInvoke.WaitKey(0); threshFilter["Kern"] = 3; outs = graph.GetOuts(); CvInvoke.Imshow(winName, outs[0].Image); CvInvoke.WaitKey(0); //----------------------------------------------------------- var laadFilter = new LoadBgrFilter(); laadFilter["File"] = new FileInfo(@"d:\GM\all\15808995027720.jpg"); var laadFilter2 = new LoadBgrFilter(); laadFilter2["File"] = new FileInfo(@"d:\GM\all\15808863021350.png"); // gray ------------------------------------ var grayFilter = new Bgr2GrayFilter(); grayFilter.ConnectBefore(laadFilter); var imgs00 = grayFilter.GetOut(); CvInvoke.Imshow(winName, imgs00[0].Image); CvInvoke.WaitKey(0); //rotate ----------------------------------- { var rotF1 = new RotationFilter(); rotF1["Deg"] = 45.0; rotF1["Scale"] = 0.75; rotF1["NewSize"] = 1; rotF1["Width"] = 500; rotF1["Height"] = 500; rotF1.ConnectBefore(laadFilter); var rotF2 = new RotationFilter(); rotF2["Deg"] = 360.0 - 45.0; rotF2["Scale"] = 0.75; rotF2["NewSize"] = 1; rotF2["Width"] = 500; rotF2["Height"] = 500; rotF2.ConnectBefore(laadFilter2); // MergeFilter var mergeFilter = new MergeFilter(); mergeFilter.ConnectBefore(rotF1); mergeFilter.ConnectBefore(rotF2); mergeFilter["Alpha"] = 0.5; mergeFilter["Beta"] = 0.5; mergeFilter["Gamma"] = 0.5; var imgs0 = mergeFilter.GetOut(); var bittt01 = GetRGBBitmapFromCvMat(imgs0[0].Image); bittt01.Save("out1.jpg"); CvInvoke.Imshow(winName, imgs0[0].Image); CvInvoke.WaitKey(0); } // resize { var resFinlt1 = new ResizeAbsFilter(); resFinlt1["Width"] = 240; resFinlt1["Height"] = 320; resFinlt1.ConnectBefore(laadFilter); resFinlt1.ConnectBefore(laadFilter2); var imgs = resFinlt1.GetOut(); CvInvoke.Imshow(winName, imgs[0].Image); CvInvoke.WaitKey(0); CvInvoke.Imshow(winName, imgs[1].Image); CvInvoke.WaitKey(0); // MergeFilter var mergeFilter = new MergeFilter(); mergeFilter.ConnectBefore(resFinlt1); mergeFilter["Alpha"] = 1.0; mergeFilter["Beta"] = -1.0; mergeFilter["Gamma"] = 0.0; var imgs0 = mergeFilter.GetOut(); var bittt02 = GetRGBBitmapFromCvMat(imgs0[0].Image); bittt02.Save("out2.jpg"); CvInvoke.Imshow(winName, imgs0[0].Image); CvInvoke.WaitKey(0); } // blur { var blurFilter = new BlurFilter(); blurFilter["Type"] = "Median";//"Gaussian"; blurFilter["Kern"] = 6; blurFilter.ConnectBefore(laadFilter); var imgs0 = blurFilter.GetOut(); var bittt03 = GetRGBBitmapFromCvMat(imgs0[0].Image); bittt03.Save("out3.jpg"); CvInvoke.Imshow(winName, imgs0[0].Image); CvInvoke.WaitKey(0); Mat fromBitmap00 = GetCvMatFromBitmap(bittt03); CvInvoke.Imshow(winName, fromBitmap00); CvInvoke.WaitKey(0); } // Morphology { var morphFlt = new MorphologyFilter(); morphFlt["Type"] = "Erode"; morphFlt["Kern"] = 5; morphFlt["Count"] = 2; morphFlt["KernType"] = "Ellipse"; morphFlt.ConnectBefore(laadFilter); var imgs0 = morphFlt.GetOut(); var bittt04 = GetRGBBitmapFromCvMat(imgs0[0].Image); bittt04.Save("out4.jpg"); CvInvoke.Imshow(winName, imgs0[0].Image); CvInvoke.WaitKey(0); Mat fromBitmap00 = GetCvMatFromBitmap(bittt04); CvInvoke.Imshow(winName, fromBitmap00); CvInvoke.WaitKey(0); } // Threshold { var threshFlt = new ThresholdFilter(); threshFlt["Type"] = "AdaptiveGaussianBinaryInv"; threshFlt["Threshold"] = 125; threshFlt["ValueSet"] = 255; threshFlt["Kern"] = 5; threshFlt["SubtractedFromMean"] = 0; threshFlt.ConnectBefore(grayFilter); var imgs0 = threshFlt.GetOut(); var bittt05 = GetRGBBitmapFromCvMat(imgs0[0].Image); bittt05.Save("out5.jpg"); CvInvoke.Imshow(winName, imgs0[0].Image); CvInvoke.WaitKey(0); Mat fromBitmap00 = GetCvMatFromBitmap(bittt05); CvInvoke.Imshow(winName, fromBitmap00); CvInvoke.WaitKey(0); } // Grad { var gradFltr = new GradientFilter(); gradFltr["Type"] = "Laplacian"; // "Canny" "Sobel" gradFltr["Kern"] = 1; gradFltr["Low"] = 0; gradFltr["Hight"] = 255; gradFltr.ConnectBefore(laadFilter); var imgs0 = gradFltr.GetOut(); var bittt06 = GetRGBBitmapFromCvMat(imgs0[0].Image); bittt06.Save("out6.jpg"); CvInvoke.Imshow(winName, imgs0[0].Image); CvInvoke.WaitKey(0); Mat fromBitmap00 = GetCvMatFromBitmap(bittt06); CvInvoke.Imshow(winName, fromBitmap00); CvInvoke.WaitKey(0); } // My test------------------------------------ { var gradFl = new GradientFilter(); gradFl["Type"] = "Sobel"; gradFl["Kern"] = 1; gradFl.ConnectBefore(laadFilter); var morphFlt = new MorphologyFilter(); morphFlt["Type"] = "Detate"; morphFlt["Kern"] = 5; morphFlt["Count"] = 2; morphFlt["KernType"] = "Ellipse"; morphFlt.ConnectBefore(gradFl); var mergeFlt = new MergeFilter(); mergeFlt.ConnectBefore(laadFilter); mergeFlt.ConnectBefore(gradFl); mergeFlt["Alpha"] = 1.0; mergeFlt["Beta"] = -1.0; mergeFlt["Gamma"] = 0.0; var blurFilter = new BlurFilter(); blurFilter["Type"] = "Median";//"Gaussian"; blurFilter["Kern"] = 6; blurFilter.ConnectBefore(mergeFlt); var mergeFlt2 = new MergeFilter(); mergeFlt2.ConnectBefore(blurFilter); mergeFlt2.ConnectBefore(gradFl); mergeFlt2["Alpha"] = 0.5; mergeFlt2["Beta"] = 0.5; mergeFlt2["Gamma"] = 0.0; var imgs0 = mergeFlt2.GetOut(); var bittt7 = GetRGBBitmapFromCvMat(imgs0[0].Image); bittt7.Save("out7.jpg"); CvInvoke.Imshow(winName, imgs0[0].Image); CvInvoke.WaitKey(0); Mat fromBitmap00 = GetCvMatFromBitmap(bittt7); CvInvoke.Imshow(winName, fromBitmap00); CvInvoke.WaitKey(0); } CvInvoke.WaitKey(0); //Wait for the key pressing event CvInvoke.DestroyWindow(winName); //Destroy the window if key is pressed return; }
private void thresholdToolStripMenuItem_Click(object sender, EventArgs e) { cipFormThreshold formThreshold = new cipFormThreshold(this.picBoxModifyed.Image,this.GetCurrentRaster(),Thread.CurrentThread.CurrentUICulture); if (formThreshold.ShowDialog()==DialogResult.OK) { byte level = formThreshold.GetLevel(); if (!backgroundWorkerCip.IsBusy) { ImageFilter filter = new ThresholdFilter(level); backgroundWorkerCip.RunWorkerAsync(filter); this.CalculateHistogram(); } } }
void Init() { try { #region FILTERS spatialFilter = new SpatialFilter(); spatialFilter.Options[Option.FilterMagnitude].Value = 5.0F; spatialFilter.Options[Option.FilterSmoothAlpha].Value = 0.25F; spatialFilter.Options[Option.FilterSmoothDelta].Value = 50.0F; decimationFilter = new DecimationFilter(); decimationFilter.Options[Option.FilterMagnitude].Value = 2.0F; holeFilter = new HoleFillingFilter(); thresholdFilter = new ThresholdFilter(); //thresholdFilter.Options[Option.MinDistance].Value = 0.73F; //thresholdFilter.Options[Option.MaxDistance].Value = 0.81F; #endregion align_to = new Align(Intel.RealSense.Stream.Depth); colorizer = new Colorizer(); pipeline = new Pipeline(); //CONFIG SETTINGS var cfg = new Config(); cfg.EnableStream(Intel.RealSense.Stream.Depth, resolutionW, resolutionH, Format.Z16, FPS); //depth resolution manuel change cfg.EnableStream(Intel.RealSense.Stream.Color, 640, 480, Format.Rgb8, 30); pipelineProfile = pipeline.Start(cfg); //stream starting with user config var advancedDevice = AdvancedDevice.FromDevice(pipelineProfile.Device); //connected device //read device's configuration settings from json file advancedDevice.JsonConfiguration = File.ReadAllText(@"CustomConfig.json"); selectedDevice = pipelineProfile.Device; #region Field Of View Info float[] dfov, cfov, irfov; var depth_stream = pipelineProfile.GetStream <VideoStreamProfile>(Intel.RealSense.Stream.Depth); Intrinsics depthIntr = depth_stream.GetIntrinsics(); dfov = depthIntr.FOV; // float[2] - horizontal and vertical field of view in degrees var color_stream = pipelineProfile.GetStream <VideoStreamProfile>(Intel.RealSense.Stream.Color); Intrinsics colorIntr = color_stream.GetIntrinsics(); cfov = colorIntr.FOV; // float[2] - horizontal and vertical field of view in degrees var ir_stream = pipelineProfile.GetStream <VideoStreamProfile>(Intel.RealSense.Stream.Infrared); Intrinsics irIntr = ir_stream.GetIntrinsics(); irfov = irIntr.FOV; // float[2] - horizontal and vertical field of view in degrees lblDepthFov.Text = "Depth FOV : " + "H = " + Convert.ToInt32(dfov[0]).ToString() + "° , " + "V = " + Convert.ToInt32(dfov[1]).ToString() + "°"; lblColorFov.Text = "RGB FOV : " + "H = " + Convert.ToInt32(cfov[0]).ToString() + "° , " + "V = " + Convert.ToInt32(cfov[1]).ToString() + "°"; lblInfraredFov.Text = "IR FOV : " + "H = " + Convert.ToInt32(irfov[0]).ToString() + "° , " + "V = " + Convert.ToInt32(irfov[1]).ToString() + "°"; #endregion //get primary screen resolutions screenWidth = Convert.ToInt32(System.Windows.SystemParameters.PrimaryScreenWidth.ToString()); screenHeight = Convert.ToInt32(System.Windows.SystemParameters.PrimaryScreenHeight.ToString()); //camera started working. transfer image to interface SetupWindow(pipelineProfile, out updateDepth, out updateColor, out updateIR1, out updateIR2); } catch (Exception ex) { MessageBox.Show(ex.Message); } }