public (double, Mat) threshold(Mat img, double thresh, double maxval, ThresholdTypes type) { var dst = new Mat(); cv2_native_api.imgproc_threshold(img.InputArray, dst.OutputArray, thresh, maxval, (int)type, out var ret); return(ret, dst); }
internal static extern void cvAdaptiveThreshold( Arr src, Arr dst, double maxValue, AdaptiveThresholdMethod adaptiveMethod, ThresholdTypes thresholdType, int blockSize, double param1);
public AdaptiveThresholdLayer(double maxValue, AdaptiveThresholdTypes adaptiveMethod, ThresholdTypes thresholdType, int blockSize, double c) { MaxValue = maxValue; AdaptiveMethod = adaptiveMethod; ThresholdType = thresholdType; BlockSize = blockSize; C = c; }
public static void Threshold(double thresh = 128, double maxvalue = 255, ThresholdTypes type = ThresholdTypes.Binary) { Glb.DrawMatAndHist0(Glb.matSrc); var matGray = Glb.matSrc.CvtColor(ColorConversionCodes.BGR2GRAY); Glb.DrawMatAndHist1(matGray); var matDst = matGray.Threshold(thresh, maxvalue, type); Glb.DrawMatAndHist2(matDst); matGray.Dispose(); matDst.Dispose(); }
public Mat adaptiveThreshold(Mat img, double maxValue, AdaptiveThresholdTypes method, ThresholdTypes type, int blockSize, double delta) { var dst = new Mat(); cv2_native_api.imgproc_adaptiveThreshold(img.InputArray, dst.OutputArray, maxValue, (int)method, (int)type, blockSize, delta); return(dst); }
//通过轮廓分割图像 private Mat contourSeg(Mat src, ThresholdTypes CONTOUR_TYPE, int SIGMAX) { Mat imageGray = new Mat(); Mat img = new Mat(); Cv2.CvtColor(src, imageGray, ColorConversionCodes.RGB2GRAY); Cv2.GaussianBlur(imageGray, imageGray, new Size(5, 5), SIGMAX); Cv2.Threshold(imageGray, img, 0, 255, CONTOUR_TYPE); Mat element = Cv2.GetStructuringElement(MorphShapes.Rect, new Size(10, 10)); Cv2.MorphologyEx(img, img, MorphTypes.Open, element); Cv2.MorphologyEx(img, img, MorphTypes.Close, element); Mat[] contours; var hierarchy = new Mat(); Cv2.FindContours(img, out contours, hierarchy, RetrievalModes.CComp, ContourApproximationModes.ApproxSimple); //查找最大轮廓 double maxarea = 0; int maxAreaIdx = 0; for (int index = contours.Length - 1; index >= 0; index--) { double tmparea = Math.Abs(Cv2.ContourArea(contours[index])); if (tmparea > maxarea) { maxarea = tmparea; maxAreaIdx = index; //记录最大轮廓的索引号 } } Mat[] contourMax = new Mat[1]; var maxContour = contours[maxAreaIdx]; contourMax.SetValue(maxContour, 0); Mat mask = Mat.Zeros(src.Size(), MatType.CV_8UC1); //掩模上填充轮廓 Cv2.DrawContours(mask, contourMax, -1, Scalar.All(255), -1); Mat real = src.Clone(); var result = new Mat(); Cv2.CvtColor(mask, mask, ColorConversionCodes.GRAY2RGB); Cv2.BitwiseAnd(real, mask, result); return(result); }
/// <summary> /// Applies Niblack thresholding to input image. /// </summary> /// <remarks><![CDATA[ /// The function transforms a grayscale image to a binary image according to the formulae: /// - **THRESH_BINARY** /// \f[dst(x, y) = \fork{\texttt{maxValue } /// }{if \(src(x, y) > T(x, y)\)}{0}{otherwise}\f] /// - ** THRESH_BINARY_INV** /// \f[dst(x, y) = \fork{0}{if \(src(x, y) > T(x, y)\)}{\texttt{maxValue}}{otherwise}\f] /// where \f$T(x, y)\f$ is a threshold calculated individually for each pixel. /// The threshold value \f$T(x, y)\f$ is the mean minus \f$ delta \f$ times standard deviation /// of \f$\texttt{blockSize} \times\texttt{blockSize}\f$ neighborhood of \f$(x, y)\f$. /// The function can't process the image in-place. /// ]]></remarks> /// <param name="src">Source 8-bit single-channel image.</param> /// <param name="dst">Destination image of the same size and the same type as src.</param> /// <param name="maxValue">Non-zero value assigned to the pixels for which the condition is satisfied, /// used with the THRESH_BINARY and THRESH_BINARY_INV thresholding types.</param> /// <param name="type">Thresholding type, see cv::ThresholdTypes.</param> /// <param name="blockSize">Size of a pixel neighborhood that is used to calculate a threshold value for the pixel: 3, 5, 7, and so on.</param> /// <param name="delta">Constant multiplied with the standard deviation and subtracted from the mean. /// Normally, it is taken to be a real number between 0 and 1.</param> public static void NiblackThreshold( InputArray src, OutputArray dst, double maxValue, ThresholdTypes type, int blockSize, double delta) { if (src == null) { throw new ArgumentNullException(nameof(src)); } if (dst == null) { throw new ArgumentNullException(nameof(dst)); } src.ThrowIfDisposed(); dst.ThrowIfNotReady(); NativeMethods.ximgproc_niBlackThreshold(src.CvPtr, dst.CvPtr, maxValue, (int)type, blockSize, delta); GC.KeepAlive(src); dst.Fix(); }
public override bool run() { object v; bool ret; try { v = getValue("method"); switch ((Method)v) { case Method.Threshold: threshold = (double)getValue("threshold"); maxvalue = (double)getValue("maxvalue"); thresholdType = (ThresholdTypes)getValue("thresholdType"); dst = src.Threshold(threshold, maxvalue, thresholdType); break; case Method.AdaptiveThreshold: maxvalue = (double)getValue("maxvalue"); adaptiveType = (AdaptiveThresholdTypes)getValue("adaptiveType"); thresholdType = (ThresholdTypes)getValue("thresholdType"); blocksize = (int)getValue("blocksize"); if (blocksize % 2 == 0) { blocksize++; } C = (double)getValue("C"); dst = src.AdaptiveThreshold(maxvalue, adaptiveType, thresholdType, blocksize, C); break; default: break; } ret = true; } catch (Exception ex) { throw ex; } return(ret); }
//自适应阈值分割函数封装 private Mat AdaptiveThreshold(Mat image, Mat grayImage, Mat binImage, AdaptiveThresholdTypes adaptiveThresholdTypes, ThresholdTypes thresholdTypes) { Cv2.CvtColor(image, grayImage, ColorConversionCodes.BGR2GRAY);//色彩空间转换 Cv2.AdaptiveThreshold(grayImage, binImage, 255, adaptiveThresholdTypes, thresholdTypes, 7, 1); return(binImage); }
public static Threshold THRESHOLD(int threst, ThresholdTypes thType, MorphTypes moType) => new Threshold(threst, thType, moType);
public void SetThresholdType(ushort Id, ThresholdTypes Value) { this[Id].JewelStatType = Value; }
/// <summary> /// Apply a threshold filter on an image <br/> ///Image processing algorithm /// </summary> /// <param name="img">input image, of type OpenCvSharp.Mat</param> /// <param name="thresholdValue">threshold value, of type int</param> /// <param name="threshType">threshold type, of type OpenCvSharp.ThresholdTypes</param> /// <returns>output image, of type OpenCvSharp.Mat</returns> public Mat ThresholdFilter(ref Mat img, int thresholdValue, ThresholdTypes threshType) { Cv2.Threshold(img, imgThreshold, thresholdValue, 255, threshType); return(imgThreshold); }
public MetricThreshold(decimal thresh, ThresholdTypes threshtype, ThresholdOperatorTypes threshotype) { this.Threshold = thresh; this.ThresholdType = threshtype; this.ThresholdOperatorType = threshotype; }
/// <summary> /// Applies an adaptive threshold to an array. /// </summary> /// <param name="src">Source 8-bit single-channel image.</param> /// <param name="dst">Destination image of the same size and the same type as src .</param> /// <param name="maxValue">Non-zero value assigned to the pixels for which the condition is satisfied. See the details below.</param> /// <param name="adaptiveMethod">Adaptive thresholding algorithm to use, ADAPTIVE_THRESH_MEAN_C or ADAPTIVE_THRESH_GAUSSIAN_C .</param> /// <param name="thresholdType">Thresholding type that must be either THRESH_BINARY or THRESH_BINARY_INV .</param> /// <param name="blockSize">Size of a pixel neighborhood that is used to calculate a threshold value for the pixel: 3, 5, 7, and so on.</param> /// <param name="c">Constant subtracted from the mean or weighted mean (see the details below). /// Normally, it is positive but may be zero or negative as well.</param> public static void AdaptiveThreshold(InputArray src, OutputArray dst, double maxValue, AdaptiveThresholdTypes adaptiveMethod, ThresholdTypes thresholdType, int blockSize, double c) { if (src == null) throw new ArgumentNullException(nameof(src)); if (dst == null) throw new ArgumentNullException(nameof(dst)); src.ThrowIfDisposed(); dst.ThrowIfNotReady(); NativeMethods.imgproc_adaptiveThreshold(src.CvPtr, dst.CvPtr, maxValue, (int)adaptiveMethod, (int)thresholdType, blockSize, c); GC.KeepAlive(src); dst.Fix(); }
public static void CarbonPaper(int x1 = 100, int y1 = 300, int x2 = 1100, int y2 = 1600, ThresholdTypes thrType = ThresholdTypes.Binary, int thr = 128, int filterArea = 30) { // 1. convert to grayscale var matGray = Glb.matSrc.CvtColor(ColorConversionCodes.BGR2GRAY); // 2. roi crop Rect roi = new Rect(x1, y1, x2 - x1 + 1, y2 - y1 + 1); var matGrayDrawRoi = Glb.matSrc.Clone(); matGrayDrawRoi.Rectangle(roi, Scalar.Yellow); Glb.DrawMat0(matGrayDrawRoi); var matRoi = new Mat(matGray, roi); Glb.DrawHist0(matRoi); // 3. threshold var matThr = matRoi.Threshold(thr, 255, thrType); Glb.DrawMatAndHist1(matThr); // 4. blob with area filter CvBlobs blobs = new CvBlobs(); blobs.Label(matThr); blobs.FilterByArea(filterArea, int.MaxValue); // 5. display blob var matDsp = new Mat(matRoi.Rows, matRoi.Cols, MatType.CV_8UC3); matDsp.SetTo(Scalar.Black); blobs.RenderBlobs(matDsp, matDsp, RenderBlobsModes.Color); Glb.DrawMatAndHist2(matDsp); Console.WriteLine("blobs.cnt = {0}", blobs.Count); matGray.Dispose(); matGrayDrawRoi.Dispose(); matRoi.Dispose(); matThr.Dispose(); matDsp.Dispose(); }
//更改参数再次分割图片 private void button4_Click(object sender, EventArgs e) { if (comboBox1.Text == "grabcut" && value1.Text == "迭代次数:" && value2.Text == "分割算子:") { try { int iter_count = int.Parse(textBox5.Text); GrabCutModes grabcutmode = (GrabCutModes)(Enum.Parse(typeof(GrabCutModes), comboBox4.Text)); int x = int.Parse(textBox7.Text); int y = int.Parse(textBox8.Text); int w = int.Parse(textBox9.Text); int h = int.Parse(textBox10.Text); if (iter_count > 20 || iter_count < 0) { MessageBox.Show("迭代次数于0-20次之内较为合适"); } else { System.Drawing.Bitmap bitmap = (System.Drawing.Bitmap)pictureBox1.Image; Mat image = BitmapConverter.ToMat(bitmap); double startTime = Cv2.GetTickCount(); Mat res = grabCut(image, iter_count, grabcutmode, x, y, w, h); double duration = (Cv2.GetTickCount() - startTime) / (Cv2.GetTickFrequency()); showImage(res, duration); showParameters("迭代次数:", iter_count.ToString(), "分割算子:", grabcutmode.ToString()); textBox6.Hide(); showRect(x, y, w, h); } } catch (Exception err) { if (err.ToString().Contains("totalSampleCount")) { MessageBox.Show("您选择的区域无法分割出前景,无法分割,请重新选择区域大小"); } else if (err.ToString().Contains("mask")) { MessageBox.Show("掩模mask为空,无法分割"); } else if (err.ToString().Contains("Format")) { MessageBox.Show("输入的字符串格式不正确,请重新输入"); } } } if (comboBox1.Text == "watershed" && value1.Text == "中值滤波内核:" && value2.Text == "形态学卷积核:") { try { if (int.Parse(textBox5.Text) % 2 == 0) { MessageBox.Show("中值滤波内核应为奇数,请重新输入。"); } else { int meadianblur_ksize = int.Parse(textBox5.Text); if (meadianblur_ksize > 1000 || meadianblur_ksize < 0) { MessageBox.Show("数值不合理,请重新输入适当的数值"); } else { string text = textBox6.Text; string diff11 = text.Replace("(", string.Empty); string diff12 = diff11.Replace(")", string.Empty); string[] str = diff12.Split(','); Size element_size = new Size(int.Parse(str[0]), int.Parse(str[1])); System.Drawing.Bitmap bitmap = (System.Drawing.Bitmap)pictureBox1.Image; Mat image = BitmapConverter.ToMat(bitmap); double startTime = Cv2.GetTickCount(); Mat result = waterShed(image, meadianblur_ksize, element_size); double duration = (Cv2.GetTickCount() - startTime) / (Cv2.GetTickFrequency()); showImage(result, duration); showParameters("中值滤波内核:", meadianblur_ksize.ToString(), "形态学卷积核:", "(" + element_size.Width.ToString() + "," + element_size.Height.ToString() + ")"); } } } catch (Exception err) { if (err.ToString().Contains("Format")) { MessageBox.Show("输入的字符串格式不正确,请重新输入"); } else if (err.ToString().Contains("索引超出了数组界限")) { MessageBox.Show("形态学卷积内核应用英文括号隔开"); } else { MessageBox.Show(err.ToString()); } } } if (comboBox1.Text == "meanshift" && value1.Text == "颜色域半径:" && value2.Text == "空间域半径:") { try { int meanshift_sp = int.Parse(textBox5.Text); int meanshift_sr = int.Parse(textBox6.Text); if (meanshift_sp < 0 || meanshift_sr < 0 || meanshift_sp > 1000 || meanshift_sr > 1000) { MessageBox.Show("参数有误或数值过大,请重新输入数据"); } else { System.Drawing.Bitmap bitmap = (System.Drawing.Bitmap)pictureBox1.Image; Mat image = BitmapConverter.ToMat(bitmap); double startTime = Cv2.GetTickCount(); Mat res = meanShift(image, meanshift_sp, meanshift_sr); double duration = (Cv2.GetTickCount() - startTime) / (Cv2.GetTickFrequency()); showImage(res, duration); showParameters("颜色域半径:", meanshift_sp.ToString(), "空间域半径:", meanshift_sr.ToString()); } } catch (Exception err) { if (err.ToString().Contains("Format")) { MessageBox.Show("输入的字符串格式不正确,请重新输入"); } else { MessageBox.Show(err.ToString()); } } } if (comboBox1.Text == "floodfill" && value1.Text == "像素最大下行差异值:" && value2.Text == "像素最大上行差异值:") { try { string diff1 = textBox5.Text; string diff11 = diff1.Replace("[", string.Empty); string diff12 = diff11.Replace("]", string.Empty); string[] str1 = diff12.Split(','); Scalar lodiff = new Scalar(int.Parse(str1[0]), int.Parse(str1[1]), int.Parse(str1[2]), int.Parse(str1[3])); string diff2 = textBox6.Text; string diff21 = diff2.Replace("[", string.Empty); string diff22 = diff21.Replace("]", string.Empty); string[] str2 = diff22.Split(','); int x = int.Parse(textBox7.Text); int y = int.Parse(textBox8.Text); int w = int.Parse(textBox9.Text); int h = int.Parse(textBox10.Text); Scalar updiff = new Scalar(int.Parse(str2[0]), int.Parse(str2[1]), int.Parse(str2[2]), int.Parse(str2[3])); System.Drawing.Bitmap bitmap = (System.Drawing.Bitmap)pictureBox1.Image; Mat image = BitmapConverter.ToMat(bitmap); double startTime = Cv2.GetTickCount(); Mat res = floodFill(image, lodiff, updiff, x, y, w, h); double duration = (Cv2.GetTickCount() - startTime) / (Cv2.GetTickFrequency()); showImage(res, duration); showRect(x, y, w, h); showParameters("像素最大下行差异值:", lodiff.ToString(), "像素最大上行差异值:", updiff.ToString()); } catch (Exception err) { if (err.ToString().Contains("Format")) { MessageBox.Show("输入的字符串格式不正确,请重新输入(应输入英文逗号)"); } else if (err.ToString().Contains("索引超出了数组界限")) { MessageBox.Show("像素最大下(上)行差异值应用英文括号隔开"); } else { MessageBox.Show(err.ToString()); } } } if (comboBox1.Text == "contour" && value1.Text == "高斯核x方向标准差:" && value2.Text == "二值化算子:") { try { ThresholdTypes contour_type = (ThresholdTypes)(Enum.Parse(typeof(ThresholdTypes), comboBox4.Text)); int sigmax = int.Parse(textBox5.Text); System.Drawing.Bitmap bitmap = (System.Drawing.Bitmap)pictureBox1.Image; Mat image = BitmapConverter.ToMat(bitmap); double startTime = Cv2.GetTickCount(); Mat res = contourSeg(image, contour_type, sigmax); double duration = (Cv2.GetTickCount() - startTime) / (Cv2.GetTickFrequency()); showImage(res, duration); showParameters("高斯核x方向标准差:", sigmax.ToString(), "二值化算子:", contour_type.ToString()); textBox6.Hide(); } catch (Exception err) { if (err.ToString().Contains("Format")) { MessageBox.Show("输入的字符串格式不正确,请重新输入"); } else if (err.ToString().Contains("索引超出了数组界限")) { MessageBox.Show("索引超出了数组界限,无法分割"); } else { MessageBox.Show(err.ToString()); } } } }
public JewelNodeData(CharacterClass closestClassStartType, ThresholdTypes jewelStatType) { ClosestClassStartType = closestClassStartType; JewelStatType = jewelStatType; SlotDesc = ""; }
internal static extern double cvThreshold( Arr src, Arr dst, double threshold, double max_value, ThresholdTypes threshold_type);
/// <summary> /// Applies a fixed-level threshold to each array element. /// </summary> /// <param name="src">Source array (single-channel).</param> /// <param name="dst">Destination array with the same size and type as src .</param> /// <param name="thresh">Threshold value.</param> /// <param name="maxval">Maximum value to use with THRESH_BINARY and THRESH_BINARY_INV threshold types.</param> /// <param name="type">Threshold type. For details, see threshold . The THRESH_OTSU and THRESH_TRIANGLE /// threshold types are not supported.</param> /// <param name="stream">Stream for the asynchronous version.</param> public static void threshold(InputArray src, OutputArray dst, double thresh, double maxval, ThresholdTypes type, Stream stream = null) { if (src == null) { throw new ArgumentNullException(nameof(src)); } if (dst == null) { throw new ArgumentNullException(nameof(dst)); } src.ThrowIfDisposed(); dst.ThrowIfNotReady(); NativeMethods.cuda_arithm_threshold(src.CvPtr, dst.CvPtr, thresh, maxval, (int)type, stream?.CvPtr ?? Stream.Null.CvPtr); GC.KeepAlive(src); GC.KeepAlive(dst); dst.Fix(); }
public ThresholdLayer(double thresh, double maxval, ThresholdTypes thresholdType) { Thresh = thresh; MaxValue = maxval; ThresholdType = thresholdType; }
/// <summary> /// Applies an adaptive threshold to an array. /// Source matrix must be 8-bit single-channel image. /// </summary> /// <param name="maxValue">Non-zero value assigned to the pixels for which the condition is satisfied. See the details below.</param> /// <param name="adaptiveMethod">Adaptive thresholding algorithm to use, ADAPTIVE_THRESH_MEAN_C or ADAPTIVE_THRESH_GAUSSIAN_C .</param> /// <param name="thresholdType">Thresholding type that must be either THRESH_BINARY or THRESH_BINARY_INV .</param> /// <param name="blockSize">Size of a pixel neighborhood that is used to calculate a threshold value for the pixel: 3, 5, 7, and so on.</param> /// <param name="c">Constant subtracted from the mean or weighted mean (see the details below). /// Normally, it is positive but may be zero or negative as well.</param> /// <returns>Destination image of the same size and the same type as src.</returns> public Mat AdaptiveThreshold(double maxValue, AdaptiveThresholdTypes adaptiveMethod, ThresholdTypes thresholdType, int blockSize, double c) { var dst = new Mat(); Cv2.AdaptiveThreshold(this, dst, maxValue, adaptiveMethod, thresholdType, blockSize, c); return dst; }
/// <summary> /// Applies a fixed-level threshold to each array element. /// The input matrix must be single-channel, 8-bit or 32-bit floating point. /// </summary> /// <param name="thresh">threshold value.</param> /// <param name="maxval">maximum value to use with the THRESH_BINARY and THRESH_BINARY_INV thresholding types.</param> /// <param name="type">thresholding type (see the details below).</param> /// <returns>output array of the same size and type as src.</returns> public Mat Threshold(double thresh, double maxval, ThresholdTypes type) { var dst = new Mat(); Cv2.Threshold(this, dst, thresh, maxval, type); return dst; }
/// <summary> /// Applies a fixed-level threshold to each array element. /// </summary> /// <param name="src">input array (single-channel, 8-bit or 32-bit floating point).</param> /// <param name="dst">output array of the same size and type as src.</param> /// <param name="thresh">threshold value.</param> /// <param name="maxval">maximum value to use with the THRESH_BINARY and THRESH_BINARY_INV thresholding types.</param> /// <param name="type">thresholding type (see the details below).</param> /// <returns>the computed threshold value when type == OTSU</returns> public static double Threshold(InputArray src, OutputArray dst, double thresh, double maxval, ThresholdTypes type) { if (src == null) throw new ArgumentNullException(nameof(src)); if (dst == null) throw new ArgumentNullException(nameof(dst)); src.ThrowIfDisposed(); dst.ThrowIfNotReady(); double ret = NativeMethods.imgproc_threshold(src.CvPtr, dst.CvPtr, thresh, maxval, (int)type); GC.KeepAlive(src); dst.Fix(); return ret; }
public JewelNodeData() { ClosestClassStartType = CharacterClass.Scion; JewelStatType = ThresholdTypes.Neutral; SlotDesc = ""; }