/// <summary> /// AdaptiveThreshold 大津より精度高い /// </summary> /// <param name="bitmap"></param> /// <param name="s"></param> /// <returns>true : Pass false:Fail</returns> public static bool AdaptiveThreshold(BitmapSource src, out BitmapSource dst) { using (Mat mat = BitmapSourceConverter.ToMat(src)) using (Mat matbuf = new Mat()) { //Cv2.CvtColor //( // mat, // matbuf, // ColorConversionCodes.BGR2GRAY //); Cv2.AdaptiveThreshold ( mat, matbuf, 255, AdaptiveThresholdTypes.GaussianC, ThresholdTypes.Binary, 9, 128 ); dst = matbuf.ToBitmapSource(); } return(true); }
public static BitmapSource AdaptiveThreshold(BitmapSource src) { using (Mat mat = BitmapSourceConverter.ToMat(src)) using (Mat matbuf = new Mat()) using (Mat matbuf2 = new Mat()) { //グレイスケール化 Cv2.CvtColor ( mat, matbuf, ColorConversionCodes.BayerBG2GRAY ); //強めのフィルタ処理 //Cv2.BilateralFilter //( // matbuf, // mat, // 7, // 35, // 5 //); Cv2.FastNlMeansDenoising ( matbuf, mat ); Cv2.AdaptiveThreshold ( mat, matbuf, 255, AdaptiveThresholdTypes.GaussianC, ThresholdTypes.Binary, 9, 5 ); //Cv2.FastNlMeansDenoising //( // matbuf, // mat //); return(matbuf.ToBitmapSource());; } }
public static BitmapSource ToCvColor(this BitmapSource bitmap, int bayertype, double RGain, double BGain) { using (var in_mat = BitmapSourceConverter.ToMat(bitmap)) { var rgb_mat = new Mat(); switch (bayertype) { case 1: Cv2.CvtColor(in_mat, rgb_mat, ColorConversionCodes.BayerBG2RGB); break; case 0: Cv2.CvtColor(in_mat, rgb_mat, ColorConversionCodes.BayerGB2BGR); break; case 3: Cv2.CvtColor(in_mat, rgb_mat, ColorConversionCodes.BayerGR2BGR); break; case 2: Cv2.CvtColor(in_mat, rgb_mat, ColorConversionCodes.BayerRG2BGR); break; } var split_mat = rgb_mat.Split(); var rgb2_mat = new Mat(); Cv2.Merge(new Mat[] { split_mat[0] * BGain, split_mat[1], split_mat[2] * RGain }, rgb2_mat); //var a = new Mat(a.Rows, a.Cols, MatType.CV_8UC1); //var b = new Mat(a.Rows,a.Cols,MatType.CV_8UC3); //var b = mat[0]; //var c = new Mat(a.Rows, a.Cols, MatType.CV_8UC1); //Cv2.CvtColor(b, c, ColorConvers); //Cv2.CvtColor(mat[0], c, ColorConversionCodes.BayerBG2RGB); //pictureBox1.Image = mat[0].ToBitmap(); //pictureBox2.Image =c.ToBitmap(); //pictureBox1.Image = b.ToBitmap(); //pictureBox1.Image = mat[0].ToBitmap(); return(rgb2_mat.ToBitmapSource()); } }
private void Button_Click(object sender, RoutedEventArgs e) { // Image コントロールから画像データを BitmapSource 形式で取得する。 var bitmapSource = (BitmapSource)image.Source; // BitmapSource 形式を OpenCV の Mat 形式に変換する。 var mat = BitmapSourceConverter.ToMat(bitmapSource); // OpenCV で グレースケール化する。 Cv2.CvtColor(mat, mat, ColorConversionCodes.RGB2GRAY); // OpenCV の Mat 形式を BitmapSource 形式に変換する。 var bitmapSource_gray = BitmapSourceConverter.ToBitmapSource(mat); // Image コントロールに BitmapSource 形式の画像データを設定する。 image.Source = bitmapSource_gray; }
public static BitmapSource ToCvColor(this BitmapSource bitmap, int bayertype, double RGain, double BGain, double[] Matrix, double Gamma) { using (var in_mat = BitmapSourceConverter.ToMat(bitmap)) { var rgb_mat = new Mat(); switch (bayertype) { case 1: Cv2.CvtColor(in_mat, rgb_mat, ColorConversionCodes.BayerBG2RGB); break; case 0: Cv2.CvtColor(in_mat, rgb_mat, ColorConversionCodes.BayerGB2BGR); break; case 3: Cv2.CvtColor(in_mat, rgb_mat, ColorConversionCodes.BayerGR2BGR); break; case 2: Cv2.CvtColor(in_mat, rgb_mat, ColorConversionCodes.BayerRG2BGR); break; } var split_mat = rgb_mat.Split(); var rgb2_mat = new Mat(); Cv2.Merge(new Mat[] { split_mat[2] * Matrix[6] + split_mat[1] * Matrix[7] + split_mat[0] * Matrix[8] * BGain, split_mat[2] * Matrix[3] + split_mat[1] * Matrix[4] + split_mat[0] * Matrix[5], split_mat[2] * Matrix[0] * RGain + split_mat[1] * Matrix[1] + split_mat[0] * Matrix[2] }, rgb2_mat); var rgb3_mat = new Mat(); byte[] lut = new byte[256]; double gm = 1.0 / Gamma; for (int i = 0; i < 256; i++) { lut[i] = (byte)(Math.Pow(1.0 * i / 255, gm) * 255); } Cv2.LUT(rgb2_mat, lut, rgb3_mat); var buf = rgb3_mat.ToBitmapSource(); buf.Freeze(); return(buf); } }
private void MouseUp(MouseEventArgs e) { IsDragging = false; var image = e.Source as Image; if (image != null) { var originalPhosImage = _phosImageList[PhosImageListIndex - 1]; var pixelMousePositionX = _imagePixelPos.X * originalPhosImage.PixelWidth / image.ActualWidth; var pixelMousePositionY = _imagePixelPos.Y * originalPhosImage.PixelHeight / image.ActualHeight; var pixelWidth = RoiWidth * originalPhosImage.PixelWidth / image.ActualWidth; var pixelHeight = RoiHeight * originalPhosImage.PixelHeight / image.ActualHeight; if (pixelHeight > 0 && pixelWidth > 0) { Mat src = BitmapSourceConverter.ToMat(originalPhosImage); int rectX = (int)Math.Round(pixelMousePositionX); if (rectX < 0) { rectX = 0; } if (rectX + pixelWidth > originalPhosImage.PixelWidth) { rectX = (int)(originalPhosImage.PixelWidth - pixelWidth); } int rectY = (int)Math.Round(pixelMousePositionY); if (rectY < 0) { rectY = 0; } if (rectY + pixelHeight > originalPhosImage.PixelHeight) { rectY = (int)(originalPhosImage.PixelHeight - pixelHeight); } var roi = new OpenCvSharp.Rect(rectX, rectY, (int)pixelWidth, (int)pixelHeight); _roiMat = new Mat(src, roi).Clone(); int rectThick = (int)Math.Round(0.01 * originalPhosImage.PixelWidth, 0); if (originalPhosImage.PixelHeight < originalPhosImage.PixelWidth) { rectThick = (int)Math.Round(0.01 * originalPhosImage.PixelHeight, 0); } Cv2.Rectangle(src, new OpenCvSharp.Point(rectX, rectY), new OpenCvSharp.Point(pixelWidth + rectX, pixelHeight + rectY), new Scalar(0, 0, 255, 255), rectThick); //Cv2.NamedWindow("src", WindowMode.Normal); //Cv2.ImShow("src", src); //Cv2.ResizeWindow("src", 400, 300); //Cv2.WaitKey(); //Cv2.DestroyAllWindows(); PhosImage = BitmapSourceConverter.ToBitmapSource(src); } } RoiX = RoiY = RoiWidth = RoiHeight = 0; }
public static void GetHSL(BitmapSource bsrc, out uint l, out uint s, out uint h) { Mat src = BitmapSourceConverter.ToMat(bsrc); GetHSL(src, out l, out s, out h); }
public static void Save(this BitmapSource bmp, string path) => BitmapSourceConverter.ToMat(bmp).Save(path);