static Mat GetRedThresholdedImage(Mat sourceImage) { double minRedSaturation = 100d; double minRedValue = 100d; double maxRedSaturation = 255d; double maxRedValue = 255d; using (var hsvImage = new Mat()) { Cv2.CvtColor(sourceImage, hsvImage, ColorConversionCodes.BGR2HSV); // first part of red is from 0 to 10 hue using (var lowerRed = new Mat()) { Cv2.InRange(hsvImage, new Scalar(0d, minRedSaturation, minRedValue), new Scalar(10d, maxRedSaturation, maxRedValue), lowerRed); // second part of red is from 160 to 180 hue using (var upperRed = new Mat()) { Cv2.InRange(hsvImage, new Scalar(160d, minRedSaturation, minRedValue), new Scalar(180d, maxRedSaturation, maxRedValue), upperRed); // add both parts together var combinedRed = new Mat(); Cv2.AddWeighted(lowerRed, 1d, upperRed, 1d, 0d, combinedRed); return(combinedRed); } } } }
private void PicZoom() { Rectangle cropRect1 = new Rectangle(xOffset, yOffset, imageWidth, imageHeight); Bitmap src1 = image_in as Bitmap; picIn.Image = src1.Clone(cropRect1, src1.PixelFormat); Mat image_opencv = BitmapConverter.ToMat(new Bitmap(image_in)); Size dsize = new Size(0, 0); int fx = zoom; int fy = zoom; Mat image_filtered = image_opencv.Resize(dsize, fx, fy, interpolation); Mat blur = new Mat(); Mat image_usm = new Mat(); Cv2.GaussianBlur(image_filtered, blur, new Size(0, 0), 25); Cv2.AddWeighted(image_filtered, alpha, blur, -1 * beta, gamma, image_usm); MemoryStream ms_out = new MemoryStream(image_filtered.ToBytes()); image_out = Image.FromStream(ms_out); Rectangle cropRect2 = new Rectangle(xOffset * zoom, yOffset * zoom, image_out.Width / zoom, image_out.Height / zoom); Bitmap src2 = image_out as Bitmap; picOut.Image = src2.Clone(cropRect2, src2.PixelFormat); }
private void Logo() { WriteableBitmap wb = ShowImage.Source as WriteableBitmap; Mat image = wb.ToMat(); string strFile = string.Empty; FileOpen(ref strFile); if (strFile == string.Empty) { return; } if (image == null) { return; } Mat logo = new Mat(strFile, ImreadModes.Color); Mat imageLogo = new Mat(image, new OpenCvSharp.Rect(0, 0, logo.Cols, logo.Rows)); Cv2.AddWeighted(imageLogo, 1.0, logo, 0.3, 0.1, imageLogo); //logo.CopyTo(imageLogo); //using (OpenCvSharp.Window test = new OpenCvSharp.Window("str")) //{ // Cv2.ImShow("str", imageLogo); // Cv2.WaitKey(0); //} //Cv2.AddWeighted(image, 1.0, imageLogo, 0.3, 0.1, image); DisplayImage(image); }
public override int Process(ref Mat img, List <Point> points) { Divide(img, points, out var inputL, out var inputR); CalcPCA(inputL, out var centerL, out var valL1, out var valL2, out var angleL); CalcPCA(inputR, out var centerR, out var valR1, out var valR2, out var angleR); var kalmanInputL = new double[] { centerL.X, centerL.Y, valL1, valL2, angleL }; var kalmanInputR = new double[] { centerR.X, centerR.Y, valR1, valR2, angleR }; if (first) { _filterL = new Yamashita.Control.KalmanFilter(kalmanInputL, _mNoise, _pNoise); _filterR = new Yamashita.Control.KalmanFilter(kalmanInputR, _mNoise, _pNoise); first = false; } var(_, predictL) = _filterL.Update(kalmanInputL); var(_, predictR) = _filterR.Update(kalmanInputR); centerL = new Point(predictL[0], predictL[1]); centerR = new Point(predictR[0], predictR[1]); valL1 = predictL[2]; valL2 = predictL[3]; angleL = predictL[4]; valR1 = predictR[2]; valR2 = predictR[3]; angleR = predictR[4]; DrawArrow(img, centerL, valL1, valL2, angleL); DrawArrow(img, centerR, valR1, -valR2, angleR); using var addImg = img.Clone(); DrawEllipses(addImg, centerL, valL1, valL2, angleL); DrawEllipses(addImg, centerR, valR1, valR2, angleR); // 第二主成分のシグマ点を端点とする var endL = new Point(centerL.X + Math.Cos((angleL - 90) * Math.PI / 180) * valL2, centerL.Y + Math.Sin((angleL - 90) * Math.PI / 180) * valL2); var endR = new Point(centerR.X - Math.Cos((angleR - 90) * Math.PI / 180) * valR2, centerR.Y - Math.Sin((angleR - 90) * Math.PI / 180) * valR2); endL = Rotate(new Point(0, img.Height), endL, -30 * Math.PI / 180); endR = Rotate(new Point(img.Width, img.Height), endR, 30 * Math.PI / 180); //Cv2.Circle(img, endL, 3, red, 3); //Cv2.Circle(img, endR, 3, red, 3); var x = (endL.X + endR.X) / 2; var y = img.Height * 3 / 5; var poly = new Point[] { new Point(x, y), new Point(img.Width / 3, img.Height * 7 / 8), new Point(img.Width * 2 / 3, img.Height * 7 / 8) }; Cv2.FillConvexPoly(addImg, poly, red); //重ねて描画 Cv2.AddWeighted(img, 0.6, addImg, 0.4, 0, img); //Cv2.Line(img, new Point(img.Width / 2, 0), new Point(img.Width / 2, img.Height), red); //Cv2.Line(img, new Point(x, y), new Point(img.Width / 2, y), red, 2); //横方向偏差を返す return((int)x - img.Width / 2); }
public Mat GetZPlane(double z) { //Check if in bounds if (z >= Origin.Z && z <= ZMax) { var zSteps = (int)((z - Origin.Z) * ImageOrientation.zDir.Z / ZRes); var zSteps_Pls1 = (int)((z - Origin.Z + ZRes) * ImageOrientation.zDir.Z / ZRes); var lowZ = Origin.Z + zSteps * ZRes; var highZ = Origin.Z + (zSteps_Pls1) * ZRes; var interpZ = (z - Origin.Z) / ZRes % 1 != 0; if (!interpZ) { return(GetZPlaneBySlice((int)(zSteps))); } else { //Otherwise interpolate var zd = interpZ ? (z - lowZ) / (highZ - lowZ) : 0; var lowPlane = GetZPlaneBySlice((int)(zSteps)); var highPlane = GetZPlaneBySlice((int)(zSteps_Pls1)); Mat interpolated = lowPlane.EmptyClone(); Cv2.AddWeighted(lowPlane, 1 - zd, highPlane, zd, 0, interpolated); return(interpolated); } } else { //Return empty return(new Mat(DimensionY, DimensionX, _mat.Type(), new float[DimensionX * DimensionY])); } }
private void Noising(ref Mat SourceMat, double intensity) { using (Mat NoiseMat = new Mat(SourceMat.Height, SourceMat.Width, MatType.CV_8UC4, new Scalar(0, 0, 0, 255))) { Cv2.Randn(NoiseMat, new Scalar(0, 0, 0, 255), new Scalar(255, 255, 255, 255)); Cv2.AddWeighted(SourceMat, 1 - intensity, NoiseMat, intensity, 0, SourceMat); } }
private Mat Sharpening(Mat picture, Mat result) { Data_th th_data = th.Get_Data(); Mat tmp = Mat.Zeros(new OpenCvSharp.Size(picture.Cols, picture.Rows), MatType.CV_8UC3); Cv2.GaussianBlur(picture, tmp, new OpenCvSharp.Size(9, 9), th_data.Get_Shsigma()); Cv2.AddWeighted(picture, th_data.Get_Shth1(), tmp, -th_data.GetShth2(), 7.5, result); return(result); }
//4. 图像增强方法1(输入图像为灰度图像)(通过Sobel算子X轴方向与Y轴方向的梯度进行融合进行增强) public static Mat ImageEnhancementMethod1(Mat inMat, Mat outMat) { Mat soX = new Mat(); Mat soY = new Mat(); Cv2.Sobel(inMat, soX, MatType.CV_8U, 1, 0, 3, 1, 1); //获取Sobel处理的X轴方向梯度 Cv2.Sobel(inMat, soY, MatType.CV_8U, 0, 1, 3, 1, 1); //获取Sobel处理的Y轴方向梯度 Cv2.AddWeighted(soX, 0.5, soY, 0.5, 0, outMat); //加权,对其进行合并梯度处理 return(outMat); }
private void CameraImageDisp_TimersTimer(object sender, ElapsedEventArgs e) { capture.Read(image); Mat tmp = new Mat(); Cv2.AddWeighted(image, 1.0, flashImage, flashAlpha, 0, tmp); pictureBox1.Image = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(tmp); GC.Collect(); }
/// <summary> /// 負片 /// </summary> /// <param name="Source"> 原圖(灰) </param> /// <param name="_Dst"> 結果(灰) </param> public void Negative(Mat Source, OutputArray _Dst) { Mat Dst = _Dst.GetMat(); using (Mat Src = new Mat()) using (Mat WhiteMat = new Mat(Source.Size(), Source.Type(), new Scalar(255))) { Source.CopyTo(Src); Cv2.AddWeighted(WhiteMat, 1, Src, -1, 0, Dst); } }
public static void render_2D(ref OpenCvSharp.Mat left_display, sl.float2 img_scale, ref sl.Objects objects, bool render_mask, bool isTrackingON) { OpenCvSharp.Mat overlay = left_display.Clone(); OpenCvSharp.Rect roi_render = new OpenCvSharp.Rect(0, 0, left_display.Size().Width, left_display.Size().Height); OpenCvSharp.Mat mask = new OpenCvSharp.Mat(left_display.Rows, left_display.Cols, OpenCvSharp.MatType.CV_8UC1); int line_thickness = 2; for (int i = 0; i < objects.numObject; i++) { sl.ObjectData obj = objects.objectData[i]; if (Utils.renderObject(obj, isTrackingON)) { OpenCvSharp.Scalar base_color = Utils.generateColorID_u(obj.id); // Display image scale bouding box 2d if (obj.boundingBox2D.Length < 4) { continue; } Point top_left_corner = Utils.cvt(obj.boundingBox2D[0], img_scale); Point top_right_corner = Utils.cvt(obj.boundingBox2D[1], img_scale); Point bottom_right_corner = Utils.cvt(obj.boundingBox2D[2], img_scale); Point bottom_left_corner = Utils.cvt(obj.boundingBox2D[3], img_scale); // Create of the 2 horizontal lines Cv2.Line(left_display, top_left_corner, top_right_corner, base_color, line_thickness); Cv2.Line(left_display, bottom_left_corner, bottom_right_corner, base_color, line_thickness); // Creation of two vertical lines Utils.drawVerticalLine(ref left_display, bottom_left_corner, top_left_corner, base_color, line_thickness); Utils.drawVerticalLine(ref left_display, bottom_right_corner, top_right_corner, base_color, line_thickness); // Scaled ROI OpenCvSharp.Rect roi = new OpenCvSharp.Rect(top_left_corner.X, top_left_corner.Y, (int)top_right_corner.DistanceTo(top_left_corner), (int)bottom_right_corner.DistanceTo(top_right_corner)); overlay.SubMat(roi).SetTo(base_color); sl.float2 position_image = getImagePosition(obj.boundingBox2D, img_scale); Cv2.PutText(left_display, obj.label.ToString(), new Point(position_image.x - 20, position_image.y - 12), HersheyFonts.HersheyComplexSmall, 0.5f, new Scalar(255, 255, 255, 255), 1); if (!float.IsInfinity(obj.position.Z)) { string text = Math.Abs(obj.position.Z).ToString("0.##M"); Cv2.PutText(left_display, text, new Point(position_image.x - 20, position_image.y), HersheyFonts.HersheyComplexSmall, 0.5, new Scalar(255, 255, 255, 255), 1); } } } // Here, overlay is as the left image, but with opaque masks on each detected objects Cv2.AddWeighted(left_display, 0.7, overlay, 0.3, 0.0, left_display); }
private void Greening(ref Mat SourceMat, double intensity) { using (Mat GreenMat = new Mat(SourceMat.Height, SourceMat.Width, MatType.CV_8UC4, new Scalar(0, 255, 0, 255))) { Cv2.AddWeighted(SourceMat, 1 - intensity, GreenMat, intensity, 0, SourceMat); SourceMat.ConvertTo(SourceMat, SourceMat.Type(), 1, -intensity * 128); Mat[] bgra; Cv2.Split(SourceMat, out bgra); Cv2.Rectangle(bgra[3], new OpenCvSharp.Rect(0, 0, bgra[3].Width, bgra[3].Height), new Scalar(255), -1); bgra[1].ConvertTo(bgra[1], bgra[1].Type(), 1, 32 * intensity); Cv2.Merge(bgra, SourceMat); } }
public Mat DrawSegmentationMap(Mat mat1, SegmentationDetectionInfo[] detections, float visTresh) { orig = mat1.Clone(); Mat mat = mat1.Clone(); Detections = detections; for (int i = 0; i < detections.Length; i++) { if (!detections[i].Visible) { continue; } Mat m2 = detections[i].Mask.Clone(); Mat[] rgb = new Mat[3]; Scalar clr; if (i < clrs.Count) { clr = clrs[i]; } else { clr = clrs.Last(); } for (int j = 0; j < 3; j++) { rgb[j] = m2.Clone(); //rgb[j].ConvertTo(rgb[j], MatType.CV_32F); rgb[j] *= clr[j]; } Mat merged = new Mat(); Cv2.Merge(rgb, merged); merged = merged.Resize(mat.Size()); Cv2.AddWeighted(mat, 1, merged, 0.5, 0, mat); if (EnableBoxDraw) { mat.Rectangle(detections[i].Rect, new OpenCvSharp.Scalar(255, 0, 0), 2); } if (EnableTextDraw) { mat.PutText(detections[i].Label + ": " + Math.Round(detections[i].Conf, 4), new Point(detections[i].Rect.Left, detections[i].Rect.Top), HersheyFonts.HersheyComplex, 1, clr); } } return(mat); }
// // Blending // 블렌딩 작업을 수행합니다. // // Return // 성공시 블렌딩된 이미지, 실패시 null // public Mat Blending() { if (this.Blendable == false) { return(null); } var mask = this.Smooth ? this._mask.GaussianBlur(new Size(21, 21), 11.0) : this._mask; var mask_inv = (~mask).ToMat(); var mask_norm = new Mat(); var mask_inv_norm = new Mat(); if (mask.CountNonZero() == 0) { mask_inv_norm = new Mat(mask.Size(), MatType.CV_32FC1, new Scalar(1.0)); mask_norm = new Mat(mask.Size(), MatType.CV_32FC1, new Scalar(0.0)); } else if (mask_inv.CountNonZero() == 0) { mask_norm = new Mat(mask.Size(), MatType.CV_32FC1, new Scalar(1.0)); mask_inv_norm = new Mat(mask.Size(), MatType.CV_32FC1, new Scalar(0.0)); } else { mask_norm = mask.Normalize(0, 1, NormTypes.MinMax, MatType.CV_32FC1); mask_inv_norm = mask_inv.Normalize(0, 1, NormTypes.MinMax, MatType.CV_32FC1); } mask_norm = mask_norm.CvtColor(ColorConversionCodes.GRAY2BGR); mask_inv_norm = mask_inv_norm.CvtColor(ColorConversionCodes.GRAY2BGR); var background32f = new Mat(); this._background.ConvertTo(background32f, MatType.CV_32FC3); var foreground32f = new Mat(); this._foreground.ConvertTo(foreground32f, MatType.CV_32FC3); var blended_frame = new Mat(); Cv2.AddWeighted(foreground32f.Mul(mask_norm), 1.0f - this.Transparency, background32f.Mul(mask_norm), this.Transparency, 0, blended_frame); blended_frame += background32f.Mul(mask_inv_norm); blended_frame.ConvertTo(blended_frame, MatType.CV_8UC3); return(blended_frame); }
/// <summary> /// Processes the given image with custom settings for the prime part image /// </summary> /// <param name="image">The image to apply filter to</param> /// <returns>The resulted image after applying custom filters to the given one</returns> public static Bitmap PrimePartImageCustomProcess(Bitmap image) { string tempImagePath = Path.Combine(GeneralUtils.GetAssemblyPath(), "temp.tiff"); image.Save(tempImagePath, System.Drawing.Imaging.ImageFormat.Tiff); Mat src_gray = new Mat(tempImagePath, ImreadModes.Grayscale); GeneralUtils.DeleteFile(tempImagePath); Cv2.AddWeighted(src_gray, 1.5, src_gray, -0.5, 0, src_gray); Cv2.Threshold(src_gray, src_gray, 150, 255, ThresholdTypes.Binary); return(BitmapConverter.ToBitmap(src_gray)); }
private void MakeEdgeMat() { Mat sobelX = new Mat(); Cv2.Sobel(plantSegmentasionImage, sobelX, MatType.CV_16S, 1, 0, 3); Cv2.ConvertScaleAbs(sobelX, sobelX); Mat sobelY = new Mat(); Cv2.Sobel(plantSegmentasionImage, sobelY, MatType.CV_16S, 0, 1, 3); Cv2.ConvertScaleAbs(sobelY, sobelY); plantEdges = new Mat(); Cv2.AddWeighted(sobelX, 0.5, sobelY, 0.5, 0, plantEdges); }
public static void WriteAviFileTest(List <string> sourcefiles, string outfilename, double fps) { Size size; using (var mat = new Mat(sourcefiles[0])) { size = mat.Size(); } VideoWriter outV = null; for (int i = 0; i < 30; i++) { using (var mat1 = new Mat(sourcefiles[i])) using (var mat2 = new Mat(sourcefiles[i + 1])) using (var mat3 = new Mat(sourcefiles[i + 2])) using (var mat4 = new Mat(sourcefiles[i + 3])) using (var mat5 = new Mat(sourcefiles[i + 4])) using (var mat6 = new Mat(sourcefiles[i + 5])) using (var mat7 = new Mat(sourcefiles[i + 6])) using (var mat8 = new Mat(sourcefiles[i + 7])) using (var mat11 = new Mat()) using (var mat22 = new Mat()) using (var mat33 = new Mat()) using (var mat44 = new Mat()) using (var mat111 = new Mat()) using (var mat222 = new Mat()) using (var mat = new Mat()) { if (i == 0) { outV = new VideoWriter(outfilename, FourCC.Default, fps, mat.Size()); } Cv2.AddWeighted(mat1, 0.5, mat2, 0.5, 1, mat11); Cv2.AddWeighted(mat3, 0.5, mat4, 0.5, 1, mat22); Cv2.AddWeighted(mat5, 0.5, mat6, 0.5, 1, mat33); Cv2.AddWeighted(mat7, 0.5, mat8, 0.5, 1, mat44); Cv2.AddWeighted(mat11, 0.5, mat22, 0.5, 1, mat111); Cv2.AddWeighted(mat33, 0.5, mat44, 0.5, 1, mat222); Cv2.AddWeighted(mat111, 0.5, mat222, 0.5, 1, mat); outV.Write(mat); } } outV.Release(); }
public static void ShowAllSlices(this Matrix m) { for (int z = 0; z < m.DimensionZ; z++) { using (var mat = m.GetZPlaneBySlice(z)) { var sliceText = mat.EmptyClone(); sliceText.PutText($"{z}", new Point(m.DimensionX / 2, m.DimensionY - 5), HersheyFonts.Italic, 0.5, new Scalar(1)); var combined = new Mat(); Cv2.AddWeighted(mat, 1, sliceText, 0.5, 0, combined); FloatMat.Show(combined); combined.Dispose(); mat.Dispose(); sliceText.Dispose(); } } }
public UnsharpMaskImage() { this.Name = "Unsharp Mask"; this.Action = (m) => { var gauss = m.GaussianBlur(new Size(this.GaussSize, this.GaussSize), this.GaussSigmaX); var alpha = 1 + this.Intensity; var beta = -1 * (this.Intensity + this.Gamma); Cv2.AddWeighted(m, alpha, gauss, beta, 0, m); gauss.Dispose(); HasError = false; return(m); }; }
public static void LineEdgeDetection(string fileName) { Mat img = new Mat(Path.Combine(INPUT_PATH, fileName), ImreadModes.AnyColor); Mat gray = new Mat(); Cv2.CvtColor(img, gray, ColorConversionCodes.BGR2GRAY); int kernel_size = 5; Mat blur_gray = new Mat(); Cv2.GaussianBlur(gray, blur_gray, new Size { Height = kernel_size, Width = kernel_size }, 0); int low_treshold = 150; int high_threshold = 200; Mat edges = new Mat(); Cv2.Canny(blur_gray, edges, low_treshold, high_threshold); double rho = 1; // distance resolution in pixels of the Hough grid double theta = Math.PI / 180; //angular resolution in radians of the Hough grid int threshold = 15; // minimum number of votes (intersections in Hough grid cell) int min_line_length = 50; // minimum number of pixels making up a line int max_line_gap = 13; // maximum gap in pixels between connectable line segments // creating a blank to draw lines on Mat line_image = new Mat(rows: img.Rows, cols: img.Cols, type: img.Type()); // Run Hough on edge detected image // Output "lines" is an array containing endpoints of detected line segments var lines = Cv2.HoughLinesP(edges, rho, theta, threshold, min_line_length, max_line_gap); foreach (LineSegmentPoint line in lines) { Console.WriteLine($"{line.P1}, {line.P2}"); Cv2.Line(line_image, line.P1, line.P2, Scalar.Red, 2, LineTypes.Link8); } Mat line_edges = new Mat(); Cv2.AddWeighted(img, 0.8, line_image, 1, 0, line_edges); Cv2.ImWrite(Path.Combine(LINEDETECTION_PATH, fileName), line_edges); }
public static Mat GetXPlane(IMatrix _mat, double xPositionMM, double xiScale = 1, double yiScale = 1) { //XDIR=> Y, YDIR => Z //Scale var xScale = _mat.ZRes > _mat.YRes ? (_mat.YRes / _mat.ZRes) : 1.0; var yScale = _mat.YRes > _mat.ZRes ? (_mat.ZRes / _mat.YRes) : 1.0; //Check if in bounds if (xPositionMM >= _mat.Origin.X && xPositionMM <= _mat.XMax) { var ordered = Enumerable.Range(1, _mat.DimensionX).Select(s => { return(new { Key = s, Value = _mat.Origin.X + _mat.XRes * s }); }); var firstLessThan = ordered.LastOrDefault(o => o.Value <= xPositionMM); //If close enough, send this plane if (Math.Abs(firstLessThan.Value - xPositionMM) < 0.001) { return(_mat.GetXPlaneBySlice(firstLessThan.Key - 1, xiScale, yiScale) .Resize(Size.Zero, xScale, yScale)); } var firstMoreThan = ordered.FirstOrDefault(o => o.Value >= xPositionMM); if (Math.Abs(firstMoreThan.Value - xPositionMM) < 0.001) { return(_mat.GetXPlaneBySlice(firstMoreThan.Key - 1, xiScale, yiScale) .Resize(Size.Zero, xScale, yScale)); } //Otherwise interpolate var zd = (xPositionMM - firstLessThan.Value) / (firstMoreThan.Value - firstLessThan.Value); var lowPlane = _mat.GetXPlaneBySlice(firstLessThan.Key - 1, xiScale, yiScale); var highPlane = _mat.GetXPlaneBySlice(firstMoreThan.Key - 1, xiScale, yiScale); Mat interpolated = lowPlane.EmptyClone(); Cv2.AddWeighted(lowPlane, zd, highPlane, 1 - zd, 0, interpolated); interpolated = interpolated.Resize(Size.Zero, xScale, yScale); return(interpolated); } else { //Return empty return(new Mat(_mat.DimensionZ, _mat.DimensionY, _mat.MatType).Resize(Size.Zero, xScale, yScale)); } }
/// <summary> /// Gets the contours on a slice z. Interpolates if contours don't exist /// </summary> /// <param name="z">z position of contours</param> /// <returns></returns>. public static Mat GetMaskAtZ(this IEnumerable <SliceContourMeta> contours, float z, Mat zSlice, Mat imageToPatientTx, double scale) { var zSlices = contours.GroupBy(sc => sc.Z).OrderBy(grp => grp.Key).ToList(); if (zSlices.Any(o => Math.Abs(z - o.Key) < 0.01)) { var matchedContours = zSlices.First(o => Math.Abs(z - o.Key) < 0.01); using (var mask = new Mat(zSlice.Rows, zSlice.Cols, MatType.CV_8UC1, new Scalar(0))) { foreach (var contour in matchedContours) { contour.MaskImageFast(mask, imageToPatientTx, 255, scale); } return(mask); } } else {//Else interpolate var smaller = zSlices.Last(s => s.Key <= z); var larger = zSlices.First(s => s.Key >= z); var zd = (z - smaller.Key) / (larger.Key - smaller.Key); using (var mask1 = new Mat(zSlice.Rows, zSlice.Cols, MatType.CV_8UC1, new Scalar(0))) { foreach (var contour in smaller) { contour.MaskImageFast(mask1, imageToPatientTx, 255, scale); } using (var mask2 = new Mat(zSlice.Rows, zSlice.Cols, MatType.CV_8UC1, new Scalar(0))) { foreach (var contour in larger) { contour.MaskImageFast(mask2, imageToPatientTx, 255, scale); } Mat interpolated = new Mat(zSlice.Rows, zSlice.Cols, MatType.CV_8UC1, new Scalar(0)); Cv2.AddWeighted(mask1, 1 - zd, mask2, zd, 0, interpolated); return(interpolated); } } } }
//test image from https://play.google.com/store/apps/details?id=com.vizalevgames.finddifferences200levels //thank for that static void Main() { var org1 = new Mat("1.jpg"); var org2 = new Mat("2.jpg"); //create background subtraction method var mog = BackgroundSubtractorMOG2.Create(); var mask = new Mat(); mog.Apply(org1, mask); mog.Apply(org2, mask); //reduce noise Cv2.MorphologyEx(mask, mask, MorphTypes.Open, null, null, 2); //convert mask from gray to BGR for AddWeighted function var maskBgr = new Mat(); Cv2.CvtColor(mask, maskBgr, ColorConversionCodes.GRAY2BGR); //apply two image as one Cv2.AddWeighted(org1, 1.0, maskBgr, 0.5, 2.2, org1); Cv2.AddWeighted(org2, 1.0, maskBgr, 0.5, 2.2, org2); #region draw contours var canny = new Mat(); Cv2.Canny(mask, canny, 15, 120); Cv2.FindContours(canny, out var contours, out var _, RetrievalModes.External, ContourApproximationModes.ApproxSimple); Cv2.DrawContours(org1, contours, -1, Scalar.Red, 2); Cv2.DrawContours(org2, contours, -1, Scalar.Red, 2); #endregion using (new Window("org1", org1)) using (new Window("org2", org2)) using (new Window("mask", mask)) { Cv2.WaitKey(); } }
public Mat GetYPlane(double yPositionMM) { //Scale var xScale = ZRes > XRes ? (XRes / ZRes) : 1.0; var yScale = XRes > ZRes ? (ZRes / XRes) : 1.0; //Check if in bounds if (yPositionMM >= Origin.Y && yPositionMM <= YMax) { var ordered = Enumerable.Range(1, DimensionY).Select(s => { return(new { Key = s, Value = Origin.Y + YRes * s }); }); var firstLessThan = ordered.LastOrDefault(o => o.Value <= yPositionMM); //If close enough, send this plane if (Math.Abs(firstLessThan.Value - yPositionMM) < 0.001) { return(GetYPlaneBySlice(firstLessThan.Key - 1).Resize(Size.Zero, xScale, yScale)); } var firstMoreThan = ordered.FirstOrDefault(o => o.Value >= yPositionMM); if (Math.Abs(firstMoreThan.Value - yPositionMM) < 0.001) { return(GetYPlaneBySlice(firstMoreThan.Key - 1).Resize(Size.Zero, xScale, yScale)); } //Otherwise interpolate var zd = (yPositionMM - firstLessThan.Value) / (firstMoreThan.Value - firstLessThan.Value); var lowPlane = GetYPlaneBySlice(firstLessThan.Key - 1); var highPlane = GetYPlaneBySlice(firstMoreThan.Key - 1); Mat interpolated = lowPlane.EmptyClone(); Cv2.AddWeighted(lowPlane, zd, highPlane, 1 - zd, 0, interpolated); interpolated = interpolated.Resize(Size.Zero, xScale, yScale); return(interpolated); } else { //Return empty return(new Mat(DimensionZ, DimensionY, _mat.Type()).Resize(Size.Zero, xScale, yScale)); } }
public override Mat Preview(Mat frame) { if (ksize % 2 != 1) { ksize++; } var gray = frame.CvtColor(ColorConversionCodes.BGR2GRAY); var blur = gray .GaussianBlur(new Size(ksize, ksize), 0); var grX = blur.Sobel(MatType.CV_16S, 1, 0, ksize, scale, delta); var grY = blur.Sobel(MatType.CV_16S, 0, 1, ksize, scale, delta); var absGradX = grX.ConvertScaleAbs(); var absGradY = grY.ConvertScaleAbs(); var grad = new Mat(); Cv2.AddWeighted(absGradX, 0.5, absGradY, 0.5, 0, grad); var edges = grad.Canny(LTr, HTr); return(edges); }
static void Main(string[] args) { Mat src = Cv2.ImRead("tomato.jpg"); Mat hsv = new Mat(src.Size(), MatType.CV_8UC3); Mat lower_red = new Mat(src.Size(), MatType.CV_8UC3); Mat upper_red = new Mat(src.Size(), MatType.CV_8UC3); Mat added_red = new Mat(src.Size(), MatType.CV_8UC3); Mat dst = new Mat(src.Size(), MatType.CV_8UC3); Cv2.CvtColor(src, hsv, ColorConversionCodes.BGR2HSV); Cv2.InRange(hsv, new Scalar(0, 100, 100), new Scalar(5, 255, 255), lower_red); Cv2.InRange(hsv, new Scalar(170, 100, 100), new Scalar(179, 255, 255), upper_red); Cv2.AddWeighted(lower_red, 1.0, upper_red, 1.0, 0.0, added_red); Cv2.BitwiseAnd(hsv, hsv, dst, added_red); Cv2.CvtColor(dst, dst, ColorConversionCodes.HSV2BGR); Cv2.ImShow("dst", dst); Cv2.WaitKey(0); Cv2.DestroyAllWindows(); }
public static Mat GetZPlane(IMatrix _mat, double z, double xiScale = 1, double yiScale = 1) { //XDIR=> X, YDIR => Y //Scale var xScale = _mat.XRes > _mat.YRes ? (_mat.YRes / _mat.XRes) : 1.0; var yScale = _mat.YRes > _mat.XRes ? (_mat.XRes / _mat.YRes) : 1.0; //Check if in bounds if (z >= _mat.Origin.Z && z <= _mat.ZMax) { var zSteps = (int)((z - _mat.Origin.Z) / _mat.ZRes); var lowZ = _mat.Origin.Z + zSteps * _mat.ZRes; var highZ = _mat.Origin.Z + (zSteps + 1) * _mat.ZRes; var interpZ = (z - _mat.Origin.Z) / _mat.ZRes % 1 != 0; if (!interpZ) { return(_mat.GetZPlaneBySlice(zSteps, xiScale, yiScale) .Resize(Size.Zero, xScale, yScale)); } else { //Otherwise interpolate var zd = interpZ ? (z - lowZ) / (highZ - lowZ) : 0; var lowPlane = _mat.GetZPlaneBySlice(zSteps, xiScale, yiScale); var highPlane = _mat.GetZPlaneBySlice(zSteps + 1, xiScale, yiScale); Mat interpolated = lowPlane.EmptyClone(); Cv2.AddWeighted(lowPlane, 1 - zd, highPlane, zd, 0, interpolated); interpolated = interpolated.Resize(Size.Zero, xScale, yScale); return(interpolated); } } else { //Return empty return(new Mat(_mat.DimensionY, _mat.DimensionX, _mat.MatType)); } }
private void pictureBox1_MouseMove(object sender, MouseEventArgs e) { //if (e.Button == MouseButtons.Left && start_Paint) //{ // Cv2.Line(img_aux, ini_Coord.X, ini_Coord.Y, TranslateStretchImageMousePosition(e.Location).X, // TranslateStretchImageMousePosition(e.Location).Y, color, int.Parse(textBox1.Text), LineTypes.AntiAlias); // ini_Coord = TranslateStretchImageMousePosition(e.Location); // Mat dest = new Mat(); // Mat watershed= C_lmage.C_Image_Watershed(img_aux, img_, color); // Cv2.AddWeighted(img_, 1.0, watershed, 0.9, 0.5, dest); //Combination of two images // pictureBox1.Image = C_lmage.MatToBitmap(watershed); //} if (e.Button == MouseButtons.Left && start_Paint) { Cv2.Line(img_aux, ini_Coord.X, ini_Coord.Y, TranslateStretchImageMousePosition(e.Location).X, TranslateStretchImageMousePosition(e.Location).Y, color, int.Parse(textBox1.Text), LineTypes.AntiAlias); ini_Coord = TranslateStretchImageMousePosition(e.Location); Mat dest = new Mat(); Cv2.AddWeighted(img_, 1.0, img_aux, 0.8, 0.0, dest); //Combination of two images pictureBox1.Image = C_lmage.MatToBitmap(dest); } if (e.Button == MouseButtons.Right && start_Paint) { Cv2.Line(img_aux, ini_Coord.X, ini_Coord.Y, TranslateStretchImageMousePosition(e.Location).X, TranslateStretchImageMousePosition(e.Location).Y, Scalar.Black, int.Parse(textBox1.Text), LineTypes.AntiAlias); ini_Coord = TranslateStretchImageMousePosition(e.Location); Mat dest = new Mat(); Cv2.AddWeighted(img_, 1.0, img_aux, 0.8, 0.0, dest); //Combination of two images pictureBox1.Image = C_lmage.MatToBitmap(dest); } GC.Collect(); //let w }
public void cuda_addWeighted() { Mat mat1 = Image("lenna.png", ImreadModes.Grayscale); Size size = mat1.Size(); Mat mat2 = new Mat(size, mat1.Type(), new Scalar(2)); double alpha = 0.9; double beta = 1.1; double gamma = 2.3; using (GpuMat g_mat1 = new GpuMat(size, mat1.Type())) using (GpuMat dst = new GpuMat()) { GpuMat g_mat2 = new GpuMat(size, mat2.Type()); g_mat2.Upload(mat2); g_mat1.Upload(mat1); Cuda.cuda.addWeighted(g_mat1, alpha, g_mat2, beta, gamma, dst); Mat dst_gold = new Mat(size, mat1.Type(), Scalar.Black); Cv2.AddWeighted(mat1, alpha, mat2, beta, gamma, dst_gold); ImageEquals(dst_gold, dst, 2.0); ShowImagesWhenDebugMode(g_mat1, dst); } }
public static Mat Edge(Mat input) { var scale = 1; var delta = 0; var ddepth = MatType.CV_16S; /// Gradient X using var grad_x = input.Scharr(ddepth, 1, 0, scale, delta, BorderTypes.Default); //using var grad_x = input.Sobel(ddepth, 1, 0, 3, scale, delta, BorderTypes.Default); using var abs_grad_x = grad_x.ConvertScaleAbs(); grad_x.Dispose(); /// Gradient Y using var grad_y = input.Scharr(ddepth, 0, 1, scale, delta, BorderTypes.Default); //using var grad_y = input.Sobel(ddepth, 0, 1, 3, scale, delta, BorderTypes.Default); using var abs_grad_y = grad_y.ConvertScaleAbs(); grad_y.Dispose(); /// Total Gradient (approximate) using var grad = new Mat(); Cv2.AddWeighted(abs_grad_x, 0.5, abs_grad_y, 0.5, 0, grad); abs_grad_y.Dispose(); abs_grad_y.Dispose(); var output = new Mat(grad.Width, grad.Height, MatType.CV_8UC1); Cv2.Normalize(grad, output, 255, 0, NormTypes.MinMax); return(output); }