public static void DistanceTransform(bool negative = false, DistanceTypes distanceType = DistanceTypes.L2, DistanceMaskSize distanceMaskSize = DistanceMaskSize.Mask3) { Glb.DrawMatAndHist0(Glb.matSrc); var matThr = Glb.matSrc.CvtColor(ColorConversionCodes.BGR2GRAY).Threshold(128, 255, ThresholdTypes.Otsu); if (negative) { Cv2.BitwiseNot(matThr, matThr); } Glb.DrawMatAndHist1(matThr); var matDist = matThr.DistanceTransform(distanceType, distanceMaskSize); var x1 = matDist.Min(); var x2 = matDist.Max(); float y1 = 0; float y2 = 255; double scale = (y2 - y1) / (x2 - x1); double offset = (x2 * y1 - x1 * y2) / (x2 - x1); var matDistColor = new Mat(); matDist.ConvertTo(matDistColor, MatType.CV_8UC1, scale, offset); Glb.DrawMatAndHist2(matDistColor); matThr.Dispose(); matDist.Dispose(); matDistColor.Dispose(); }
public static void DistanceTransformMy(bool negative = false) { Glb.DrawMatAndHist0(Glb.matSrc); var matThr = Glb.matSrc.CvtColor(ColorConversionCodes.BGR2GRAY).Threshold(128, 255, ThresholdTypes.Otsu); if (negative) { Cv2.BitwiseNot(matThr, matThr); } Glb.DrawMatAndHist1(matThr); var matDist = new Mat <float>(matThr.Size()); IpUnsafe.DistanceTransform(matThr.Data, matThr.Width, matThr.Height, matDist.Data); var x1 = matDist.Min(); var x2 = matDist.Max(); float y1 = 0; float y2 = 255; double scale = (y2 - y1) / (x2 - x1); double offset = (x2 * y1 - x1 * y2) / (x2 - x1); var matDistColor = new Mat(); matDist.ConvertTo(matDistColor, MatType.CV_8UC1, scale, offset); Glb.DrawMatAndHist2(matDistColor); matThr.Dispose(); matDist.Dispose(); matDistColor.Dispose(); }
private void FindAndDrawHomo() { using (var template = new Mat("Images\\Circle_Template.bmp", ImreadModes.Color)) { using (var surf = SURF.Create(1000)) { using (var templateDescriptors = new Mat()) { surf.DetectAndCompute(template, null, out KeyPoint[] templateKeyPoints, templateDescriptors); using (var image = new Mat("Images\\Circle.bmp", ImreadModes.Color)) { using (var imageDescriptors = new Mat()) { surf.DetectAndCompute(image, null, out KeyPoint[] imageKeyPoints, imageDescriptors); using (var matcher = new BFMatcher()) { var matches = matcher.Match(imageDescriptors, templateDescriptors); var goodMatches = matches;//.Where(m => m.Distance < 0.2).ToArray(); using (var srcPoints = InputArray.Create(goodMatches.Select(m => templateKeyPoints[m.TrainIdx].Pt))) { using (var dstPoints = InputArray.Create(goodMatches.Select(m => imageKeyPoints[m.QueryIdx].Pt))) { using (var h**o = Cv2.FindHomography(srcPoints, dstPoints, HomographyMethods.Rho)) { ////using (var overlay = image.Overlay()) ////{ //// DrawBox(template, h**o, overlay); //// this.Result.Source = overlay.ToBitmapSource(); ////} using (var tmp = image.Overlay()) { Cv2.BitwiseNot(template, template); Cv2.WarpPerspective(template, tmp, h**o, tmp.Size()); using (var overlay = tmp.Overlay()) { for (var r = 0; r < tmp.Rows; r++) { for (var c = 0; c < tmp.Cols; c++) { overlay.Set(r, c, tmp.At <int>(r, c) == 0 ? new Vec4b(0, 0, 0, 0) : new Vec4b(0, 0, 255, 150)); } } this.Result.Source = overlay.ToBitmapSource(); } } } } } } } } } } } }
/// <summary> /// Mask for values which are inf, inf, inf or NaN, Nan, Nan /// </summary> /// <returns></returns> public Mat GetIndeterminteMask() { var mask = new Mat(); Cv2.BitwiseNot(GetRealMask(), mask); return(mask); }
public Reading Analyze(Mat input) { var resized = input.ResizePreserveAspectRatio(maxSize: _settings.InputAnalysisMaxSize); var grayScale = resized.Image.CvtColor(ColorConversionCodes.BGR2GRAY); var noiseReduced = grayScale.Erode(new Mat()).Dilate(new Mat()); var meterNumberRectangles = DetectMeterNumbers(noiseReduced); if (!meterNumberRectangles.Any()) { return(new Reading("")); } Mat[] croppedImages; if (_settings.DarkSectors) { croppedImages = meterNumberRectangles.Select(r => { var rect = new Rect(r.Position.X, r.Position.Y, r.Width, r.Height); return(new Mat(noiseReduced, rect) .Threshold(0, 255, ThresholdTypes.Otsu) .Erode(new Mat()) .ResizePreserveAspectRatio(_settings.DarkSectorsSeparateMeterNumbersResizeMaxSize).Image); }).ToArray(); } else { croppedImages = meterNumberRectangles.Select(r => { var rect = new Rect(r.Position.X, r.Position.Y, r.Width, r.Height); return(new Mat(noiseReduced, rect) .AdaptiveThreshold(255, AdaptiveThresholdTypes.GaussianC, ThresholdTypes.BinaryInv, _settings.LightSectorsAdaptiveThresholdBlockSize, _settings.LightSectorsAdaptiveThresholdC) .ResizePreserveAspectRatio(_settings.LightSectorsSeparateMeterNumbersResizeMaxSize).Image); }).ToArray(); croppedImages = croppedImages.Select(i => { var rect = new BlobDetector().GetLargestBlob(i); return(new Mat(i, rect)); }).ToArray(); } var resizedCroppedNumbers = croppedImages.ResizeToAverageHeight(); var combinedNumbers = resizedCroppedNumbers.Skip(1) .Aggregate(croppedImages[0], (a, b) => a.CombineImages(b), r => r); var numbers = combinedNumbers.WidenEdges(10); if (!_settings.DarkSectors) { Cv2.BitwiseNot(numbers, numbers); } var reading = new Reading(InsertDecimal(_stringExtractor.GetStringFromImage(numbers)), GetScaledRectangles(resized.OriginalScale, meterNumberRectangles)); return(reading); }
private Mat GetSceleton(Mat grayImage) { var binary = new Mat(); Cv2.Threshold(grayImage, binary, 240, 255, ThresholdType.BinaryInv); var skel = Mat.Zeros(binary.Rows, binary.Cols, MatType.CV_8UC1).ToMat(); var temp = new Mat(binary.Cols, binary.Rows, MatType.CV_8UC1); var elem = Cv2.GetStructuringElement(StructuringElementShape.Cross, new OpenCvSharp.CPlusPlus.Size(3, 3)); bool done; do { Cv2.MorphologyEx(binary, temp, MorphologyOperation.Open, elem); Cv2.BitwiseNot(temp, temp); Cv2.BitwiseAnd(binary, temp, temp); Cv2.BitwiseOr(skel, temp, skel); Cv2.Erode(binary, binary, elem); double max; double min; Cv2.MinMaxLoc(binary, out min, out max); done = (max == 0); } while (!done); return(skel); }
static void Main(string[] args) { Mat src = Cv2.ImRead("chess.png"); Mat gray = new Mat(); Mat binary = new Mat(); Mat morp = new Mat(); Mat image = new Mat(); Mat dst = src.Clone(); Mat kernel = Cv2.GetStructuringElement(MorphShapes.Rect, new Size(3, 3)); Point[][] contours; HierarchyIndex[] hierarchy; Cv2.CvtColor(src, gray, ColorConversionCodes.BGR2GRAY); Cv2.Threshold(gray, binary, 230, 255, ThresholdTypes.Binary); Cv2.MorphologyEx(binary, morp, MorphTypes.Close, kernel, new Point(-1, -1), 2); Cv2.BitwiseNot(morp, image); Cv2.FindContours(image, out contours, out hierarchy, RetrievalModes.Tree, ContourApproximationModes.ApproxTC89KCOS); Cv2.DrawContours(dst, contours, -1, new Scalar(255, 0, 0), 2, LineTypes.AntiAlias, hierarchy, 3); for (int i = 0; i < contours.Length; i++) { for (int j = 0; j < contours[i].Length; j++) { Cv2.Circle(dst, contours[i][j], 1, new Scalar(0, 0, 255), 3); } } Cv2.ImShow("dst", dst); Cv2.WaitKey(0); Cv2.DestroyAllWindows(); }
public List <List <int> > Parse(string scanPdfPath) { Mat[] scansBin; using (var scansPath = Gs.PdfToJpeg(scanPdfPath, $"Scans_{Guid.NewGuid()}", "scan")) { scansBin = scansPath.Files .Select(path => Cv2.ImRead(path, ImreadModes.Grayscale)) .Select(mat => mat.GaussianBlur( new Size(KernelSize, KernelSize), GaussianSigma, GaussianSigma)) .Select(mat => mat.Threshold(BinThreshold, 255, ThresholdTypes.Binary)) .ToArray(); } for (int i = 0; i < scansBin.Length; i++) { var scan = scansBin[i]; Cv2.BitwiseNot(scan, scan); Deskew(scan); } var checks = FindChecks(scansBin); return(checks); }
//This is the method that consumes the Matrix images and runs some openCV //processing on each before saving them. The threads are asynchronus //so processing can occur out of order intentionally to speed up the flow private static async void AsynchronousImageConv(ISourceBlock <Image> imageQueue) { while (await imageQueue.OutputAvailableAsync()) { Image producedResult = imageQueue.Receive(); Task.Run( () => { if (DateTime.Now.Ticks % 3 == 0) { Thread.Sleep(550); } Mat output = new Mat(); Mat input = producedResult.mat; Mat flipped = input.Flip(FlipMode.Y); //create an inversion effect i.e white becomes black Cv2.BitwiseNot(flipped, output); string outputName = @"C:\Temp\MetroImgs\Output\" + producedResult.filename; output.ImWrite(outputName); Console.WriteLine("Processed Image {0} from the queue:", producedResult.filename); WriteToLog("Processed Image " + producedResult.filename + " from the queue: \n"); }); } }
/// <summary> /// Mask for values which are inf, inf, inf or NaN, Nan, Nan /// </summary> /// <returns></returns> public static Mat GetIndeterminteMask(this Mat mat) { var mask = new Mat(); Cv2.BitwiseNot(mat.GetRealMask(), mask); return(mask); }
public Window1() { InitializeComponent(); Mat src = new Mat(@"./carp1.jpg"); Cv2.ImShow("src", src); src = src.Resize(new Size(src.Width / 2, src.Height / 2)); //src = src.Resize(new Size(src.Width / 3, src.Height / 3)); //for (var y = 0; y < src.Height; y++) //{ // for (var x = 0; x < src.Width; x++) // { // var color = src.Get<Vec3b>(y, x); // //if (color.Item2 < 175) // if (color.Item2 < 225) // { // color.Item0 = 255; // color.Item1 = 0; // color.Item2 = 0; // } // src.Set(y, x, color); // } //} var binary = BinarizationMat(src); Cv2.ImShow("src", src); Cv2.ImShow("bin", binary); //var line = binary.Canny(100, 200); //Cv2.ImShow("line", line); var fScreenMat = FindContoursMat(binary, src); fScreenMat = fScreenMat.Resize(new Size(fScreenMat.Width * 2, fScreenMat.Height * 2)); fScreenMat = new Mat(fScreenMat, new Rect((int)(fScreenMat.Width * 0.05), (int)(fScreenMat.Height * 0.1), fScreenMat.Width - (int)(fScreenMat.Width * 0.1), fScreenMat.Height - (int)(fScreenMat.Height * 0.2))); var fScreenBinaryMat = BinarizationMat(fScreenMat); Cv2.BitwiseNot(fScreenBinaryMat, fScreenBinaryMat, new Mat()); var fCardMat = FindContoursMat(fScreenBinaryMat, fScreenMat); //Cv2.ImShow("fScreenMat", fScreenMat); //Cv2.ImShow("fCardMat", fCardMat); //dstImg = new Mat(dstImg, // new Rect((int)(dstImg.Width * 0.15), (int)(dstImg.Height * 0.3), // dstImg.Width - (int)(dstImg.Width * 0.3), dstImg.Height - (int)(dstImg.Height * 0.6))); //fCardMat = fCardMat.Resize(new Size(fCardMat.Width / 1.5, fCardMat.Height / 1.5)); Cv2.ImShow("fCardMat", fCardMat); var dstImg = BinarizationMat(fCardMat); dstImg = dstImg.Threshold(50, 255, ThresholdTypes.Otsu | ThresholdTypes.Binary); Cv2.BitwiseNot(dstImg, dstImg, new Mat()); Cv2.ImShow("dst", dstImg); dstImg = dstImg.Resize(new Size(dstImg.Width / 2.5, dstImg.Height / 2.5)); var engine = new TesseractEngine("./tessdata", "din+eng+chi_sim", EngineMode.Default); var resProcess = engine.Process(Pix.LoadTiffFromMemory(dstImg.ToBytes(".tiff"))); MessageBox.Show(resProcess.GetText()); }
/// <summary> /// Submatrix operations /// </summary> private void SubMat() { Mat src = Cv2.ImRead(FilePath.Image.Lenna); // Assign small image to mat Mat small = new Mat(); Cv2.Resize(src, small, new Size(100, 100)); src[10, 110, 10, 110] = small; src[370, 470, 400, 500] = small.T(); // ↑ This is same as the following: //small.T().CopyTo(src[370, 470, 400, 500]); // Get partial mat (similar to cvSetImageROI) Mat part = src[200, 400, 200, 360]; // Invert partial pixel values Cv2.BitwiseNot(part, part); // Fill the region (50..100, 100..150) with color (128, 0, 0) part = src.SubMat(50, 100, 400, 450); part.SetTo(128); using (new Window("SubMat", src)) { Cv2.WaitKey(); } }
private static Bitmap prepareInput(System.Drawing.Image image, bool showProcessedInput = false) { Mat src = BitmapConverter.ToMat((Bitmap)image); // Si no es jpg la convertimos if (!image.RawFormat.Equals(System.Drawing.Imaging.ImageFormat.Jpeg)) { byte[] buffer; Cv2.ImEncode(".jpg", src, out buffer); src = Cv2.ImDecode(buffer, ImreadModes.AnyColor); } // Prueba con canny //Mat prepared = new Mat(); //Cv2.Canny(converted, prepared, 300, 900); // Aplicamos filtro para descartar ruido Cv2.InRange(src, new Scalar(0, 0, 0), new Scalar(128, 128, 128), src); Cv2.BitwiseNot(src, src); // Mostramos el resultado, según el parámetro especificado if (showProcessedInput) { using (new Window("Source", WindowMode.FreeRatio, src)) { Cv2.WaitKey(); } } return(BitmapConverter.ToBitmap(src)); }
private static void testBuiltinFilters() { using (var src = new Mat(@"..\..\Images\Car.jpg", ImreadModes.AnyDepth | ImreadModes.AnyColor)) { using (var dst = new Mat()) { src.CopyTo(dst); using (new Window("src", image: src)) { Cv2.Erode(src, dst, new Mat()); using (new Window("Erode", image: dst)) { Cv2.Dilate(src, dst, new Mat()); using (new Window("Dilate", image: dst)) { Cv2.BitwiseNot(src, dst); using (new Window("Invert", image: dst)) { Cv2.WaitKey(); } } } } } } }
public void Bitwise() { const int count = 256; var random = new Random(0); var array1 = Enumerable.Range(0, count).Select(i => (byte)i).OrderBy(_ => random.Next()).ToArray(); var array2 = Enumerable.Range(0, count).Select(i => (byte)i).OrderBy(_ => random.Next()).ToArray(); using var mat1 = new Mat(count, 1, MatType.CV_8UC1, array1); using var mat2 = new Mat(count, 1, MatType.CV_8UC1, array2); using var and = new Mat(); using var or = new Mat(); using var xor = new Mat(); using var not = new Mat(); Cv2.BitwiseAnd(mat1, mat2, and); Cv2.BitwiseOr(mat1, mat2, or); Cv2.BitwiseXor(mat1, mat2, xor); Cv2.BitwiseNot(mat1, not); for (int i = 0; i < count; i++) { Assert.Equal((byte)(array1[i] & array2[i]), and.Get <byte>(i)); Assert.Equal((byte)(array1[i] | array2[i]), or.Get <byte>(i)); Assert.Equal((byte)(array1[i] ^ array2[i]), xor.Get <byte>(i)); Assert.Equal((byte)(~array1[i]), not.Get <byte>(i)); } }
public IplImage BitwiseMat(IplImage src) { Mat Input1 = new Mat(src); Mat Input2 = new Mat(this.Binary(src, 150)); Mat bitwise = new Mat(); Window win_src1 = new Window("src1", WindowMode.StretchImage, Input1); Window win_src2 = new Window("src2", WindowMode.StretchImage, Input2); Cv2.BitwiseNot(Input1, bitwise); Window win_Not = new Window("BitwiseNot", WindowMode.StretchImage, bitwise); Cv2.BitwiseAnd(Input1, Input2.CvtColor(ColorConversion.GrayToBgr), bitwise); Window win_And = new Window("BitwiseAnd", WindowMode.StretchImage, bitwise); Cv2.BitwiseOr(Input1, Input2.CvtColor(ColorConversion.GrayToBgr), bitwise); Window win_Or = new Window("BitwiseOr", WindowMode.StretchImage, bitwise); Cv2.BitwiseXor(Input1, Input2.CvtColor(ColorConversion.GrayToBgr), bitwise); Window win_Xor = new Window("BitwiseXor", WindowMode.StretchImage, bitwise); //IplImage 형식// //Cv.Not(); //Cv.And(); //Cv.Or(); //Cv.Xor(); //IplImage 형식// return(Input2.ToIplImage()); }
public Bitmap ToBitmap(Mat normalFrame) { Bitmap bitmap; ConvertBGR2HSV(KeyColor.B, KeyColor.G, KeyColor.R, out var h, out var s, out var v); var hsv = new Mat(); Cv2.CvtColor(normalFrame, hsv, ColorConversionCodes.BGR2HSV); var grayFrame = new Mat(); var s_min = new Scalar( Math.Max(0, h - 10), Math.Max(0, s - KeyColorRange), Math.Max(0, v - KeyColorRange)); var s_max = new Scalar( Math.Min(255, h + 10), Math.Min(255, s + KeyColorRange), Math.Min(255, v + KeyColorRange)); Cv2.InRange(hsv, s_min, s_max, grayFrame); Cv2.BitwiseNot(grayFrame, grayFrame); var rgb = Cv2.Split(normalFrame); var rgbaLayers = new Mat[] { rgb[0], rgb[1], rgb[2], grayFrame }; var rgba = new Mat(); Cv2.Merge(rgbaLayers, rgba); bitmap = BitmapConverter.ToBitmap(rgba); hsv.Dispose(); grayFrame.Dispose(); rgba.Dispose(); return(bitmap); }
public Bitmap ToBitmap(Mat normalFrame) { Bitmap bitmap; var rgb = Cv2.Split(normalFrame); var grayFrame = new Mat(); var rgba = new Mat(); //Cv2.CvtColor(normalFrame, grayFrame, ColorConversionCodes.BGR2GRAY); var s_min = new Scalar( Math.Max(0, KeyColor.B - KeyColorRange), Math.Max(0, KeyColor.G - KeyColorRange), Math.Max(0, KeyColor.R - KeyColorRange)); var s_max = new Scalar( Math.Min(255, KeyColor.B + KeyColorRange), Math.Min(255, KeyColor.G + KeyColorRange), Math.Min(255, KeyColor.R + KeyColorRange)); Cv2.InRange(normalFrame, s_min, s_max, grayFrame); Cv2.BitwiseNot(grayFrame, grayFrame); var rgbaLayers = new Mat[] { rgb[0], rgb[1], rgb[2], grayFrame }; Cv2.Merge(rgbaLayers, rgba); bitmap = BitmapConverter.ToBitmap(rgba); grayFrame.Dispose(); rgba.Dispose(); return(bitmap); }
public void FindContours() { using var src = Image("markers_6x6_250.png", ImreadModes.Grayscale); Cv2.BitwiseNot(src, src); Cv2.FindContours( src, out var contours, out var hierarchy, RetrievalModes.External, ContourApproximationModes.ApproxSimple); Assert.NotEmpty(contours); Assert.NotEmpty(hierarchy); Assert.All(contours, contour => { Assert.Equal(4, contour.Length); }); if (Debugger.IsAttached) { using var view = new Mat(src.Size(), MatType.CV_8UC3, Scalar.All(0)); Cv2.DrawContours(view, contours, -1, Scalar.Red); Window.ShowImages(src, view); } }
public static void InvertImage(IplImage gray, ref IplImage inverted) { Mat temp = new Mat(gray); Mat tempOut = new Mat(); Cv2.BitwiseNot(temp, tempOut); tempOut.ToIplImage().Copy(inverted); }
// ---- 이미지 역상 ----- private Bitmap InvertImage(string path) { Mat src = new Mat(path, ImreadModes.Unchanged); Mat dst = new Mat(); Cv2.BitwiseNot(src, dst); Bitmap bitDst = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(dst); return(bitDst); }
public static SoftwareBitmap InvertEffect(SoftwareBitmap Input) { using (Mat inputMat = Input.SoftwareBitmapToMat()) using (Mat outputMat = new Mat(inputMat.Rows, inputMat.Cols, MatType.CV_8UC4)) { Cv2.BitwiseNot(inputMat, outputMat); return(outputMat.MatToSoftwareBitmap()); } }
public void UpdateFrame(Mat frame) { CurrentFrame = frame; GrayFrame = CurrentFrame.CvtColor(ColorConversionCodes.BGR2GRAY); HsvImage = CurrentFrame.CvtColor(ColorConversionCodes.BGR2HSV); Cv2.BitwiseNot(CurrentFrame, InversedFrame); InversedGrayFrame = InversedFrame.CvtColor(ColorConversionCodes.BGR2GRAY); InversedHsvImage = InversedFrame.CvtColor(ColorConversionCodes.BGR2HSV); }
private static void InitIcon() { LifeCounterIcon = new Mat(); string filenameIcon = Path.Combine(LoadRemover.ImageFolder, $"life.png"); LifeCounterIcon = Cv2.ImRead(filenameIcon); LifeCounterIconMask = new Mat(); Cv2.InRange(LifeCounterIcon, 0.01f, 1.0f, LifeCounterIconMask); Cv2.BitwiseNot(LifeCounterIconMask, LifeCounterIconMask); }
static void Main(string[] args) { string[] files = Directory.GetFiles(@"C:\Users\Laptop\Desktop\asd2"); Window show = new Window("", WindowMode.FreeRatio); Window show2 = new Window("asds", WindowMode.FreeRatio); Mat[] splitMat = new Mat[3]; Mat image = new Mat(); Mat imageTrue = new Mat(); foreach (var file in files) { image = Cv2.ImRead(file); image.CopyTo(imageTrue); Cv2.CvtColor(image, image, ColorConversion.RgbToHsv); Cv2.Split(image, out splitMat); //Cv2.CvtColor(splitMat[1], image, ColorConversion.RgbToGray); Cv2.BitwiseNot(splitMat[1], image); Cv2.Threshold(image, image, 210, 255, ThresholdType.Binary); int total = image.Cols * image.Rows; int black = Cv2.CountNonZero(image); float debesuotumas = (float)black / (float)total; string oroSalygos = ""; if (debesuotumas != 0 && debesuotumas < 0.01) { oroSalygos = "Giedra su mazais debesimis"; } else if (0.01 < debesuotumas && debesuotumas < 0.45) { oroSalygos = "Lengvas Debesuotumas"; } else if (0.45 < debesuotumas && debesuotumas < 0.8) { oroSalygos = "Debesuota"; } else if (0.8 < debesuotumas) { oroSalygos = "Labai Debesuota"; } else { oroSalygos = "Giedra"; } Console.WriteLine(debesuotumas); Point vieta = new Point(100, 100); Cv2.PutText(imageTrue, oroSalygos, vieta, FontFace.Italic, 3, Scalar.Red, 5); show.Image = image; show2.Image = imageTrue; Cv2.WaitKey(); } }
private Line GetLine(Mat img, Mat img_debug, Mat mask_white, Player player, Point pt0, Point pt1, Point pt2, string line_name) { //Point ptc1 = new Point(70, 390); //Point ptc2 = new Point(520, 390); if (config.Preview > 0) { Cv2.Line(img_debug, pt0, pt1, Colors.purple, 2); Cv2.Line(img_debug, pt0, pt2, Colors.purple, 2); } // https://stackoverflow.com/questions/51875114/triangle-filling-in-opencv // triangle_cnt = np.array( [pt1, pt2, pt3] ) // cv2.drawContours(image, [triangle_cnt], 0, (0, 255, 0), -1) // cv2.imshow("image", image) // cv2.waitKey() // https://stackoverflow.com/questions/44063407/opencvsharp-2-floodfill-is-broken-and-corrupts-output-mask Mat mask = new Mat(img.Rows, img.Cols, MatType.CV_8UC1, new Scalar(0, 0, 0, 0)); // floodflags = 4 // #floodflags |= cv2.FLOODFILL_MASK_ONLY // floodflags |= (255 << 8) //FloodFillFlags flags = FloodFillFlags.Link4 | FloodFillFlags.MaskOnly | FloodFillFlags.FixedRange; // num,im,mask,rect = cv2.floodFill(orig2, mask, seed, (255,255,0), (5,)*3, (5,)*3, floodflags) //Rect rect = new Rect(); //int flags = (int)FloodFillFlags.Link4 | (int)FloodFillFlags.MaskOnly | (255 << 8); //Cv2.FloodFill(src, mask, seed, new Scalar(255, 255, 0), out rect, new Scalar(2, 2, 2), new Scalar(2, 2, 2), flags); //Mat mask2 = new Mat(mask, new Rect(new Point(1, 1), new Size(src.Cols, src.Rows))); //mask.Release(); // https://stackoverflow.com/questions/35969667/how-to-use-the-opencvsharp-3-polylines-fillpoly-library-function List <List <Point> > listOfListOfPoint = new List <List <Point> >(); List <Point> points = new List <Point>(); listOfListOfPoint.Add(points); points.Add(pt0); points.Add(pt1); points.Add(pt2); //Cv2.C Polylines(mask, listOfListOfPoint, true, new Scalar(255), 1); Cv2.DrawContours(mask, listOfListOfPoint, 0, new Scalar(255), -1); Mat mask2 = new Mat(); Cv2.BitwiseNot(mask_white, mask2, mask); //Cv2.BitwiseAnd(mask2, mask, mask); Scalar color_line = Cv2.Mean(img, mask2); if (config.Preview == 4) { Cv2.ImShow(line_name, mask2); } Line line = new Line(); line.mask = mask2; line.color = color_line; return(line); }
public static Mat ExtractColorBlob(Vec3b Col, Mat Image) { Scalar upperB = new Scalar(Col.Item0 + 10, Col.Item1 + 10, Col.Item2 + 10); Mat binary = new Mat(Image.Height, Image.Width, MatType.CV_8U); Cv2.InRange(Image, (Scalar)Col, upperB, binary); Mat output = new Mat(Image.Height, Image.Width, MatType.CV_8U); Cv2.BitwiseNot(binary, output); return(output); }
public void 評価用画像作成(Mat テンプレート, Mat 検査結果, ref Mat dst) { Mat mask = テンプレート.Clone();//この時点では輪郭が白 Zero(ref dst); Cv2.BitwiseNot(テンプレート, mask);//正解画像の輪郭を黒にする Cv2.BitwiseAnd(検査結果, 検査結果, dst, mask); mask.Dispose(); }
public Mat MinEnclosing(Mat img) { Mat binary = new Mat(); Mat morp = new Mat(); Mat image = new Mat(); Mat dst = img.Clone(); int pointX = 0, pointY = 0; string text; Mat kernel = Cv2.GetStructuringElement(MorphShapes.Rect, new OpenCvSharp.Size(3, 3)); OpenCvSharp.Point[][] contours; HierarchyIndex[] hierarchy; Cv2.Threshold(img, binary, 230, 255, ThresholdTypes.Binary); Cv2.MorphologyEx(binary, morp, MorphTypes.Close, kernel, new OpenCvSharp.Point(-1, -1), 2); Cv2.BitwiseNot(morp, image); Cv2.FindContours(image, out contours, out hierarchy, RetrievalModes.Tree, ContourApproximationModes.ApproxTC89KCOS); for (int i = 0; i < contours.Length; i++) { double perimeter = Cv2.ArcLength(contours[i], true); double epsilon = perimeter * 0.01; OpenCvSharp.Point[] approx = Cv2.ApproxPolyDP(contours[i], epsilon, true); OpenCvSharp.Point[][] draw_approx = new OpenCvSharp.Point[][] { approx }; Cv2.DrawContours(dst, draw_approx, -1, new Scalar(255, 0, 0), 2, LineTypes.AntiAlias); Cv2.MinEnclosingCircle(contours[i], out Point2f center, out float radius); Cv2.Circle(dst, new OpenCvSharp.Point(center.X, center.Y), (int)radius, Scalar.Red, 2, LineTypes.AntiAlias); pointX += (int)center.X; pointY += (int)center.Y; for (int j = 0; j < approx.Length; j++) { Cv2.Circle(dst, approx[j], 1, new Scalar(0, 0, 255), 3); } } if (contours.Length > 0) { pointX = pointX / contours.Length; pointY = pointY / contours.Length; text = pointX.ToString(); text = text + ":" + pointY.ToString(); Cv2.PutText(dst, text, new OpenCvSharp.Point(3300, 2700), HersheyFonts.HersheyPlain, 5, Scalar.White, 5); } return(dst); }
//融合图片 public static Mat MixImg(Mat imgl, Mat imgh) { Mat msak1 = new Mat(); Mat mask = MakeMask(imgh, 685, 685, 70, 70, 10); Cv2.BitwiseNot(mask, msak1); Mat img = (imgh & mask); Mat img1 = (imgl & msak1); img = img | img1; return(img); }