public void Compare(Mat mat1, Mat mat2) { CvInvoke.Resize(mat2, mat2, mat1.Size); CvInvoke.CvtColor(mat1, mat1, ColorConversion.Bgr2Gray); CvInvoke.CvtColor(mat2, mat2, ColorConversion.Bgr2Gray); //直方图尺寸设置 //一个灰度值可以设定一个bins,256个灰度值就可以设定256个bins //对应HSV格式,构建二维直方图 //每个维度的直方图灰度值划分为256块进行统计,也可以使用其他值 int hBins = 256, sBins = 256; int[] histSize = { hBins }; //H:0~180, S:0~255,V:0~255 //H色调取值范围 float[] hRanges = { 0, 180 }; //S饱和度取值范围 float[] sRanges = { 180 }; float[][] ranges = { new float[] { 0, 40 }, new float[] { 40, 80 }, new float[] { 40, 255 } }; int[] channels = { 0 };//二维直方图 Mat hist1 = new Mat(), hist2 = new Mat(); CvInvoke.CalcHist(mat1, channels, new Mat(), hist1, histSize, sRanges, false); CvInvoke.CalcHist(mat2, channels, new Mat(), hist2, histSize, sRanges, false); var result = CvInvoke.CompareHist(hist1, hist2, HistogramCompMethod.Correl); Console.WriteLine($"result:{result}"); }
private void calculateToolStripMenuItem_Click(object sender, EventArgs e) { try { if (pictureBox1.Image == null) { return; } var img = new Bitmap(pictureBox1.Image) .ToImage <Gray, byte>(); Mat hist = new Mat(); float[] ranges = new float[] { 0, 256 }; int[] channel = { 0 }; int[] histSize = { 256 }; VectorOfMat ms = new VectorOfMat(); ms.Push(img); CvInvoke.CalcHist(ms, channel, null, hist, histSize, ranges, false); HistogramViewer viewer = new HistogramViewer(); viewer.Text = "Image Histogram"; viewer.ShowIcon = false; viewer.HistogramCtrl.AddHistogram("Image Histogram", Color.Blue, hist, 256, ranges); viewer.HistogramCtrl.Refresh(); viewer.Show(); //pictureBox1.Image = CreateGraph(hist).GetImage(); } catch (Exception ex) { MessageBox.Show(ex.Message); } }
/// <summary> /// Calculate histogram for given vector of matrices, allowing masking. /// </summary> /// <param name="vm">matrices</param> /// <param name="tempMask">mask</param> /// <returns>histogram as umat</returns> public static UMat CalculateHistogram(VectorOfMat vm, Image <Gray, byte> tempMask) { UMat hist = new UMat(); int[] channel = new int[] { 0 }; int[] histSize = new int[] { 32 }; float[] range = new float[] { 0.0f, 256.0f }; CvInvoke.CalcHist(vm, channel, tempMask, hist, histSize, range, false); return(hist); }
private void button2_Click(object sender, EventArgs e) { int[] ch = { 0, 0 }; CvInvoke.MixChannels(hsvImage, hueImage, ch); //【2】直方图计算,并归一化 float[] hue_range = new float[2] { 0.00f, 180.00f }; Mat hist = new Mat(hueImage.Size, Emgu.CV.CvEnum.DepthType.Cv8U, 1); int[] channels = new int[1] { 0 }; int[] histSize = new int[1] { Math.Max(g_bins, 2) }; float[] ranges = new float[2] { 0, 180 }; CvInvoke.CalcHist(hueImage, channels, null, hist, histSize, ranges, true); CvInvoke.Normalize(hist, hist, 0, 255, NormType.MinMax, DepthType.Default, null); //【3】计算反向投影 Mat backproj = new Mat(); CvInvoke.CalcBackProject(hueImage, channels, hist, backproj, ranges, 1); //【4】显示反向投影 imageBox2.Image = backproj; //【5】绘制直方图 Image <Gray, float> img = new Image <Gray, float>(hist.Bitmap); float[] data = new float[img.Data.Length]; for (int i = 0; i < img.Data.Length; i++) { data[i] = img.Data[i, 0, 0]; } float max = data[0]; //获取最大值 for (int i = 1; i < data.Length; i++) { if (data[i] > max) { max = data[i]; } } Image <Bgr, byte> image = new Image <Bgr, byte>(300, 300, new Bgr(0, 0, 0)); for (int i = 0; i < data.Length; i++) { data[i] = data[i] * 256 / max; image.Draw(new LineSegment2DF(new PointF(i + 20, 255), new PointF(i + 21, 255 - data[i])), new Bgr(255, 255, 255), 2); } imageBox3.Image = image; }
private void button_hist_Click(object sender, EventArgs e) { if (Image_Target == null || Image_Texture == null) { return; } double temp_weight = (double)trackBar_hist.Value / 10.0; Image <Gray, Byte> target_gray = Image_Target.Clone().Convert <Gray, Byte>(); Image <Gray, Byte> texture_gray = Image_Texture.Clone().Convert <Gray, Byte>(); Image <Gray, Byte> target_hist_matched = new Image <Gray, Byte>(Image_Target.Size); Image <Gray, Byte> target_hist_matched_weighted = new Image <Gray, Byte>(Image_Texture.Size); Matrix <byte> histLUT = new Matrix <byte>(1, 256); Mat hist_target = new Mat(); Mat hist_texture = new Mat(); VectorOfMat vm_target = new VectorOfMat(); VectorOfMat vm_texture = new VectorOfMat(); vm_target.Push(target_gray); vm_texture.Push(texture_gray); CvInvoke.CalcHist(vm_target, new int[] { 0 }, null, hist_target, new int[] { 256 }, new float[] { 0, 255 }, false); CvInvoke.CalcHist(vm_texture, new int[] { 0 }, null, hist_texture, new int[] { 256 }, new float[] { 0, 255 }, false); float[] CDF_hist_target = new float[256]; float[] CDF_hist_texture = new float[256]; Marshal.Copy(hist_target.DataPointer, CDF_hist_target, 0, 256); Marshal.Copy(hist_texture.DataPointer, CDF_hist_texture, 0, 256); for (int i = 1; i < 256; i++) { CDF_hist_target[i] += CDF_hist_target[i - 1]; CDF_hist_texture[i] += CDF_hist_texture[i - 1]; } for (int i = 0; i < 256; i++) { histLUT.Data[0, i] = 0; for (int j = 0; j < 256; j++) { if (CDF_hist_texture[j] >= CDF_hist_target[i]) { histLUT.Data[0, i] = (byte)j; break; } } } CvInvoke.LUT(target_gray, histLUT, target_hist_matched); target_hist_matched_weighted = target_hist_matched * temp_weight + target_gray * (1.0 - temp_weight); imageBox_hist.Image = target_hist_matched_weighted; }
private void StartNewTrack(Rectangle toTrack, Image <Gray, byte> imgTrackingImage, Image <Gray, byte> imgRoi, CamshiftOutput output) { _matBackProjectionMask = new Mat(); _rectangleSearchWindow = toTrack;// GetIncreasedRectangle(toTrack, IncreaseRegion); _histogram = new DenseHistogram(BinSize, new RangeF(0, BinSize)); using (VectorOfMat vmTrackingImageRoi = new VectorOfMat(imgRoi.Mat)) { CvInvoke.CalcHist(vmTrackingImageRoi, _channels, _matBackProjectionMask, _histogram, _histogramSize, _ranges, Accumulate); CvInvoke.Normalize(_histogram, _histogram, 0, 255, NormType.MinMax); } _trackStarted = true; }
public void applyHistogram(Mat image) { Mat hist = new Mat(); int hbins = 10, sbins = 12; int[] histSize = { hbins, sbins }; float[] hrange = { 0, 180 }; float[] srange = { 0, 256 }; float[] range = { 1, 255 }; int[] channel = { 0, 1 }; CvInvoke.CalcHist(image, channel, new Mat(), hist, histSize, range, true); subtractImage.Image = hist.Bitmap; }
public void Apply(Image <Bgr, byte> org, out Image <Bgr, byte> dst) { int width = org.Width; int height = org.Height; var gray = org.Convert <Gray, byte>(); Image <Gray, byte> dstImage = new Image <Gray, byte>(gray.Size); dstImage.SetZero(); float[] ranges = { 0.0f, 255.0f }; using (Mat histogram = new Mat()) using (VectorOfMat vm = new VectorOfMat()) { vm.Push(gray.Mat); CvInvoke.CalcHist(vm, new int[] { 0 }, null, histogram, new int[] { 255 }, ranges, false); double[] binVal = new double[histogram.Size.Height]; GCHandle handle = GCHandle.Alloc(binVal, GCHandleType.Pinned); using (Matrix <double> m = new Matrix <double>(binVal.Length, 1, handle.AddrOfPinnedObject(), sizeof(double))) { histogram.ConvertTo(m, DepthType.Cv64F); } int histMax = 0; for (int i = 0; i < 255; i++) { if (binVal[i] > histMax) { histMax = (int)binVal[i]; } } for (int x = 0; x < width; x++) { int x2 = (int)((double)x * 255 / (double)width); int row = (int)((double)binVal[x2] * height / (double)histMax); for (int y = height - 1; y >= height - row; y--) { dstImage[y, x] = new Gray(255); } } dst = dstImage.Convert <Bgr, byte>(); } }
static void RunMeanshiftDemo() { VideoCapture video = new VideoCapture("mleko.mp4"); // mleko.mp4 //VideoCapture video = new VideoCapture("mouthwash.avi"); // mouthwash.avi var firstFrame = new Mat(); video.Read(firstFrame); int x = 290, y = 230, width = 100, height = 15; // mleko.mp4 //int x = 300, y = 305, width = 100, height = 115; // mouthwash.avi var roi = new Mat(firstFrame, new Rectangle(x, y, width, height)); ShowHueEmphasizedImage(roi); CvInvoke.Imshow("Roi", roi); var roiHsv = new Mat(); CvInvoke.CvtColor(roi, roiHsv, ColorConversion.Bgr2Hsv); var histogram = new Mat(); CvInvoke.CalcHist(new VectorOfMat(new Mat[] { roiHsv }), new int[] { 0 }, null, histogram, new int[] { 180 }, new float[] { 0, 180 }, false); CvInvoke.Normalize(histogram, histogram, 0, 255, NormType.MinMax); show2DHueHistogram(histogram); var nextFrame = new Mat(); var nextFrameHsv = new Mat(); var mask = new Mat(); var trackingWindow = new Rectangle(x, y, width, height); while (true) { video.Read(nextFrame); if (nextFrame.IsEmpty) { break; } CvInvoke.CvtColor(nextFrame, nextFrameHsv, ColorConversion.Bgr2Hsv); CvInvoke.CalcBackProject(new VectorOfMat(new Mat[] { nextFrameHsv }), new int[] { 0 }, histogram, mask, new float[] { 0, 180 }, 1); CvInvoke.Imshow("mask", mask); CvInvoke.MeanShift(mask, ref trackingWindow, new MCvTermCriteria(10, 1)); CvInvoke.Rectangle(nextFrame, trackingWindow, new MCvScalar(0, 255, 0), 2); CvInvoke.Imshow("nextFrame", nextFrame); CvInvoke.WaitKey(60); } Console.WriteLine("Koniec filmu."); CvInvoke.WaitKey(); }
Array GetHistogramValues() { Mat hist = new Mat(); using (Emgu.CV.Util.VectorOfMat vm = new Emgu.CV.Util.VectorOfMat()) { int[] channels = { 0, 1, 2 }; int[] histSize = { 32, 32, 32 }; float[] ranges = { 0.0f, 256.0f, 0.0f, 256.0f, 0.0f, 256.0f }; vm.Push(srcImage_Gray); CvInvoke.CalcHist(vm, new int[] { 0 }, null, hist, new int[] { 256 }, new float[] { 0.0f, 256.0f }, false); CvInvoke.Normalize(hist, hist, 0, srcImage_Gray.Rows, NormType.MinMax); } return(hist.GetData()); }
private void button5_Click(object sender, EventArgs e) { //在比较直方图时,最佳操作是在HSV空间中操作,所以需要将BGR空间转换为HSV空间 Mat srcHsvImage = new Mat(CvInvoke.cvGetSize(src1), DepthType.Cv8U, 3); Mat compareHsvImage = new Mat(CvInvoke.cvGetSize(src2), DepthType.Cv8U, 3); CvInvoke.CvtColor(src1, srcHsvImage, ColorConversion.Bgr2Hsv); CvInvoke.CvtColor(src2, compareHsvImage, ColorConversion.Bgr2Hsv); //采用H-S直方图进行处理 //首先得配置直方图的参数 Mat srcHist = new Mat(CvInvoke.cvGetSize(src1), DepthType.Cv8U, 3); Mat compHist = new Mat(CvInvoke.cvGetSize(src2), DepthType.Cv8U, 3); //H、S通道 int[] channels = new int[2] { 0, 1 }; int[] histSize = new int[2] { 30, 32 }; float[] Ranges = new float[2] { 0, 180 }; //进行原图直方图的计算 CvInvoke.CalcHist(srcHsvImage, channels, null, srcHist, histSize, Ranges, true); //对需要比较的图进行直方图的计算 CvInvoke.CalcHist(compareHsvImage, channels, null, compHist, histSize, Ranges, true); //注意:这里需要对两个直方图进行归一化操作 CvInvoke.Normalize(srcHist, srcHist, 1, 0, NormType.MinMax); CvInvoke.Normalize(compHist, compHist, 1, 0, NormType.MinMax); //对得到的直方图对比 //相关:CV_COMP_CORREL //卡方:CV_COMP_CHISQR //直方图相交:CV_COMP_INTERSECT //Bhattacharyya距离:CV_COMP_BHATTACHARYYA double g_dCompareRecult = CvInvoke.CompareHist(srcHist, compHist, his[comboBox1.SelectedIndex]); richTextBox1.Text = "方法 " + comboBox1.SelectedIndex + ":两幅图像比较的结果为:" + g_dCompareRecult + "\r\n"; }
public Mat initialtracker(Image <Bgr, Byte> SELECTION, out UMat model, out Mat Model, Mat selection, int value1, int value2, int value3, out Mat Mask) { Mat hsv = new Mat(); model = SELECTION.Copy().ToUMat(); Model = selection; S = SELECTION; s = selection; Image <Hsv, Byte> HSV = SELECTION.Convert <Hsv, Byte>(); // CvInvoke.CvtColor(selection, hsv, ColorConversion.Bgr2Hsv); // CvInvoke.CvtColor(skin_sample, skin_sample, ColorConversion.Bgr2Hsv); Mat mask = new Mat(); CvInvoke.Threshold(mask, mask, 60, 255, ThresholdType.Binary); CvInvoke.InRange(HSV, new ScalarArray(new MCvScalar(0, value2 - 30, value3 - 45)), new ScalarArray(new MCvScalar(value1 + 30, value2 + 30, value3 + 30)), mask); //CvInvoke.InRange(HSV, new ScalarArray(new MCvScalar(0, value1, Math.Min(value2, value3))), new ScalarArray(new MCvScalar(180, 255, Math.Max(value2, value3))), mask); Mat hue = new Mat(); hue = HSV.Mat.Split()[0]; Mask = mask; int[] Chn = { 0 }; int[] size = { 24 }; float[] range = { 0, 180 }; var vhue = new VectorOfMat(hue); Mat element = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(2, 2), new Point(1, 1));// Mat element = getStructuringElement(MORPH_RECT, Size(3, 3)); // CvInvoke.Erode(mask, mask, element, new Point(1, 1), 2, BorderType.Default, new MCvScalar(0, 0, 0)); Mat element2 = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(2, 2), new Point(1, 1)); CvInvoke.Dilate(mask, mask, element, new Point(-1, -1), 2, BorderType.Default, new MCvScalar(0, 0, 0)); Mat hist = new Mat(); //mask = MASK.Mat; CvInvoke.CalcHist(vhue, Chn, mask, hist, size, range, true); // CvInvoke.EqualizeHist(hist, hist); CvInvoke.Normalize(hist, hist, 0, 200, NormType.MinMax); return(hist); }
public static Array GetHistogramOfImage(Mat image, char channel = 'b', int size = 256, float range = 256) { if (image == null) { return(null); } Mat hist = new Mat(); using (Emgu.CV.Util.VectorOfMat vm = new Emgu.CV.Util.VectorOfMat()) { int[] histoChannel = { 0 }; if (channel == 'b') { histoChannel = new int[] { 0 } } ; if (channel == 'g') { histoChannel = new int[] { 1 } } ; if (channel == 'r') { histoChannel = new int[] { 2 } } ; int[] histoSize = { size }; float[] histoRange = { 0.0f, range }; vm.Push(image); CvInvoke.CalcHist(vm, histoChannel, null, hist, histoSize, histoRange, false); //CvInvoke.Normalize(hist, hist, 0, image.Rows, NormType.MinMax); } return(hist.GetData().Cast <float>().ToArray()); }
internal static double DetermineThreshold(Image <Gray, byte> img) { double threshhold = 0; float smallest = float.MaxValue; Mat hist = new Mat(); using (VectorOfMat vm = new VectorOfMat()) { vm.Push(img.Mat); float[] ranges = new float[] { 0.0f, 256.0f }; CvInvoke.CalcHist(vm, new int[] { 0 }, null, hist, new int[] { 256 }, ranges, false); } for (int i = 5; i < 50; ++i) { if (hist.GetValue(0, i) < smallest) { smallest = hist.GetValue(0, i); threshhold = i; } } return(threshhold); }
public PageMetric(Mat image) { Mat hsv = new Mat(); CvInvoke.CvtColor(image, hsv, ColorConversion.Rgb2Hsv); var dim = new int[] { 0 }; var histSize = new int[] { 256 }; var range = new float[] { 0, 255 }; bool accumulate = false; Mat hist = new Mat(); Mat rgb = new Mat(); using (VectorOfMat array = new VectorOfMat()) { array.Push(hsv); CvInvoke.CalcHist(array, dim, new Mat(), hist, histSize, range, accumulate); } this.histogram = hist; }
private double Similar2(Bitmap bitmap1, Bitmap bitmap2) { //var fileName = @"C:\Users\Administrator\Pictures\A.png"; //bitmap1 = new Bitmap(fileName); //bitmap2= new Bitmap(fileName); Mat mat1 = new Image <Gray, byte>(bitmap1).Mat; Mat mat2 = new Image <Gray, byte>(bitmap2).Mat; Mat hist1 = new Mat(), hist2 = new Mat(); float[] range = { 10, 50 }; int[] channels = new int[] { 0 }; int[] histSize = new int[] { 10, 10 }; bool uniform = true; bool accumulate = false; CvInvoke.CalcHist(mat1, channels, null, hist1, histSize, range, accumulate); CvInvoke.CalcHist(mat1, channels, null, hist2, histSize, range, accumulate); return(1); }
public static double CompareHistograms(Mat img, Mat img2) { using (Mat hist = new Mat()) using (Mat hist2 = new Mat()) using (VectorOfMat vm = new VectorOfMat()) using (VectorOfMat vm2 = new VectorOfMat()) { vm.Push(img); vm2.Push(img2); var channels = new int[] { 0 }; var histSize = new int[] { 256 }; var ranges = new float[] { 0, 256, }; CvInvoke.CalcHist(vm, channels, null, hist, histSize, ranges, false); CvInvoke.CalcHist(vm2, channels, null, hist2, histSize, ranges, false); //CvInvoke.Normalize(hist, hist, 0, 255, NormType.MinMax); //CvInvoke.Normalize(hist2, hist2, 0, 255, NormType.MinMax); //double res = CvInvoke.CompareHist(hist, hist2, HistogramCompMethod.Bhattacharyya); //Debug.Log("Cards in Stock: " + (res > 0.5)); return(CvInvoke.CompareHist(hist, hist2, HistogramCompMethod.Correl)); } }
private (float SatAverage, float ValAverage) GetHistogramAverages(Image <Hsv, byte> image) { image._EqualizeHist(); var hsvPlanes = new VectorOfMat(); CvInvoke.Split(image, hsvPlanes); var histSize = new[] { 256 }; var range = new[] { 0f, 255f }; var accumulate = false; var sHist = new Mat(); var vHist = new Mat(); CvInvoke.CalcHist(hsvPlanes, new[] { 1 }, new Mat(), sHist, histSize, range, accumulate); CvInvoke.CalcHist(hsvPlanes, new[] { 2 }, new Mat(), vHist, histSize, range, accumulate); var sAvg = GetHistogramAverage(sHist); var vAvg = GetHistogramAverage(vHist); return(sAvg, vAvg); }
private double Similar2(Bitmap bitmap1, Bitmap bitmap2) { //FileStorage //var fileName = @"C:\Users\Administrator\Pictures\A.png"; //bitmap1 = new Bitmap(fileName); //bitmap2 = new Bitmap(fileName); var mat1 = new Image <Gray, byte>(bitmap1); var mat2 = new Image <Gray, byte>(bitmap2); var hist1 = mat1.CopyBlank(); var hist2 = mat2.CopyBlank(); float[] range = { 0, 256 }; int[] channels = new int[] { 0 }; int[] histSize = new int[] { 256 }; bool uniform = true; bool accumulate = false; CvInvoke.CalcHist(mat1, channels, null, hist1, histSize, range, accumulate); CvInvoke.CalcHist(mat1, channels, null, hist2, histSize, range, accumulate); return(1); }
public Mat Tracker() { Mat hsv_roi = new Mat(); CvInvoke.CvtColor(selection, hsv_roi, ColorConversion.Bgr2Hsv); // imageBox1.Image = selection; Mat mask_roi = new Mat(); CvInvoke.InRange(hsv_roi, new ScalarArray(new MCvScalar(0, 60, 32)), new ScalarArray(new MCvScalar(180, 255, 255)), mask_roi); Mask = mask_roi; imageBox1.Image = hsv_roi; Mat hist_roi = new Mat(); VectorOfMat vhsv_roi = new VectorOfMat(hsv_roi); //Mat hue = new Mat(); // var vhue = new VectorOfMat(hue); int[] Chn = { 0 }; int[] size = { 24 }; float[] range = { 0, 180 }; //imageBox1.Image = hsv_roi; //vInvoke.MixChannels(vhsv_roi,vhue); CvInvoke.CalcHist(vhsv_roi, Chn, mask_roi, hist_roi, size, range, true); // imageBox1.Image = hist_roi; CvInvoke.Normalize(hist_roi, hist_roi, 0, 255, NormType.MinMax); // double min = 0;double max = 0;Point minloc = new Point(0); Point maxloc = new Point(0); // CvInvoke.MinMaxLoc(hist_roi, ref min, ref max, ref minloc,ref maxloc); int histSize = hist_roi.Rows; Mat histimg = new Mat(histSize, histSize, DepthType.Cv8U, 255); double hpt = 0.9 * histSize; imageBox1.Image = hist_roi; return(hist_roi); }
/// <summary> /// Calculate a mask for the pieces. The function calculates a histogram to find the piece background color. /// Everything within a specific HSV range around the piece background color is regarded as foreground. The rest is regarded as background. /// </summary> /// <param name="inputImg">Color input image</param> /// <returns>Mask image</returns> /// see: https://docs.opencv.org/2.4/modules/imgproc/doc/histograms.html?highlight=calchist public override Image <Gray, byte> GetMask(Image <Rgba, byte> inputImg) { Image <Gray, byte> mask; using (Image <Hsv, byte> hsvSourceImg = inputImg.Convert <Hsv, byte>()) //Convert input image to HSV color space { Mat hsvImgMat = new Mat(); hsvSourceImg.Mat.ConvertTo(hsvImgMat, DepthType.Cv32F); VectorOfMat vm = new VectorOfMat(hsvImgMat); // Calculate histograms for each channel of the HSV image (H, S, V) Mat histOutH = new Mat(), histOutS = new Mat(), histOutV = new Mat(); int hbins = 32, sbins = 32, vbins = 32; CvInvoke.CalcHist(vm, new int[] { 0 }, new Mat(), histOutH, new int[] { hbins }, new float[] { 0, 179 }, false); CvInvoke.CalcHist(vm, new int[] { 1 }, new Mat(), histOutS, new int[] { sbins }, new float[] { 0, 255 }, false); CvInvoke.CalcHist(vm, new int[] { 2 }, new Mat(), histOutV, new int[] { vbins }, new float[] { 0, 255 }, false); hsvImgMat.Dispose(); vm.Dispose(); // Draw the histograms for debugging purposes if (PluginFactory.GetGeneralSettingsPlugin().SolverShowDebugResults) { PluginFactory.LogHandle?.Report(new LogEventImage("Hist H", Utils.DrawHist(histOutH, hbins, 30, 1024, new MCvScalar(255, 0, 0)).Bitmap)); PluginFactory.LogHandle?.Report(new LogEventImage("Hist S", Utils.DrawHist(histOutS, sbins, 30, 1024, new MCvScalar(0, 255, 0)).Bitmap)); PluginFactory.LogHandle?.Report(new LogEventImage("Hist V", Utils.DrawHist(histOutV, vbins, 30, 1024, new MCvScalar(0, 0, 255)).Bitmap)); } //#warning Use border color //int borderHeight = 10; //Image<Hsv, byte> borderImg = hsvSourceImg.Copy(new Rectangle(0, hsvSourceImg.Height - borderHeight, hsvSourceImg.Width, borderHeight)); //MCvScalar meanBorderColorScalar = CvInvoke.Mean(borderImg); //Hsv meanBorderColor = new Hsv(meanBorderColorScalar.V0, meanBorderColorScalar.V1, meanBorderColorScalar.V2); //if (PuzzleSolverParameters.Instance.SolverShowDebugResults) //{ // Image<Hsv, byte> borderColorImg = new Image<Hsv, byte>(12, 12); // borderColorImg.SetValue(meanBorderColor); // _logHandle.Report(new LogBox.LogEventImage("HSV Border Color (" + meanBorderColor.Hue + " ; " + meanBorderColor.Satuation + "; " + meanBorderColor.Value + ")", borderColorImg.Bitmap)); //} // Find the peaks in the histograms and use them as piece background color. Black and white areas are ignored. Hsv pieceBackgroundColor = new Hsv { Hue = Utils.HighestBinValInRange(histOutH, MainHueSegment - HueDiffHist, MainHueSegment + HueDiffHist, 179), //25, 179, 179); Satuation = Utils.HighestBinValInRange(histOutS, 50, 205, 255), //50, 255, 255); Value = Utils.HighestBinValInRange(histOutV, 75, 205, 255) //75, 255, 255); }; histOutH.Dispose(); histOutS.Dispose(); histOutV.Dispose(); // Show the found piece background color if (PluginFactory.GetGeneralSettingsPlugin().SolverShowDebugResults) { Image <Hsv, byte> pieceBgColorImg = new Image <Hsv, byte>(4, 12); Image <Hsv, byte> lowPieceBgColorImg = new Image <Hsv, byte>(4, 12); Image <Hsv, byte> highPieceBgColorImg = new Image <Hsv, byte>(4, 12); pieceBgColorImg.SetValue(pieceBackgroundColor); lowPieceBgColorImg.SetValue(new Hsv(pieceBackgroundColor.Hue - HueDiff, pieceBackgroundColor.Satuation - SaturationDiff, pieceBackgroundColor.Value - ValueDiff)); highPieceBgColorImg.SetValue(new Hsv(pieceBackgroundColor.Hue + HueDiff, pieceBackgroundColor.Satuation + SaturationDiff, pieceBackgroundColor.Value + ValueDiff)); PluginFactory.LogHandle?.Report(new LogEventImage("HSV Piece Bg Color (" + pieceBackgroundColor.Hue + " ; " + pieceBackgroundColor.Satuation + "; " + pieceBackgroundColor.Value + ")", Utils.Combine2ImagesHorizontal(Utils.Combine2ImagesHorizontal(lowPieceBgColorImg.Convert <Rgb, byte>(), pieceBgColorImg.Convert <Rgb, byte>(), 0), highPieceBgColorImg.Convert <Rgb, byte>(), 0).Bitmap)); pieceBgColorImg.Dispose(); lowPieceBgColorImg.Dispose(); highPieceBgColorImg.Dispose(); } // do HSV segmentation and keep only the meanColor areas with some hysteresis as pieces mask = hsvSourceImg.InRange(new Hsv(pieceBackgroundColor.Hue - HueDiff, pieceBackgroundColor.Satuation - SaturationDiff, pieceBackgroundColor.Value - ValueDiff), new Hsv(pieceBackgroundColor.Hue + HueDiff, pieceBackgroundColor.Satuation + SaturationDiff, pieceBackgroundColor.Value + ValueDiff)); // close small black gaps with morphological closing operation Mat kernel = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(5, 5), new Point(-1, -1)); CvInvoke.MorphologyEx(mask, mask, MorphOp.Close, kernel, new Point(-1, -1), 5, BorderType.Default, new MCvScalar(0)); } return(mask); }
private void compareToolStripMenuItem_Click(object sender, EventArgs e) { try { if (pictureBox1.Image == null) { return; } var img = new Bitmap(pictureBox1.Image) .ToImage <Gray, byte>(); Image <Gray, byte> img1 = null; OpenFileDialog dialog = new OpenFileDialog(); if (dialog.ShowDialog() == DialogResult.OK) { img1 = new Image <Gray, byte>(dialog.FileName); } Mat hist = new Mat(); Mat hist1 = new Mat(); float[] ranges = new float[] { 0, 256 }; int[] channel = { 0 }; int[] histSize = { 256 }; VectorOfMat ms = new VectorOfMat(); ms.Push(img); VectorOfMat ms1 = new VectorOfMat(); ms1.Push(img1); CvInvoke.CalcHist(ms, channel, null, hist, histSize, ranges, false); CvInvoke.CalcHist(ms1, channel, null, hist1, histSize, ranges, false); CvInvoke.Normalize(hist, hist); CvInvoke.Normalize(hist1, hist1); HistogramViewer viewer = new HistogramViewer(); viewer.Text = "Image Histogram"; viewer.ShowIcon = false; viewer.HistogramCtrl.AddHistogram("Image1 Histogram", Color.Blue, hist, 256, ranges); viewer.HistogramCtrl.Refresh(); viewer.Show(); HistogramViewer viewer1 = new HistogramViewer(); viewer1.Text = "Image Histogram"; viewer1.ShowIcon = false; viewer1.HistogramCtrl.AddHistogram("Image2 Histogram", Color.Blue, hist1, 256, ranges); viewer1.HistogramCtrl.Refresh(); viewer1.Show(); var result1 = CvInvoke.CompareHist(hist, hist, Emgu.CV.CvEnum.HistogramCompMethod.Correl); var result2 = CvInvoke.CompareHist(hist1, hist1, Emgu.CV.CvEnum.HistogramCompMethod.Correl); var result3 = CvInvoke.CompareHist(hist, hist1, Emgu.CV.CvEnum.HistogramCompMethod.Correl); lblBGR.Text = "Hist vs Hist = " + result1.ToString() + "\n" + "Hist1 vs Hist1 = " + result2.ToString() + "\n" + "Hist vs Hist1 = " + result3.ToString() + "\n"; //pictureBox1.Image = CreateGraph(hist).GetImage(); } catch (Exception ex) { MessageBox.Show(ex.Message); } }
/// <summary> /// Generate histograms for the image. One histogram is generated for each color channel. /// You will need to call the Refresh function to do the painting afterward. /// </summary> /// <param name="image">The image to generate histogram from</param> /// <param name="numberOfBins">The number of bins for each histogram</param> public void GenerateHistograms(IInputArray image, int numberOfBins) { using (InputArray iaImage = image.GetInputArray()) { int channelCount = iaImage.GetChannels(); Mat[] channels = new Mat[channelCount]; Type imageType; if ((imageType = Toolbox.GetBaseType(image.GetType(), "Image`2")) != null || (imageType = Toolbox.GetBaseType(image.GetType(), "Mat")) != null || (imageType = Toolbox.GetBaseType(image.GetType(), "UMat")) != null) { for (int i = 0; i < channelCount; i++) { Mat channel = new Mat(); CvInvoke.ExtractChannel(image, channel, i); channels[i] = channel; } } else if ((imageType = Toolbox.GetBaseType(image.GetType(), "CudaImage`2")) != null) { using (Mat img = imageType.GetMethod("ToMat").Invoke(image, null) as Mat) for (int i = 0; i < channelCount; i++) { Mat channel = new Mat(); CvInvoke.ExtractChannel(img, channel, i); channels[i] = channel; } } else { throw new ArgumentException(String.Format("The input image type of {0} is not supported", image.GetType().ToString())); } Type[] genericArguments = imageType.GetGenericArguments(); String[] channelNames; Color[] colors; Type typeOfDepth; if (genericArguments.Length > 0) { IColor typeOfColor = Activator.CreateInstance(genericArguments[0]) as IColor; channelNames = Reflection.ReflectColorType.GetNamesOfChannels(typeOfColor); colors = Reflection.ReflectColorType.GetDisplayColorOfChannels(typeOfColor); typeOfDepth = imageType.GetGenericArguments()[1]; } else { channelNames = new String[channelCount]; colors = new Color[channelCount]; for (int i = 0; i < channelCount; i++) { channelNames[i] = String.Format("Channel {0}", i); colors[i] = Color.Red; } if (image is Mat) { typeOfDepth = CvInvoke.GetDepthType(((Mat)image).Depth); } else if (image is UMat) { typeOfDepth = CvInvoke.GetDepthType(((UMat)image).Depth); } else { throw new ArgumentException(String.Format( "Unable to get the type of depth from image of type {0}", image.GetType().ToString())); } } float minVal, maxVal; #region Get the maximum and minimum color intensity values if (typeOfDepth == typeof(Byte)) { minVal = 0.0f; maxVal = 256.0f; } else { #region obtain the maximum and minimum color value double[] minValues, maxValues; Point[] minLocations, maxLocations; using (InputArray ia = image.GetInputArray()) using (Mat m = ia.GetMat()) { m.MinMax(out minValues, out maxValues, out minLocations, out maxLocations); double min = minValues[0], max = maxValues[0]; for (int i = 1; i < minValues.Length; i++) { if (minValues[i] < min) { min = minValues[i]; } if (maxValues[i] > max) { max = maxValues[i]; } } minVal = (float)min; maxVal = (float)max; } #endregion } #endregion Mat[] histograms = new Mat[channels.Length]; for (int i = 0; i < channels.Length; i++) { //using (DenseHistogram hist = new DenseHistogram(numberOfBins, new RangeF(minVal, maxVal))) using (Mat hist = new Mat()) using (Util.VectorOfMat vm = new Util.VectorOfMat()) { vm.Push(channels[i]); float[] ranges = new float[] { minVal, maxVal }; CvInvoke.CalcHist(vm, new int[] { 0 }, null, hist, new int[] { numberOfBins }, ranges, false); //hist.Calculate(new IImage[1] { channels[i] }, true, null); histograms[i] = GenerateHistogram(channelNames[i], colors[i], hist, numberOfBins, ranges); } } if (histograms.Length == 1) { this.Image = histograms[0]; } else { int maxWidth = 0; int totalHeight = 0; for (int i = 0; i < histograms.Length; i++) { maxWidth = Math.Max(maxWidth, histograms[i].Width); totalHeight += histograms[i].Height; } Mat concated = new Mat(new Size(maxWidth, totalHeight), histograms[0].Depth, histograms[0].NumberOfChannels); int currentY = 0; for (int i = 0; i < histograms.Length; i++) { using (Mat roi = new Mat(concated, new Rectangle(new Point(0, currentY), histograms[i].Size))) { histograms[i].CopyTo(roi); } currentY += histograms[i].Height; histograms[i].Dispose(); } this.Image = concated; } } }
private void backProjectionToolStripMenuItem_Click(object sender, EventArgs e) { try { if (pictureBox1.Image == null) { return; } var img = new Bitmap(pictureBox1.Image) .ToImage <Gray, byte>(); Image <Gray, byte> img1 = null; OpenFileDialog dialog = new OpenFileDialog(); if (dialog.ShowDialog() == DialogResult.OK) { img1 = new Image <Gray, byte>(dialog.FileName); } Mat hist = new Mat(); Mat hist1 = new Mat(); float[] ranges = new float[] { 0, 256 }; int[] channel = { 0 }; int[] histSize = { 256 }; VectorOfMat ms = new VectorOfMat(); ms.Push(img); VectorOfMat ms1 = new VectorOfMat(); ms1.Push(img1); CvInvoke.CalcHist(ms, channel, null, hist, histSize, ranges, false); CvInvoke.CalcHist(ms1, channel, null, hist1, histSize, ranges, false); CvInvoke.Normalize(hist, hist); CvInvoke.Normalize(hist1, hist1); Mat proj = new Mat(); CvInvoke.CalcBackProject(ms, channel, hist, proj, ranges); HistogramViewer viewer = new HistogramViewer(); viewer.Text = "Image Histogram"; viewer.ShowIcon = false; viewer.HistogramCtrl.AddHistogram("Image1 Histogram", Color.Blue, hist, 256, ranges); viewer.HistogramCtrl.Refresh(); viewer.Show(); HistogramViewer viewer1 = new HistogramViewer(); viewer1.Text = "Image Histogram"; viewer1.ShowIcon = false; viewer1.HistogramCtrl.AddHistogram("Image2 Histogram", Color.Blue, hist1, 256, ranges); viewer1.HistogramCtrl.Refresh(); viewer1.Show(); pictureBox1.Image = proj.ToBitmap(); } catch (Exception ex) { MessageBox.Show(ex.Message); } }
/// <summary> /// Generate histograms for the image. One histogram is generated for each color channel. /// You will need to call the Refresh function to do the painting afterward. /// </summary> /// <param name="image">The image to generate histogram from</param> /// <param name="numberOfBins">The number of bins for each histogram</param> public void GenerateHistograms(IImage image, int numberOfBins) { Mat[] channels = new Mat[image.NumberOfChannels]; Type imageType; if ((imageType = Toolbox.GetBaseType(image.GetType(), "Image`2")) != null || (imageType = Toolbox.GetBaseType(image.GetType(), "Mat")) != null || (imageType = Toolbox.GetBaseType(image.GetType(), "UMat")) != null) { for (int i = 0; i < image.NumberOfChannels; i++) { Mat channel = new Mat(); CvInvoke.ExtractChannel(image, channel, i); channels[i] = channel; } } else if ((imageType = Toolbox.GetBaseType(image.GetType(), "CudaImage`2")) != null) { IImage img = imageType.GetMethod("ToImage").Invoke(image, null) as IImage; for (int i = 0; i < img.NumberOfChannels; i++) { Mat channel = new Mat(); CvInvoke.ExtractChannel(img, channel, i); channels[i] = channel; } } else { throw new ArgumentException(String.Format("The input image type of {0} is not supported", image.GetType().ToString())); } Type[] genericArguments = imageType.GetGenericArguments(); String[] channelNames; Color[] colors; Type typeOfDepth; if (genericArguments.Length > 0) { IColor typeOfColor = Activator.CreateInstance(genericArguments[0]) as IColor; channelNames = Emgu.CV.Reflection.ReflectColorType.GetNamesOfChannels(typeOfColor); colors = Emgu.CV.Reflection.ReflectColorType.GetDisplayColorOfChannels(typeOfColor); typeOfDepth = imageType.GetGenericArguments()[1]; } else { channelNames = new String[image.NumberOfChannels]; colors = new Color[image.NumberOfChannels]; for (int i = 0; i < image.NumberOfChannels; i++) { channelNames[i] = String.Format("Channel {0}", i); colors[i] = Color.Red; } if (image is Mat) { typeOfDepth = CvInvoke.GetDepthType(((Mat)image).Depth); } else if (image is UMat) { typeOfDepth = CvInvoke.GetDepthType(((UMat)image).Depth); } else { throw new ArgumentException(String.Format("Unable to get the type of depth from image of type {0}", image.GetType().ToString())); } } float minVal, maxVal; #region Get the maximum and minimum color intensity values if (typeOfDepth == typeof(Byte)) { minVal = 0.0f; maxVal = 256.0f; } else { #region obtain the maximum and minimum color value double[] minValues, maxValues; Point[] minLocations, maxLocations; image.MinMax(out minValues, out maxValues, out minLocations, out maxLocations); double min = minValues[0], max = maxValues[0]; for (int i = 1; i < minValues.Length; i++) { if (minValues[i] < min) { min = minValues[i]; } if (maxValues[i] > max) { max = maxValues[i]; } } #endregion minVal = (float)min; maxVal = (float)max; } #endregion for (int i = 0; i < channels.Length; i++) { //using (DenseHistogram hist = new DenseHistogram(numberOfBins, new RangeF(minVal, maxVal))) using (Mat hist = new Mat()) using (Emgu.CV.Util.VectorOfMat vm = new Emgu.CV.Util.VectorOfMat()) { vm.Push(channels[i]); float[] ranges = new float[] { minVal, maxVal }; CvInvoke.CalcHist(vm, new int[] { 0 }, null, hist, new int[] { numberOfBins }, ranges, false); //hist.Calculate(new IImage[1] { channels[i] }, true, null); AddHistogram(channelNames[i], colors[i], hist, numberOfBins, ranges); } } }
private void StartMatching() { Image <Bgr, Byte> target = Image_Target.Clone().Resize(scale, Inter.Linear); Image <Bgr, Byte> texture = Image_Texture.Clone(); Image <Gray, Byte> target_gray = target.Convert <Gray, Byte>(); Image <Gray, Byte> texture_gray = texture.Convert <Gray, Byte>(); Image <Gray, Byte> target_hist_matched = new Image <Gray, Byte>(target.Size); Image <Gray, Byte> target_hist_matched_weighted = new Image <Gray, Byte>(target.Size); Image <Gray, float> target_sobel_x = target_gray.Sobel(1, 0, 3); Image <Gray, float> target_sobel_y = target_gray.Sobel(0, 1, 3); Image <Gray, float> texture_sobel_x = texture_gray.Sobel(1, 0, 3); Image <Gray, float> texture_sobel_y = texture_gray.Sobel(0, 1, 3); Image <Gray, float> target_sobel_mag = new Image <Gray, float>(target_gray.Size); Image <Gray, float> texture_sobel_mag = new Image <Gray, float>(texture_gray.Size); Image_Result = new Image <Bgr, byte>(target.Width, target.Height, new Bgr(0, 0, 0)); imageBox_Result.Image = Image_Result; this.Invoke(new MethodInvoker(() => { progressBar_match.Value = 0; progressBar_match.Maximum = (target.Width / size) * (target.Height / size); })); Matrix <byte> histLUT = new Matrix <byte>(1, 256); Mat hist_target = new Mat(); Mat hist_texture = new Mat(); VectorOfMat vm_target = new VectorOfMat(); VectorOfMat vm_texture = new VectorOfMat(); vm_target.Push(target_gray); vm_texture.Push(texture_gray); CvInvoke.CalcHist(vm_target, new int[] { 0 }, null, hist_target, new int[] { 256 }, new float[] { 0, 255 }, false); CvInvoke.CalcHist(vm_texture, new int[] { 0 }, null, hist_texture, new int[] { 256 }, new float[] { 0, 255 }, false); float[] CDF_hist_target = new float[256]; float[] CDF_hist_texture = new float[256]; Marshal.Copy(hist_target.DataPointer, CDF_hist_target, 0, 256); Marshal.Copy(hist_texture.DataPointer, CDF_hist_texture, 0, 256); for (int i = 1; i < 256; i++) { CDF_hist_target[i] += CDF_hist_target[i - 1]; CDF_hist_texture[i] += CDF_hist_texture[i - 1]; } for (int i = 0; i < 256; i++) { histLUT.Data[0, i] = 0; for (int j = 0; j < 256; j++) { if (CDF_hist_texture[j] >= CDF_hist_target[i]) { histLUT.Data[0, i] = (byte)j; break; } } } CvInvoke.LUT(target_gray, histLUT, target_hist_matched); target_hist_matched_weighted = target_hist_matched * weight_hist + target_gray * (1.0 - weight_hist); CvInvoke.CartToPolar(target_sobel_x, target_sobel_y, target_sobel_mag, new Mat()); CvInvoke.CartToPolar(texture_sobel_x, texture_sobel_y, texture_sobel_mag, new Mat()); List <Matrix <float> > transformation_matrixs = new List <Matrix <float> >(); List <Matrix <float> > transformation_matrixs_invert = new List <Matrix <float> >(); List <RectangleF> rotatedRects = new List <RectangleF>(); for (int i = 1; i < rotations; i++) { double angle = i * (360.0 / (float)rotations); RectangleF rotatedRect = new RotatedRect(new PointF(), texture.Size, (float)angle).MinAreaRect(); PointF center = new PointF(0.5f * texture.Width, 0.5f * texture.Height); Matrix <float> transformation_matrix = new Matrix <float>(2, 3); Matrix <float> transformation_matrix_invert = new Matrix <float>(2, 3); CvInvoke.GetRotationMatrix2D(center, angle, 1.0, transformation_matrix); transformation_matrix.Data[0, 2] += (rotatedRect.Width - texture.Width) / 2; transformation_matrix.Data[1, 2] += (rotatedRect.Height - texture.Height) / 2; CvInvoke.InvertAffineTransform(transformation_matrix, transformation_matrix_invert); transformation_matrixs.Add(transformation_matrix); transformation_matrixs_invert.Add(transformation_matrix_invert); rotatedRects.Add(rotatedRect); } List <Image <Bgr, byte> > texture_rotations = new List <Image <Bgr, byte> >(rotations) { }; List <Image <Gray, byte> > texture_gray_rotations = new List <Image <Gray, byte> >(rotations) { }; List <Image <Gray, float> > texture_sobel_rotations = new List <Image <Gray, float> >(rotations) { }; List <Image <Gray, byte> > texture_mask_rotations = new List <Image <Gray, byte> >(rotations) { }; texture_rotations.Add(texture); texture_gray_rotations.Add(texture_gray); texture_sobel_rotations.Add(texture_sobel_mag); texture_mask_rotations.Add(new Image <Gray, byte>(texture.Width, texture.Height, new Gray(255))); for (int i = 1; i < rotations; i++) { texture_mask_rotations.Add(new Image <Gray, byte>(rotatedRects[i - 1].Size.ToSize())); texture_rotations.Add(new Image <Bgr, byte>(rotatedRects[i - 1].Size.ToSize())); texture_gray_rotations.Add(new Image <Gray, byte>(rotatedRects[i - 1].Size.ToSize())); texture_sobel_rotations.Add(new Image <Gray, float>(rotatedRects[i - 1].Size.ToSize())); } for (int i = 1; i < rotations; i++) { CvInvoke.WarpAffine(texture, texture_rotations[i], transformation_matrixs[i - 1], rotatedRects[i - 1].Size.ToSize(), Inter.Nearest, Warp.Default, BorderType.Constant, new MCvScalar(0)); CvInvoke.WarpAffine(texture_gray, texture_gray_rotations[i], transformation_matrixs[i - 1], rotatedRects[i - 1].Size.ToSize(), Inter.Nearest, Warp.Default, BorderType.Constant, new MCvScalar(0)); CvInvoke.WarpAffine(texture_sobel_mag, texture_sobel_rotations[i], transformation_matrixs[i - 1], rotatedRects[i - 1].Size.ToSize(), Inter.Nearest, Warp.Default, BorderType.Constant, new MCvScalar(0)); CvInvoke.WarpAffine(texture_mask_rotations[0], texture_mask_rotations[i], transformation_matrixs[i - 1], rotatedRects[i - 1].Size.ToSize(), Inter.Nearest, Warp.Default, BorderType.Constant, new MCvScalar(0)); } // Directory.SetCurrentDirectory(path); String current_path = path + @"\matched_patches"; if (Directory.Exists(current_path)) { DirectoryInfo di = new DirectoryInfo(current_path); foreach (FileInfo file in di.GetFiles()) { file.Delete(); } foreach (DirectoryInfo dir in di.GetDirectories()) { dir.Delete(true); } } else { DirectoryInfo di = Directory.CreateDirectory(current_path); } for (int y = 0; y < target_hist_matched_weighted.Height / size; y++) { for (int x = 0; x < target_hist_matched_weighted.Width / size; x++) { Image <Bgr, byte> template; Image <Gray, byte> template_gray; Image <Gray, float> template_sobel; template = target.Clone(); template_gray = target_hist_matched_weighted.Clone(); template_sobel = target_sobel_mag.Clone(); template.ROI = new Rectangle(x * size, y * size, size, size); template_gray.ROI = new Rectangle(x * size, y * size, size, size); template_sobel.ROI = new Rectangle(x * size, y * size, size, size); template = template.Clone(); template_gray = template_gray.Copy(); template_sobel = template_sobel.Copy(); int minMatchIndex = -1; double minMatchValue = double.MaxValue; Point minMatchLoc = new Point(); Object _lock = new Object(); Parallel.For(0, rotations, i => { Image <Gray, float> match_gray = new Image <Gray, float>(texture_gray.Size); Image <Gray, float> match_sobel = new Image <Gray, float>(texture_sobel_mag.Size); Image <Gray, float> match_sum = new Image <Gray, float>(texture.Size); double minVal = 0, maxVal = 0; Point minLoc = new Point(), maxLoc = new Point(); CvInvoke.MatchTemplate(texture_gray_rotations[i], template_gray, match_gray, TemplateMatchingType.Sqdiff); CvInvoke.MatchTemplate(texture_sobel_rotations[i], template_sobel, match_sobel, TemplateMatchingType.Sqdiff); match_sum = match_gray + match_sobel; //CvInvoke.MinMaxLoc(match_sum, ref minVal, ref maxVal, ref minLoc, ref maxLoc); //CudaInvoke.MinMaxLoc(match_sum, ref minVal, ref maxVal, ref minLoc, ref maxLoc, GetMinMaxLocMask(texture_mask_rotations[i], size)); CvInvoke.MinMaxLoc(match_sum, ref minVal, ref maxVal, ref minLoc, ref maxLoc, GetMinMaxLocMask(texture_mask_rotations[i], size)); lock (_lock) { if (minVal < minMatchValue && minVal > 0) { minMatchValue = minVal; minMatchIndex = i; minMatchLoc = minLoc; } } match_gray.Dispose(); match_sobel.Dispose(); match_sum.Dispose(); }); Console.WriteLine($"minMatchValue = {minMatchValue}\r\nminMatchIndex = {minMatchIndex}\r\nminMatchLoc = {minMatchLoc}"); if (minMatchIndex < 0) { MessageBox.Show("Out of textures!!!!"); return; } texture_mask_rotations[minMatchIndex].Draw(new Rectangle(minMatchLoc.X, minMatchLoc.Y, size - 1, size - 1), new Gray(0), -1); if (minMatchIndex > 0) { Image <Gray, byte> mask = new Image <Gray, byte>(texture_mask_rotations[0].Size); CvInvoke.WarpAffine(texture_mask_rotations[minMatchIndex], mask, transformation_matrixs_invert[minMatchIndex - 1], mask.Size, Inter.Nearest, Warp.Default, BorderType.Constant, new MCvScalar(0)); texture_mask_rotations[0] = texture_mask_rotations[0] - (255 - mask); } for (int i = 1; i < rotations; i++) { Image <Gray, byte> mask_rot = new Image <Gray, byte>(texture_mask_rotations[i].Size); CvInvoke.WarpAffine(texture_mask_rotations[0], mask_rot, transformation_matrixs[i - 1], mask_rot.Size, Inter.Nearest, Warp.Default, BorderType.Constant, new MCvScalar(0)); mask_rot.CopyTo(texture_mask_rotations[i]); } texture_rotations[minMatchIndex].ROI = new Rectangle(minMatchLoc, new Size(size, size)); CvInvoke.Imwrite($@"{current_path}\{x}_{y}.bmp", texture_rotations[minMatchIndex].Copy()); imageBox_match.Image = texture_rotations[minMatchIndex].Copy(); imageBox_template.Image = template.Copy(); Image_Result.ROI = new Rectangle(x * size, y * size, size, size); texture_rotations[minMatchIndex].CopyTo(Image_Result); Image_Result.ROI = Rectangle.Empty; texture_rotations[minMatchIndex].ROI = Rectangle.Empty; imageBox_Result.Image = Image_Result; if (minMatchIndex > 0) { PointF[] maskPoints = { minMatchLoc, new PointF(minMatchLoc.X + size - 1, minMatchLoc.Y), new PointF(minMatchLoc.X, minMatchLoc.Y + size - 1), new PointF(minMatchLoc.X + size - 1, minMatchLoc.Y + size - 1) }; PointF[] maskPoints_rot = new PointF[4]; for (int i = 0; i < 4; i++) { maskPoints_rot[i].X = maskPoints[i].X * transformation_matrixs_invert[minMatchIndex - 1].Data[0, 0] + maskPoints[i].Y * transformation_matrixs_invert[minMatchIndex - 1].Data[0, 1] + 1.0f * transformation_matrixs_invert[minMatchIndex - 1].Data[0, 2]; maskPoints_rot[i].Y = maskPoints[i].X * transformation_matrixs_invert[minMatchIndex - 1].Data[1, 0] + maskPoints[i].Y * transformation_matrixs_invert[minMatchIndex - 1].Data[1, 1] + 1.0f * transformation_matrixs_invert[minMatchIndex - 1].Data[1, 2]; } Point[] maskPoints_rot_round = { Point.Round(maskPoints_rot[0]), Point.Round(maskPoints_rot[1]), Point.Round(maskPoints_rot[3]), Point.Round(maskPoints_rot[2]) }; texture.FillConvexPoly(maskPoints_rot_round, new Bgr(0, 0, 0)); } else { //texture.DrawPolyline(new Point[] { minMatchLoc, new Point(minMatchLoc.X + size, minMatchLoc.Y), new Point(minMatchLoc.X + size, minMatchLoc.Y + size), new Point(minMatchLoc.X, minMatchLoc.Y + size) }, true, new Bgr(0, 0, 0), 0); texture.Draw(new Rectangle(minMatchLoc, new Size(size - 1, size - 1)), new Bgr(0, 0, 0), -1); } imageBox_Texture.Image = texture; template.Dispose(); template_gray.Dispose(); template_sobel.Dispose(); GC.Collect(); this.Invoke(new MethodInvoker(() => { progressBar_match.Value++; })); } } }