private static void drawHsv(Mat flow, out Mat bgr) { Mat[] xy = new Mat[2]; Cv2.Split(flow, out xy); Mat magnitude = new Mat(); Mat angle = new Mat(); Cv2.CartToPolar(xy[0], xy[1], magnitude, angle, true); //translate magnitude to range [0;1] double mag_max, mag_min; Cv2.MinMaxLoc(magnitude, out mag_min, out mag_max); magnitude.ConvertTo( magnitude, // output matrix -1, // type of the ouput matrix, if negative same type as input matrix 1.0 / mag_max // scaling factor ); //build hsv image Mat[] _hsv = new Mat[3]; Mat hsv = new Mat(); bgr = new Mat(); _hsv[0] = angle; _hsv[1] = magnitude; _hsv[2] = Mat.Ones(angle.Size(), MatType.CV_32F); Cv2.Merge(_hsv, hsv); Cv2.CvtColor(hsv, bgr, ColorConversionCodes.HSV2BGR); }
/// <summary> /// Create a template /// </summary> private void TrainTemplate() { try { results.Clear(); using (Mat src = new Mat()) using (Mat output = new Mat()) using (Mat gx = new Mat()) using (Mat gy = new Mat()) using (Mat magnitude = new Mat()) using (Mat direction = new Mat()) { /// convert to gray image Cv2.CvtColor(template, src, ColorConversionCodes.RGB2GRAY); /// using the canny algorithm to get edges Cv2.Canny(src, output, TrainParame.Threshold1, TrainParame.Threshold2, TrainParame.ApertureSize, TrainParame.L2gradient); Cv2.FindContours(output, out var contours, out var hierarchy, TrainParame.Mode, TrainParame.Method); /// use the sobel filter on the template image which returns the gradients in the X (Gx) and Y (Gy) direction. Cv2.Sobel(src, gx, MatType.CV_64F, 1, 0, 3); Cv2.Sobel(src, gy, MatType.CV_64F, 0, 1, 3); /// compute the magnitude and direction(radians) Cv2.CartToPolar(gx, gy, magnitude, direction); /// save edge info var sum = new Point2d(0, 0); for (int i = 0, m = contours.Length; i < m; i++) { for (int j = 0, n = contours[i].Length; j < n; j++) { var cur = contours[i][j]; var fdx = gx.At <double>(cur.Y, cur.X, 0); // dx var fdy = gy.At <double>(cur.Y, cur.X, 0); // dy var der = new Point2d(fdx, fdy); // (dx,dy) var mag = magnitude.At <double>(cur.Y, cur.X, 0); // √(dx²+dy²) var dir = direction.At <double>(cur.Y, cur.X, 0); // atan2(dy,dx) results.Add(new PointInfo { Point = cur, Derivative = der, Direction = dir, Magnitude = mag == 0 ? 0 : 1 / mag, }); sum += cur; } } /// update Center and Offset in PointInfo var center = new Point2d(sum.X / results.Count, sum.Y / results.Count); foreach (var item in results) { item.Update(center); } /// overlay display origin image, edge(green) and center point(red) Cv2.DrawContours(template, new[] { results.Select(_ => _.Point) }, -1, Scalar.LightGreen, 2); //Cv2.DrawContours(template, contours, -1, Scalar.LightGreen, 2); Cv2.Circle(template, center.ToPoint(), 2, Scalar.Red, -1); } /// update UI RaisePropertyChanged(nameof(Template)); } catch (Exception ex) { Trace.TraceError(ex.Message); Trace.TraceError(ex.StackTrace); } }
/// <summary> /// NCC to find template /// </summary> private void MatchSearch() { Stopwatch stopwatch = new Stopwatch(); try { Trace.TraceInformation("NCC matching start"); stopwatch.Start(); /// convert to gray image using (Mat src = new Mat()) using (Mat gx = new Mat()) using (Mat gy = new Mat()) using (Mat direction = new Mat()) using (Mat magnitude = new Mat()) { Cv2.CvtColor(destination, src, ColorConversionCodes.RGB2GRAY); /// use the sobel filter on the source image which returns the gradients in the X (Gx) and Y (Gy) direction. Cv2.Sobel(src, gx, MatType.CV_64F, 1, 0, 3); Cv2.Sobel(src, gy, MatType.CV_64F, 0, 1, 3); /// compute the magnitude and direction Cv2.CartToPolar(gx, gy, magnitude, direction); var minScore = SearchParame.MinScore; var greediness = SearchParame.Greediness; /// ncc match search long noOfCordinates = results.Count; double normMinScore = minScore / noOfCordinates; // normalized min score double normGreediness = (1 - greediness * minScore) / (1 - greediness) / noOfCordinates; double partialScore = 0; double resultScore = 0; Point center = new Point(); for (int i = 0, h = src.Height; i < h; i++) { for (int j = 0, w = src.Width; j < w; j++) { double partialSum = 0; for (var m = 0; m < noOfCordinates; m++) { var item = results[m]; var curX = (int)(j + item.Offset.X); var curY = (int)(i + item.Offset.Y); var iTx = item.Derivative.X; var iTy = item.Derivative.Y; if (curX < 0 || curY < 0 || curY > src.Height - 1 || curX > src.Width - 1) { continue; } var iSx = gx.At <double>(curY, curX, 0); var iSy = gy.At <double>(curY, curX, 0); if ((iSx != 0 || iSy != 0) && (iTx != 0 || iTy != 0)) { var mag = magnitude.At <double>(curY, curX, 0); var matGradMag = mag == 0 ? 0 : 1 / mag; // 1/√(dx²+dy²) partialSum += ((iSx * iTx) + (iSy * iTy)) * (item.Magnitude * matGradMag); } var sumOfCoords = m + 1; partialScore = partialSum / sumOfCoords; /// check termination criteria /// if partial score score is less than the score than needed to make the required score at that position /// break serching at that coordinate. if (partialScore < Math.Min((minScore - 1) + normGreediness * sumOfCoords, normMinScore * sumOfCoords)) { break; } } if (partialScore > resultScore) { resultScore = partialScore; center.X = j; center.Y = i; } } } /// overlay display origin image, edge(green) and center point(red) Cv2.DrawContours(destination, new[] { results.Select(_ => _.Offset.ToPoint()) }, -1, Scalar.LightGreen, 2, offset: center); Cv2.Circle(destination, center, 5, Scalar.Red, -1); Trace.TraceInformation($"NCC matching score {resultScore}. time: {stopwatch.Elapsed.TotalMilliseconds} ms"); } RaisePropertyChanged(nameof(Destination)); } catch (Exception ex) { Trace.TraceError(ex.Message); Trace.TraceError(ex.StackTrace); } finally { stopwatch.Stop(); } }
private void Cardboardize(ref Mat src, ref Mat dst) { // Calculate Center Point of input image int n = src.Cols; int c = n / 2; // create a cartesian coordinate mesh of pixels. // generates 2 matrices xi and yi, each of size n*n Mat xi = new Mat(n, n, MatType.CV_32FC1); Mat yi = new Mat(n, n, MatType.CV_32FC1); meshgrid(ref xi, ref yi); // normalizing indices between -n and n Mat xt = xi - c; Mat yt = yi - c; // Transposing xt = xt.T(); yt = yt.T(); if (!xt.IsContinuous()) { xt = xt.Clone(); } if (!yt.IsContinuous()) { yt = yt.Clone(); } xt = xt.Reshape(0, xt.Rows * xt.Cols); yt = yt.Reshape(0, yt.Rows * yt.Cols); Mat r = new Mat(); Mat theta = new Mat(); Cv2.CartToPolar(xt, yt, r, theta); //Cv2.CreateSuperResolution_BTVL1(); Mat s = r + 0.00001f * r.Mul(r.Mul(r)); Mat ut = new Mat(); Mat vt = new Mat(); Cv2.PolarToCart(s, theta, ut, vt); Mat u = new Mat(); Mat v = new Mat(); if (!ut.IsContinuous()) { ut = ut.Clone(); } if (!vt.IsContinuous()) { vt = vt.Clone(); } u = ut.Reshape(0, n) + c; v = vt.Reshape(0, n) + c; u = u.T(); v = v.T(); Mat dist = new Mat(); Cv2.Remap(src, dist, u, v, InterpolationFlags.Linear, BorderTypes.Constant); string devicenames = ""; for (int i = 0; i < WebCamTexture.devices.Length; ++i) { //if (!WebCamTexture.devices[i].isFrontFacing) //{ // devicenames = devicenames + WebCamTexture.devices[i].name; // devicenames = devicenames + " , "; //} devicenames = devicenames + WebCamTexture.devices[i].name; devicenames = devicenames + " , "; } Scalar color = new Scalar(0, 0, 255); Point p = new Point(50, c); Debug.Log("Number of Cameras Found: " + WebCamTexture.devices.Length); //Cv2.PutText(dist, devicenames, p, HersheyFonts.HersheyPlain, 1, color, 2); Cv2.HConcat(dist, dist, dst); }