/// <summary> /// Create virtual sensors for specified image /// </summary> /// <param name="hsv">The work area in HSV</param> /// <param name="indexer">The indexer of work area in HSV (for fast access to pixels' values)</param> /// <returns>The list of sensors</returns> static List <Sensor> GetSensors(Mat hsv, MatIndexer <Vec3b> indexer) { List <Sensor> result = new List <Sensor>(); for (int y = hsv.Height - 5; y >= 0; y -= 3) { int laneThickness = CalculateLaneThickness(y); // Don't create sensors if the lane thickness too small if (laneThickness < 10) { break; } for (int x = 0; x < hsv.Width - laneThickness - 10; x += 5) { // Don't create sensors if its start is out of trapeze range if (indexer[y, x] == COLOR_BLACK && indexer[y, x + 1] == COLOR_BLACK && indexer[y, x + 2] == COLOR_BLACK) { continue; } result.Add(new Sensor(x, x + laneThickness + 10, y)); } } return(result); }
private string ReadMonochromePixels(MatIndexer <byte> indexer, int charCounter, int bitsPerChar) { string tempStr = string.Empty; for (int i = charCounter * bitsPerChar; i < (charCounter * bitsPerChar) + bitsPerChar; i++) { if (i < matSearchPixels.Count) { // Get pixel value float pixelIntensity = indexer[matSearchPixels[i].Y, matSearchPixels[i].X]; // If pixel contains blue if (pixelIntensity == LUM_INTENSITY_MAX) { tempStr += '1'; } else if (pixelIntensity == LUM_INTENSITY_MIN) { tempStr += '0'; } } } // If nothing was read if (tempStr == CHAR_ZEROS_7) { return(""); } return(tempStr); }
/// <summary> /// Gets a green-red representation of which frames has had FindPupils run over them /// </summary> /// <param name="width">width of image to get</param> /// <param name="height">height of image to get</param> /// <returns></returns> public BitmapImage GetFramesProcessedPreviewImage(int width = 1920, int height = 6) { if (pupilLocations == null) { return(null); } Mat representation = new Mat(height, frameCount, MatType.CV_8UC3); MatIndexer <Vec3b> indexer = representation.GetGenericIndexer <Vec3b>(); for (int i = 0; i < frameCount; i++) { for (int j = 0; j < height; j++) { representation.Set(j, i, Double.IsNaN(pupilLocations[i, 0]) ? Scalar.DeepPink.ToVec3b() : Scalar.LimeGreen.ToVec3b()); } //indexer[i, j] = value > 0 ? Scalar.LimeGreen.ToVec3b() : Scalar.DeepPink.ToVec3b(); } representation.Resize(new Size(width, height), 0, 0, InterpolationFlags.Nearest).ToBitmap().Save(BMPConvertMemeory, ImageFormat.Bmp); BMPConvertMemeory.Position = 0; bitmapFrame = new BitmapImage(); bitmapFrame.BeginInit(); bitmapFrame.StreamSource = BMPConvertMemeory; bitmapFrame.CacheOption = BitmapCacheOption.OnLoad; bitmapFrame.EndInit(); BMPConvertMemeory.SetLength(0); return(bitmapFrame); }
/// <summary> /// Select sensors which cover contours from both sides /// </summary> /// <param name="sensors">The list of sensors</param> /// <param name="contIndexer">The indexer for HSV image of work area</param> /// <returns>Filtered sensors</returns> static List <Sensor> FilterByContours(List <Sensor> sensors, MatIndexer <Vec3b> contIndexer) { List <Sensor> filteredByContours = new List <Sensor>(); foreach (var sensor in sensors) { int cnt = 0; for (int x = sensor.x1; x <= sensor.x1 + 10; x++) { if (contIndexer[sensor.y, x] == COLOR_RED) { cnt++; break; } } for (int x = sensor.x2; x >= sensor.x2 - 10; x--) { if (contIndexer[sensor.y, x] == COLOR_RED) { cnt++; break; } } if (cnt == 2) { filteredByContours.Add(sensor); } } return(filteredByContours); }
/// <summary> /// Filter sensors by count of pixels with bad color and changing of average color /// </summary> /// <param name="sensors">The list of sensors</param> /// <param name="indexer">The indexer for HSV image of work area</param> /// <returns>Filtered sensors</returns> static List <Sensor> FilterByColorAndChangeColor(List <Sensor> sensors, MatIndexer <Vec3b> indexer) { List <Sensor> result = new List <Sensor>(); foreach (var sensor in sensors) { // Count the average components of HSV inside the sensor int s0Sum = 0, v0Sum = 0, badColorsCnt = 0; for (int x = sensor.x1; x <= sensor.x2; x++) { Vec3b color = indexer[sensor.y, x]; if (isBadColor(color)) { badColorsCnt++; } if (badColorsCnt == 10) { break; } s0Sum += color[1]; v0Sum += color[2]; } // Delete sensors where much of pixels with bad color if (badColorsCnt == 10) { continue; } double n = sensor.x2 - sensor.x1; double s0Avg = s0Sum / n, v0Avg = v0Sum / n; // Count the average components of HSV outside the sensor int sSum = 0, vSum = 0; for (int x = sensor.x1 - 10; x < sensor.x1; x++) { Vec3b color = indexer[sensor.y, x]; sSum += color[1]; vSum += color[2]; } for (int x = sensor.x2 + 1; x <= sensor.x2 + 10; x++) { Vec3b color = indexer[sensor.y, x]; sSum += color[1]; vSum += color[2]; } double sAvg = sSum / 20.0, vAvg = vSum / 20.0; // If difference between these values much, it may be a lane if (Math.Abs(s0Avg - sAvg) > 4 || Math.Abs(v0Avg - vAvg) > 9) { result.Add(sensor); } } return(result); }
public Dictionary <string, BitmapSource> SplitColoredQR(Mat combinedMat) { Dictionary <string, BitmapSource> outputSplitImages = new Dictionary <string, BitmapSource>(); Size size = new Size(combinedMat.Width, combinedMat.Height); int depth = combinedMat.Depth(); Mat redComponent = Mat.Zeros(size, MatType.CV_8UC1); Mat grnComponent = Mat.Zeros(size, MatType.CV_8UC1); Mat bluComponent = Mat.Zeros(size, MatType.CV_8UC1); // Get mat indexers MatOfByte3 mobComb = new MatOfByte3(combinedMat); MatOfByte mobRed = new MatOfByte(redComponent); MatOfByte mobGrn = new MatOfByte(grnComponent); MatOfByte mobBlu = new MatOfByte(bluComponent); MatIndexer <Vec3b> indexerComb = mobComb.GetIndexer(); MatIndexer <byte> indexerRed = mobRed.GetIndexer(); MatIndexer <byte> indexerGrn = mobGrn.GetIndexer(); MatIndexer <byte> indexerBlu = mobBlu.GetIndexer(); for (int y = 0; y < combinedMat.Height; y++) { for (int x = 0; x < combinedMat.Width; x++) { // Assign intensity of red channel from the combined mat to the red component mat indexerRed[y, x] = indexerComb[y, x].Item2; // Assign intensity of green channel from the combined mat to the green component mat indexerGrn[y, x] = indexerComb[y, x].Item1; // Assign intensity of blue channel from the combined mat to the blue component mat indexerBlu[y, x] = indexerComb[y, x].Item0; } } outputSplitImages.Add(QR_TYPE_RED_OUT, Utils.MatToImage(redComponent)); outputSplitImages.Add(QR_TYPE_GREEN_OUT, Utils.MatToImage(grnComponent)); outputSplitImages.Add(QR_TYPE_BLUE_OUT, Utils.MatToImage(bluComponent)); return(outputSplitImages); }
public void 自作反射光除去(Mat[] images, ref Mat DST) { int width = images[0].Width; int height = images[0].Height; MatIndexer <Vec3b>[] indexers = new MatIndexer <Vec3b> [4]; var indexer = new MatOfByte3(DST).GetIndexer(); for (int i = 0; i < 4; i++) { indexers[i] = new MatOfByte3(images[i]).GetIndexer(); //images[i].GetGenericIndexer<Vec3b>(); } for (int x = 0; x < width; x++) { for (int y = 0; y < height; y++) { Vec3b[] colors = new Vec3b[4]; Vec3b color = indexer[y, x]; for (int i = 0; i < 4; i++) { colors[i] = indexers[i][y, x]; } double[] vals = { 0, 0, 0, 0 }; for (int num = 0; num < 4; num++) { vals[num] = colors[num].Item0; } Array.Sort(vals);//並び替えを行う.min=vals[0] color.Item0 = (byte)((vals[0] + vals[1] + vals[2]) / 3.0); indexer[y, x] = color; colors = null; } } indexers = null; indexer = null; }
static int VoteForSizeAndOrientation(KeyPoint[] modelKeyPoints, KeyPoint[] observedKeyPoints, DMatch[][] matches, Mat mask, float scaleIncrement, int rotationBins) { int idx = 0; int nonZeroCount = 0; byte[] maskMat = new byte[mask.Rows]; GCHandle maskHandle = GCHandle.Alloc(maskMat, GCHandleType.Pinned); using (Mat m = new Mat(mask.Rows, 1, MatType.CV_8U, maskHandle.AddrOfPinnedObject())) { mask.CopyTo(m); List <float> logScale = new List <float>(); List <float> rotations = new List <float>(); double s, maxS, minS, r; maxS = -1.0e-10f; minS = 1.0e10f; //if you get an exception here, it's because you're passing in the model and observed keypoints backwards. Just switch the order. for (int i = 0; i < maskMat.Length; i++) { if (maskMat[i] > 0) { KeyPoint observedKeyPoint = observedKeyPoints[i]; KeyPoint modelKeyPoint = modelKeyPoints[matches[i][0].TrainIdx]; s = Math.Log10(observedKeyPoint.Size / modelKeyPoint.Size); logScale.Add((float)s); maxS = s > maxS ? s : maxS; minS = s < minS ? s : minS; r = observedKeyPoint.Angle - modelKeyPoint.Angle; r = r < 0.0f ? r + 360.0f : r; rotations.Add((float)r); } } int scaleBinSize = (int)Math.Ceiling((maxS - minS) / Math.Log10(scaleIncrement)); if (scaleBinSize < 2) { scaleBinSize = 2; } float[] scaleRanges = { (float)minS, (float)(minS + scaleBinSize + Math.Log10(scaleIncrement)) }; using (var scalesMat = new Mat <float>(rows: logScale.Count, cols: 1, data: logScale.ToArray())) using (var rotationsMat = new Mat <float>(rows: rotations.Count, cols: 1, data: rotations.ToArray())) using (var flagsMat = new Mat <float>(logScale.Count, 1)) using (Mat hist = new Mat()) { flagsMat.SetTo(new Scalar(0.0f)); float[] flagsMatFloat1 = flagsMat.ToArray(); int[] histSize = { scaleBinSize, rotationBins }; float[] rotationRanges = { 0.0f, 360.0f }; int[] channels = { 0, 1 }; // with infrared left and right, rotation max = min and calchist fails. Adding 1 to max enables all this to work! if (rotations.Count > 0) { Rangef[] ranges = { new Rangef(scaleRanges[0], scaleRanges[1]), new Rangef(rotations.Min(), rotations.Max() + 1) }; double minVal, maxVal; Mat[] arrs = { scalesMat, rotationsMat }; Cv2.CalcHist(arrs, channels, null, hist, 2, histSize, ranges); Cv2.MinMaxLoc(hist, out minVal, out maxVal); Cv2.Threshold(hist, hist, maxVal * 0.5, 0, ThresholdTypes.Tozero); Cv2.CalcBackProject(arrs, channels, hist, flagsMat, ranges); MatIndexer <float> flagsMatIndexer = flagsMat.GetIndexer(); for (int i = 0; i < maskMat.Length; i++) { if (maskMat[i] > 0) { if (flagsMatIndexer[idx++] != 0.0f) { nonZeroCount++; } else { maskMat[i] = 0; } } } m.CopyTo(mask); } } } maskHandle.Free(); return(nonZeroCount); }
MatIndexer <Vec3b> WriteColorComponent(string message, QRCodeProperties qrCodeProps, MatIndexer <Vec3b> indexerComponent, Vec3b color) { int stringIndex = -1; for (int y = 0; y < qrCodeProps.CellsPerDim * qrCodeProps.CellSize; y += qrCodeProps.CellSize) { for (int x = 0; x < qrCodeProps.CellsPerDim * qrCodeProps.CellSize; x += qrCodeProps.CellSize) { // If message is done reading if (++stringIndex + 1 > message.Length) { break; } // If bit is 0, skip this cell if (message[stringIndex].Equals('0')) { continue; } // If bit is 1, color the cell else if (message[stringIndex].Equals('1')) { for (int i = y; i < y + qrCodeProps.CellSize; i++) { for (int j = x; j < x + qrCodeProps.CellSize; j++) { indexerComponent[i, j] = color; } } } } } return(indexerComponent); }
//img1:test image; img2:ref img public float MatchTemplate(Mat img1, Mat img2, bool ishowImageMatchTemplate, string s = "Match") { float matchRate = 0.0f; using (var descriptors1 = new Mat()) using (var descriptors2 = new Mat()) using (var matcher = new BFMatcher(NormTypes.L2SQR)) using (var kaze = KAZE.Create()) { KeyPoint[] keypoints1, keypoints2; kaze.DetectAndCompute(img1, null, out keypoints1, descriptors1); kaze.DetectAndCompute(img2, null, out keypoints2, descriptors2); DMatch[][] matches = matcher.KnnMatch(descriptors1, descriptors2, 2); using (Mat mask = new Mat(matches.Length, 1, MatType.CV_8U)) { mask.SetTo(new Scalar(255)); int nonZero = Cv2.CountNonZero(mask); VoteForUniqueness(matches, mask); nonZero = Cv2.CountNonZero(mask); nonZero = VoteForSizeAndOrientation(keypoints2, keypoints1, matches, mask, 1.5f, 20); List <Point2f> obj = new List <Point2f>(); List <Point2f> scene = new List <Point2f>(); List <DMatch> goodMatchesList = new List <DMatch>(); //iterate through the mask only pulling out nonzero items because they're matches for (int i = 0; i < mask.Rows; i++) { MatIndexer <byte> maskIndexer = mask.GetGenericIndexer <byte>(); if (maskIndexer[i] > 0) { obj.Add(keypoints1[matches[i][0].QueryIdx].Pt); scene.Add(keypoints2[matches[i][0].TrainIdx].Pt); goodMatchesList.Add(matches[i][0]); } } List <Point2d> objPts = obj.ConvertAll(Point2fToPoint2d); List <Point2d> scenePts = scene.ConvertAll(Point2fToPoint2d); if (nonZero >= 4) { Mat homography = Cv2.FindHomography(objPts, scenePts, HomographyMethods.Ransac, 1.5, mask); nonZero = Cv2.CountNonZero(mask); //calculate match rate by how many match points exist //matchRate = (float)nonZero / keypoints2.Count(); matchRate = 1 - (float)(keypoints2.Count() - nonZero) / (keypoints2.Count() + nonZero); if (homography != null && ishowImageMatchTemplate == true) { Point2f[] objCorners = { new Point2f(0, 0), new Point2f(img1.Cols, 0), new Point2f(img1.Cols, img1.Rows), new Point2f(0, img1.Rows) }; Point2d[] sceneCorners = MyPerspectiveTransform3(objCorners, homography); //This is a good concat horizontal using (Mat img3 = new Mat(Math.Max(img1.Height, img2.Height), img2.Width + img1.Width, MatType.CV_8UC3)) using (Mat left = new Mat(img3, new Rect(0, 0, img1.Width, img1.Height))) using (Mat right = new Mat(img3, new Rect(img1.Width, 0, img2.Width, img2.Height))) { img1.CopyTo(left); img2.CopyTo(right); byte[] maskBytes = new byte[mask.Rows * mask.Cols]; mask.GetArray(0, 0, maskBytes); Cv2.DrawMatches(img1, keypoints1, img2, keypoints2, goodMatchesList, img3, Scalar.All(-1), Scalar.All(-1), maskBytes, DrawMatchesFlags.NotDrawSinglePoints); //List<List<Point>> listOfListOfPoint2D = new List<List<Point>>(); //List<Point> listOfPoint2D = new List<Point>(); //listOfPoint2D.Add(new Point(sceneCorners[0].X + img1.Cols, sceneCorners[0].Y)); //listOfPoint2D.Add(new Point(sceneCorners[1].X + img1.Cols, sceneCorners[1].Y)); //listOfPoint2D.Add(new Point(sceneCorners[2].X + img1.Cols, sceneCorners[2].Y)); //listOfPoint2D.Add(new Point(sceneCorners[3].X + img1.Cols, sceneCorners[3].Y)); //listOfListOfPoint2D.Add(listOfPoint2D); //img3.Polylines(listOfListOfPoint2D, true, Scalar.LimeGreen, 2); Cv2.ImShow(s, img3.Resize(new Size(img3.Rows / 2, img3.Cols / 2))); Cv2.WaitKey(0); Cv2.DestroyWindow(s); //Window.ShowImages(img3.Resize(new Size(img3.Rows / 2, img3.Cols / 2))); //Window.WaitKey(0); //Window.DestroyAllWindows(); } } } } } return(matchRate); }
public void 自作反射光除去(Mat[] images, ref Mat DST) { int width = images[0].Width; int height = images[0].Height; MatIndexer<Vec3b>[] indexers = new MatIndexer<Vec3b>[4]; var indexer = new MatOfByte3(DST).GetIndexer(); for (int i = 0; i < 4; i++) indexers[i] = new MatOfByte3(images[i]).GetIndexer(); for (int x = 0; x < width; x++) for (int y = 0; y < height; y++) { Vec3b[] colors = new Vec3b[4]; Vec3b color = indexer[y, x]; for (int i = 0; i < 4; i++) colors[i] = indexers[i][y, x]; double[] vals = { 0, 0, 0, 0 }; for (int num = 0; num < 4; num++) vals[num] = colors[num].Item0; Array.Sort(vals);//並び替えを行う.min=vals[0] color.Item0 = (byte)((vals[0] + vals[1] + vals[2]) / 3.0); indexer[y, x] = color; colors = null; } indexers = null; indexer = null; }
public void 吉岡反射光除去処理(Mat[] images, ref Mat DST,int th_l,int th_h) { int width = images[0].Width; int height = images[0].Height; MatIndexer<Vec3b>[] indexers = new MatIndexer<Vec3b>[4]; var indexer = new MatOfByte3(DST).GetIndexer(); for (int i = 0; i < 4; i++) indexers[i] = new MatOfByte3(images[i]).GetIndexer(); for (int x = 0; x < width; x++) for (int y = 0; y < height; y++) {//medianX,Y SG完成 Vec3b[] colors = new Vec3b[4]; Vec3b color = indexer[y, x]; for (int i = 0; i < 4; i++) colors[i] = indexers[i][y, x]; double[] vals = { 0, 0, 0, 0 }; for (int num = 0; num < 4; num++) vals[num] = colors[num].Item0; Array.Sort(vals);//並び替えを行う.min=vals[0] for (int i = 1; i < 3; i++) { //if (vals[i] < th_l || vals[i] > th_h) vals[i] = 255; //普通こっちでは? //if (vals[i] < th_l) vals[i] = 0; //if (vals[i] > th_h) vals[i] = 255; } color.Item0 = (byte)((vals[1] + vals[2]) / 2.0); indexer[y, x] = color; colors = null; } indexers = null; indexer = null; }
public Dictionary <string, string> DecodeQRCode(QRCodeProperties qrCodeProps, QRColorMode colorMode) { outputMessages = new Dictionary <string, string>(); if (colorMode == QRColorMode.Grayscale) { string decodedBinary = string.Empty; outputMats.TryGetValue(QR_TYPE_MONOCHROME, out Mat qrMatMono); // Get mat indexer MatOfByte mob1 = new MatOfByte(qrMatMono); MatIndexer <byte> indexerByte = mob1.GetIndexer(); // Read decoded strings int bitsPerChar = 7; int messageLength = inputMessage.Length; decimal messageChars = messageLength / bitsPerChar; // Read decoded binary for (int i = 0; i < messageChars; i++) { decodedBinary += ReadMonochromePixels(indexerByte, i, bitsPerChar); } // Add decoded messag to output list outputMessages.Add(QR_TYPE_MONOCHROME, decodedBinary); return(outputMessages); } else if (colorMode == QRColorMode.Color) { string decodedBinaryRed = string.Empty; string decodedBinaryGreen = string.Empty; string decodedBinaryBlue = string.Empty; string decodedBinaryCombined = string.Empty; outputMats.TryGetValue(QR_TYPE_COMBINED, out Mat qrMatCombined); // Get mat indexer MatOfByte3 mobComb = new MatOfByte3(qrMatCombined); MatIndexer <Vec3b> indexerMobComb = mobComb.GetIndexer(); // Read decoded strings int bitsPerChar = 7; int messageLength = inputMessage.Length; decimal messageChars = messageLength / bitsPerChar; int coloredMessageLength = (int)Math.Ceiling(messageChars / 3); for (int i = 0; i < coloredMessageLength; i++) { string tempRed = ReadColorPixels(indexerMobComb, QR_TYPE_RED, i, bitsPerChar); string tempGreen = ReadColorPixels(indexerMobComb, QR_TYPE_GREEN, i, bitsPerChar); string tempBlue = ReadColorPixels(indexerMobComb, QR_TYPE_BLUE, i, bitsPerChar); decodedBinaryRed += tempRed; decodedBinaryGreen += tempGreen; decodedBinaryBlue += tempBlue; decodedBinaryCombined += tempRed; decodedBinaryCombined += tempGreen; decodedBinaryCombined += tempBlue; } // Add output messages outputMessages.Add(QR_TYPE_RED, decodedBinaryRed); outputMessages.Add(QR_TYPE_GREEN, decodedBinaryGreen); outputMessages.Add(QR_TYPE_BLUE, decodedBinaryBlue); outputMessages.Add(QR_TYPE_COMBINED, decodedBinaryCombined); return(outputMessages); } return(null); }
public static FormExtractionResult ProcessImage(string filename, FormExtractionOptions options = null) { if (options == null) { // Assume recommanded parameters. options = new FormExtractionOptions(); } var orig = new Mat(filename); var image = new Mat(filename, ImreadModes.GrayScale); Cv2.AdaptiveThreshold(image, image, 255, AdaptiveThresholdTypes.MeanC, ThresholdTypes.Binary, 9, 4); // Resize image if too large. if (image.Width > options.ResizeWidth) { var height = options.ResizeWidth * image.Height / image.Width; Cv2.Resize(image, image, new Size(options.ResizeWidth, height)); } Cv2.BitwiseNot(image, image); Cv2.Dilate(image, image, Cv2.GetStructuringElement(MorphShapes.Cross, new Size(2, 2))); MatOfByte mat = new MatOfByte(image); MatIndexer <byte> indexer = mat.GetIndexer(); var row = image.Height; var col = image.Width; Mat newImage = new Mat(row, col, MatType.CV_8UC3); newImage.SetTo(Scalar.Black); // We must determine if it "may" be an interesting blob. Stopwatch watch = new Stopwatch(); watch.Start(); int[] imgData = new int[row * col]; for (int y = 0; y < row; y++) { for (int x = 0; x < col; x++) { imgData[y + x * row] = indexer[y, x]; } } var result = HasBoxes(imgData, row, col, options); watch.Stop(); result.Duration = watch.Elapsed; // Preview if (result.Boxes.Any() && image.Width != 0 && options.ShowDebugImage) { var img = CreateImage(result.DebugImg, hasColor: true); Cv2.BitwiseOr(newImage, img, newImage); Cv2.BitwiseNot(image, image); int width = 400; var height = width * image.Height / image.Width; Cv2.Resize(orig, orig, new Size(width, height)); Cv2.Resize(image, image, new Size(width, height)); Cv2.Resize(newImage, newImage, new Size(width, height)); using (new Window("orig", orig)) using (new Window("pre", image)) using (new Window("post", newImage)) { Cv2.WaitKey(); Cv2.DestroyAllWindows(); } } // Dispose. orig.Dispose(); image.Dispose(); newImage.Dispose(); mat.Dispose(); return(result); }
public Mat Run(Mat img1, Mat img2) { Mat img3 = new Mat(Math.Max(img1.Height, img2.Height), img2.Width + img1.Width, MatType.CV_8UC3).SetTo(0); using (var descriptors1 = new Mat()) using (var descriptors2 = new Mat()) using (var matcher = new BFMatcher(NormTypes.L2SQR)) using (var kaze = KAZE.Create()) { kaze.DetectAndCompute(img1, null, out keypoints1, descriptors1); kaze.DetectAndCompute(img2, null, out keypoints2, descriptors2); if (descriptors1.Width > 0 && descriptors2.Width > 0) { DMatch[][] matches = matcher.KnnMatch(descriptors1, descriptors2, 2); using (Mat mask = new Mat(matches.Length, 1, MatType.CV_8U)) { mask.SetTo(Scalar.White); int nonZero = Cv2.CountNonZero(mask); VoteForUniqueness(matches, mask); nonZero = Cv2.CountNonZero(mask); nonZero = VoteForSizeAndOrientation(keypoints2, keypoints1, matches, mask, 1.5f, 10); List <Point2f> obj = new List <Point2f>(); List <Point2f> scene = new List <Point2f>(); List <DMatch> goodMatchesList = new List <DMatch>(); //iterate through the mask only pulling out nonzero items because they're matches MatIndexer <byte> maskIndexer = mask.GetGenericIndexer <byte>(); for (int i = 0; i < mask.Rows; i++) { if (maskIndexer[i] > 0) { obj.Add(keypoints1[matches[i][0].QueryIdx].Pt); scene.Add(keypoints2[matches[i][0].TrainIdx].Pt); goodMatchesList.Add(matches[i][0]); } } List <Point2d> objPts = obj.ConvertAll(Point2fToPoint2d); List <Point2d> scenePts = scene.ConvertAll(Point2fToPoint2d); if (nonZero >= 4) { Mat homography = Cv2.FindHomography(objPts, scenePts, HomographyMethods.Ransac, 1.5, mask); nonZero = Cv2.CountNonZero(mask); if (homography != null && homography.Width > 0) { Point2f[] objCorners = { new Point2f(0, 0), new Point2f(img1.Cols, 0), new Point2f(img1.Cols, img1.Rows), new Point2f(0, img1.Rows) }; Point2d[] sceneCorners = MyPerspectiveTransform3(objCorners, homography); //This is a good concat horizontal using (Mat left = new Mat(img3, new Rect(0, 0, img1.Width, img1.Height))) using (Mat right = new Mat(img3, new Rect(img1.Width, 0, img2.Width, img2.Height))) { img1.CopyTo(left); img2.CopyTo(right); byte[] maskBytes = new byte[mask.Rows * mask.Cols]; mask.GetArray(out maskBytes); Cv2.DrawMatches(img1, keypoints1, img2, keypoints2, goodMatchesList, img3, Scalar.All(-1), Scalar.All(-1), maskBytes, DrawMatchesFlags.NotDrawSinglePoints); List <List <Point> > listOfListOfPoint2D = new List <List <Point> >(); List <Point> listOfPoint2D = new List <Point>(); listOfPoint2D.Add(new Point(sceneCorners[0].X + img1.Cols, sceneCorners[0].Y)); listOfPoint2D.Add(new Point(sceneCorners[1].X + img1.Cols, sceneCorners[1].Y)); listOfPoint2D.Add(new Point(sceneCorners[2].X + img1.Cols, sceneCorners[2].Y)); listOfPoint2D.Add(new Point(sceneCorners[3].X + img1.Cols, sceneCorners[3].Y)); listOfListOfPoint2D.Add(listOfPoint2D); img3.Polylines(listOfListOfPoint2D, true, Scalar.LimeGreen, 2); //This works too //Cv2.Line(img3, scene_corners[0] + new Point2d(img1.Cols, 0), scene_corners[1] + new Point2d(img1.Cols, 0), Scalar.LimeGreen); //Cv2.Line(img3, scene_corners[1] + new Point2d(img1.Cols, 0), scene_corners[2] + new Point2d(img1.Cols, 0), Scalar.LimeGreen); //Cv2.Line(img3, scene_corners[2] + new Point2d(img1.Cols, 0), scene_corners[3] + new Point2d(img1.Cols, 0), Scalar.LimeGreen); //Cv2.Line(img3, scene_corners[3] + new Point2d(img1.Cols, 0), scene_corners[0] + new Point2d(img1.Cols, 0), Scalar.LimeGreen); } } } } } return(img3); } }
private string ReadColorPixels(MatIndexer <Vec3b> indexer, string qrType, int charCounter, int bitsPerChar) { string tempStr = string.Empty; switch (qrType) { case QR_TYPE_RED: for (int i = charCounter * bitsPerChar; i < (charCounter * bitsPerChar) + bitsPerChar; i++) { if (i < matSearchPixels.Count) { // Get pixel value Vec3b pixelValue = indexer[matSearchPixels[i].Y, matSearchPixels[i].X]; // If pixel contains red if (pixelValue.Item2 == LUM_INTENSITY_MAX) { tempStr += '1'; } else if (pixelValue.Item2 == LUM_INTENSITY_MIN) { tempStr += '0'; } } } break; case QR_TYPE_GREEN: for (int i = charCounter * bitsPerChar; i < (charCounter * bitsPerChar) + bitsPerChar; i++) { if (i < matSearchPixels.Count) { // Get pixel value Vec3b pixelValue = indexer[matSearchPixels[i].Y, matSearchPixels[i].X]; // If pixel contains green if (pixelValue.Item1 == LUM_INTENSITY_MAX) { tempStr += '1'; } else if (pixelValue.Item1 == LUM_INTENSITY_MIN) { tempStr += '0'; } } } break; case QR_TYPE_BLUE: for (int i = charCounter * bitsPerChar; i < (charCounter * bitsPerChar) + bitsPerChar; i++) { if (i < matSearchPixels.Count) { // Get pixel value Vec3b pixelValue = indexer[matSearchPixels[i].Y, matSearchPixels[i].X]; // If pixel contains blue if (pixelValue.Item0 == LUM_INTENSITY_MAX) { tempStr += '1'; } else if (pixelValue.Item0 == LUM_INTENSITY_MIN) { tempStr += '0'; } } } break; default: break; } // If nothing was read if (tempStr == CHAR_ZEROS_7) { return(""); } return(tempStr); }
public Dictionary <string, BitmapSource> GenerateQRCore(string inputMessage, QRCodeProperties qrCodeProps, QRColorMode colorMode) { outputImages = new Dictionary <string, BitmapSource>(); outputMats = new Dictionary <string, Mat>(); matSearchPixels = new List <Point>(); this.inputMessage = inputMessage; CalculateSearchPoint(qrCodeProps); if (colorMode == QRColorMode.Grayscale) { // Generate empty mat and set all pixels to white qrMatMono = Mat.Zeros(new Size(qrCodeProps.ImgSize.Width, qrCodeProps.ImgSize.Height), MatType.CV_8UC1); //qrMat.SetTo(Scalar.White); // Get mat indexer MatOfByte mob1 = new MatOfByte(qrMatMono); MatIndexer <byte> indexerByte = mob1.GetIndexer(); int stringIndex = -1; for (int y = 0; y < qrCodeProps.CellsPerDim * qrCodeProps.CellSize; y += qrCodeProps.CellSize) { for (int x = 0; x < qrCodeProps.CellsPerDim * qrCodeProps.CellSize; x += qrCodeProps.CellSize) { // If message is done reading if (++stringIndex + 1 > inputMessage.Length) { break; } // If bit is 0, skip this cell if (inputMessage[stringIndex].Equals('0')) { continue; } // If bit is 1, color the cell else if (inputMessage[stringIndex].Equals('1')) { for (int i = y; i < y + qrCodeProps.CellSize; i++) { for (int j = x; j < x + qrCodeProps.CellSize; j++) { indexerByte[i, j] = LUM_INTENSITY_MAX; } } } } } // Add image and mat to output lists outputImages.Add(QR_TYPE_MONOCHROME, Utils.MatToImage(qrMatMono)); outputMats.Add(QR_TYPE_MONOCHROME, qrMatMono); // Return images to UI return(outputImages); } else if (colorMode == QRColorMode.Color) { // Generate empty mats and fill with white Mat qrRedMat = Mat.Zeros(new Size(qrCodeProps.ImgSize.Width, qrCodeProps.ImgSize.Height), MatType.CV_8UC3); Mat qrGreenMat = Mat.Zeros(new Size(qrCodeProps.ImgSize.Width, qrCodeProps.ImgSize.Height), MatType.CV_8UC3); Mat qrBlueMat = Mat.Zeros(new Size(qrCodeProps.ImgSize.Width, qrCodeProps.ImgSize.Height), MatType.CV_8UC3); //qrCyanMat.SetTo(Scalar.White); //qrMagentaMat.SetTo(Scalar.White); //qrYellowMat.SetTo(Scalar.White); // Get mat indexers MatOfByte3 mobRed = new MatOfByte3(qrRedMat); MatOfByte3 mobGreen = new MatOfByte3(qrGreenMat); MatOfByte3 mobBlue = new MatOfByte3(qrBlueMat); MatIndexer <Vec3b> indexerMobRed = mobRed.GetIndexer(); MatIndexer <Vec3b> indexerMobGreen = mobGreen.GetIndexer(); MatIndexer <Vec3b> indexerMobBlue = mobBlue.GetIndexer(); // Split message thrice int bitsPerChar = 7; int messageChars = inputMessage.Length / bitsPerChar; string messageForRed = string.Empty; string messageForGreen = string.Empty; string messageForBlue = string.Empty; for (int i = 0; i < messageChars; i++) { if (i % 3 == 0) { for (int j = 0; j < bitsPerChar; j++) { messageForRed += inputMessage[(i * bitsPerChar) + j]; } } else if (i % 3 == 1) { for (int j = 0; j < bitsPerChar; j++) { messageForGreen += inputMessage[(i * bitsPerChar) + j]; } } else if (i % 3 == 2) { for (int j = 0; j < bitsPerChar; j++) { messageForBlue += inputMessage[(i * bitsPerChar) + j]; } } } indexerMobRed = WriteColorComponent(messageForRed, qrCodeProps, indexerMobRed, COLOR_RED); indexerMobGreen = WriteColorComponent(messageForGreen, qrCodeProps, indexerMobGreen, COLOR_GREEN); indexerMobBlue = WriteColorComponent(messageForBlue, qrCodeProps, indexerMobBlue, COLOR_BLUE); Mat combinedMat = qrRedMat + qrGreenMat + qrBlueMat; // Add image and mats to output lists outputImages.Add(QR_TYPE_COMBINED, Utils.MatToImage(combinedMat)); outputImages.Add(QR_TYPE_RED, Utils.MatToImage(qrRedMat)); outputImages.Add(QR_TYPE_GREEN, Utils.MatToImage(qrGreenMat)); outputImages.Add(QR_TYPE_BLUE, Utils.MatToImage(qrBlueMat)); outputMats.Add(QR_TYPE_COMBINED, combinedMat); outputMats.Add(QR_TYPE_RED, qrRedMat); outputMats.Add(QR_TYPE_GREEN, qrGreenMat); outputMats.Add(QR_TYPE_BLUE, qrBlueMat); return(outputImages); } return(null); }
static void Main(string[] args) { // Used to check memory leak //for (int i = 0; i < 1000; i++) using (var state = new ThreadLocal <FormExtractionHandle>(NativeFormExtraction.CreateFormExtraction)) { GC.Collect(); List <string> pathFiles = GetSamplesAndCleanUpResults(); // For testing: pathFiles = pathFiles.Where(m => m.Contains("form9")).ToList(); int numThread = 1; // Environment.ProcessorCount; var showDebugImage = true; // If true, you may want to use: numThread = 1. Parallel.ForEach(pathFiles, new ParallelOptions { MaxDegreeOfParallelism = numThread }, pathFile => { FormExtractionHandle handle = state.Value; NativeFormExtraction.SetOptions(handle, 800, 25, 15, 5, 20000, 50000, showDebugImage); var resizeWidth = 800; var orig = new Mat(pathFile); var image = new Mat(pathFile, ImreadModes.GrayScale); Cv2.AdaptiveThreshold(image, image, 255, AdaptiveThresholdTypes.MeanC, ThresholdTypes.Binary, 9, 4); // Resize image if too large. if (image.Width > resizeWidth) { var height = resizeWidth * image.Height / image.Width; Cv2.Resize(image, image, new Size(resizeWidth, height)); } Cv2.BitwiseNot(image, image); Cv2.Dilate(image, image, Cv2.GetStructuringElement(MorphShapes.Cross, new Size(2, 2))); MatOfByte mat = new MatOfByte(image); MatIndexer <byte> indexer = mat.GetIndexer(); var row = image.Height; var col = image.Width; Mat newImage = new Mat(row, col, MatType.CV_8UC3); newImage.SetTo(Scalar.Black); // We must determine if it "may" be an interesting blob. Stopwatch watch = new Stopwatch(); watch.Start(); int[] imgData = new int[row * col]; for (int y = 0; y < row; y++) { for (int x = 0; x < col; x++) { imgData[y + x * row] = indexer[y, x]; } } var result = NativeFormExtraction.RunFormExtraction(handle, imgData, row, col); if (result != 0) { throw new Exception("Unknown error occured with the function: RunFormExtraction"); } watch.Stop(); Console.WriteLine("Duration: " + watch.Elapsed); if (showDebugImage) { var debugImg = NativeFormExtraction.GetDebugImage(handle, row * col); var img = CreateImage(debugImg, row, col, hasColor: true); Cv2.BitwiseOr(newImage, img, newImage); Cv2.BitwiseNot(image, image); int width = 400; var height = width * image.Height / image.Width; Cv2.Resize(orig, orig, new Size(width, height)); Cv2.Resize(image, image, new Size(width, height)); Cv2.Resize(newImage, newImage, new Size(width, height)); using (new Window("orig", orig)) using (new Window("pre", image)) using (new Window("post", newImage)) { Cv2.WaitKey(); Cv2.DestroyAllWindows(); } } // Dispose. orig.Dispose(); image.Dispose(); newImage.Dispose(); mat.Dispose(); }); } Console.WriteLine("End"); Console.ReadLine(); }
public static void ExtractCharacters(string pathFile, string resultDir, bool removeEmptyBoxes = false) { int groupId = 0; try { var filename = Path.GetFileNameWithoutExtension(pathFile); var result = FormExtraction.ProcessImage(pathFile); if (result.ReturnCode != 0) { throw new Exception("ReturnCode: " + result.ReturnCode); } Console.WriteLine("Processing: " + Path.GetFileNameWithoutExtension(pathFile) + ", Duration: " + result.Duration); using (var image = new Mat(pathFile, ImreadModes.GrayScale)) { Cv2.AdaptiveThreshold(image, image, 255, AdaptiveThresholdTypes.MeanC, ThresholdTypes.Binary, 9, 4); // TODO: we should not resize. (keep maximum quality) if (image.Width > 800) { var height = 800 * image.Height / image.Width; Cv2.Resize(image, image, new Size(800, height)); } // You may want to use ".OrderBy(m => m.Min(x => x.TopLeft.Y)).Take(1)" to select the first box on top. foreach (var group in result.Boxes) { // Console.WriteLine("\nGroup #" + numGroup + " (" + group.Count + ")"); groupId++; int characterNum = 1; foreach (var box in group) { // Console.WriteLine(box.TopLeft + " " + box.TopRight + "\n" + box.BottomLeft + " " + box.BottomRight + "\n"); var xTopLeft = Math.Min(box.TopLeft.X, box.BottomLeft.X); var yTopLeft = Math.Min(box.TopLeft.Y, box.TopRight.Y); var xBottomRight = Math.Max(box.TopRight.X, box.BottomRight.X); var yBottomRight = Math.Max(box.BottomLeft.Y, box.BottomRight.Y); var estimatedWidth = xBottomRight - xTopLeft; var estimatedHeight = yBottomRight - yTopLeft; try { using (var subImg = new Mat(image, new Rect(xTopLeft, yTopLeft, estimatedWidth, estimatedHeight))) { MatOfByte3 mat3 = new MatOfByte3(subImg); MatIndexer <Vec3b> indexer = mat3.GetIndexer(); int borderPixelX = 4; int borderPixelY = 4; var minY = Math.Min(borderPixelX, subImg.Height); var maxY = Math.Max(0, subImg.Height - borderPixelX); var minX = Math.Min(borderPixelX, subImg.Width); var maxX = Math.Max(0, subImg.Width - borderPixelY); var outputFilename = filename + "_g-" + groupId + "_n-" + characterNum; if (removeEmptyBoxes) { // Basic empty box detection. int whitePixelCounter = 0; int pixelCounter = 0; for (int y = minY; y <= maxY; y++) { for (int x = minX; x <= maxX; x++) { var pixel = indexer[y, x].Item0; // Grayscale only if (pixel == 255) { whitePixelCounter++; } pixelCounter++; } } mat3.Dispose(); int percentRatio = 100 * whitePixelCounter / pixelCounter; // Exclude empty boxes. if (percentRatio < 95) { Cv2.ImWrite(resultDir + Path.DirectorySeparatorChar + outputFilename + ".jpg", subImg); } } else { Cv2.ImWrite(resultDir + Path.DirectorySeparatorChar + outputFilename + ".jpg", subImg); } } } catch (Exception) { // Ignore it. Outside image. } characterNum++; } } } } catch (Exception ex) { Console.WriteLine("Processing: " + Path.GetFileNameWithoutExtension(pathFile) + ", Error: " + ex.Message); Console.WriteLine(ex.StackTrace); Console.ReadLine(); } }