public static IList <BoundingBox> ObtainBoundingBoxes(string base64Image) { var opencvImage = Image.ConvertFromBase64ToMat(base64Image); var tensor = new DenseTensor <float>(new int[] { channels, opencvImage.Height, opencvImage.Width }); using (var mat = new MatOfByte3(opencvImage)) { var indexer = mat.GetIndexer(); for (int y = 0; y < opencvImage.Height; y++) { for (int x = 0; x < opencvImage.Width; x++) { Vec3b color = indexer[y, x]; tensor[0, y, x] = (float)color.Item2; tensor[1, y, x] = (float)color.Item1; tensor[2, y, x] = (float)color.Item0; } } } var transform = tensor.Reshape(new ReadOnlySpan <int>(new[] { channels *opencvImage.Height *opencvImage.Width })); var yoloParser = new YoloOutputParser(); var yoloModel = YoloModel.Instance; var results = yoloModel.Evaluate(new[] { transform }); return(yoloParser.ParseOutputs(results.First().ToArray())); }
public Scalar getColor() { if (average.Val0 != -1) { return(average); } MatOfByte3 mat3 = new MatOfByte3(data); var indexer = mat3.GetIndexer(); ArrayList hue = new ArrayList(); ArrayList light = new ArrayList(); ArrayList sat = new ArrayList(); for (int y = 0; y < data.Height; y++) { for (int x = 0; x < data.Width; x++) { Vec3b color = indexer[y, x]; hue.Add(color.Item0); light.Add(color.Item1); sat.Add(color.Item2); } } byte medHue = getMedian(hue); byte medLight = getMedian(light); byte medSat = getMedian(sat); average = new Scalar(medHue, medLight, medSat); return(average); }
/// <summary> /// 访问像素 /// </summary> public static void AccessPixel() { using (Mat mat = new Mat("Images/lena.jpg", ImreadModes.Color)) { //bgr格式 using (MatOfByte3 mat3 = new MatOfByte3(mat)) { var indexer = mat3.GetIndexer(); for (int y = 0; y < mat.Height; y++) { for (int x = 0; x < mat.Width; x++) { Vec3b color = indexer[y, x]; byte temp = color.Item0; color.Item0 = color.Item2;//B <-- R color.Item2 = temp; indexer[y, x] = color; } } } using (new Window("window", mat)) { Cv2.WaitKey(); } } }
public static IList <YoloBoundingBox> ObtainBoundingBoxes(string base64Image) { var opencvImage = Image.ConvertFromBase64ToMat(base64Image); MatOfByte3 mat3 = new MatOfByte3(opencvImage); var indexer = mat3.GetIndexer(); Tensor <float> imageData = new DenseTensor <float>(new[] { 3, opencvImage.Width, opencvImage.Height }); for (int y = 0; y < opencvImage.Height; y++) { for (int x = 0; x < opencvImage.Width; x++) { Vec3b color = indexer[y, x]; imageData[0, y, x] = (float)color.Item2; imageData[1, y, x] = (float)color.Item1; imageData[2, y, x] = (float)color.Item0; } } var yoloParser = new YoloParser(); var resultTransform = imageData.Reshape(new ReadOnlySpan <int>(new[] { 3 * 416 * 416 })); var yoloModel = new YoloModel(); var results = yoloModel.Evaluate(new[] { resultTransform }); var boundingBoxes = yoloParser.ParseOutputs(results.First().ToArray()); return(boundingBoxes); }
/// <summary> /// Calculate the Block Mean Value Based Image Perceptual Hash /// </summary> /// <param name="image">image to hash (OpenCV Mat)</param> /// <param name="bits">number of blocks to divide the image by horizontally and vertically</param> /// <returns>hash bitstring</returns> /// <remarks> /// based on blockhash_quick https://github.com/commonsmachinery/blockhash/blob/master/blockhash.c /// </remarks> /// <seealso cref="http://dx.doi.org/10.1109/IIH-MSP.2006.265125"/> private static string BlockHash(Mat image, int bits = 8) { int x, y, ix, iy; int value; int block_width; int block_height; int[] blocks; // use smaller image to speedup calculation (default 64x64) if (image.Width != 8 * bits || image.Height != 8 * bits) { var smaller = new OpenCvSharp.Size(8 * bits, 8 * bits); var simage = image.Resize(smaller, 0, 0, InterpolationFlags.Linear); simage.CopyTo(image); simage.Dispose(); } var image_byte3 = new MatOfByte3(image); var indexer = image_byte3.GetIndexer(); block_width = (image.Width) / bits; block_height = (image.Height) / bits; blocks = new int[bits * bits]; for (y = 0; y < bits; y++) { for (x = 0; x < bits; x++) { value = 0; for (iy = 0; iy < block_height; iy++) { value = 0; for (ix = 0; ix < block_width; ix++) { var iix = (x * block_width + ix); var iiy = (y * block_height + iy); value += (int)((double)indexer[iiy, iix].Item2 + (double)indexer[iiy, iix].Item1 + (double)indexer[iiy, iix].Item0); } } blocks[y * bits + x] = value; } } var hash = translate_blocks_to_bits(blocks, bits * bits, block_width * block_height); //return bits_to_hexhash(hash); return(hash); }
public Vec3b GetPixelColor(Mat image, Point p) { if (image.Type() != MatType.CV_8UC3) { throw new ArgumentException("Only CV_8UC3 images are supported..."); } MatOfByte3 mob3 = new MatOfByte3(image); var indexer = mob3.GetIndexer(); Vec3b color = indexer[p.Y, p.X]; return(color); }
public static float[] GetArray(this Mat self, float[] buffer = null, bool bgr2rgb = true) { int width = self.Width; int height = self.Height; float[] f; if (buffer == null) { f = new float[width * height * self.Channel]; } else { if (buffer.Length < width * height * self.Channel) { throw new ArgumentOutOfRangeException(nameof(buffer)); } f = buffer; } using (MatOfByte3 matByte = new MatOfByte3()) { self.CopyTo(matByte); var indexer = matByte.GetIndexer(); int i = 0; for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { Vec3b color = indexer[y, x]; if (bgr2rgb) { f[i] = color.Item2; i++; f[i] = color.Item1; i++; f[i] = color.Item0; i++; } else { f[i] = color.Item0; i++; f[i] = color.Item1; i++; f[i] = color.Item2; i++; } } } } return(f); }
// Copies in-memory pixels out of an OpenCV Mat into a PixData that Tesseract / Leptonica understands private unsafe void TransferData(MatOfByte3 mat3, PixData pixData) { var indexer = mat3.GetIndexer(); for (int y = 0; y < mat3.Height; y++) { uint *pixLine = (uint *)pixData.Data + (y * pixData.WordsPerLine); for (int x = 0; x < mat3.Width; x++) { Vec3b color = indexer[y, x]; PixData.SetDataFourByte(pixLine, x, BitmapHelper.EncodeAsRGBA(color.Item0, color.Item1, color.Item2, 255)); } } }
public override void Draw(IVncPixelGetter a_pixelGetter, MatOfByte3 a_mat) { var indexer = a_mat.GetIndexer(); int byteSize = a_pixelGetter.GetPixelByteSize(); int offset = m_offset; for (int y = Y; y < Y + Height; ++y) { for (int x = X; x < X + Width; ++x) { indexer[y, x] = a_pixelGetter.GetPixelVec3b(m_pixelData, offset); offset += byteSize; } } }
public void DrawHexTile(int a_x, int a_y, int a_width, int a_height, IVncPixelGetter a_pixelGetter, MatOfByte3 a_mat) { var indexer = a_mat.GetIndexer(); int byteSize = a_pixelGetter.GetPixelByteSize(); int offset = Offset; for (int y = a_y; y < a_y + a_height; ++y) { for (int x = a_x; x < a_x + a_width; ++x) { indexer[y, x] = a_pixelGetter.GetPixelVec3b(RawPixel, offset); offset += byteSize; } } }
public static bool[] BrainInputFromMat(Mat mat) { var byte3Col = new MatOfByte3(mat); var indexer = byte3Col.GetIndexer(); var pixel = indexer[0, 1]; var result = new bool[mat.Height * mat.Width]; for (int x = 0; x < mat.Width; x++) { for (int y = 0; y < mat.Height; y++) { result[(mat.Width * y) + x] = GetIndexerValue(indexer[y, x]); } } return(result); }
public static void GrayScale(Mat mat) { var mat3 = new MatOfByte3(mat); var indexer = mat3.GetIndexer(); for (int y = 0; y < mat.Height; y++) { for (int x = 0; x < mat.Width; x++) { Vec3b color = indexer[y, x]; var sum = color.Item0 + color.Item1 + color.Item2; color.Item0 = (byte)(sum / 3); color.Item1 = (byte)(sum / 3); color.Item2 = (byte)(sum / 3); indexer[y, x] = color; } } }
public Dictionary <string, BitmapSource> SplitColoredQR(Mat combinedMat) { Dictionary <string, BitmapSource> outputSplitImages = new Dictionary <string, BitmapSource>(); Size size = new Size(combinedMat.Width, combinedMat.Height); int depth = combinedMat.Depth(); Mat redComponent = Mat.Zeros(size, MatType.CV_8UC1); Mat grnComponent = Mat.Zeros(size, MatType.CV_8UC1); Mat bluComponent = Mat.Zeros(size, MatType.CV_8UC1); // Get mat indexers MatOfByte3 mobComb = new MatOfByte3(combinedMat); MatOfByte mobRed = new MatOfByte(redComponent); MatOfByte mobGrn = new MatOfByte(grnComponent); MatOfByte mobBlu = new MatOfByte(bluComponent); MatIndexer <Vec3b> indexerComb = mobComb.GetIndexer(); MatIndexer <byte> indexerRed = mobRed.GetIndexer(); MatIndexer <byte> indexerGrn = mobGrn.GetIndexer(); MatIndexer <byte> indexerBlu = mobBlu.GetIndexer(); for (int y = 0; y < combinedMat.Height; y++) { for (int x = 0; x < combinedMat.Width; x++) { // Assign intensity of red channel from the combined mat to the red component mat indexerRed[y, x] = indexerComb[y, x].Item2; // Assign intensity of green channel from the combined mat to the green component mat indexerGrn[y, x] = indexerComb[y, x].Item1; // Assign intensity of blue channel from the combined mat to the blue component mat indexerBlu[y, x] = indexerComb[y, x].Item0; } } outputSplitImages.Add(QR_TYPE_RED_OUT, Utils.MatToImage(redComponent)); outputSplitImages.Add(QR_TYPE_GREEN_OUT, Utils.MatToImage(grnComponent)); outputSplitImages.Add(QR_TYPE_BLUE_OUT, Utils.MatToImage(bluComponent)); return(outputSplitImages); }
private Array2D <RgbPixel> ToArray(Mat mat) { var array = new Array2D <RgbPixel>(mat.Rows, mat.Cols); using (var mat3 = new MatOfByte3(mat)) { var indexer = mat3.GetIndexer(); for (var i = 0; i < array.Rows; i++) { var destRow = array[i]; for (var j = 0; j < array.Columns; j++) { var color = indexer[i, j]; destRow[j] = new RgbPixel(color.Item2, color.Item1, color.Item0); } } } return(array); }
/// <summary> /// Faster /// </summary> private void TypeSpecificMat() { using (Mat mat = new Mat(FilePath.Lenna, LoadMode.Color)) { MatOfByte3 mat3 = new MatOfByte3(mat); var indexer = mat3.GetIndexer(); for (int y = 0; y < mat.Height; y++) { for (int x = 0; x < mat.Width; x++) { Vec3b color = indexer[y, x]; Swap(ref color.Item0, ref color.Item2); indexer[y, x] = color; } } //Cv2.ImShow("TypeSpecificMat", mat); //Cv2.WaitKey(0); //Cv2.DestroyAllWindows(); } }
public static void Binarization(Mat mat, float threshold) { var mat3 = new MatOfByte3(mat); var indexer = mat3.GetIndexer(); var bthreshold = threshold * byte.MaxValue; for (int y = 0; y < mat.Height; y++) { for (int x = 0; x < mat.Width; x++) { Vec3b color = indexer[y, x]; var sum = color.Item0 + color.Item1 + color.Item2; color.Item0 = (byte)(sum / 3) <= bthreshold ? byte.MinValue : byte.MaxValue; color.Item1 = (byte)(sum / 3) <= bthreshold ? byte.MinValue : byte.MaxValue; color.Item2 = (byte)(sum / 3) <= bthreshold ? byte.MinValue : byte.MaxValue; indexer[y, x] = color; } } }
public static Color[] ConvertMatToColorArray(Mat mat) { var mat3b = new MatOfByte3(mat); var indexer = mat3b.GetIndexer(); var width = mat.Width; var heigth = mat.Height; var colorArray = new Color[width * heigth]; for (var y = 0; y < heigth; y++) { for (var x = 0; x < width; x++) { var newColor = new Color(); newColor.r = indexer[y, x].Item2 / 255.0f; newColor.g = indexer[y, x].Item1 / 255.0f; newColor.b = indexer[y, x].Item0 / 255.0f; colorArray[y * width + x] = newColor; } } return(colorArray); }
/// <summary> /// Convert an image to a 1-dimensional float array. All color frames /// are laid out sequentially, one after the other. /// </summary> /// <param name="mat">The input image as an OpenCv Mat object.</param> /// <param name="offsets">Offsets to apply to each color channel.</param> /// <returns>An 1-dimensional float array containing the image data.</returns> public static float[] FlattenByChannel(Mat mat, float[] offsets) { var num_pixels = mat.Size().Height *mat.Size().Width; float[] result = new float[num_pixels * 3]; using (MatOfByte3 mat3 = new MatOfByte3(mat)) { var indexer = mat3.GetIndexer(); var pos = 0; for (int y = 0; y < mat.Height; y++) { for (int x = 0; x < mat.Width; x++) { var color = indexer[y, x]; result[pos] = color.Item0 - offsets[0]; result[pos + num_pixels] = color.Item1 - offsets[1]; result[pos + 2 * num_pixels] = color.Item2 - offsets[2]; pos++; } } } return(result); }
public void WriteBackwardBody(MatOfByte3 a_mat, MemoryStream a_output) { // 12 == x(2) + y(2) + w(2) + h(2) + encodeType(4) a_output.Write(BigEndianBitConverter.GetBytes(X), 0, 2); a_output.Write(BigEndianBitConverter.GetBytes(Y), 0, 2); a_output.Write(BigEndianBitConverter.GetBytes(Width), 0, 2); a_output.Write(BigEndianBitConverter.GetBytes(Height), 0, 2); a_output.Write(BigEndianBitConverter.GetBytes((Int32)VncEnum.EncodeType.Raw), 0, 4); var indexer = a_mat.GetIndexer(); for (int y = Y; y < Y + Height; ++y) { for (int x = X; x < X + Width; ++x) { var bgr = indexer[y, x]; a_output.WriteByte(bgr.Item0); a_output.WriteByte(bgr.Item1); a_output.WriteByte(bgr.Item2); a_output.WriteByte(255); } } }
static float[] convert_to_channel_first(Mat mat, float[] offsets) { var num_pixels = mat.Size().Height *mat.Size().Width; float[] result = new float[num_pixels * 3]; MatOfByte3 mat3 = new MatOfByte3(mat); var indexer = mat3.GetIndexer(); var pos = 0; for (int y = 0; y < mat.Height; y++) { for (int x = 0; x < mat.Width; x++) { var color = indexer[y, x]; result[pos] = color.Item0 - offsets[0]; result[pos + num_pixels] = color.Item1 - offsets[1]; result[pos + 2 * num_pixels] = color.Item2 - offsets[2]; pos++; } } mat3.Dispose(); mat3 = null; return(result); }
private Mat AlterMat(Mat mat) { var copy = new Mat(); mat.CopyTo(copy); // Slowest way //for (int i = 0; i < copy.Height; i++) //{ // for (int j = 0; j < copy.Width; j++) // { // var pixel = copy.Get<Vec3b>(i, j); // pixel.Item0 = pixel.Item2; // copy.Set(i, j, pixel); // } //} // Faster way var fasterCopy = new MatOfByte3(mat); var indexer = fasterCopy.GetIndexer(); for (int row = 0; row < fasterCopy.Height; row++) { for (int col = 0; col < fasterCopy.Width; col++) { var pixel = indexer[row, col]; if (50 < pixel.Item2 && pixel.Item2 < 110 && 10 < pixel.Item1 && pixel.Item1 < 100) { pixel.Item2 = 255; pixel.Item1 = 0; } indexer[row, col] = pixel; } } return(fasterCopy); }
private List <Point> getPoints(Mat Dst, int Value, Point Offset) //we find all the pixels that are equal to zero { int nl = Dst.Rows; // number of lines int nc = Dst.Cols; List <Point> points = new List <Point>(); MatOfByte3 mat3 = new MatOfByte3(Dst); // cv::Mat_<cv::Vec3b> var indexer = mat3.GetIndexer(); for (int y = 0; y < Dst.Height; y++) { for (int x = 0; x < Dst.Width; x++) { Vec3b color = indexer[y, x]; if (color.Item0 == Value) { points.Add(new Point(x + Offset.X, y + Offset.Y)); } } } return(points); }
public bool MaskOutline(Mat whiteLightImage, Mat edgeMaskImage, out Mat mergedImage) { bool result = false; mergedImage = whiteLightImage.Clone(); try { //convert white mask outlines to purple color outlines MatOfByte matb = new MatOfByte(edgeMaskImage); var indexerGray = matb.GetIndexer(); MatOfByte3 mat3 = new MatOfByte3(mergedImage); var indexerColor = mat3.GetIndexer(); for (int y = 0; y < edgeMaskImage.Height; y++) { for (int x = 0; x < edgeMaskImage.Width; x++) { byte edge = indexerGray[y, x]; if (edge == 255) //white { indexerColor[y, x] = new Vec3b(255, 128, 255); //purple } } } result = true; } catch (Exception ex) { } return(result); }
static void FindLaneInTheVideo(string path) { VideoCapture capture = new VideoCapture(path); Mat workAreaMask = CreateMask(); using (Window win1 = new Window("test1")) { Mat image = new Mat(); // We will save previous results here List <List <Sensor> > oldResultGroups = null; int[] countTaked = new int[2] { 0, 0 }; while (true) { DateTime dt1 = DateTime.Now; capture.Read(image); if (image.Empty()) { break; } if (capture.PosFrames % 2 != 0) { continue; } // Get the work area Mat image_s = image.SubMat(Camera.vert_frame[0], Camera.vert_frame[1], Camera.hor_frame[0], Camera.hor_frame[1]); Mat workArea = new Mat(); image_s.CopyTo(workArea, workAreaMask); // Get HSV, grat and canny Mat hsv_image = workArea.CvtColor(ColorConversionCodes.RGB2HSV); Mat canny1 = hsv_image.Canny(40, 60); Mat gray = workArea.CvtColor(ColorConversionCodes.BGR2GRAY); Mat canny2 = gray.Canny(40, 60); Mat canny = new Mat(); Cv2.BitwiseAnd(canny1, canny2, canny); // Get, filter and draw contours Mat hsv_contoures = new Mat(); hsv_image.CopyTo(hsv_contoures); var contoures = FindContoures(canny); hsv_contoures.DrawContours(contoures, -1, Scalar.Red); // Get indexers MatOfByte3 hsv_cont_ind = new MatOfByte3(hsv_contoures); MatOfByte3 hsv_ind = new MatOfByte3(hsv_image); var hsv_cont_indexer = hsv_cont_ind.GetIndexer(); var hsv_indexer = hsv_ind.GetIndexer(); // Make steps of the algorithm List <Sensor> sensors = GetSensors(hsv_contoures, hsv_cont_indexer); List <Sensor> filteredByContours = FilterByContours(sensors, hsv_cont_indexer); List <Sensor> filteredByColors = FilterByColorAndChangeColor(filteredByContours, hsv_indexer); List <Sensor> filteredByNearSensors = FilterByNearSensors(filteredByColors); List <List <Sensor> > groupedByAngle = GroupByAngle(filteredByNearSensors).Where(g => g.Count > 2).ToList(); List <List <Sensor> > groupedByDistance = GroupByDistance(groupedByAngle).Where(g => g.Count > 2).ToList(); List <List <Sensor> > groupedWithoudCovering = DeleteCovering(groupedByDistance); List <List <Sensor> > unionGroups = UnionGroups(groupedWithoudCovering).Where(g => g.Count > 2).ToList(); List <List <Sensor> > resultGroups = SelectGroups(unionGroups, oldResultGroups, ref countTaked); image.SaveImage("image.png"); // Draw the result foreach (var group in resultGroups) { if (group != null) { foreach (var line in GetLinesForGroup(group)) { image.Line(line.x1 + Camera.hor_frame[0], line.y1 + Camera.vert_frame[0], line.x2 + Camera.hor_frame[0], line.y2 + Camera.vert_frame[0], Scalar.Blue, 5); } } } image.SaveImage("res.png"); Mat imageForDisplay = image.Resize(new Size(0, 0), 0.5, 0.5); win1.ShowImage(imageForDisplay); oldResultGroups = resultGroups; DateTime dt2 = DateTime.Now; Console.WriteLine("{0}\tms", (dt2 - dt1).TotalMilliseconds); int key = Cv2.WaitKey(0); if (key == 27) { break; //escape } // Free resourses image_s.Release(); workArea.Release(); hsv_ind.Release(); hsv_cont_ind.Release(); gray.Release(); canny1.Release(); canny2.Release(); canny.Release(); hsv_image.Release(); hsv_contoures.Release(); } } }
private bool IsNotArm(Mat row, ref int?index) { var fillCheck = false; var shapeCheck = false; var fill = (double)Cv2.CountNonZero(row) / (double)row.Width; fillCheck = fill <= _notArmMaxFill; if (fillCheck) { var byte3Col = new MatOfByte3(row); var indexer = byte3Col.GetIndexer(); bool start = false, end = false, buffer = false; for (var col = 0; col < row.Cols; col++) { var pixel = indexer[0, col]; if (!start) { if (!CheckPixel(pixel)) { continue; } start = true; if (index == null) { index = col + 2; } } else if (!end) { if (!buffer) { if (CheckPixel(pixel)) { continue; } buffer = true; if (index == col) { index = col - 1; } } else { if (!CheckPixel(pixel)) { end = true; } } } else { if (CheckPixel(pixel)) { end = false; } } } shapeCheck = start && end; } return(shapeCheck && fillCheck); }
/// <summary> /// Convert Mat to byte vector index for faster access to pixels https://github.com/shimat/opencvsharp/wiki/%5BCpp%5D-Accessing-Pixel /// </summary> /// <param name="image">image OpenCv matrix</param> /// <returns>byte vector index</returns> private static MatIndexer <Vec3b> GetByteVectorIndexer(Mat image) { var image_byte3 = new MatOfByte3(image); return(image_byte3.GetIndexer()); }
/// <summary> /// Poisson Image Editing /// </summary> /// <param name="srcMat">顔がある方</param> /// <param name="putMat">重ねる顔</param> /// <returns></returns> public Mat PutMaskOnFace(Mat srcMat, Mat putMat) { var grayMat = new Mat(); Cv2.CvtColor(srcMat, grayMat, ColorConversionCodes.BGR2GRAY); Cv2.EqualizeHist(grayMat, grayMat); var faces = Cascade.DetectMultiScale(grayMat); if (faces == null) return srcMat; var binaryMat = new Mat(); int blockSize = 7; double k = 0.15; double R = 32; Binarizer.Sauvola(grayMat, binaryMat, blockSize, k, R); Cv2.BitwiseNot(binaryMat, binaryMat); var polygons = new List<List<Point>>(); var faceCount = faces.Count(); // O(n) for (int d = 0; d < faceCount; d++) { polygons = new List<List<Point>>(); int x1 = faces[d].X; int y1 = faces[d].Y; int width = faces[d].Width; int heigh = faces[d].Height; int x2 = x1 + width; int y2 = y1 + heigh; polygons.Add(new List<Point>() { new Point(x1,y1), new Point(x2,y1), new Point(x2,y2), new Point(x1,y2), }); var pwidth = putMat.Width; var pheight = putMat.Height; //重ねるファイルは少し拡大したほうが良いかな? /* Mat put0 = putMat[(int)(pwidth * 0.1) , (int)(pwidth * 0.9), (int)(pheight * 0.1), (int)(pheight * 0.9)] .Resize(new Size(width, heigh), 0, 0, InterpolationFlags.Lanczos4); */ Mat put0 = putMat.Resize(new Size(width, heigh), 0, 0, InterpolationFlags.Lanczos4); //真ん中編の色を適当に抽出 // 改良の余地あり(肌色領域の平均取ったり?) MatOfByte3 mat3 = new MatOfByte3(put0); // cv::Mat_<cv::Vec3b> var indexer = mat3.GetIndexer(); Vec3b color = indexer[(int)(put0.Width * 0.5), (int)(put0.Height * 0.5)]; //抽出した色で埋める Mat put1 = new Mat(srcMat.Size(), MatType.CV_8UC3, new Scalar(color.Item0, color.Item1, color.Item2)); //重ねる範囲にコピー put1[y1, y2, x1, x2] = put0; Mat mask = Mat.Zeros(srcMat.Size(), MatType.CV_8UC3); Cv2.FillPoly(mask, polygons, new Scalar(255, 255, 255)); //中心はここ var center = new Point(faces[d].X + faces[d].Width * 0.5, faces[d].Y + faces[d].Height * 0.5); Cv2.SeamlessClone(put1, srcMat, mask, center, srcMat, SeamlessCloneMethods.NormalClone); } return srcMat; }
/// <summary> /// Eye見て傾き検出 /// </summary> /// <param name="srcMat"></param> /// <param name="putMat"></param> /// <returns></returns> public Mat PutEllipseEyeMaskOnFace(Mat srcMat, Mat putMat) { var grayMat = new Mat(); Cv2.CvtColor(srcMat, grayMat, ColorConversionCodes.BGR2GRAY); Cv2.EqualizeHist(grayMat, grayMat); var faces = Cascade.DetectMultiScale(grayMat); if (faces == null) return srcMat; var polygons = new List<List<Point>>(); var faceCount = faces.Count(); // O(n) for (int d = 0; d < faceCount; d++) { polygons = new List<List<Point>>(); int x1 = faces[d].X; int y1 = faces[d].Y; int width = faces[d].Width; int height = faces[d].Height; int x2 = x1 + width; int y2 = y1 + height; int pwidth = putMat.Width; int pheight = putMat.Height; int srcWidth = srcMat.Width; int srcHeight = srcMat.Height; polygons.Add(new List<Point>() { new Point(x1,y1), new Point(x2,y1), new Point(x2,y2), new Point(x1,y2), }); var faceSize = new Size(width, height); //重ねるファイルは少し拡大したほうが良いかな? /* Mat put0 = putMat[(int)(pwidth * 0.1) , (int)(pwidth * 0.9), (int)(pheight * 0.1), (int)(pheight * 0.9)] .Resize(new Size(width, heigh), 0, 0, InterpolationFlags.Lanczos4); */ Mat put0 = putMat.Resize(faceSize, 0, 0, InterpolationFlags.Lanczos4); //真ん中編の色を適当に抽出 // 改良の余地あり(肌色領域の平均取ったり?) MatOfByte3 mat3 = new MatOfByte3(put0); // cv::Mat_<cv::Vec3b> var indexer = mat3.GetIndexer(); Vec3b color = indexer[(int)(put0.Width * 0.5), (int)(put0.Height * 0.5)]; //抽出した色で埋める Mat put1 = new Mat(srcMat.Size(), MatType.CV_8UC3, new Scalar(color.Item0, color.Item1, color.Item2)); //重ねる範囲にコピー put1[y1, y2, x1, x2] = put0; Mat put1gray = Mat.Zeros(srcMat.Size(), MatType.CV_8UC1); put1gray[y1, y2, x1, x2] = grayMat[y1, y2, x1, x2]; var eyes = EyeCascade.DetectMultiScale(put1gray); /* Debug.WriteLine(eyes.Count()); var cccc = new Point(eyes[0].X + eyes[0].Width * 0.5, eyes[0].Y + eyes[0].Height * 0.5); put1gray.Circle(cccc,(int)(eyes[0].Width * 0.5), new Scalar(0, 255, 255)); return put1gray;*/ var eyeCount = eyes.Count(); if (eyeCount >= 2) { var eyePpints = new List<Point>(); var orderedEyes = eyes.OrderByDescending(x => x.Width * x.Height).ToArray(); while (true) { for (int i = 0; i < 2; i++) { eyePpints.Add(new Point(eyes[i].X + eyes[i].Width * 0.5, eyes[i].Y + eyes[i].Height * 0.5)); } var wrapRect = Cv2.MinAreaRect(eyePpints); if (Math.Abs(wrapRect.Angle % 180) < 20) { var scale = 1.0; var angle = -wrapRect.Angle % 180; var eyedx = (eyePpints[0].X + eyePpints[1].X) * 0.5 - wrapRect.Center.X; var eyedy = (eyePpints[0].Y + eyePpints[1].Y) * 0.5 - wrapRect.Center.Y; //中心はここ var center = new Point( (faces[d].X + faces[d].Width * 0.5) + eyedx, (faces[d].Y + faces[d].Height * 0.5) + eyedy); Mat matrix = Cv2.GetRotationMatrix2D(center, angle, scale); //画像を回転させる Cv2.WarpAffine(put1, put1, matrix, put1.Size()); var faceAvgWidth = (int)((wrapRect.Size.Width + faceSize.Width) * 0.6); var rotateRect = new RotatedRect(center, new Size2f(faceAvgWidth, faceSize.Height * 0.9), angle); Mat mask = Mat.Zeros(srcMat.Size(), MatType.CV_8UC3); Cv2.Ellipse(mask, rotateRect, new Scalar(255, 255, 255), -1, LineTypes.AntiAlias); // Cv2.FillPoly(mask, polygons, new Scalar(255, 255, 255)); Cv2.SeamlessClone(put1, srcMat, mask, center, srcMat, SeamlessCloneMethods.NormalClone); break; } else { if (orderedEyes.Count() > 2) { orderedEyes = orderedEyes.Skip(1).ToArray(); } else { var angle = 0; //中心はここ var center = new Point(faces[d].X + faces[d].Width * 0.5, faces[d].Y + faces[d].Height * 0.5); var rotateRect = new RotatedRect(center, new Size2f(faceSize.Width * 0.8, faceSize.Height * 0.9), angle); Mat mask = Mat.Zeros(srcMat.Size(), MatType.CV_8UC3); Cv2.Ellipse(mask, rotateRect, new Scalar(255, 255, 255), -1, LineTypes.AntiAlias); // Cv2.FillPoly(mask, polygons, new Scalar(255, 255, 255)); Cv2.SeamlessClone(put1, srcMat, mask, center, srcMat, SeamlessCloneMethods.NormalClone); break; } } } } else { var angle = 0; //中心はここ var center = new Point(faces[d].X + faces[d].Width * 0.5, faces[d].Y + faces[d].Height * 0.5); var rotateRect = new RotatedRect(center, new Size2f(faceSize.Width * 0.8, faceSize.Height * 0.9), angle); Mat mask = Mat.Zeros(srcMat.Size(), MatType.CV_8UC3); Cv2.Ellipse(mask, rotateRect, new Scalar(255, 255, 255), -1, LineTypes.AntiAlias); // Cv2.FillPoly(mask, polygons, new Scalar(255, 255, 255)); Cv2.SeamlessClone(put1, srcMat, mask, center, srcMat, SeamlessCloneMethods.NormalClone); } } return srcMat; }
/// <summary> /// Draws all blobs to the specified image. /// </summary> /// <param name="img">The target image to be drawn.</param> public void RenderBlobs(Mat img) { if (img == null) throw new ArgumentNullException("img"); /* if (img.Empty()) throw new ArgumentException("img is empty"); if (img.Type() != MatType.CV_8UC3) throw new ArgumentException("img must be CV_8UC3");*/ if (Blobs == null || Blobs.Count == 0) throw new OpenCvSharpException("Blobs is empty"); if (Labels == null) throw new OpenCvSharpException("Labels is empty"); int height = Labels.GetLength(0); int width = Labels.GetLength(1); img.Create(new Size(width, height), MatType.CV_8UC3); Scalar[] colors = new Scalar[Blobs.Count]; colors[0] = Scalar.All(0); for (int i = 1; i < Blobs.Count; i++) { colors[i] = Scalar.RandomColor(); } using (var imgt = new MatOfByte3(img)) { var indexer = imgt.GetIndexer(); for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { int labelValue = Labels[y, x]; indexer[y, x] = colors[labelValue].ToVec3b(); } } } }
public Mat PutEllipseMaskOnFace2(Mat srcMat, Mat putMat) { var grayMat = new Mat(); Cv2.CvtColor(srcMat, grayMat, ColorConversionCodes.BGR2GRAY); Cv2.EqualizeHist(grayMat, grayMat); var faces = Cascade.DetectMultiScale(grayMat); if (faces == null) return srcMat; var binaryMat = new Mat(); // binaryMat = ColorExtractor.ExtractMask(srcMat,ColorConversionCodes.BGR2HSV,ColorVariation.Skin); // return binaryMat; int blockSize = 7; double k = 1.5; double R = 100; Binarizer.Sauvola(grayMat, binaryMat, blockSize, k, R); Cv2.BitwiseNot(binaryMat, binaryMat); return binaryMat; var polygons = new List<List<Point>>(); var faceCount = faces.Count(); // O(n) for (int d = 0; d < faceCount; d++) { polygons = new List<List<Point>>(); int x1 = faces[d].X; int y1 = faces[d].Y; int width = faces[d].Width; int height = faces[d].Height; int x2 = x1 + width; int y2 = y1 + height; int pwidth = putMat.Width; int pheight = putMat.Height; int srcWidth = srcMat.Width; int srcHeight = srcMat.Height; polygons.Add(new List<Point>() { new Point(x1,y1), new Point(x2,y1), new Point(x2,y2), new Point(x1,y2), }); // f = fixed /* int fx1 = (int)(x1 - width * 0.01); fx1 = fx1 > 0 ? fx1 : 0; int fx2 = (int)(x2 + width * 0.01); fx2 = fx2 < srcWidth ? fx2 : srcWidth; int fy1 = (int)(y1 - height * 0.01); fy1 = fy1 > 0 ? fy1 : 0; int fy2 = (int)(y2 + height * 0.01); fy2 = fy2 < srcHeight ? fy2 : srcHeight; */ int fx1 = (int)(x1 + width * 0.1); int fx2 = (int)(x2 - width * 0.1); int fy1 = (int)(y1 + height * 0.1); int fy2 = (int)(y2 - height * 0.1); int fwidth = x2 - x1; int fheight = y2 - y1; var faceSize = new Size(fwidth, fheight); //重ねるファイルは少し拡大したほうが良いかな? /* Mat put0 = putMat[(int)(pwidth * 0.1) , (int)(pwidth * 0.9), (int)(pheight * 0.1), (int)(pheight * 0.9)] .Resize(new Size(width, heigh), 0, 0, InterpolationFlags.Lanczos4); */ Mat put0 = putMat.Resize(faceSize, 0, 0, InterpolationFlags.Lanczos4); //真ん中編の色を適当に抽出 // 改良の余地あり(肌色領域の平均取ったり?) MatOfByte3 mat3 = new MatOfByte3(put0); // cv::Mat_<cv::Vec3b> var indexer = mat3.GetIndexer(); Vec3b color = indexer[(int)(put0.Width * 0.5), (int)(put0.Height * 0.5)]; //抽出した色で埋める Mat put1 = new Mat(srcMat.Size(), MatType.CV_8UC3, new Scalar(color.Item0, color.Item1, color.Item2)); //重ねる範囲にコピー put1[y1, y2, x1, x2] = put0; Mat mask = Mat.Zeros(srcMat.Size(), MatType.CV_8UC3); //中心はここ var center = new Point(faces[d].X + faces[d].Width * 0.5, faces[d].Y + faces[d].Height * 0.5); Mat faceAroundMat = Mat.Zeros(srcMat.Size(), MatType.CV_8UC1); faceAroundMat[fy1, fy2, fx1, fx2] = binaryMat[fy1, fy2, fx1, fx2]; // faceAroundMat[y1, y2, x1, x2] = binaryMat[y1, y2, x1, x2]; //var countours = new // 単純な輪郭抽出のみでは、傾きがわからない // 元のAPIが破壊的な関数なので clone http://opencv.jp/opencv-2svn/cpp/imgproc_structural_analysis_and_shape_descriptors.html#cv-findcontours var contours = faceAroundMat.Clone().FindContoursAsArray(RetrievalModes.List, ContourApproximationModes.ApproxNone); //要素数が大きい輪郭だけ var detectedContours = contours.Where(c => /*Cv2.ContourArea(c) > Cv2.ContourArea(polygons[0]) * 0.05 &&*/ Cv2.ContourArea(c) < Cv2.ContourArea(polygons[0]) * 0.1); Mat conMat = Mat.Zeros(srcMat.Size(), MatType.CV_8UC1); Cv2.DrawContours(conMat, detectedContours, -1, new Scalar(255, 255, 255)); return conMat; var points = new List<Point>(); foreach (var dc in detectedContours) { points.Union(dc); } var detectedRotateRect = Cv2.MinAreaRect(points); float angle = detectedRotateRect.Angle = Math.Abs(detectedRotateRect.Angle) > 20 ? detectedRotateRect.Angle % 20 : detectedRotateRect.Angle; float scale = 1.0f; // 回転 Mat matrix = Cv2.GetRotationMatrix2D(center, angle, scale); Debug.WriteLine(detectedRotateRect.Angle); //画像を回転させる Cv2.WarpAffine(put1, put1, matrix, put1.Size()); var rotateRect = new RotatedRect(center, new Size2f(faceSize.Width, faceSize.Height), detectedRotateRect.Angle); continue; Cv2.Ellipse(mask, detectedRotateRect, new Scalar(255, 255, 255), -1, LineTypes.AntiAlias); // Cv2.FillPoly(mask, polygons, new Scalar(255, 255, 255)); Cv2.SeamlessClone(put1, srcMat, mask, center, srcMat, SeamlessCloneMethods.NormalClone); } return srcMat; }
static void FindLaneInTheImage(string path) { Mat workAreaMask = CreateMask(); using (Window win1 = new Window("test1")) { Mat image = new Mat(path); // Get the work area Mat imageS = image.SubMat(Camera.vert_frame[0], Camera.vert_frame[1], Camera.hor_frame[0], Camera.hor_frame[1]); Mat workArea = new Mat(); imageS.CopyTo(workArea, workAreaMask); // Get HSV, gray and canny Mat hsvImage = workArea.CvtColor(ColorConversionCodes.RGB2HSV); Mat canny1 = hsvImage.Canny(40, 60); Mat gray = workArea.CvtColor(ColorConversionCodes.BGR2GRAY); Mat canny2 = gray.Canny(40, 60); Mat canny = new Mat(); Cv2.BitwiseAnd(canny1, canny2, canny); // Get, filter and draw contours Mat hsvContoures = new Mat(); hsvImage.CopyTo(hsvContoures); var contoures = FindContoures(canny); hsvContoures.DrawContours(contoures, -1, Scalar.Red); // Get indexers MatOfByte3 hsvContInd = new MatOfByte3(hsvContoures); MatOfByte3 hsvInd = new MatOfByte3(hsvImage); var hsvContIndexer = hsvContInd.GetIndexer(); var hsvIndexer = hsvInd.GetIndexer(); // Make steps of the algorithm List <Sensor> sensors = GetSensors(hsvContoures, hsvContIndexer); List <Sensor> filteredByContours = FilterByContours(sensors, hsvContIndexer); List <Sensor> filteredByColors = FilterByColorAndChangeColor(filteredByContours, hsvIndexer); List <Sensor> filteredByNearSensors = FilterByNearSensors(filteredByColors); List <List <Sensor> > groupedByAngle = GroupByAngle(filteredByNearSensors).Where(g => g.Count > 2).ToList(); List <List <Sensor> > groupedByDistance = GroupByDistance(groupedByAngle).Where(g => g.Count > 2).ToList(); List <List <Sensor> > groupedWithoudCovering = DeleteCovering(groupedByDistance); List <List <Sensor> > unionGroups = UnionGroups(groupedWithoudCovering).Where(g => g.Count > 2).ToList(); List <List <Sensor> > resultGroups = SelectGroups(unionGroups); // Draw the result foreach (var group in resultGroups) { if (group != null) { foreach (var line in GetLinesForGroup(group)) { image.Line(line.x1 + Camera.hor_frame[0], line.y1 + Camera.vert_frame[0], line.x2 + Camera.hor_frame[0], line.y2 + Camera.vert_frame[0], Scalar.Blue, 5); } } } Mat imageForDisplay = image.Resize(new Size(0, 0), 0.5, 0.5); win1.ShowImage(imageForDisplay); Cv2.WaitKey(0); // Free resourses image.Release(); imageS.Release(); workArea.Release(); hsvInd.Release(); hsvContInd.Release(); gray.Release(); canny1.Release(); canny2.Release(); canny.Release(); hsvImage.Release(); hsvContoures.Release(); } }
public static Mat ExtractMask(Mat srcMat, ColorConversionCodes code, int ch1Lower, int ch1Upper, int ch2Lower, int ch2Upper, int ch3Lower, int ch3Upper) { if (srcMat == null) throw new ArgumentNullException("srcMat"); var colorMat = srcMat.CvtColor(code); var lut = new Mat(256, 1, MatType.CV_8UC3); var lower = new int[3] { ch1Lower, ch2Lower, ch3Lower }; var upper = new int[3] { ch1Upper, ch2Upper, ch3Upper }; // cv::Mat_<cv::Vec3b> var mat3 = new MatOfByte3(lut); var indexer = mat3.GetIndexer(); for (int i = 0; i < 256; i++) { var color = indexer[i]; byte temp; for (int k = 0; k < 3; k++) { if (lower[k] <= upper[k]) { if ((lower[k] <= i) && (i <= upper[k])) { temp = 255; } else { temp = 0; } } else { if ((i <= upper[k]) || (lower[k] <= i)) { temp = 255; } else { temp = 0; } } color[k] = temp; } indexer[i] = color; } Cv2.LUT(colorMat, lut, colorMat); var channelMat = colorMat.Split(); var maskMat = new Mat(); Cv2.BitwiseAnd(channelMat[0], channelMat[1], maskMat); Cv2.BitwiseAnd(maskMat, channelMat[2], maskMat); return maskMat; }
public override void Draw(IVncPixelGetter a_pixelGetter, MatOfByte3 a_mat) { if (m_unzippedData == null) { m_unzippedData = m_zrleReader.Read(m_zlibData, m_offset, m_length); } int readPos = 0; for (int y = Y; y < Y + Height; y += 64) { int h = (Y + Height - y) > 64 ? 64 : (Y + Height - y); for (int x = X; x < X + Width; x += 64) { int w = (X + Width - x) > 64 ? 64 : (X + Width - x); int subencodingType = m_unzippedData[readPos++]; if (subencodingType == 0) { var indexer = a_mat.GetIndexer(); int byteSize = a_pixelGetter.GetPixelByteSize(); for (int posY = y; posY < y + h; ++posY) { for (int posX = x; posX < x + w; ++posX) { indexer[posY, posX] = a_pixelGetter.GetPixelVec3b(m_unzippedData, readPos); readPos += byteSize; } } } else if (subencodingType == 1) { Scalar background = (Scalar)a_pixelGetter.GetPixelVec3b(m_unzippedData, readPos); readPos += a_pixelGetter.GetPixelByteSize(); a_mat.Rectangle(new Rect(x, y, w, h), background, -1 /* Fill */); } else if (2 <= subencodingType && subencodingType <= 16) { Vec3b[] palette = new Vec3b[subencodingType]; for (int i = 0; i < subencodingType; ++i) { palette[i] = a_pixelGetter.GetPixelVec3b(m_unzippedData, readPos); readPos += a_pixelGetter.GetPixelByteSize(); } var indexer = a_mat.GetIndexer(); int byteSize = a_pixelGetter.GetPixelByteSize(); int[] ppa = createPackedPixelsArray(w, h, m_unzippedData, readPos, subencodingType); for (int i = 0, posY = y; posY < y + h; ++posY) { for (int posX = x; posX < x + w; ++posX) { indexer[posY, posX] = palette[ppa[i]]; ++i; } } readPos += getPackedPixelsBytesSize(w, h, subencodingType); } else if (17 <= subencodingType && subencodingType <= 127) { throw new NotSupportedException($"SubencodingType ({subencodingType}) is not supported."); } else if (subencodingType == 128) { var indexer = a_mat.GetIndexer(); int posX = x; int posY = y; int totalLen = 0; int size = w * h; while (totalLen < size) { Vec3b pixel = a_pixelGetter.GetPixelVec3b(m_unzippedData, readPos); readPos += a_pixelGetter.GetPixelByteSize(); // count length int b; int len = 1; do { b = m_unzippedData[readPos++]; len += b; } while (b == 255); // set pixel for (int i = 0; i < len; ++i) { indexer[posY, posX] = pixel; // to next position ++posX; if (posX >= x + w) { posX = x; ++posY; } } totalLen += len; } } else if (subencodingType == 129) { throw new NotSupportedException($"SubencodingType ({subencodingType}) is not supported."); } else if (130 <= subencodingType && subencodingType <= 255) { // create palette int paletteSize = subencodingType - 128; Vec3b[] palette = new Vec3b[paletteSize]; for (int i = 0; i < paletteSize; ++i) { palette[i] = a_pixelGetter.GetPixelVec3b(m_unzippedData, readPos); readPos += a_pixelGetter.GetPixelByteSize(); } var indexer = a_mat.GetIndexer(); int posX = x; int posY = y; int totalLen = 0; int size = w * h; while (totalLen < size) { int paletteIndex = m_unzippedData[readPos++]; if ((paletteIndex & 0b10000000) == 0) { // length is 1 indexer[posY, posX] = palette[paletteIndex]; ++totalLen; // to next position ++posX; if (posX >= x + w) { posX = x; ++posY; } } else { int b; int len = 1; do { b = m_unzippedData[readPos++]; len += b; } while (b == 255); for (int i = 0; i < len; ++i) { indexer[posY, posX] = palette[paletteIndex & 0x7F]; // to next position ++posX; if (posX >= x + w) { posX = x; ++posY; } } totalLen += len; } } }
public MainForm() { InitializeComponent(); initCameraThread(); initGC(); initFpsUpdater(); _garbageCollector.Start(); _depthSensorThread.Start(); void initCameraThread() { _guiUpdater.Update += new EventHandler <ThreadService>((o, service) => { var frame = new Mat(); short[,] depthMap = _depthSensorThread.GetDepthMap(); if (depthMap != null) { var mat = new MatOfByte3(480, 640); var indexer = mat.GetIndexer(); //short maxValue = 1; //foreach (short value in depthMap) //{ // if (Math.Abs(value) < 20000 && value > maxValue) // { // maxValue = value; // } //} //maxValue = 1500; for (int row = 0; row < 480; row++) { for (int col = 0; col < 640; col++) { var pixel = indexer[row, col]; short value = depthMap[row / 2, col / 2]; pixel.Item0 = (byte)(value / 3 % 256); pixel.Item1 = 255; pixel.Item2 = 255;//(byte)(255 - value * 220 / maxValue); if (Math.Abs(value) > 20000) { pixel.Item1 = 0; pixel.Item2 = 0; } indexer[row, col] = pixel; } } Cv2.CvtColor(mat, mat, ColorConversionCodes.HSV2BGR); BeginInvoke(new MethodInvoker(() => { PictureBox_DepthMap.Image = mat.ToBitmap(); })); } if (_capture.Read(frame) && frame.Width > 0) { //Cv2.Flip(frame, frame, FlipMode.Y); //frame = AlterMat(frame); BeginInvoke(new MethodInvoker(() => { string[] oldInfo = Label_FrameInfo.Text.Split(' '); string fpsInfo = oldInfo.Length > 1 ? oldInfo[1] : "?"; Label_FrameInfo.Text = $"{frame.Width}x{frame.Height} {fpsInfo}"; PictureBox_Camera.Image = frame.ToBitmap(); })); _framesCounter.Update(); } }); _guiUpdater.ThreadCompleted += new EventHandler <ThreadService>((o, service) => { BeginInvoke(new MethodInvoker(() => { PictureBox_Camera.Image = null; })); _framesCounter.Reset(); Label_FrameInfo.Text = "N/A"; }); _guiUpdater.UpdateInterval = 0; } void initGC() { _garbageCollector.Update += new EventHandler <ThreadService>((obj, service) => { GC.Collect(); }); _garbageCollector.UpdateInterval = 1000; } void initFpsUpdater() { _fpsUpdater.Update += new EventHandler <ThreadService>((obj, service) => { BeginInvoke(new MethodInvoker(() => { int fps = _framesCounter.GetFps(); if (fps != -1) { string[] oldInfo = Label_FrameInfo.Text.Split(' '); string resInfo = oldInfo.Length > 0 ? oldInfo[0] : "?x?"; Label_FrameInfo.Text = $"{resInfo} {fps}"; } })); }); _fpsUpdater.UpdateInterval = 500; } }
public Dictionary <string, BitmapSource> GenerateQRCore(string inputMessage, QRCodeProperties qrCodeProps, QRColorMode colorMode) { outputImages = new Dictionary <string, BitmapSource>(); outputMats = new Dictionary <string, Mat>(); matSearchPixels = new List <Point>(); this.inputMessage = inputMessage; CalculateSearchPoint(qrCodeProps); if (colorMode == QRColorMode.Grayscale) { // Generate empty mat and set all pixels to white qrMatMono = Mat.Zeros(new Size(qrCodeProps.ImgSize.Width, qrCodeProps.ImgSize.Height), MatType.CV_8UC1); //qrMat.SetTo(Scalar.White); // Get mat indexer MatOfByte mob1 = new MatOfByte(qrMatMono); MatIndexer <byte> indexerByte = mob1.GetIndexer(); int stringIndex = -1; for (int y = 0; y < qrCodeProps.CellsPerDim * qrCodeProps.CellSize; y += qrCodeProps.CellSize) { for (int x = 0; x < qrCodeProps.CellsPerDim * qrCodeProps.CellSize; x += qrCodeProps.CellSize) { // If message is done reading if (++stringIndex + 1 > inputMessage.Length) { break; } // If bit is 0, skip this cell if (inputMessage[stringIndex].Equals('0')) { continue; } // If bit is 1, color the cell else if (inputMessage[stringIndex].Equals('1')) { for (int i = y; i < y + qrCodeProps.CellSize; i++) { for (int j = x; j < x + qrCodeProps.CellSize; j++) { indexerByte[i, j] = LUM_INTENSITY_MAX; } } } } } // Add image and mat to output lists outputImages.Add(QR_TYPE_MONOCHROME, Utils.MatToImage(qrMatMono)); outputMats.Add(QR_TYPE_MONOCHROME, qrMatMono); // Return images to UI return(outputImages); } else if (colorMode == QRColorMode.Color) { // Generate empty mats and fill with white Mat qrRedMat = Mat.Zeros(new Size(qrCodeProps.ImgSize.Width, qrCodeProps.ImgSize.Height), MatType.CV_8UC3); Mat qrGreenMat = Mat.Zeros(new Size(qrCodeProps.ImgSize.Width, qrCodeProps.ImgSize.Height), MatType.CV_8UC3); Mat qrBlueMat = Mat.Zeros(new Size(qrCodeProps.ImgSize.Width, qrCodeProps.ImgSize.Height), MatType.CV_8UC3); //qrCyanMat.SetTo(Scalar.White); //qrMagentaMat.SetTo(Scalar.White); //qrYellowMat.SetTo(Scalar.White); // Get mat indexers MatOfByte3 mobRed = new MatOfByte3(qrRedMat); MatOfByte3 mobGreen = new MatOfByte3(qrGreenMat); MatOfByte3 mobBlue = new MatOfByte3(qrBlueMat); MatIndexer <Vec3b> indexerMobRed = mobRed.GetIndexer(); MatIndexer <Vec3b> indexerMobGreen = mobGreen.GetIndexer(); MatIndexer <Vec3b> indexerMobBlue = mobBlue.GetIndexer(); // Split message thrice int bitsPerChar = 7; int messageChars = inputMessage.Length / bitsPerChar; string messageForRed = string.Empty; string messageForGreen = string.Empty; string messageForBlue = string.Empty; for (int i = 0; i < messageChars; i++) { if (i % 3 == 0) { for (int j = 0; j < bitsPerChar; j++) { messageForRed += inputMessage[(i * bitsPerChar) + j]; } } else if (i % 3 == 1) { for (int j = 0; j < bitsPerChar; j++) { messageForGreen += inputMessage[(i * bitsPerChar) + j]; } } else if (i % 3 == 2) { for (int j = 0; j < bitsPerChar; j++) { messageForBlue += inputMessage[(i * bitsPerChar) + j]; } } } indexerMobRed = WriteColorComponent(messageForRed, qrCodeProps, indexerMobRed, COLOR_RED); indexerMobGreen = WriteColorComponent(messageForGreen, qrCodeProps, indexerMobGreen, COLOR_GREEN); indexerMobBlue = WriteColorComponent(messageForBlue, qrCodeProps, indexerMobBlue, COLOR_BLUE); Mat combinedMat = qrRedMat + qrGreenMat + qrBlueMat; // Add image and mats to output lists outputImages.Add(QR_TYPE_COMBINED, Utils.MatToImage(combinedMat)); outputImages.Add(QR_TYPE_RED, Utils.MatToImage(qrRedMat)); outputImages.Add(QR_TYPE_GREEN, Utils.MatToImage(qrGreenMat)); outputImages.Add(QR_TYPE_BLUE, Utils.MatToImage(qrBlueMat)); outputMats.Add(QR_TYPE_COMBINED, combinedMat); outputMats.Add(QR_TYPE_RED, qrRedMat); outputMats.Add(QR_TYPE_GREEN, qrGreenMat); outputMats.Add(QR_TYPE_BLUE, qrBlueMat); return(outputImages); } return(null); }