private void buscarrosto(Bitmap frame) { Image <Rgb, Byte> imageCV = new Image <Rgb, byte>(frame); Emgu.CV.Mat mat = imageCV.Mat; var array = new byte[mat.Width * mat.Height * mat.ElementSize]; mat.CopyTo(array); using (Array2D <RgbPixel> image = Dlib.LoadImageData <RgbPixel>(array, (uint)mat.Height, (uint)mat.Width, (uint)(mat.Width * mat.ElementSize))) { using (FrontalFaceDetector fd = Dlib.GetFrontalFaceDetector()) { var faces = fd.Operator(image); foreach (DlibDotNet.Rectangle face in faces) { FullObjectDetection shape = _ShapePredictor.Detect(image, face); ChipDetails faceChipDetail = Dlib.GetFaceChipDetails(shape, 150, 0.25); Array2D <RgbPixel> faceChip = Dlib.ExtractImageChip <RgbPixel>(image, faceChipDetail); Bitmap bitmap1 = faceChip.ToBitmap <RgbPixel>(); MainWindow.main.Statusa1 = bitmap1; Dlib.DrawRectangle(image, face, color: new RgbPixel(0, 255, 255), thickness: 4); } } frame = image.ToBitmap <RgbPixel>(); MainWindow.main.Statusa = frame; } }
public static string OcrImage(Emgu.CV.OCR.Tesseract _ocr, Emgu.CV.Mat image) { using (var imageColor = new Mat()) using (Mat imgGrey = new Mat()) using (Mat imgThresholded = new Mat()) { if (image.NumberOfChannels == 1) { CvInvoke.CvtColor(image, imageColor, ColorConversion.Gray2Bgr); } else { image.CopyTo(imageColor); } //Interfaces.Image.Util.SaveImageStamped(imageColor.Bitmap, "OcrImage-Color"); _ocr.SetImage(imageColor); _ocr.AnalyseLayout(); if (_ocr.Recognize() != 0) { throw new Exception("Failed to recognizer image"); } Emgu.CV.OCR.Tesseract.Character[] characters = _ocr.GetCharacters(); Log.Debug("GetCharacters found " + characters.Length + " with colors"); if (characters.Length == 0) { CvInvoke.CvtColor(image, imgGrey, ColorConversion.Bgr2Gray); //Interfaces.Image.Util.SaveImageStamped(imgGrey.Bitmap, "OcrImage-Gray"); _ocr.SetImage(imgGrey); _ocr.AnalyseLayout(); if (_ocr.Recognize() != 0) { throw new Exception("Failed to recognizer image"); } characters = _ocr.GetCharacters(); Log.Debug("GetCharacters found " + characters.Length + " with grey scaled"); if (characters.Length == 0) { CvInvoke.Threshold(imgGrey, imgThresholded, 65, 255, ThresholdType.Binary); //Interfaces.Image.Util.SaveImageStamped(imgThresholded.Bitmap, "OcrImage-Thresholded"); _ocr.SetImage(imgThresholded); _ocr.AnalyseLayout(); if (_ocr.Recognize() != 0) { throw new Exception("Failed to recognizer image"); } characters = _ocr.GetCharacters(); Log.Debug("GetCharacters found " + characters.Length + " thresholded"); } } return(_ocr.GetUTF8Text().TrimEnd(Environment.NewLine.ToCharArray())); } }
public static void Texture2dToOutputArray(Texture2D texture, IOutputArray result) { int width = texture.width; int height = texture.height; try { Color32[] colors = texture.GetPixels32(); GCHandle handle = GCHandle.Alloc(colors, GCHandleType.Pinned); //result.Create(height, width, DepthType.Cv8U, 4); using (Mat rgba = new Mat(height, width, DepthType.Cv8U, 4, handle.AddrOfPinnedObject(), width * 4)) { rgba.CopyTo(result); //result.ConvertFrom(rgba); } handle.Free(); } catch (Exception excpt) { if (texture.format == TextureFormat.ARGB32 || texture.format == TextureFormat.RGBA32 || texture.format == TextureFormat.RGB24 || texture.format == TextureFormat.Alpha8) { byte[] jpgBytes = texture.EncodeToJPG(); using (Mat tmp = new Mat()) { CvInvoke.Imdecode(jpgBytes, LoadImageType.AnyColor, tmp); tmp.CopyTo(result); } } else { throw new Exception(String.Format("We are not able to handle Texture format of {0} type", texture.format), excpt); } } }
static void Main(string[] args) { PupilDetector pupilDetector = new PupilDetector("C:\\Users\\Phoenix\\Documents\\Visual Studio 2013\\Projects\\testPupilDetection\\res\\haarcascade_frontalface_alt.xml"); Mat frame = new Mat(); Mat frameCopy = new Mat(); Capture capture = new Capture(0); if (capture == null) return; while (true) { frame = capture.QueryFrame(); frame.CopyTo(frameCopy); CvInvoke.Flip(frameCopy, frameCopy, FlipType.Horizontal); if (!frameCopy.IsEmpty) { Rectangle face = new Rectangle(new Point(0, 0), new Size(0, 0)); Point leftPupil = new Point(0, 0); Point rightPupil = new Point(0, 0); pupilDetector.Detect(frameCopy, ref face, ref leftPupil, ref rightPupil); CvInvoke.Rectangle(frameCopy, face, new MCvScalar(1234)); CvInvoke.Circle(frameCopy, leftPupil, 3, new MCvScalar(1234)); CvInvoke.Circle(frameCopy, rightPupil, 3, new MCvScalar(1234)); CvInvoke.Imshow("Pupil Detection", frameCopy); } else { Console.WriteLine(" --(!) No captured frame -- Break!"); break; } int c = CvInvoke.WaitKey(10); if ((char)c == 'c') break; } }
/// <summary> /// Convert raw data to bitmap /// </summary> /// <param name="scan0">The pointer to the raw data</param> /// <param name="step">The step</param> /// <param name="size">The size of the image</param> /// <param name="srcColorType">The source image color type</param> /// <param name="numberOfChannels">The number of channels</param> /// <param name="srcDepthType">The source image depth type</param> /// <param name="tryDataSharing">Try to create Bitmap that shares the data with the image</param> /// <returns>The Bitmap</returns> public static Bitmap RawDataToBitmap(IntPtr scan0, int step, Size size, Type srcColorType, int numberOfChannels, Type srcDepthType, bool tryDataSharing = false) { if (tryDataSharing) { if (srcColorType == typeof(Gray) && srcDepthType == typeof(Byte)) { //Grayscale of Bytes Bitmap bmpGray = new Bitmap( size.Width, size.Height, step, System.Drawing.Imaging.PixelFormat.Format8bppIndexed, scan0 ); bmpGray.Palette = GrayscalePalette; return(bmpGray); } // Mono in Linux doesn't support scan0 constructor with Format24bppRgb, use ToBitmap instead // See https://bugzilla.novell.com/show_bug.cgi?id=363431 // TODO: check mono buzilla Bug 363431 to see when it will be fixed else if ( Emgu.Util.Platform.OperationSystem == Emgu.Util.Platform.OS.Windows && Emgu.Util.Platform.ClrType == Emgu.Util.Platform.Clr.DotNet && srcColorType == typeof(Bgr) && srcDepthType == typeof(Byte) && (step & 3) == 0) { //Bgr byte return(new Bitmap( size.Width, size.Height, step, System.Drawing.Imaging.PixelFormat.Format24bppRgb, scan0)); } else if (srcColorType == typeof(Bgra) && srcDepthType == typeof(Byte)) { //Bgra byte return(new Bitmap( size.Width, size.Height, step, System.Drawing.Imaging.PixelFormat.Format32bppArgb, scan0)); } //PixelFormat.Format16bppGrayScale is not supported in .NET //else if (typeof(TColor) == typeof(Gray) && typeof(TDepth) == typeof(UInt16)) //{ // return new Bitmap( // size.width, // size.height, // step, // PixelFormat.Format16bppGrayScale; // scan0); //} } System.Drawing.Imaging.PixelFormat format; //= System.Drawing.Imaging.PixelFormat.Undefined; if (srcColorType == typeof(Gray)) // if this is a gray scale image { format = System.Drawing.Imaging.PixelFormat.Format8bppIndexed; } else if (srcColorType == typeof(Bgra)) //if this is Bgra image { format = System.Drawing.Imaging.PixelFormat.Format32bppArgb; } else if (srcColorType == typeof(Bgr)) //if this is a Bgr Byte image { format = System.Drawing.Imaging.PixelFormat.Format24bppRgb; } else { using (Mat m = new Mat(size.Height, size.Width, CvInvoke.GetDepthType(srcDepthType), numberOfChannels, scan0, step)) using (Mat m2 = new Mat()) { CvInvoke.CvtColor(m, m2, srcColorType, typeof(Bgr)); return(RawDataToBitmap(m2.DataPointer, m2.Step, m2.Size, typeof(Bgr), 3, srcDepthType, false)); } } Bitmap bmp = new Bitmap(size.Width, size.Height, format); System.Drawing.Imaging.BitmapData data = bmp.LockBits( new Rectangle(Point.Empty, size), System.Drawing.Imaging.ImageLockMode.WriteOnly, format); using (Mat bmpMat = new Mat(size.Height, size.Width, CvEnum.DepthType.Cv8U, numberOfChannels, data.Scan0, data.Stride)) using (Mat dataMat = new Mat(size.Height, size.Width, CvInvoke.GetDepthType(srcDepthType), numberOfChannels, scan0, step)) { if (srcDepthType == typeof(Byte)) { dataMat.CopyTo(bmpMat); } else { double scale = 1.0, shift = 0.0; RangeF range = dataMat.GetValueRange(); if (range.Max > 255.0 || range.Min < 0) { scale = range.Max.Equals(range.Min) ? 0.0 : 255.0 / (range.Max - range.Min); shift = scale.Equals(0) ? range.Min : -range.Min * scale; } CvInvoke.ConvertScaleAbs(dataMat, bmpMat, scale, shift); } } bmp.UnlockBits(data); if (format == System.Drawing.Imaging.PixelFormat.Format8bppIndexed) { bmp.Palette = GrayscalePalette; } return(bmp); }
public static ImageElement[] OcrImage2(Emgu.CV.OCR.Tesseract _ocr, Emgu.CV.Mat image, string wordlimit, bool casesensitive) { using (var imageColor = new Mat()) using (Mat imgGrey = new Mat()) { if (image.NumberOfChannels == 1) { CvInvoke.CvtColor(image, imageColor, ColorConversion.Gray2Bgr); } else { image.CopyTo(imageColor); } // _ocr.SetImage(imageColor); CvInvoke.CvtColor(image, imgGrey, ColorConversion.Bgr2Gray); _ocr.SetImage(imgGrey); _ocr.AnalyseLayout(); if (_ocr.Recognize() != 0) { throw new Exception("Failed to recognizer image"); } Emgu.CV.OCR.Tesseract.Character[] characters = _ocr.GetCharacters(); var index = 0; var wordlimitindex = 0; var chars = new List <Emgu.CV.OCR.Tesseract.Character>(); var result = new List <ImageElement>(); var wordresult = new List <ImageElement>(); var wordchars = new List <Emgu.CV.OCR.Tesseract.Character>(); Rectangle desktop = new Rectangle(0, 0, System.Windows.Forms.Screen.PrimaryScreen.Bounds.Width, System.Windows.Forms.Screen.PrimaryScreen.Bounds.Height); Rectangle imagerect = new Rectangle(0, 0, image.Width, image.Height); while (index < characters.Length) { if (!string.IsNullOrEmpty(wordlimit)) { if ((characters[index].Text == wordlimit[wordlimitindex].ToString()) || (!casesensitive && characters[index].Text.ToLower() == wordlimit[wordlimitindex].ToString().ToLower())) { wordchars.Add(characters[index]); wordlimitindex++; if (wordchars.Count == wordlimit.Length) { var res = new ImageElement(Rectangle.Empty); wordchars.ForEach(x => res.Text += x.Text); res.Confidence = wordchars[0].Cost; Rectangle rect = new Rectangle(wordchars[0].Region.X, wordchars[0].Region.Y, wordchars[0].Region.Width, wordchars[0].Region.Height); rect.Width = (wordchars[wordchars.Count - 1].Region.X - wordchars[0].Region.X) + wordchars[wordchars.Count - 1].Region.Width; rect.Height = (wordchars[wordchars.Count - 1].Region.Y - wordchars[0].Region.Y) + wordchars[wordchars.Count - 1].Region.Height; res.Rectangle = rect; wordresult.Add(res); wordchars.Clear(); wordlimitindex = 0; if (!desktop.Contains(rect)) { Log.Error("Found element outside desktop !!!!!"); } if (!imagerect.Contains(rect)) { Log.Error("Found element outside desktop !!!!!"); } Log.Debug("Found: " + res.Text + " at " + res.Rectangle.ToString()); } } else { wordchars.Clear(); wordlimitindex = 0; } } if (characters[index].Text == " " || characters[index].Text == "\r" || characters[index].Text == "\n") { if (chars.Count > 0) { var res = new ImageElement(Rectangle.Empty); chars.ForEach(x => res.Text += x.Text); res.Confidence = chars[0].Cost; Rectangle rect = new Rectangle(chars[0].Region.X, chars[0].Region.Y, chars[0].Region.Width, chars[0].Region.Height); rect.Width = (chars[chars.Count - 1].Region.X - chars[0].Region.X) + chars[chars.Count - 1].Region.Width; rect.Height = (chars[chars.Count - 1].Region.Y - chars[0].Region.Y) + chars[chars.Count - 1].Region.Height; res.Rectangle = rect; result.Add(res); } index++; chars.Clear(); continue; } chars.Add(characters[index]); index++; } if (chars.Count > 0) { var res = new ImageElement(Rectangle.Empty); chars.ForEach(x => res.Text += x.Text); res.Confidence = chars[0].Cost; Rectangle rect = new Rectangle(chars[0].Region.X, chars[0].Region.Y, chars[0].Region.Width, chars[0].Region.Height); rect.Width = (chars[chars.Count - 1].Region.X - chars[0].Region.X) + chars[chars.Count - 1].Region.Width; rect.Height = (chars[chars.Count - 1].Region.Y - chars[0].Region.Y) + chars[chars.Count - 1].Region.Height; res.Rectangle = rect; result.Add(res); } if (!string.IsNullOrEmpty(wordlimit)) { return(wordresult.ToArray()); } return(result.ToArray()); } }
internal static void ToArray(this CGImage cgImage, IOutputArray mat, ImreadModes modes = ImreadModes.AnyColor) { Size sz = new Size((int)cgImage.Width, (int)cgImage.Height); using (Mat m = new Mat(sz, DepthType.Cv8U, 4)) { RectangleF rect = new RectangleF(PointF.Empty, new SizeF(cgImage.Width, cgImage.Height)); using (CGColorSpace cspace = CGColorSpace.CreateDeviceRGB()) using (CGBitmapContext context = new CGBitmapContext( m.DataPointer, sz.Width, sz.Height, 8, sz.Width * 4, cspace, CGImageAlphaInfo.PremultipliedLast)) context.DrawImage(rect, cgImage); if (modes == ImreadModes.Unchanged) { m.CopyTo(mat); } if (modes == ImreadModes.Grayscale) { CvInvoke.CvtColor(m, mat, ColorConversion.Rgba2Gray); } else if (modes == ImreadModes.AnyColor) { CvInvoke.CvtColor(m, mat, ColorConversion.Rgba2Bgra); } else if (modes == ImreadModes.Color) { CvInvoke.CvtColor(m, mat, ColorConversion.Rgba2Bgr); } else if (modes == ImreadModes.ReducedColor2) { using (Mat tmp = new Mat()) { CvInvoke.PyrDown(m, tmp); CvInvoke.CvtColor(tmp, mat, ColorConversion.Rgba2Bgr); } } else if (modes == ImreadModes.ReducedGrayscale2) { using (Mat tmp = new Mat()) { CvInvoke.PyrDown(m, tmp); CvInvoke.CvtColor(tmp, mat, ColorConversion.Rgba2Gray); } } else if (modes == ImreadModes.ReducedColor4 || modes == ImreadModes.ReducedColor8 || modes == ImreadModes.ReducedGrayscale4 || modes == ImreadModes.ReducedGrayscale8 || modes == ImreadModes.LoadGdal) { throw new NotImplementedException(String.Format("Conversion from PNG using mode {0} is not supported", modes)); } else { throw new Exception(String.Format("ImreadModes of {0} is not implemented.", modes.ToString())); //CvInvoke.CvtColor(m, mat, ColorConversion.Rgba2Bgr); } } }
public static void DoMatMagic(string message = null) { if (message != "Magical Mystery Heap fix") { Log.InfoFormat("Magical Mystery Heap fix from '{0}'", message); return; } Log.InfoFormat("Mat test! {0}" , message); var m1 = new Mat(); var m2 = new Mat(); m2.CopyTo(m1); m1.Dispose(); m2.Dispose(); }
/// <summary> /// Create a Mat from Bitmap /// </summary> /// <param name="bitmap">The Bitmap to be converted to Mat</param> /// <param name="mat">The Mat converted from Bitmap</param> public static void ToMat(this Bitmap bitmap, Mat mat) { Size size = bitmap.Size; switch (bitmap.PixelFormat) { case PixelFormat.Format32bppRgb: BitmapData data32bppRgb = bitmap.LockBits( new Rectangle(Point.Empty, size), ImageLockMode.ReadOnly, bitmap.PixelFormat); try { using (Mat tmp = new Mat(bitmap.Size, DepthType.Cv8U, 4, data32bppRgb.Scan0, data32bppRgb.Stride)) { CvInvoke.MixChannels(tmp, mat, new[] { 0, 0, 1, 1, 2, 2 }); } } finally { bitmap.UnlockBits(data32bppRgb); } return; case PixelFormat.Format32bppArgb: BitmapData data32bppArgb = bitmap.LockBits( new Rectangle(Point.Empty, size), ImageLockMode.ReadOnly, bitmap.PixelFormat); try { using (Mat tmp = new Mat(bitmap.Size, DepthType.Cv8U, 4, data32bppArgb.Scan0, data32bppArgb.Stride)) { tmp.CopyTo(mat); } } finally { bitmap.UnlockBits(data32bppArgb); } return; case PixelFormat.Format8bppIndexed: //Mat imageFrom8bppIndexed = new Mat(); Matrix <Byte> bTable, gTable, rTable, aTable; ColorPaletteToLookupTable(bitmap.Palette, out bTable, out gTable, out rTable, out aTable); BitmapData data8bppIndexed = bitmap.LockBits( new Rectangle(Point.Empty, size), ImageLockMode.ReadOnly, bitmap.PixelFormat); try { using (Mat indexValue = new Mat(bitmap.Size, DepthType.Cv8U, 1, data8bppIndexed.Scan0, data8bppIndexed.Stride)) { using (Mat b = new Mat()) using (Mat g = new Mat()) using (Mat r = new Mat()) using (Mat a = new Mat()) { CvInvoke.LUT(indexValue, bTable, b); CvInvoke.LUT(indexValue, gTable, g); CvInvoke.LUT(indexValue, rTable, r); CvInvoke.LUT(indexValue, aTable, a); using (VectorOfMat mv = new VectorOfMat(new Mat[] { b, g, r, a })) { CvInvoke.Merge(mv, mat); } } } } finally { bTable.Dispose(); gTable.Dispose(); rTable.Dispose(); aTable.Dispose(); bitmap.UnlockBits(data8bppIndexed); } return; case PixelFormat.Format24bppRgb: //Mat imageFrom24bppRgb = new Mat(); BitmapData data24bppRgb = bitmap.LockBits( new Rectangle(Point.Empty, size), ImageLockMode.ReadOnly, bitmap.PixelFormat); try { using (Mat tmp = new Mat(bitmap.Size, DepthType.Cv8U, 3, data24bppRgb.Scan0, data24bppRgb.Stride)) { tmp.CopyTo(mat); } } finally { bitmap.UnlockBits(data24bppRgb); } return; case PixelFormat.Format1bppIndexed: //Mat imageFrom1bppIndexed = new Mat(); int rows = size.Height; int cols = size.Width; BitmapData data1bppIndexed = bitmap.LockBits( new Rectangle(Point.Empty, size), ImageLockMode.ReadOnly, bitmap.PixelFormat); int fullByteCount = cols >> 3; int partialBitCount = cols & 7; int mask = 1 << 7; Int64 srcAddress = data1bppIndexed.Scan0.ToInt64(); Byte[,] imagedata = new byte[rows, cols]; Byte[] row = new byte[fullByteCount + (partialBitCount == 0 ? 0 : 1)]; int v = 0; for (int i = 0; i < rows; i++, srcAddress += data1bppIndexed.Stride) { Marshal.Copy((IntPtr)srcAddress, row, 0, row.Length); for (int j = 0; j < cols; j++, v <<= 1) { if ((j & 7) == 0) { //fetch the next byte v = row[j >> 3]; } imagedata[i, j] = (v & mask) == 0 ? (Byte)0 : (Byte)255; } } GCHandle imageDataHandle = GCHandle.Alloc(imagedata, GCHandleType.Pinned); try { using (Mat tmp = new Mat(new int[] { rows, cols }, DepthType.Cv8U, imageDataHandle.AddrOfPinnedObject())) { tmp.CopyTo(mat); } } finally { imageDataHandle.Free(); bitmap.UnlockBits(data1bppIndexed); } return; default: #region Handle other image type //Mat imageDefault = new Mat(); Byte[,,] data = new byte[size.Height, size.Width, 4]; for (int i = 0; i < size.Width; i++) { for (int j = 0; j < size.Height; j++) { Color color = bitmap.GetPixel(i, j); data[j, i, 0] = color.B; data[j, i, 1] = color.G; data[j, i, 2] = color.R; data[j, i, 3] = color.A; } } GCHandle dataHandle = GCHandle.Alloc(data, GCHandleType.Pinned); try { using (Mat tmp = new Mat(new int[] { size.Height, size.Width, 4 }, DepthType.Cv8U, dataHandle.AddrOfPinnedObject())) { tmp.CopyTo(mat); } } finally { dataHandle.Free(); } return; #endregion } }