private string reconhecerCaptcha(Image img) { Bitmap imagem = new Bitmap(img); imagem = imagem.Clone(new Rectangle(0, 0, img.Width, img.Height), System.Drawing.Imaging.PixelFormat.Format24bppRgb); Erosion erosion = new Erosion(); Dilatation dilatation = new Dilatation(); Invert inverter = new Invert(); ColorFiltering cor = new ColorFiltering(); cor.Blue = new AForge.IntRange(200, 255); cor.Red = new AForge.IntRange(200, 255); cor.Green = new AForge.IntRange(200, 255); Opening open = new Opening(); BlobsFiltering bc = new BlobsFiltering(); Closing close = new Closing(); GaussianSharpen gs = new GaussianSharpen(); ContrastCorrection cc = new ContrastCorrection(); bc.MinHeight = 10; FiltersSequence seq = new FiltersSequence(gs, inverter, open, inverter, bc, inverter, open, cc, cor, bc, inverter); pictureBox.Image = seq.Apply(imagem); string reconhecido = OCR((Bitmap)pictureBox.Image); return reconhecido; }
/// <summary> /// Manipulate (and resize) an image /// </summary> /// <param name="source"></param> /// <param name="parameters"></param> /// <returns></returns> public override Bitmap Manipulate(object source, ImageManipulationSettings parameters) { var image = Resize(source, parameters); var filters = new FiltersSequence(); if (parameters.Gamma.HasValue) filters.Add(new GammaCorrection(parameters.Gamma.Value)); if (parameters.Sharpen.HasValue) filters.Add(new Sharpen { Threshold = (int)parameters.Sharpen.Value }); if (parameters.Hue.HasValue) filters.Add(new HueModifier((int)parameters.Hue.Value)); if (parameters.Saturation.HasValue) filters.Add(new SaturationCorrection((float)parameters.Saturation.Value)); if (parameters.Brightness.HasValue) filters.Add(new BrightnessCorrection((int)parameters.Brightness.Value)); if (parameters.Contrast.HasValue) filters.Add(new ContrastCorrection((int)parameters.Contrast.Value)); return filters.Count == 0 ? image : filters.Apply(image); }
/// <summary> /// Apply corrections and crop a specific part of an image in order to perform OCR /// </summary> /// <param name="SourceImage"></param> /// <param name="rect"></param> /// <returns></returns> public static Bitmap PreprocessOCR(Bitmap SourceImage, Rectangle rect) { try { // binarization filtering sequence FiltersSequence filter = new FiltersSequence( new Crop(rect), new Median(), new ContrastCorrection(), //new Mean(), new AForge.Imaging.Filters.Blur(), new GrayscaleBT709(), //new Threshold(), new Threshold(), new Invert() ); // load image Bitmap image = SourceImage; // format image AForge.Imaging.Image.Clone(image,image.PixelFormat); // AForge.Imaging.Image.FormatImage(ref image); // lock the source image BitmapData sourceData = image.LockBits(new Rectangle(0, 0, image.Width, image.Height), ImageLockMode.ReadOnly, image.PixelFormat); // apply filters and binarize the image UnmanagedImage binarySource = filter.Apply(new UnmanagedImage(sourceData)); Bitmap binarizedImage= binarySource.ToManagedImage(); // unlock source image image.UnlockBits(sourceData); // dispose temporary binary source image binarySource.Dispose(); return binarizedImage; } catch (Exception ex) { throw ex; } }//preprocess
private void thresholding() // Threshold Image { // Declare Image Bitmap bmp = new Bitmap(pictureBox1.Image); // create filters sequence FiltersSequence filter = new AForge.Imaging.Filters.FiltersSequence(); // add filters to the sequence filter.Add(new Grayscale(0.299, 0.587, 0.114)); filter.Add(new Threshold(128)); // apply the filter sequence Bitmap newbmp = filter.Apply(bmp); pictureBox1.Image = newbmp; }
public Button RecognizeOneButton(Bitmap source, Rectangle rect, string filePath, int id) { var seq = new FiltersSequence(); var card = new Button(source); //Create Card Object card.Rect = rect; seq.Clear(); seq.Add(Grayscale.CommonAlgorithms.BT709); seq.Add(new OtsuThreshold()); source = seq.Apply(source); card.Tip = ScanButtonTip(source); //Scan Rank of non-face card //if (card.Tip == ButtonTip.NOT_RECOGNIZED) //{ // if (!string.IsNullOrEmpty(filePath)) // { // while (File.Exists(filePath + id + ".bmp")) // id++; // top.Save(filePath + id + ".bmp", ImageFormat.Bmp); // } //} return card; }
//TODO not f*****g working public string RecognizeBet(Bitmap source) { try { //var res = IsNotNumber(source); //if (res) //{ // return "none"; //} var temp = source.Clone() as Bitmap; //Clone image to keep original image //* var seq = new FiltersSequence(); seq.Add(new Grayscale(0, 1, 0)); //seq.Add(Grayscale.CommonAlgorithms.BT709); //seq.Add(new ResizeBilinear(source.Width * 2, source.Height * 2)); //seq.Add(new OtsuThreshold()); //Then add binarization(thresholding) filter //seq.Add(new Threshold(50)); //seq.Add(new Invert()); temp = seq.Apply(source); // Apply filters on source image //*/ //var extractor = new BlobCounter(); //extractor.FilterBlobs = true; //extractor.MaxHeight = 15; //extractor.MinHeight = 0; ////extractor.MaxWidth = 10; ////extractor.MinWidth = 10; ////extractor.BackgroundThreshold = Color.Green; //extractor.ProcessImage(temp); //////Will be used transform(extract) cards on source image ////QuadrilateralTransformation quadTransformer = new QuadrilateralTransformation(); //foreach (Blob blob in extractor.GetObjectsInformation()) //{ // ////Get Edge points of card // //List<IntPoint> edgePoints = extractor.GetBlobsEdgePoints(blob); // ////Calculate/Find corners of card on source image from edge points // //List<IntPoint> corners = PointsCloud.FindQuadrilateralCorners(edgePoints); // //var cardImg = source.Clone(blob.Rectangle, PixelFormat.DontCare); // var cardImg = temp.Clone(blob.Rectangle, PixelFormat.DontCare); //} //using (var engine = new TesseractEngine(@"./tessdata", "rus", EngineMode.Default)) { //_engine.SetVariable("tessedit_char_whitelist", "$.,0123456789"); using (var page = _engine.Process(temp)) { var text = page.GetText(); var conf = page.GetMeanConfidence(); //Ex.Report(new Exception(text)); //if (conf < 0.8) // return "none"; return text; } } } catch (Exception e) { Trace.TraceError(e.ToString()); Ex.Report(e); return ""; } }
/// <summary> /// Detects and recognizes cards from source image /// </summary> /// <param name="source">Source image to be scanned</param> /// <returns>Recognized Cards</returns> public CardCollection Recognize(Bitmap source, string filePath, int id, int minSize, Rectangle suitRect, Rectangle rankRect ) { CardCollection collection = new CardCollection(); //Collection that will hold cards Bitmap temp = source.Clone() as Bitmap; //Clone image to keep original image FiltersSequence seq = new FiltersSequence(); seq.Add(Grayscale.CommonAlgorithms.BT709); //First add grayScaling filter seq.Add(new OtsuThreshold()); //Then add binarization(thresholding) filter temp = seq.Apply(source); // Apply filters on source image //if (!string.IsNullOrEmpty(fileName)) //{ // temp.Save(fileName, ImageFormat.Bmp); //} //Extract blobs from image whose size width and height larger than 150 BlobCounter extractor = new BlobCounter(); extractor.FilterBlobs = true; extractor.MinWidth = extractor.MinHeight = minSize;//TODO card size //extractor.MaxWidth = extractor.MaxHeight = 70;//TODO card size extractor.ProcessImage(temp); //Will be used transform(extract) cards on source image //QuadrilateralTransformation quadTransformer = new QuadrilateralTransformation(); foreach (Blob blob in extractor.GetObjectsInformation()) { var cardImg = source.Clone(blob.Rectangle, PixelFormat.DontCare); Card card = new Card(cardImg); //Create Card Object Bitmap suitBmp = card.GetPart(suitRect); char color = ScanColor(suitBmp); //Scan color seq.Clear(); seq.Add(Grayscale.CommonAlgorithms.BT709); seq.Add(new OtsuThreshold()); suitBmp = seq.Apply(suitBmp); card.Suit = ScanSuit(suitBmp, color); //Scan suit of face card Bitmap rankBmp = card.GetPart(rankRect); seq.Clear(); seq.Add(Grayscale.CommonAlgorithms.BT709); seq.Add(new OtsuThreshold()); rankBmp = seq.Apply(rankBmp); //var ext = new BlobsFiltering(0, 0, 40, 40); //ext.ApplyInPlace(rankBmp); card.Rank = ScanRank(rankBmp); //Scan Rank of non-face card //if (card.Rank == Rank.NOT_RECOGNIZED) //{ // if (!string.IsNullOrEmpty(filePath)) // { // while (File.Exists(filePath + id + ".bmp")) // id++; // top.Save(filePath + id + ".bmp", ImageFormat.Bmp); // } //} if(card.Rank != Rank.NOT_RECOGNIZED && card.Suit != Suit.NOT_RECOGNIZED) collection.Add(card); //Add card to collection } collection.SortByRank(); return collection; }
void webcam_ImageCaptured(object source, WebcamEventArgs e) { _FrameImage.Image = e.WebCamImage; Bitmap MaskImage = new Bitmap(640, 480); if (backgroundFrame == null) { Frames2Ignore--; if (Frames2Ignore == 0) { backgroundFrame = (Bitmap)e.WebCamImage; backgroundFrameGray = grayscaleFilter.Apply(backgroundFrame); } return; } //Save curent image CurrentFrame = (Bitmap)e.WebCamImage; CurrentFrameGray = grayscaleFilter.Apply(CurrentFrame); /* // create filter IFilter pixellateFilter = new Pixellate(); // apply the filter backgroundFrame = pixellateFilter.Apply(backgroundFrame); backgroundFrameGray = grayscaleFilter.Apply(backgroundFrame); CurrentFrame = pixellateFilter.Apply(CurrentFrame); CurrentFrameGray = grayscaleFilter.Apply(CurrentFrame);*/ MoveTowards moveTowardsFilter = new MoveTowards(); moveTowardsFilter.OverlayImage = CurrentFrameGray; // move background towards current frame Bitmap tmp = moveTowardsFilter.Apply(backgroundFrameGray); // dispose old background backgroundFrame.Dispose(); backgroundFrame = tmp; // create processing filters sequence FiltersSequence processingFilter = new FiltersSequence(); processingFilter.Add(new Difference(backgroundFrameGray)); processingFilter.Add(new Threshold(15)); processingFilter.Add(new Opening()); processingFilter.Add(new Edges()); processingFilter.Add(new DifferenceEdgeDetector()); // apply the filter Bitmap tmp1 = processingFilter.Apply(CurrentFrameGray); // extract red channel from the original image IFilter extrachChannel = new ExtractChannel(RGB.R); Bitmap redChannel = extrachChannel.Apply(backgroundFrame); // merge red channel with moving object borders Merge mergeFilter = new Merge(); mergeFilter.OverlayImage = tmp1; Bitmap tmp2 = mergeFilter.Apply(redChannel); // replace red channel in the original image ReplaceChannel replaceChannel = new ReplaceChannel(RGB.R, tmp2); replaceChannel.ChannelImage = tmp2; Bitmap tmp3 = replaceChannel.Apply(backgroundFrame); StateMgr.Execute(tmp1); if (eChangedCursorEvent != null && StateMgr.Val == Webcam.ValidLocation.TRUE) { //Console.WriteLine("X={0} , Y={1}", StateMgr.CurrState.CurrX, StateMgr.CurrState.CurrY); eChangedCursorEvent(StateMgr.CurrState.CurrX, StateMgr.CurrState.CurrY); for (int i = -4; i <= 4; i++) for (int j = -4; j <= 4;j++ ) tmp3.SetPixel(StateMgr.CurrState.CurrX+i, StateMgr.CurrState.CurrY+j, Color.Blue); //eChangedCursorEvent(StateMgr.CurrState.CurrX, 100); //eChangedCursorEvent(100, StateMgr.CurrState.CurrY); } _CaptureImage.Image = tmp1; _CaptureImage2.Image = tmp3; backgroundFrame = (Bitmap)e.WebCamImage; backgroundFrameGray = grayscaleFilter.Apply(backgroundFrame); }
/// <summary> /// Detects and recognizes cards from source image /// </summary> /// <param name="source">Source image to be scanned</param> /// <returns>Recognized Cards</returns> public List<Card> Recognize(Bitmap source) { List<Card> collection = new List<Card>(); Bitmap temp = source.Clone(source.PixelFormat) as Bitmap; //Clone image to keep original image FiltersSequence seq = new FiltersSequence(); seq.Add(Grayscale.CommonAlgorithms.BT709); //First add grayScaling filter seq.Add(new OtsuThreshold()); //Then add binarization(thresholding) filter temp = seq.Apply(source); // Apply filters on source image //Extract blobs from image whose size width and height larger than 150 BlobCounter extractor = new BlobCounter(); extractor.FilterBlobs = true; extractor.MinWidth = extractor.MinHeight = 150; extractor.MaxWidth = extractor.MaxHeight = 350; extractor.ProcessImage(temp); //Will be used transform(extract) cards on source image QuadrilateralTransformation quadTransformer = new QuadrilateralTransformation(); //Will be used resize(scaling) cards ResizeBilinear resizer = new ResizeBilinear(CardWidth, CardHeight); foreach (Blob blob in extractor.GetObjectsInformation()) { //Get Edge points of card List<IntPoint> edgePoints = extractor.GetBlobsEdgePoints(blob); //Calculate/Find corners of card on source image from edge points List<IntPoint> corners = PointsCloud.FindQuadrilateralCorners(edgePoints); quadTransformer.SourceQuadrilateral = corners; //Set corners for transforming card quadTransformer.AutomaticSizeCalculaton = true; Bitmap cardImg = quadTransformer.Apply(source); //Extract(transform) card image if (cardImg.Width > cardImg.Height) //If card is positioned horizontally { WriteableBitmap wbmp=(WriteableBitmap)cardImg; wbmp = wbmp.Rotate(90); cardImg = (Bitmap)wbmp; //Rotate } cardImg = resizer.Apply(cardImg); //Normalize card size Card card = new Card(cardImg, corners.ToArray()); //Create Card Object char color = ScanColor(card.GetTopLeftPart()); //Scan color bool faceCard = IsFaceCard(cardImg); //Determine type of card(face or not) if (!faceCard) { card.Suit = ScanSuit(cardImg, color); //Scan Suit of non-face card card.Rank = ScanRank(cardImg); //Scan Rank of non-face card } else { Bitmap topLeft = card.GetTopLeftPart(); seq = null; seq = new FiltersSequence(); seq.Add(Grayscale.CommonAlgorithms.BT709); seq.Add(new BradleyLocalThresholding()); topLeft = seq.Apply(topLeft); BlobsFiltering bFilter = new BlobsFiltering(5, 5, 150, 150); bFilter.ApplyInPlace(topLeft); //Filter blobs that can not be a suit //topLeft.Save("topleft.bmp", ImageFormat.Bmp); card.Suit = ScanFaceSuit(topLeft, color); //Scan suit of face card card.Rank = ScanFaceRank(topLeft); //Scan rank of face card } collection.Add(card); //Add card to collection } return collection; }
void webcam_ImageCaptured_Back(object source, WebcamEventArgs e) { _FrameImage.Image = e.WebCamImage; Bitmap MaskImage = new Bitmap(640, 480); if (backgroundFrame == null) { Frames2Ignore--; if (Frames2Ignore == 0) { backgroundFrame = (Bitmap)e.WebCamImage; backgroundFrameGray = grayscaleFilter.Apply(backgroundFrame); } return; } //Save curent image CurrentFrame = (Bitmap)e.WebCamImage; CurrentFrameGray = grayscaleFilter.Apply(CurrentFrame); /* // create filter IFilter pixellateFilter = new Pixellate(); // apply the filter backgroundFrame = pixellateFilter.Apply(backgroundFrame); backgroundFrameGray = grayscaleFilter.Apply(backgroundFrame); CurrentFrame = pixellateFilter.Apply(CurrentFrame); CurrentFrameGray = grayscaleFilter.Apply(CurrentFrame);*/ MoveTowards moveTowardsFilter = new MoveTowards(); moveTowardsFilter.OverlayImage = CurrentFrameGray; // move background towards current frame Bitmap tmp = moveTowardsFilter.Apply(backgroundFrameGray); // dispose old background backgroundFrame.Dispose(); backgroundFrame = tmp; // create processing filters sequence FiltersSequence processingFilter = new FiltersSequence(); processingFilter.Add(new Difference(backgroundFrameGray)); processingFilter.Add(new Threshold(15)); processingFilter.Add(new Opening()); processingFilter.Add(new Edges()); processingFilter.Add(new DifferenceEdgeDetector()); // apply the filter Bitmap tmp1 = processingFilter.Apply(CurrentFrameGray); // extract red channel from the original image IFilter extrachChannel = new ExtractChannel(RGB.R); Bitmap redChannel = extrachChannel.Apply(backgroundFrame); // merge red channel with moving object borders Merge mergeFilter = new Merge(); mergeFilter.OverlayImage = tmp1; Bitmap tmp2 = mergeFilter.Apply(redChannel); // replace red channel in the original image ReplaceChannel replaceChannel = new ReplaceChannel(RGB.R,tmp2); replaceChannel.ChannelImage = tmp2; Bitmap tmp3 = replaceChannel.Apply(backgroundFrame); ConnectedComponentsLabeling CCL = new ConnectedComponentsLabeling(); CCL.MinWidth = 75; CCL.MinHeight = 75; CCL.CoupledSizeFiltering = true; Bitmap tmp4 = CCL.Apply(tmp1); blobCounter.MinHeight = 75; blobCounter.MinWidth = 75; blobCounter.CoupledSizeFiltering = true; blobCounter.ProcessImage(tmp1); Blob[] blobs = blobCounter.GetObjects(tmp1); int maxSize = 0; Blob maxObject = new Blob(0, new Rectangle(0, 0, 0, 0)); // find biggest blob Bitmap Masked = new Bitmap(320, 240); if (blobs != null) { foreach (Blob blob in blobs) { int blobSize = blob.Rectangle.Width * blob.Rectangle.Height; if (blobSize > maxSize) { maxSize = blobSize; maxObject = blob; } } for (int i = maxObject.Rectangle.Left; i < maxObject.Rectangle.Right; i++) { for (int j = maxObject.Rectangle.Top; j < maxObject.Rectangle.Bottom; j++) { Masked.SetPixel(i, j, maxObject.Image.GetPixel(i - maxObject.Rectangle.Left, j - maxObject.Rectangle.Top)); } } } /*Bitmap Hor = new Bitmap(320, 240); Bitmap Ver = new Bitmap(320, 240); if (maxSize > 150) { AForge.Imaging.VerticalIntensityStatistics VIS = new VerticalIntensityStatistics(tmp1); int[] HistVer = VIS.Gray.Values; AForge.Imaging.HorizontalIntensityStatistics HIS = new HorizontalIntensityStatistics(tmp1); int[] HistHor = HIS.Gray.Values; for (int x=0;x<320;x++) for (int y = 0; y < 240; y++) { Hor.SetPixel(x, y, Color.White); Ver.SetPixel(x, y, Color.White); } int Imax = -1, Max = -1; for (int i = 0; i < HistHor.Length; i++) { for (int y = 0; y < ((double)(HistHor[i]) / 255) ; y++) Hor.SetPixel(i, y, Color.Black); if (HistHor[i] > 0) { Imax = i; Max = HistHor[i]; } } int ImaxY = -1, MaxY = -1; for (int i = 0; i < HistVer.Length; i++) { for (int x = 0; x < ((double)(HistVer[i]) / 255) ; x++) Ver.SetPixel(x, i, Color.Black); if (HistVer[i] > MaxY) { ImaxY = i; MaxY = HistVer[i]; } } } */ /* blobCounter.MinHeight = 75; blobCounter.MinWidth = 75; blobCounter.CoupledSizeFiltering = true; blobCounter.ProcessImage(tmp1); Blob[] blobs = blobCounter.GetObjects(tmp1); int maxSize = 0; Blob maxObject = new Blob(0, new Rectangle(0, 0, 0, 0)); // find biggest blob if (blobs != null) { foreach (Blob blob in blobs) { int blobSize = blob.Rectangle.Width * blob.Rectangle.Height; if (blobSize > maxSize) { maxSize = blobSize; maxObject = blob; } } if (maxObject.Rectangle.Height > 90 && maxObject.Rectangle.Width > 30) { AForge.Imaging.VerticalIntensityStatistics VIS = new VerticalIntensityStatistics(maxObject.Image); int[] HistVer = VIS.Gray.Values; AForge.Imaging.HorizontalIntensityStatistics HIS = new HorizontalIntensityStatistics(maxObject.Image); int[] HistHor = HIS.Gray.Values; int Imax = -1, Max = -1; for (int i = 0; i < HistHor.Length; i++) { if (HistHor[i] > 0) { Imax = i; Max = HistHor[i]; break; } } int ImaxY = -1, MaxY = -1; for (int i = 0; i < HistVer.Length; i++) { if (HistVer[i] > MaxY) { ImaxY = i; MaxY = HistVer[i]; } } //Imax = 0; ImaxY = 0; Console.WriteLine("X={0},Y={1}", maxObject.Rectangle.X, maxObject.Rectangle.Y); if (eChangedCursorEvent != null && maxSize != 0) eChangedCursorEvent(maxObject.Rectangle.X + Imax, maxObject.Rectangle.Y + ImaxY); LastX = maxObject.Rectangle.X; LastY = maxObject.Rectangle.Y; }*/ /*else if (LastX != -1 && LastY != -1 && maxSize > 0) { //Calc distance from LastX,LastY double distX = System.Math.Pow(maxObject.Rectangle.X - LastX, 2); double distY = System.Math.Pow(maxObject.Rectangle.Y - LastY, 2); double dist = System.Math.Pow(distX + distY, 0.5); if (dist < 15) { Console.WriteLine("X={0},Y={1}", maxObject.Rectangle.X, maxObject.Rectangle.Y); if (eChangedCursorEvent != null && maxSize != 0) eChangedCursorEvent(maxObject.Rectangle.X, maxObject.Rectangle.Y); LastX = maxObject.Rectangle.X; LastY = maxObject.Rectangle.Y; } else { LastX = -1; LastY = -1; } } else { LastX = -1; LastY = -1; }*/ //} _CaptureImage.Image = maxObject.Image; //_CaptureImage.Image = tmp3; _CaptureImage2.Image = tmp4; backgroundFrame = (Bitmap)e.WebCamImage; backgroundFrameGray = grayscaleFilter.Apply(backgroundFrame); }
/// <summary> /// Based off www.codeproject.com/Articles/10248/Motion-Detection-Algorithms /// </summary> /// <param name="prevImage"></param> /// <param name="image"></param> /// <returns></returns> private Bitmap ThresholdImage(Bitmap prevImage, Bitmap image) { // create filter new MoveTowards(image).ApplyInPlace(prevImage); FiltersSequence processingFilter = new FiltersSequence(); processingFilter.Add(new Difference(prevImage)); processingFilter.Add(new Pixellate()); processingFilter.Add(new Grayscale(0.2125, 0.7154, 0.0721)); processingFilter.Add(new Threshold(45)); // apply the filter return processingFilter.Apply(image); }
public override void GetLiveImage() { try { LiveViewData = LiveViewManager.GetLiveViewImage(CameraDevice); } catch (Exception) { return; } if (LiveViewData == null || LiveViewData.ImageData == null) return; MemoryStream stream = new MemoryStream(LiveViewData.ImageData, LiveViewData.ImageDataPosition, LiveViewData.ImageData.Length - LiveViewData.ImageDataPosition); using (var bmp = new Bitmap(stream)) { Bitmap res = bmp; var preview = BitmapFactory.ConvertToPbgra32Format(BitmapSourceConvert.ToBitmapSource(res)); var zoow = preview.Crop((int)(CentralPoint.X - (StarWindowSize / 2)), (int)(CentralPoint.Y - (StarWindowSize / 2)), StarWindowSize, StarWindowSize); CalculateStarSize(zoow); zoow.Freeze(); StarWindow = zoow; preview.Freeze(); Preview = preview; if (Brightness != 0) { BrightnessCorrection filter = new BrightnessCorrection(Brightness); res = filter.Apply(res); } if (EdgeDetection) { var filter = new FiltersSequence( Grayscale.CommonAlgorithms.BT709, new HomogenityEdgeDetector() ); res = filter.Apply(res); } var _bitmap = BitmapFactory.ConvertToPbgra32Format(BitmapSourceConvert.ToBitmapSource(res)); DrawGrid(_bitmap); if (ZoomFactor > 1) { double d = _bitmap.PixelWidth/(double) ZoomFactor; double h = _bitmap.PixelHeight/(double) ZoomFactor; _bitmap = _bitmap.Crop((int) (CentralPoint.X - (d/2)), (int) (CentralPoint.Y - (h/2)), (int) d, (int) h); } _bitmap.Freeze(); Bitmap = _bitmap; } }
private bool IsNotNumber(Bitmap source) { var template = Resources.PlayerEmpty; var temp = source.Clone() as Bitmap; //Clone image to keep original image var seq = new FiltersSequence(); seq.Add(Grayscale.CommonAlgorithms.BT709); temp = seq.Apply(source); // Apply filters on source image var templ = seq.Apply(template); var templateMatchin = new ExhaustiveTemplateMatching(0.9f); TemplateMatch[] templates; if ( temp.Width < template.Width || temp.Height < template.Height ) templates = templateMatchin.ProcessImage(templ, temp); else templates = templateMatchin.ProcessImage(temp, templ); var res = templates.Length > 0; template = Resources.PlayerMissing; templ = seq.Apply(template); templateMatchin = new ExhaustiveTemplateMatching(0.9f); TemplateMatch[] templates2; if ( temp.Width < template.Width || temp.Height < template.Height ) templates2 = templateMatchin.ProcessImage(templ, temp); else templates2 = templateMatchin.ProcessImage(temp, templ); res |= templates2.Length > 0; return res; }
public void ProcessLiveView(Bitmap bmp) { if (PreviewTime > 0 && (DateTime.Now - _photoCapturedTime).TotalSeconds <= PreviewTime) { var bitmap = ServiceProvider.Settings.SelectedBitmap.DisplayImage.Clone(); // flip image only if the prview not fliped if (FlipImage && !ServiceProvider.Settings.FlipPreview) bitmap = bitmap.Flip(WriteableBitmapExtensions.FlipMode.Vertical); bitmap.Freeze(); ServiceProvider.DeviceManager.LiveViewImage[CameraDevice] = SaveJpeg(bitmap); Bitmap = bitmap; return; } if (DetectMotion) { ProcessMotionDetection(bmp); } if (_totalframes%DesiredFrameRate == 0 && ShowHistogram) { ImageStatisticsHSL hslStatistics = new ImageStatisticsHSL(bmp); LuminanceHistogramPoints = ConvertToPointCollection( hslStatistics.Luminance.Values); ImageStatistics statistics = new ImageStatistics(bmp); RedColorHistogramPoints = ConvertToPointCollection( statistics.Red.Values); GreenColorHistogramPoints = ConvertToPointCollection( statistics.Green.Values); BlueColorHistogramPoints = ConvertToPointCollection( statistics.Blue.Values); } if (HighlightUnderExp) { ColorFiltering filtering = new ColorFiltering(); filtering.Blue = new IntRange(0, 5); filtering.Red = new IntRange(0, 5); filtering.Green = new IntRange(0, 5); filtering.FillOutsideRange = false; filtering.FillColor = new RGB(Color.Blue); filtering.ApplyInPlace(bmp); } if (HighlightOverExp) { ColorFiltering filtering = new ColorFiltering(); filtering.Blue = new IntRange(250, 255); filtering.Red = new IntRange(250, 255); filtering.Green = new IntRange(250, 255); filtering.FillOutsideRange = false; filtering.FillColor = new RGB(Color.Red); filtering.ApplyInPlace(bmp); } var preview = BitmapFactory.ConvertToPbgra32Format( BitmapSourceConvert.ToBitmapSource(bmp)); DrawFocusPoint(preview, true); if (Brightness != 0) { BrightnessCorrection filter = new BrightnessCorrection(Brightness); bmp = filter.Apply(bmp); } Bitmap newbmp = bmp; if (EdgeDetection) { var filter = new FiltersSequence( Grayscale.CommonAlgorithms.BT709, new HomogenityEdgeDetector() ); newbmp = filter.Apply(bmp); } WriteableBitmap writeableBitmap; if (BlackAndWhite) { Grayscale filter = new Grayscale(0.299, 0.587, 0.114); writeableBitmap = BitmapFactory.ConvertToPbgra32Format( BitmapSourceConvert.ToBitmapSource( filter.Apply(newbmp))); } else { writeableBitmap = BitmapFactory.ConvertToPbgra32Format( BitmapSourceConvert.ToBitmapSource(newbmp)); } DrawGrid(writeableBitmap); switch (RotationIndex) { case 0: Rotation = 0; break; case 1: Rotation = 90; break; case 2: Rotation = 180; break; case 3: Rotation = 270; break; case 4: Rotation = LiveViewData.Rotation; break; } if (CameraDevice.LiveViewImageZoomRatio.Value == "All") { preview.Freeze(); Preview = preview; if (ShowFocusRect) DrawFocusPoint(writeableBitmap); } if (FlipImage) { writeableBitmap = writeableBitmap.Flip(WriteableBitmapExtensions.FlipMode.Vertical); } if (CropRatio > 0) { CropOffsetX = (int) ((writeableBitmap.PixelWidth/2.0)*CropRatio/100); CropOffsetY = (int) ((writeableBitmap.PixelHeight/2.0)*CropRatio/100); writeableBitmap = writeableBitmap.Crop(CropOffsetX, CropOffsetY, writeableBitmap.PixelWidth - (2*CropOffsetX), writeableBitmap.PixelHeight - (2*CropOffsetY)); } writeableBitmap.Freeze(); Bitmap = writeableBitmap; //if (_totalframes%DesiredWebFrameRate == 0) ServiceProvider.DeviceManager.LiveViewImage[CameraDevice] = SaveJpeg(writeableBitmap); }
public void MagiCalc(double sensor) { // get image from LiveView LiveViewData = SelectedCameraDevice.GetLiveViewImage(); MemoryStream stream = new MemoryStream(LiveViewData.ImageData, LiveViewData. ImageDataPosition, LiveViewData.ImageData. Length - LiveViewData. ImageDataPosition); using (var tempImage = new Bitmap(stream)) { Bitmap bmp = tempImage; var preview = BitmapFactory.ConvertToPbgra32Format( BitmapSourceConvert.ToBitmapSource(bmp)); Bitmap binaryimage = bmp; // binarize the image to BinaryImage short[,] se = new short[,] { { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 }, { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 }, { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 }, { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 }, { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 }, { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 }, { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 }, { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 }, { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 }, { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 }, { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 } }; var filter = new FiltersSequence( Grayscale.CommonAlgorithms.BT709, new Threshold(100), new Closing(se), new Opening(se), new Opening(se), new Opening(se), new Invert() ); binaryimage = filter.Apply(bmp); // Pass ConnectedComponentsLabeling Filter WriteableBitmap writeableBitmap; // Pass ConnectedComponentsLabeling Filter ConnectedComponentsLabeling filter2 = new ConnectedComponentsLabeling(); writeableBitmap = BitmapFactory.ConvertToPbgra32Format(BitmapSourceConvert.ToBitmapSource(filter2.Apply(binaryimage))); // Check objects count int objectCount = filter2.ObjectCount; StepperManager.Instance.LinesNumber = objectCount; writeableBitmap.Freeze(); ColorBitmapSource = writeableBitmap; // Calc Magnification double one_one; one_one = 59 * sensor; double _magni; _magni = one_one / (objectCount); double _Magni = Math.Round(_magni, 1, MidpointRounding.AwayFromZero); //Rounds"up" StepperManager.Instance.Magni = _Magni; } return; }
private void GetLiveImage() { if (_operInProgress) return; try { LiveViewData = SelectedCameraDevice.GetLiveViewImage(); } catch (Exception ex) { Log.Error("Error geting lv", ex); _operInProgress = false; return; } if (LiveViewData == null) { _operInProgress = false; return; } try { if (PreviewTime > 0 && (DateTime.Now - _photoCapturedTime).TotalSeconds <= PreviewTime) { Bitmap = ServiceProvider.Settings.SelectedBitmap.DisplayImage; _operInProgress = false; Console.WriteLine("Previeving"); return; } if (LiveViewData != null && LiveViewData.ImageData != null) { MemoryStream stream = new MemoryStream(LiveViewData.ImageData, LiveViewData. ImageDataPosition, LiveViewData.ImageData. Length - LiveViewData. ImageDataPosition); LevelAngle = (int)LiveViewData.LevelAngleRolling; AngleLevelPitching = LiveViewData.LevelAnglePitching; AngleLevelYawing = LiveViewData.LevelAngleYawing; using (var res = new Bitmap(stream)) { Bitmap bmp = res; if (_totalframes%DesiredFrameRate == 0 && ShowHistogram) { ImageStatisticsHSL hslStatistics = new ImageStatisticsHSL(bmp); LuminanceHistogramPoints = ConvertToPointCollection( hslStatistics.Luminance.Values); ImageStatistics statistics = new ImageStatistics(bmp); RedColorHistogramPoints = ConvertToPointCollection( statistics.Red.Values); GreenColorHistogramPoints = ConvertToPointCollection( statistics.Green.Values); BlueColorHistogramPoints = ConvertToPointCollection( statistics.Blue.Values); } if (HighlightUnderExp) { ColorFiltering filtering = new ColorFiltering(); filtering.Blue = new IntRange(0, 5); filtering.Red = new IntRange(0, 5); filtering.Green = new IntRange(0, 5); filtering.FillOutsideRange = false; filtering.FillColor = new RGB(System.Drawing.Color.Blue); filtering.ApplyInPlace(bmp); } if (HighlightOverExp) { ColorFiltering filtering = new ColorFiltering(); filtering.Blue = new IntRange(250, 255); filtering.Red = new IntRange(250, 255); filtering.Green = new IntRange(250, 255); filtering.FillOutsideRange = false; filtering.FillColor = new RGB(System.Drawing.Color.Red); filtering.ApplyInPlace(bmp); } var preview = BitmapFactory.ConvertToPbgra32Format( BitmapSourceConvert.ToBitmapSource(bmp)); if (Brightness != 0) { BrightnessCorrection filter = new BrightnessCorrection(Brightness); bmp = filter.Apply(bmp); } Bitmap newbmp = bmp; if (EdgeDetection) { var filter = new FiltersSequence( Grayscale.CommonAlgorithms.BT709, new HomogenityEdgeDetector() ); newbmp = filter.Apply(bmp); } WriteableBitmap writeableBitmap; if (BlackAndWhite) { Grayscale filter = new Grayscale(0.299, 0.587, 0.114); writeableBitmap = BitmapFactory.ConvertToPbgra32Format( BitmapSourceConvert.ToBitmapSource( filter.Apply(newbmp))); } else { writeableBitmap = BitmapFactory.ConvertToPbgra32Format( BitmapSourceConvert.ToBitmapSource(newbmp)); } DrawGrid(writeableBitmap); DrawFocusPoint(writeableBitmap); writeableBitmap.Freeze(); Bitmap = writeableBitmap; _operInProgress = false; } return; } } catch (Exception ex) { Log.Error("Error geting lv", ex); _operInProgress = false; return; } }
private void fetchBitmapNames(Bitmap source) { for (var i = 0; i < data.Sections.Length; i++) { System.Drawing.Rectangle section = data.Sections[i]; Bitmap bmp = new Bitmap(section.Width, section.Height); Graphics g = Graphics.FromImage(bmp); g.DrawImage(source, 0, 0, section, GraphicsUnit.Pixel); FiltersSequence filters = new FiltersSequence( new Grayscale(0.2125, 0.7154, 0.0721), new ResizeBicubic(500, 47), new Shrink(System.Drawing.Color.Black), new Invert() ); bmp = filters.Apply(bmp); bmp.Save(data.DataDir + @"\crop" + i + ".tif", ImageFormat.Tiff); } }
// Process max 200 frames (5 min) in 320x240 resolution. So 76KB memory per frame (grayscale). 1200 frames is max 93 MB of RAM (normally less because of area) private void processFilePart() { int nrofframes = imageStack.Length; int i; int sum; // create filters Morph morphFilter = new Morph(); // filter for adapting background morphFilter.SourcePercent = 0.8; Difference differenceFilter = new Difference(); // filter for subtracting two frames Threshold thresholdFilter = new Threshold(); // filter for thresholding FiltersSequence filters = new FiltersSequence(); // all filters in one filters.Add(morphFilter); filters.Add(differenceFilter); filters.Add(thresholdFilter); thresholdFilter.ThresholdValue = threshold; // Process here for (i = 0; i < nrofframes; i++) { // move background towards current frame morphFilter.OverlayImage = imageStack[i]; Bitmap Temp = morphFilter.Apply(backgroundFrame); backgroundFrame = Temp.Clone(new Rectangle(0, 0, Temp.Width, Temp.Height), Temp.PixelFormat); Temp.Dispose(); // apply rest of the filters differenceFilter.OverlayImage = imageStack[i]; Bitmap Temp2 = filters.Apply(backgroundFrame); sum = 0; // Calculate sum of white pixels for (int j = 0; j < Temp2.Width; j++) { for (int k = 0; k < Temp2.Height; k++) { if (Temp2.GetPixel(j, k) != Color.FromArgb(255, 0, 0, 0)) { sum += 1; } } } Temp2.Dispose(); if (sum > objectsize) { tracker.addFrame(currentFrame); } currentFrame += 1; } // Discard Array for (i = 0; i < nrofframes; i++) { imageStack[i].Dispose(); } }
public string RecognizeTextSmall(Bitmap source) { try { //var res = IsNotNumber(source); //if (res) //{ // return "none"; //} var temp = source.Clone() as Bitmap; //Clone image to keep original image var seq = new FiltersSequence(); //seq.Add(new Grayscale(0.7, 0.7, 0.7)); seq.Add(Grayscale.CommonAlgorithms.BT709); //seq.Add(new OtsuThreshold()); //Then add binarization(thresholding) filter seq.Add(new Invert()); //seq.Add(new ResizeBilinear(source.Width * 2, source.Height * 2)); temp = seq.Apply(source); // Apply filters on source image //using (var engine = new TesseractEngine(@"./tessdata", "rus", EngineMode.Default)) { using (var page = _engine.Process(temp, PageSegMode.SingleLine)) { var text = page.GetText(); var conf = page.GetMeanConfidence(); //Ex.Report(new Exception(text)); //if (conf < 0.5) // return "none"; return text; } } } catch (Exception e) { Trace.TraceError(e.ToString()); Ex.Report(e); return ""; } }
public bool ScanByTemplate(Bitmap source, Bitmap template) { var temp = source.Clone() as Bitmap; //Clone image to keep original image var tempTempl = template; var seq = new FiltersSequence(); seq.Add(Grayscale.CommonAlgorithms.BT709); //First add grayScaling filter //seq.Add(new Threshold(200)); seq.Add(new OtsuThreshold()); //Then add binarization(thresholding) filter temp = seq.Apply(source); // Apply filters on source image //tempTempl = seq.Apply(template); // Apply filters on source image var templateMatchin = new ExhaustiveTemplateMatching(0.9f); TemplateMatch[] templates; templates = templateMatchin.ProcessImage(temp, tempTempl); return templates.Length > 0; }
void webcam_ImageCaptured_Back2(object source, WebcamEventArgs e) { _FrameImage.Image = e.WebCamImage; Bitmap MaskImage = new Bitmap(640, 480); if (backgroundFrame == null) { Frames2Ignore--; if (Frames2Ignore == 0) { backgroundFrame = (Bitmap)e.WebCamImage; backgroundFrameGray = grayscaleFilter.Apply(backgroundFrame); } return; } //Save curent image CurrentFrame = (Bitmap)e.WebCamImage; CurrentFrameGray = grayscaleFilter.Apply(CurrentFrame); /* // create filter IFilter pixellateFilter = new Pixellate(); // apply the filter backgroundFrame = pixellateFilter.Apply(backgroundFrame); backgroundFrameGray = grayscaleFilter.Apply(backgroundFrame); CurrentFrame = pixellateFilter.Apply(CurrentFrame); CurrentFrameGray = grayscaleFilter.Apply(CurrentFrame);*/ MoveTowards moveTowardsFilter = new MoveTowards(); moveTowardsFilter.OverlayImage = CurrentFrameGray; // move background towards current frame Bitmap tmp = moveTowardsFilter.Apply(backgroundFrameGray); // dispose old background backgroundFrame.Dispose(); backgroundFrame = tmp; // create processing filters sequence FiltersSequence processingFilter = new FiltersSequence(); processingFilter.Add(new Difference(backgroundFrameGray)); processingFilter.Add(new Threshold(15)); processingFilter.Add(new Opening()); processingFilter.Add(new Edges()); processingFilter.Add(new DifferenceEdgeDetector()); // apply the filter Bitmap tmp1 = processingFilter.Apply(CurrentFrameGray); // extract red channel from the original image /*IFilter extrachChannel = new ExtractChannel(RGB.R); Bitmap redChannel = extrachChannel.Apply(backgroundFrame); // merge red channel with moving object borders Merge mergeFilter = new Merge(); mergeFilter.OverlayImage = tmp1; Bitmap tmp2 = mergeFilter.Apply(redChannel); // replace red channel in the original image ReplaceChannel replaceChannel = new ReplaceChannel(RGB.R, tmp2); replaceChannel.ChannelImage = tmp2; Bitmap tmp3 = replaceChannel.Apply(backgroundFrame); ConnectedComponentsLabeling CCL = new ConnectedComponentsLabeling(); CCL.MinWidth = 75; CCL.MinHeight = 75; CCL.CoupledSizeFiltering = true; Bitmap tmp4 = CCL.Apply(tmp1); blobCounter.MinHeight = 75; blobCounter.MinWidth = 75; blobCounter.CoupledSizeFiltering = true; blobCounter.ProcessImage(tmp1); Blob[] blobs = blobCounter.GetObjects(tmp1); int maxSize = 0; Blob maxObject = new Blob(0, new Rectangle(0, 0, 0, 0)); // find biggest blob if (blobs != null) { foreach (Blob blob in blobs) { int blobSize = blob.Rectangle.Width * blob.Rectangle.Height; if (blobSize > maxSize) { maxSize = blobSize; maxObject = blob; } } }*/ Bitmap Hor = new Bitmap(320, 240); Bitmap Ver = new Bitmap(320, 240); /*if (maxSize > 150) { AForge.Imaging.VerticalIntensityStatistics VIS = new VerticalIntensityStatistics(tmp1); int[] HistVer = VIS.Gray.Values; AForge.Imaging.HorizontalIntensityStatistics HIS = new HorizontalIntensityStatistics(tmp1); int[] HistHor = HIS.Gray.Values; } */ AForge.Imaging.VerticalIntensityStatistics VIS = new VerticalIntensityStatistics(tmp1); int[] HistVer = VIS.Gray.Values; AForge.Imaging.HorizontalIntensityStatistics HIS = new HorizontalIntensityStatistics(tmp1); int[] HistHor = HIS.Gray.Values; //StateMgr.Execute(HistHor,HistVer); if (eChangedCursorEvent != null && StateMgr.Val == Webcam.ValidLocation.TRUE) { //Console.WriteLine("X={0} , Y={1}", StateMgr.CurrState.CurrX, StateMgr.CurrState.CurrY); eChangedCursorEvent(StateMgr.CurrState.CurrX, StateMgr.CurrState.CurrY); //eChangedCursorEvent(StateMgr.CurrState.CurrX, 100); //eChangedCursorEvent(100, StateMgr.CurrState.CurrY); } #region Paint Hist /*for (int x = 0; x < 320; x++) for (int y = 0; y < 240; y++) { Hor.SetPixel(x, y, Color.White); Ver.SetPixel(x, y, Color.White); } int Imax = -1, Max = -1; for (int i = 0; i < HistHor.Length; i++) { for (int y = 0; y < ((double)(HistHor[i]) / 255); y++) Hor.SetPixel(i, y, Color.Black); if (HistHor[i] > 0) { Imax = i; Max = HistHor[i]; } } int ImaxY = -1, MaxY = -1; for (int i = 0; i < HistVer.Length; i++) { for (int x = 0; x < ((double)(HistVer[i]) / 255); x++) Ver.SetPixel(x, i, Color.Black); if (HistVer[i] > MaxY) { ImaxY = i; MaxY = HistVer[i]; } }*/ #endregion _CaptureImage.Image = Hor; _CaptureImage2.Image = Ver; backgroundFrame = (Bitmap)e.WebCamImage; backgroundFrameGray = grayscaleFilter.Apply(backgroundFrame); }
Bitmap processImageCenterline(string filename) { using (Bitmap SampleImage = (Bitmap)System.Drawing.Image.FromFile(filename)) { // We must convert it to grayscale because // the filter accepts 8 bpp grayscale images Grayscale GF = new Grayscale(0.2125, 0.7154, 0.0721); using (Bitmap GSampleImage = GF.Apply(SampleImage)) { // Saving the grayscale image, so we could see it later // Detecting image edges and saving the result CannyEdgeDetector CED = new CannyEdgeDetector(0, 70); //CED.ApplyInPlace(GSampleImage); //BradleyLocalThresholding bwfilter = new BradleyLocalThresholding(); //bwfilter.ApplyInPlace(GSampleImage); // create filter // create filter sequence FiltersSequence filterSequence = new FiltersSequence(); // Inverting image to get white image on black background filterSequence.Add(new Invert()); filterSequence.Add(new SISThreshold()); // Finding skeleton filterSequence.Add(new SimpleSkeletonization()); //clean image from scratches short[,] se = new short[,] { { -1, -1, -1 }, { 0, 1, 0 }, { -1, -1, -1 }}; filterSequence.Add(new HitAndMiss(se, HitAndMiss.Modes.Thinning)); //filterSequence.Add(new Median( )); //filterSequence.Add(new Dilatation()); filterSequence.Add(new Invert()); // apply the filter and rfeturn value return filterSequence.Apply(GSampleImage); } } }
public static Image ToBlackAndWhite(this Image original){ var filtersSequence = new FiltersSequence { Grayscale.CommonAlgorithms.BT709,new OtsuThreshold() }; return filtersSequence.Apply((Bitmap)original); }
public virtual void GetLiveImage() { if (_operInProgress) { // Log.Error("OperInProgress"); return; } if (DelayedStart) { //Log.Error("Start is delayed"); return; } if (FreezeImage) return; _operInProgress = true; _totalframes++; if ((DateTime.Now - _framestart).TotalSeconds > 0) Fps = (int)(_totalframes / (DateTime.Now - _framestart).TotalSeconds); try { LiveViewData = LiveViewManager.GetLiveViewImage(CameraDevice); } catch (Exception ex) { Log.Error("Error geting lv", ex); _operInProgress = false; return; } if (LiveViewData == null) { _operInProgress = false; return; } if (!LiveViewData.IsLiveViewRunning && !IsFocusStackingRunning) { DelayedStart = true; _restartTimerStartTime = DateTime.Now; _restartTimer.Start(); _operInProgress = false; return; } if (LiveViewData.ImageData == null) { // Log.Error("LV image data is null !"); _operInProgress = false; return; } Recording = LiveViewData.MovieIsRecording; try { if (LiveViewData != null && LiveViewData.ImageData != null) { MemoryStream stream = new MemoryStream(LiveViewData.ImageData, LiveViewData. ImageDataPosition, LiveViewData.ImageData. Length - LiveViewData. ImageDataPosition); LevelAngle = (int)LiveViewData.LevelAngleRolling; SoundL = LiveViewData.SoundL; SoundR = LiveViewData.SoundR; PeakSoundL = LiveViewData.PeakSoundL; PeakSoundR = LiveViewData.PeakSoundR; HaveSoundData = LiveViewData.HaveSoundData; MovieTimeRemain = decimal.Round(LiveViewData.MovieTimeRemain, 2); if (NoProcessing) { BitmapImage bi = new BitmapImage(); bi.BeginInit(); bi.CacheOption = BitmapCacheOption.OnLoad; bi.StreamSource = stream; bi.EndInit(); bi.Freeze(); Bitmap = bi; ServiceProvider.DeviceManager.LiveViewImage[CameraDevice] = stream.ToArray(); _operInProgress = false; return; } using (var res = new Bitmap(stream)) { Bitmap bmp = res; if (PreviewTime > 0 && (DateTime.Now - _photoCapturedTime).TotalSeconds <= PreviewTime) { var bitmap = ServiceProvider.Settings.SelectedBitmap.DisplayImage.Clone(); //var dw = (double)bmp.Width / bitmap.PixelWidth; //bitmap = bitmap.Resize((int)(bitmap.PixelWidth * dw), (int)(bitmap.PixelHeight * dw), // WriteableBitmapExtensions.Interpolation.NearestNeighbor); // flip image only if the prview not fliped if (FlipImage && !ServiceProvider.Settings.FlipPreview) bitmap = bitmap.Flip(WriteableBitmapExtensions.FlipMode.Vertical); bitmap.Freeze(); ServiceProvider.DeviceManager.LiveViewImage[CameraDevice] = SaveJpeg(bitmap); Bitmap = bitmap; return; } if (DetectMotion) { ProcessMotionDetection(bmp); } if (_totalframes % DesiredFrameRate == 0 && ShowHistogram) { ImageStatisticsHSL hslStatistics = new ImageStatisticsHSL(bmp); LuminanceHistogramPoints = ConvertToPointCollection( hslStatistics.Luminance.Values); ImageStatistics statistics = new ImageStatistics(bmp); RedColorHistogramPoints = ConvertToPointCollection( statistics.Red.Values); GreenColorHistogramPoints = ConvertToPointCollection( statistics.Green.Values); BlueColorHistogramPoints = ConvertToPointCollection( statistics.Blue.Values); } if (HighlightUnderExp) { ColorFiltering filtering = new ColorFiltering(); filtering.Blue = new IntRange(0, 5); filtering.Red = new IntRange(0, 5); filtering.Green = new IntRange(0, 5); filtering.FillOutsideRange = false; filtering.FillColor = new RGB(Color.Blue); filtering.ApplyInPlace(bmp); } if (HighlightOverExp) { ColorFiltering filtering = new ColorFiltering(); filtering.Blue = new IntRange(250, 255); filtering.Red = new IntRange(250, 255); filtering.Green = new IntRange(250, 255); filtering.FillOutsideRange = false; filtering.FillColor = new RGB(Color.Red); filtering.ApplyInPlace(bmp); } var preview = BitmapFactory.ConvertToPbgra32Format( BitmapSourceConvert.ToBitmapSource(bmp)); DrawFocusPoint(preview, true); if (Brightness != 0) { BrightnessCorrection filter = new BrightnessCorrection(Brightness); bmp = filter.Apply(bmp); } Bitmap newbmp = bmp; if (EdgeDetection) { var filter = new FiltersSequence( Grayscale.CommonAlgorithms.BT709, new HomogenityEdgeDetector() ); newbmp = filter.Apply(bmp); } WriteableBitmap writeableBitmap; if (BlackAndWhite) { Grayscale filter = new Grayscale(0.299, 0.587, 0.114); writeableBitmap = BitmapFactory.ConvertToPbgra32Format( BitmapSourceConvert.ToBitmapSource( filter.Apply(newbmp))); } else { writeableBitmap = BitmapFactory.ConvertToPbgra32Format( BitmapSourceConvert.ToBitmapSource(newbmp)); } DrawGrid(writeableBitmap); switch (RotationIndex) { case 0: Rotation = 0; break; case 1: Rotation = 90; break; case 2: Rotation = 180; break; case 3: Rotation = 270; break; case 4: Rotation = LiveViewData.Rotation; break; } if (CameraDevice.LiveViewImageZoomRatio.Value == "All") { preview.Freeze(); Preview = preview; if (ShowFocusRect) DrawFocusPoint(writeableBitmap); } if (FlipImage) { writeableBitmap = writeableBitmap.Flip(WriteableBitmapExtensions.FlipMode.Vertical); } if (CropRatio > 0) { CropOffsetX = (int) ((writeableBitmap.PixelWidth/2.0)*CropRatio/100); CropOffsetY = (int) ((writeableBitmap.PixelHeight/2.0)*CropRatio/100); writeableBitmap = writeableBitmap.Crop(CropOffsetX, CropOffsetY, writeableBitmap.PixelWidth - (2*CropOffsetX), writeableBitmap.PixelHeight - (2*CropOffsetY)); } writeableBitmap.Freeze(); Bitmap = writeableBitmap; //if (_totalframes%DesiredWebFrameRate == 0) ServiceProvider.DeviceManager.LiveViewImage[CameraDevice] = SaveJpeg(writeableBitmap); } stream.Close(); } } catch (Exception exception) { Log.Error(exception); _operInProgress = false; } finally { _operInProgress = false; } _operInProgress = false; }
public static Image ToGrayScale(this Image original){ var filtersSequence = new FiltersSequence{Grayscale.CommonAlgorithms.BT709}; return filtersSequence.Apply((Bitmap) original); }
public Bitmap DealImg(System.Drawing.Image b) { try { /*var bnew = new Bitmap(b.Width, b.Height, PixelFormat.Format24bppRgb); Graphics g = Graphics.FromImage(bnew); g.DrawImage(b, 0, 0); g.Dispose(); bnew = new Grayscale(0.2125, 0.7154, 0.0721).Apply(bnew); bnew = new BlobsFiltering(1, 1, b.Width, b.Height).Apply(bnew); bnew = new Sharpen().Apply(bnew); bnew = new Threshold(50).Apply(bnew); */ //code above do not effect well here. Bitmap bnew = new Bitmap(b); Graphics g = Graphics.FromImage(bnew); g.DrawImage(b, 0, 0); g.Dispose(); FiltersSequence seq = new FiltersSequence(); seq.Add(Grayscale.CommonAlgorithms.BT709); seq.Add(new OtsuThreshold()); bnew = seq.Apply(bnew); return bnew; } catch (Exception) { throw; } //return null; }
public virtual void GetLiveImage() { if (_operInProgress) return; if (DelayedStart) return; _operInProgress = true; _totalframes++; if ((DateTime.Now - _framestart).TotalSeconds > 0) Fps = (int) (_totalframes/(DateTime.Now - _framestart).TotalSeconds); try { LiveViewData = LiveViewManager.GetLiveViewImage(CameraDevice); } catch (Exception) { _retries++; _operInProgress = false; return; } if (LiveViewData == null ) { _retries++; _operInProgress = false; return; } if (!LiveViewData.IsLiveViewRunning) { DelayedStart = true; _restartTimerStartTime = DateTime.Now; _restartTimer. Start(); _operInProgress = false; return; } if (LiveViewData.ImageData == null) { _retries++; _operInProgress = false; return; } Recording = LiveViewData.MovieIsRecording; try { WriteableBitmap preview; if (LiveViewData != null && LiveViewData.ImageData != null) { MemoryStream stream = new MemoryStream(LiveViewData.ImageData, LiveViewData. ImageDataPosition, LiveViewData.ImageData. Length - LiveViewData. ImageDataPosition); using (var res = new Bitmap(stream)) { Bitmap bmp = res; if (DetectMotion) { ProcessMotionDetection(bmp); } if (_totalframes % DesiredFrameRate == 0 && ShowHistogram) { ImageStatisticsHSL hslStatistics = new ImageStatisticsHSL(bmp); LuminanceHistogramPoints = ConvertToPointCollection( hslStatistics.Luminance.Values); ImageStatistics statistics = new ImageStatistics(bmp); RedColorHistogramPoints = ConvertToPointCollection( statistics.Red.Values); GreenColorHistogramPoints = ConvertToPointCollection( statistics.Green.Values); BlueColorHistogramPoints = ConvertToPointCollection( statistics.Blue.Values); } if (HighlightUnderExp) { ColorFiltering filtering = new ColorFiltering(); filtering.Blue = new IntRange(0, 5); filtering.Red = new IntRange(0, 5); filtering.Green = new IntRange(0, 5); filtering.FillOutsideRange = false; filtering.FillColor = new RGB(System.Drawing.Color.Blue); filtering.ApplyInPlace(bmp); } if (HighlightOverExp) { ColorFiltering filtering = new ColorFiltering(); filtering.Blue = new IntRange(250, 255); filtering.Red = new IntRange(250, 255); filtering.Green = new IntRange(250, 255); filtering.FillOutsideRange = false; filtering.FillColor = new RGB(System.Drawing.Color.Red); filtering.ApplyInPlace(bmp); } if (Brightness != 0) { BrightnessCorrection filter = new BrightnessCorrection(Brightness); bmp = filter.Apply(bmp); } preview = BitmapFactory.ConvertToPbgra32Format( BitmapSourceConvert.ToBitmapSource(bmp)); DrawFocusPoint(preview); Bitmap newbmp = bmp; if (EdgeDetection) { var filter = new FiltersSequence( Grayscale.CommonAlgorithms.BT709, new HomogenityEdgeDetector() ); newbmp = filter.Apply(bmp); } WriteableBitmap writeableBitmap; if (BlackAndWhite) { Grayscale filter = new Grayscale(0.299, 0.587, 0.114); writeableBitmap = BitmapFactory.ConvertToPbgra32Format( BitmapSourceConvert.ToBitmapSource( filter.Apply(newbmp))); } else { writeableBitmap = BitmapFactory.ConvertToPbgra32Format( BitmapSourceConvert.ToBitmapSource(newbmp)); } DrawGrid(writeableBitmap); if (RotationIndex != 0) { switch (RotationIndex) { case 1: writeableBitmap = writeableBitmap.Rotate(90); break; case 2: writeableBitmap = writeableBitmap.Rotate(180); break; case 3: writeableBitmap = writeableBitmap.Rotate(270); break; case 4: if (LiveViewData.Rotation != 0) writeableBitmap = writeableBitmap.RotateFree( LiveViewData.Rotation, false); break; } } if (CameraDevice.LiveViewImageZoomRatio.Value == "All") { preview.Freeze(); Preview = preview; if (ShowFocusRect) DrawFocusPoint(writeableBitmap); } writeableBitmap.Freeze(); Bitmap = writeableBitmap; if (_totalframes%DesiredWebFrameRate == 0) ServiceProvider.DeviceManager.LiveViewImage[CameraDevice] = SaveJpeg(writeableBitmap); } stream.Close(); } } catch (Exception exception) { Log.Error(exception); _retries++; _operInProgress = false; } _retries = 0; _operInProgress = false; }
/// <summary> /// processes Frame for Motion Detection based on background generation /// </summary> /// <param name="frame"> /// Takes in 2 Bitmap parameters, currentFrame and backgroundFrame /// </param> /// <returns> /// frame in which motion is marked /// </returns> public Bitmap processFrame(params Bitmap[] frame) { Bitmap currentFrame = frame[0]; // create grayscale filter (BT709) Grayscale filter = new Grayscale(0.2125, 0.7154, 0.0721); Bitmap GScurrentFrame = filter.Apply(currentFrame); if (this.backgroundFrame == null) { this.backgroundFrame = (Bitmap)GScurrentFrame.Clone(); GScurrentFrame.Dispose(); return currentFrame; } else { Morph filterx = new Morph(GScurrentFrame); filterx.SourcePercent = 0.75; Bitmap tmp = filterx.Apply(backgroundFrame); // dispose old background backgroundFrame.Dispose(); backgroundFrame = tmp; // create processing filters sequence FiltersSequence processingFilter = new FiltersSequence(); processingFilter.Add(new Difference(backgroundFrame)); processingFilter.Add(new Threshold(threshold_val)); processingFilter.Add(new Opening()); processingFilter.Add(new Edges()); // apply the filter Bitmap tmp1 = processingFilter.Apply(GScurrentFrame); IFilter extractChannel = new ExtractChannel(RGB.R); Bitmap redChannel = extractChannel.Apply(currentFrame); Merge mergeFilter = new Merge(); mergeFilter.OverlayImage = tmp1; Bitmap t3 = mergeFilter.Apply(redChannel); ReplaceChannel rc = new ReplaceChannel(RGB.R, t3); t3 = rc.Apply(currentFrame); redChannel.Dispose(); tmp1.Dispose(); GScurrentFrame.Dispose(); return t3; } }
Bitmap preprocess(Bitmap bmp, FiltersSequence fs) { Bitmap tmp = bmp.Clone() as Bitmap; tmp = fs.Apply(bmp); return tmp; }
/// <summary> /// Detects and recognizes cards from source image /// </summary> /// <param name="source">Source image to be scanned</param> /// <returns>Recognized Cards</returns> public CardCollection Recognize(Bitmap source) { CardCollection collection = new CardCollection(); //Collection that will hold cards Bitmap temp = source.Clone() as Bitmap; //Clone image to keep original image FiltersSequence seq = new FiltersSequence(); seq.Add(Grayscale.CommonAlgorithms.BT709); //First add grayScaling filter seq.Add(new OtsuThreshold()); //Then add binarization(thresholding) filter temp = seq.Apply(source); // Apply filters on source image //Extract blobs from image whose size width and height larger than 150 BlobCounter extractor = new BlobCounter(); extractor.FilterBlobs = true; extractor.MinWidth = extractor.MinHeight = 150; extractor.MaxWidth = extractor.MaxHeight = 350; extractor.ProcessImage(temp); //Will be used transform(extract) cards on source image QuadrilateralTransformation quadTransformer = new QuadrilateralTransformation(); //Will be used resize(scaling) cards ResizeBilinear resizer = new ResizeBilinear(CardWidth, CardHeight); foreach (Blob blob in extractor.GetObjectsInformation()) { //Get Edge points of card List<IntPoint> edgePoints = extractor.GetBlobsEdgePoints(blob); //Calculate/Find corners of card on source image from edge points List<IntPoint> corners = PointsCloud.FindQuadrilateralCorners(edgePoints); quadTransformer.SourceQuadrilateral = corners; //Set corners for transforming card quadTransformer.AutomaticSizeCalculaton = true; Bitmap cardImg = quadTransformer.Apply(source); //Extract(transform) card image if (cardImg.Width > cardImg.Height) //If card is positioned horizontally cardImg.RotateFlip(RotateFlipType.Rotate90FlipNone); //Rotate cardImg = resizer.Apply(cardImg); //Normalize card size Card card = new Card(cardImg, corners.ToArray()); //Create Card Object bool faceCard = IsFaceCard(cardImg); //Determine type of card(face or not) ResizeBicubic res; seq.Clear(); seq.Add(Grayscale.CommonAlgorithms.BT709); seq.Add(new OtsuThreshold()); Bitmap topLeftSuit = card.GetTopLeftSuitPart(); Bitmap bmp = seq.Apply(topLeftSuit); bmp = CutWhiteSpaces(bmp); res = new ResizeBicubic(32, 40); bmp = res.Apply(bmp); Bitmap topLeftRank = card.GetTopLeftRankPart(); Bitmap bmp2 = seq.Apply(topLeftRank); bmp2 = CutWhiteSpaces(bmp2); seq.Clear(); seq.Add(new OtsuThreshold()); bmp = seq.Apply(bmp); card.Suit = ScanSuit(bmp); if (!faceCard) { res = new ResizeBicubic(26, 40); bmp2 = res.Apply(bmp2); seq.Clear(); seq.Add(new OtsuThreshold()); bmp2 = seq.Apply(bmp2); card.Rank = ScanRank(bmp2); } else { res = new ResizeBicubic(32, 40); bmp2 = res.Apply(bmp2); seq.Clear(); seq.Add(new OtsuThreshold()); bmp2 = seq.Apply(bmp2); card.Rank = ScanFaceRank(bmp2); } collection.Add(card); //Add card to collection } return collection; }
private void btnCardSuit_Click(object sender, EventArgs e) { var image = Bitmap.FromFile(tbFile.Text) as Bitmap; Crop crop = new Crop(_settings.MyCardsRect);//TODO card identity var source = crop.Apply(image); FiltersSequence seq = new FiltersSequence(); seq.Add(Grayscale.CommonAlgorithms.BT709); //First add grayScaling filter seq.Add(new OtsuThreshold()); //Then add binarization(thresholding) filter var temp = seq.Apply(source); // Apply filters on source image BlobCounter extractor = new BlobCounter(); extractor.FilterBlobs = true; extractor.MinWidth = extractor.MinHeight = (int)_settings.MinCardSize;//TODO card size //extractor.MaxWidth = extractor.MaxHeight = 70;//TODO card size extractor.ProcessImage(temp); //Will be used transform(extract) cards on source image //QuadrilateralTransformation quadTransformer = new QuadrilateralTransformation(); Bitmap cardImg = null; foreach (Blob blob in extractor.GetObjectsInformation()) { cardImg = source.Clone(blob.Rectangle, PixelFormat.DontCare); break; } var r = _settings.CardSuitRect; var f = new AreaSelectorForm(new Rectangle(r.X*5, r.Y*5, r.Width*5, r.Height*5)); f.Text = "Select CardSuitRect rect"; f.SetImage(cardImg, 5); if (f.ShowDialog() == DialogResult.OK) { var newRect = new Rectangle( (int)Math.Round(f.Rect.X / 5.0), (int)Math.Round(f.Rect.Y / 5.0), (int)Math.Round(f.Rect.Width / 5.0), (int)Math.Round(f.Rect.Height / 5.0)); lblCardSuit.Text = newRect.ToString(); _settings.CardSuitRect = newRect; AppSettingsManager.Save(_settings); } }