private void NoiseReduction(ref Bitmap frame) { frame = Grayscale.CommonAlgorithms.RMY.Apply(frame); // Make gray switch (NoiseReduceValue) { case 1: BilateralSmoothing Bil_filter = new BilateralSmoothing(); Bil_filter.KernelSize = 7; Bil_filter.SpatialFactor = 10; Bil_filter.ColorFactor = 30; Bil_filter.ColorPower = 0.5; Bil_filter.ApplyInPlace(frame); break; case 2: Median M_filter = new Median(); M_filter.ApplyInPlace(frame); break; case 3: Mean Meanfilter = new Mean(); // apply the MirrFilter Meanfilter.ApplyInPlace(frame); break; default: Median Median_filter = new Median(); Median_filter.ApplyInPlace(frame); break; } GrayscaleToRGB RGBfilter = new GrayscaleToRGB(); // back to color format frame = RGBfilter.Apply(frame); }
private void GrayscaleImg(ref Bitmap frame, bool R, bool G, bool B) { // create filter Bitmap fr; if (!(!R && !G && !B) && (!R || !G || !B)) { EuclideanColorFiltering filter = new EuclideanColorFiltering(); // set center colol and radius filter.CenterColor = new AForge.Imaging.RGB(Color.FromArgb(30, 200, 30)); filter.Radius = 150; // apply the filter filter.ApplyInPlace(frame); Grayscale toGrFilter = new Grayscale(R ? 0.3 : 0, G ? .9 : 0, B ? 0.3 : 0); fr = toGrFilter.Apply(frame); GrayscaleToRGB toColFilter = new GrayscaleToRGB(); frame = toColFilter.Apply(fr); } else { frame = Grayscale.CommonAlgorithms.RMY.Apply(frame); GrayscaleToRGB toColFilter = new GrayscaleToRGB(); frame = toColFilter.Apply(frame); } }
private void EdgeDetectImg(ref Bitmap frame) { frame = Grayscale.CommonAlgorithms.RMY.Apply(frame); // Make gray switch (EdgeDetectValue) { case 1: SobelEdgeDetector SobelFilter = new SobelEdgeDetector(); SobelFilter.ApplyInPlace(frame); break; case 2: DifferenceEdgeDetector DifferenceFilter = new DifferenceEdgeDetector(); DifferenceFilter.ApplyInPlace(frame); break; case 3: HomogenityEdgeDetector HomogenityFilter = new HomogenityEdgeDetector(); HomogenityFilter.ApplyInPlace(frame); break; case 4: CannyEdgeDetector Cannyfilter = new CannyEdgeDetector(); // apply the MirrFilter Cannyfilter.ApplyInPlace(frame); break; default: HomogenityEdgeDetector filter = new HomogenityEdgeDetector(); filter.ApplyInPlace(frame); break; } GrayscaleToRGB RGBfilter = new GrayscaleToRGB(); // back to color format frame = RGBfilter.Apply(frame); }
static Bitmap GrayScaleImageToBitmap(ColorImageFrame image) { try { if (image != null) { var pixeldata = new byte[image.PixelDataLength]; image.CopyPixelDataTo(pixeldata); var bitmapFrame = new Bitmap(image.Width, image.Height, PixelFormat.Format16bppGrayScale); BitmapData bmapdata = bitmapFrame.LockBits( new Rectangle(0, 0, image.Width, image.Height), ImageLockMode.WriteOnly, bitmapFrame.PixelFormat); var ptr = bmapdata.Scan0; Marshal.Copy(pixeldata, 0, ptr, image.PixelDataLength); bitmapFrame.UnlockBits(bmapdata); var filter = new GrayscaleToRGB(); return(filter.Apply(AForge.Imaging.Image.Convert16bppTo8bpp(bitmapFrame))); } } catch (Exception ex) { Logger.LogExceptionToFile(ex, "KinectStream"); } return(null); }
// On new video frame private void videoSourcePlayer_NewFrame(object sender, ref Bitmap image) { if (activeGlyphDatabase != null) { if (image.PixelFormat == PixelFormat.Format8bppIndexed) { // convert image to RGB if it is grayscale GrayscaleToRGB filter = new GrayscaleToRGB( ); Bitmap temp = filter.Apply(image); image.Dispose( ); image = temp; } lock ( sync ) { List <ExtractedGlyphData> glyphs = imageProcessor.ProcessImage(image); EventHandler <FrameData> temp = frameProcessed; if (temp != null) { temp(this, new FrameData(glyphs, image)); } } } }
/// <summary> /// Creates a comic rendered copy of the input image. /// </summary> public override Bitmap Render(Bitmap sourceImage) { // Converters GrayscaleY convertGray = new GrayscaleY(); GrayscaleToRGB convertColor = new GrayscaleToRGB(); // Convert grayscal images if (sourceImage.PixelFormat == PixelFormat.Format8bppIndexed) { sourceImage = convertColor.Apply(sourceImage); } Bitmap comicImage = AForge.Imaging.Image.Clone(sourceImage); Bitmap edgeLayer = null; Bitmap glowLayer = null; // Glow for smooth colors GaussianBlur filterBlur = new GaussianBlur(); filterBlur.Sigma = 2.0; filterBlur.Size = 4; glowLayer = filterBlur.Apply(comicImage); //SmartBlur filterBlur = new SmartBlur(10, 0.2); //glowLayer = filterBlur.Apply(comicImage); ContrastCorrection filterContrast = new ContrastCorrection(1 - (-this.Coloring * 0.1)); filterContrast.ApplyInPlace(glowLayer); BrightnessCorrection filterBrightness = new BrightnessCorrection((-this.Coloring * 0.1) + 0.1); filterBrightness.ApplyInPlace(glowLayer); Screen blendScreen = new Screen(glowLayer); blendScreen.ApplyInPlace(comicImage); // Create a layer for edges Convolution filterConvolution = new Convolution(ConvolutionKernel); edgeLayer = filterConvolution.Apply(comicImage); // Convert to grayscale edgeLayer = convertGray.Apply(edgeLayer); // Threshold (edge thickness) Threshold filterThreshold = new Threshold((byte)(this.Edging * 255 / 100)); filterThreshold.ApplyInPlace(edgeLayer); edgeLayer = convertColor.Apply(edgeLayer); // intersect comic with top layer (Darken blend) Intersect blendIntersect = new Intersect(edgeLayer); blendIntersect.ApplyInPlace(comicImage); return(comicImage); }
public static Bitmap ConvertToGrayScale(this Bitmap me) { if (me == null) { return(null); } int radius = 20, x = me.Width / 2, y = me.Height / 2; // Generate a two-dimensional `byte` array that has the same size as the source image, which will be used as the mask. byte[,] mask = new byte[me.Height, me.Width]; // Initialize all its elements to the value 0xFF (255 in decimal). Initialize(mask, 0xFF); // "Draw" a circle in the `byte` array setting the positions inside the circle with the value 0. DrawCircle(mask, x, y, radius, 0); var grayFilter = new Grayscale(0.2125, 0.7154, 0.0721); var rgbFilter = new GrayscaleToRGB(); var maskFilter = new ApplyMask(mask); // Apply the `Grayscale` filter to everything outside the circle, convert the resulting image back to RGB Bitmap img = rgbFilter.Apply(grayFilter.Apply(maskFilter.Apply(me))); // Invert the mask Invert(mask); // Get only the cirle in color from the original image Bitmap circleImg = new ApplyMask(mask).Apply(me); // Merge both the grayscaled part of the image and the circle in color in a single one. return(new Merge(img).Apply(circleImg)); }
public static Bitmap gaborFilter(Bitmap b, GaborFilter gf) { // Grayscale g = new Grayscale(0.2125, 0.7154, 0.0721); /*filter.Gamma = random.Next(1,5); * filter.Lambda=random.Next(1,5); * filter.Psi=random.Next(1,5); * filter.Sigma=random.Next(1,5); * filter.Theta=random.Next(1,5);*/ Bitmap bx = ImageSupporter.ColorToGrayscale(b); gf.Apply(ImageSupporter.ColorToGrayscale(bx)); // var okno = new ImageWindow(ImageSupporter.Bitmap2BitmapImage(filter.Apply(ImageSupporter.ColorToGrayscale(bx)))); // okno.Title = filter.Gamma + " " + filter.Lambda + " " + filter.Psi + " " + filter.Sigma + " " + filter.Theta; // okno.Show(); GrayscaleToRGB grayscaleToRGB = new GrayscaleToRGB(); return(grayscaleToRGB.Apply(gf.Apply(ImageSupporter.ColorToGrayscale(b)))); }
public Bitmap gaborFilter(Bitmap b, float gamma, float lambda, float psi, float sigma, float theta) { GaborFilter filter = new GaborFilter(); Random random = new Random(); Boolean flaga = true; filter.Gamma = gamma; filter.Lambda = lambda; filter.Psi = psi; filter.Sigma = sigma; filter.Theta = theta; Bitmap bx = ImageSupporter.ColorToGrayscale(b); var okno = new ImageWindow(ImageSupporter.Bitmap2BitmapImage(filter.Apply(ImageSupporter.ColorToGrayscale(bx)))); okno.Title = filter.Gamma + " " + filter.Lambda + " " + filter.Psi + " " + filter.Sigma + " " + filter.Theta; okno.Show(); filter.Gamma = 3.0; filter.Theta = 0.0; GrayscaleToRGB grayscaleToRGB = new GrayscaleToRGB(); return(grayscaleToRGB.Apply(filter.Apply(ImageSupporter.ColorToGrayscale(b)))); }
// ========================================================= Contrast_scretchFunc private void GrayscaleFunc(ref Bitmap frame) { Grayscale toGrFilter = new Grayscale(0.2125, 0.7154, 0.0721); // create grayscale filter (BT709) Bitmap fr = toGrFilter.Apply(frame); GrayscaleToRGB toColFilter = new GrayscaleToRGB(); frame = toColFilter.Apply(fr); }
private void ThresholdImg(ref Bitmap frame) { frame = Grayscale.CommonAlgorithms.RMY.Apply(frame); Threshold filter = new Threshold(ThresholdValue); filter.ApplyInPlace(frame); GrayscaleToRGB toColFilter = new GrayscaleToRGB(); frame = toColFilter.Apply(frame); }
/// <summary> /// Creates a comic rendered copy of the input image. /// </summary> public override Bitmap Render(Bitmap sourceImage) { GrayscaleToRGB convertColor = new GrayscaleToRGB(); if (sourceImage.PixelFormat == PixelFormat.Format8bppIndexed) { sourceImage = convertColor.Apply(sourceImage); } BilateralBlur blur = new BilateralBlur(3, 0.1); Bitmap comic = blur.Apply(sourceImage); // Edges Bitmap grayscale = Grayscale.CommonAlgorithms.Y.Apply(comic); SobelEdgeDetector sobelEdge = new SobelEdgeDetector(); sobelEdge.ScaleIntensity = true; Bitmap edgeLayer = sobelEdge.Apply(grayscale); edgeLayer = convertColor.Apply(edgeLayer); Invert invertEdge = new Invert(); invertEdge.ApplyInPlace(edgeLayer); HSLLinear edgeLinear = new HSLLinear(); edgeLinear.InLuminance.Min = 0; edgeLinear.InLuminance.Max = 0.8; edgeLinear.ApplyInPlace(edgeLayer); // highlights Bitmap highlightLayer = invertEdge.Apply(edgeLayer); Dilatation highlightDilitation = new Dilatation(); highlightDilitation.ApplyInPlace(highlightLayer); BrightnessCorrection highlightBright = new BrightnessCorrection(-0.35); highlightBright.ApplyInPlace(highlightLayer); ColorDodge highlightBlend = new ColorDodge(highlightLayer); highlightBlend.ApplyInPlace(comic); // Merge edges with working layer Multiply multEdge = new Multiply(edgeLayer); multEdge.ApplyInPlace(comic); return(comic); }
// ========================================================= private void ThresholdFunct(ref Bitmap frame, double par_d) { try { frame = Grayscale.CommonAlgorithms.RMY.Apply(frame); Threshold filter = new Threshold((int)par_d); filter.ApplyInPlace(frame); GrayscaleToRGB toColFilter = new GrayscaleToRGB(); frame = toColFilter.Apply(frame); } catch { } }
public static Bitmap Binaryzation(Bitmap b, int limit) { Threshold th = new Threshold(); th.ThresholdValue = limit; Bitmap bx = ImageSupporter.ColorToGrayscale((Bitmap)b.Clone()); GrayscaleToRGB grayscaleToRGB = new GrayscaleToRGB(); return(grayscaleToRGB.Apply(th.Apply(ImageSupporter.ColorToGrayscale(bx)))); }
public Bitmap Apply(Bitmap input) { var resize = new Resize_filter(input.Width, input.Height); var resMask = resize.Apply(_mask); GrayscaleToRGB grayscaleToRGBFilter = new GrayscaleToRGB(); resMask = grayscaleToRGBFilter.Apply(resMask); resMask = _brightnessCorrection.Apply(resMask); Add subtractFilter = new Add(resMask); return(subtractFilter.Apply(input)); }
private Bitmap ProcessSingleImage(Bitmap _src) { // Apply filter. Bitmap tmp = new DifferenceEdgeDetector().Apply(Grayscale.CommonAlgorithms.BT709.Apply(_src)); _src.Dispose(); // Back to 24bpp. Bitmap tmp2 = new GrayscaleToRGB().Apply(tmp); tmp.Dispose(); return(tmp2); }
// On new video frame private void videoSourcePlayer_NewFrame(object sender, ref Bitmap image) { if (image.PixelFormat == PixelFormat.Format8bppIndexed) { // convert image to RGB if it is grayscale var filter = new GrayscaleToRGB(); var temp = filter.Apply(image); image.Dispose(); image = temp; } lock (_sync) { var glyphs = _imageProcessor.ProcessImage(image); } }
public void ApplyTest1() { string basePath = Path.Combine(NUnit.Framework.TestContext.CurrentContext.TestDirectory, "watershed"); Directory.CreateDirectory(basePath); Bitmap shapes = Accord.Imaging.Image.Clone(Resources.water); shapes.Save(Path.Combine(basePath, "shapes.jpg")); var bw = new BinaryWatershed(); Bitmap result = bw.Apply(shapes); Assert.AreEqual(746, result.Width); Assert.AreEqual(643, result.Height); Assert.AreEqual(PixelFormat.Format8bppIndexed, result.PixelFormat); Assert.AreEqual(9, bw.MaxPoints.Count); string strX = bw.MaxPoints.Select(i => i.X).ToArray().ToCSharp(); string strY = bw.MaxPoints.Select(i => i.Y).ToArray().ToCSharp(); double[] x = new double[] { 310, 546, 136, 254, 429, 612, 398, 345, 498 }; double[] y = new double[] { 436, 153, 392, 201, 336, 339, 242, 183, 319 }; Assert.AreEqual(x, bw.MaxPoints.Select(i => i.X).ToArray()); Assert.AreEqual(y, bw.MaxPoints.Select(i => i.Y).ToArray()); result.Save(Path.Combine(basePath, "watershed.jpg")); GrayscaleToRGB toRGB = new GrayscaleToRGB(); result = toRGB.Apply(result); PointsMarker marker = new PointsMarker(Color.Red, 5); marker.Points = bw.MaxPoints; Bitmap marked = marker.Apply(result); marked.Save(Path.Combine(basePath, "watershed-marks.jpg")); Assert.IsNotNull(result); Assert.IsNotNull(marked); }
private Bitmap ProcessSingleImage(Bitmap _src) { // Apply filter. Bitmap img = (_src.PixelFormat == PixelFormat.Format24bppRgb) ? _src : CloneTo24bpp(_src); Bitmap tmp = new DifferenceEdgeDetector().Apply(Grayscale.CommonAlgorithms.BT709.Apply(img)); _src.Dispose(); if (_src.PixelFormat != PixelFormat.Format24bppRgb) { img.Dispose(); } // Back to 24bpp. Bitmap tmp2 = new GrayscaleToRGB().Apply(tmp); tmp.Dispose(); return(tmp2); }
// On new video frame private void videoSourcePlayer_NewFrame(object sender, ref Bitmap image) { if (activeGlyphDatabase != null) { if (image.PixelFormat == PixelFormat.Format8bppIndexed) { // convert image to RGB if it is grayscale GrayscaleToRGB filter = new GrayscaleToRGB( ); Bitmap temp = filter.Apply(image); image.Dispose( ); image = temp; } lock ( sync ) { List <ExtractedGlyphData> glyphs = imageProcessor.ProcessImage(image); if (arForm != null) { List <VirtualModel> modelsToDisplay = new List <VirtualModel>( ); foreach (ExtractedGlyphData glyph in glyphs) { if ((glyph.RecognizedGlyph != null) && (glyph.RecognizedGlyph.UserData != null) && (glyph.RecognizedGlyph.UserData is GlyphVisualizationData) && (glyph.IsTransformationDetected)) { modelsToDisplay.Add(new VirtualModel( ((GlyphVisualizationData)glyph.RecognizedGlyph.UserData).ModelName, glyph.TransformationMatrix, imageProcessor.GlyphSize)); } } arForm.UpdateScene(image, modelsToDisplay); } } } }
private void TakeSnapshot_funct(Bitmap img) { Bitmap image = Grayscale.CommonAlgorithms.RMY.Apply(img); // find edges SobelEdgeDetector EdgeFilter = new SobelEdgeDetector(); EdgeFilter.ApplyInPlace(image); // back to color format GrayscaleToRGB RGBfilter = new GrayscaleToRGB(); image = RGBfilter.Apply(image); // get rid of grays EuclideanColorFiltering filter = new EuclideanColorFiltering(); filter.CenterColor.Red = 20; filter.CenterColor.Green = 20; filter.CenterColor.Blue = 20; filter.FillOutside = false; filter.Radius = 200; filter.ApplyInPlace(image); Color peek; for (int y = 0; y < image.Height; y++) { for (int x = 0; x < image.Width; x++) { peek = image.GetPixel(x, y); if (peek.R != 0) { image.SetPixel(x, y, Color.Blue); } } } image.MakeTransparent(Color.Black); SnapshotImage = image; SnapshotOriginalImage = image; }
// ========================================================= private void Edge_detectFunc(ref Bitmap frame, int par_int) { frame = Grayscale.CommonAlgorithms.RMY.Apply(frame); // Make gray switch (par_int) { case 1: SobelEdgeDetector SobelFilter = new SobelEdgeDetector(); SobelFilter.ApplyInPlace(frame); break; case 2: DifferenceEdgeDetector DifferenceFilter = new DifferenceEdgeDetector(); DifferenceFilter.ApplyInPlace(frame); break; case 3: HomogenityEdgeDetector HomogenityFilter = new HomogenityEdgeDetector(); HomogenityFilter.ApplyInPlace(frame); break; case 4: // can we not have references to canny in the code. gives me ptsd flashbacks CannyEdgeDetector Nightmare = new CannyEdgeDetector(); // apply the filter Nightmare.ApplyInPlace(frame); break; default: HomogenityEdgeDetector filter = new HomogenityEdgeDetector(); filter.ApplyInPlace(frame); break; } GrayscaleToRGB RGBfilter = new GrayscaleToRGB(); // back to color format frame = RGBfilter.Apply(frame); }
public static Bitmap gaborFilter(Bitmap b, float gamma, float lambda, float psi, float sigma, float theta) { // Grayscale g = new Grayscale(0.2125, 0.7154, 0.0721); GaborFilter filter = new GaborFilter(); Random random = new Random(); Boolean flaga = true; filter.Gamma = gamma; filter.Lambda = lambda; filter.Psi = psi; filter.Sigma = sigma; filter.Theta = theta; /*filter.Gamma = random.Next(1,5); * filter.Lambda=random.Next(1,5); * filter.Psi=random.Next(1,5); * filter.Sigma=random.Next(1,5); * filter.Theta=random.Next(1,5);*/ Bitmap bx = ImageSupporter.ColorToGrayscale(b); filter.Apply(ImageSupporter.ColorToGrayscale(bx)); // var okno = new ImageWindow(ImageSupporter.Bitmap2BitmapImage(filter.Apply(ImageSupporter.ColorToGrayscale(bx)))); // okno.Title = filter.Gamma + " " + filter.Lambda + " " + filter.Psi + " " + filter.Sigma + " " + filter.Theta; // okno.Show(); GrayscaleToRGB grayscaleToRGB = new GrayscaleToRGB(); return(grayscaleToRGB.Apply(filter.Apply(ImageSupporter.ColorToGrayscale(b)))); }
//konwertowanie do 8 bitowego obrazu w odcieniu szarości public static Bitmap GrayScaleToColor(Bitmap bitmap) { GrayscaleToRGB grayscaleToRGB = new GrayscaleToRGB(); return(grayscaleToRGB.Apply(ImageSupporter.ColorToGrayscale(bitmap))); }
public ImageProcessorResult Process(Bitmap bitmap, bool rgb) { var result = new ImageProcessorResult(); _viewConfig = _configService.ViewConfig; _imageProcessorConfig = _configService.ImageProcessorConfig; new Blur().ApplyInPlace(bitmap); Bitmap overlay = bitmap; if (_viewConfig.BackgroundImage == ViewConfigBackgroundImage.CameraRaw) { // 카메라 원본 overlay = bitmap; } // 그레이 스케일 var grayscale = Grayscale.CommonAlgorithms.BT709.Apply(bitmap); // 경계 검출 var edges = new DifferenceEdgeDetector().Apply(grayscale); if (_viewConfig.BackgroundImage == ViewConfigBackgroundImage.Edge) { overlay = new GrayscaleToRGB().Apply(edges); } // 이진화 // var threshold = new Threshold(_imageProcessorConfig.Threshold).Apply(edges); var threshold = new OtsuThreshold().Apply(edges); if (_viewConfig.BackgroundImage == ViewConfigBackgroundImage.Binary) { overlay = new GrayscaleToRGB().Apply(threshold); } // 오버레이 복제 overlay = overlay.CloneBitmap(); // 오버레이 데이터 var overlayData = overlay.LockBits(overlay.GetRectangle(), ImageLockMode.ReadWrite, overlay.PixelFormat); _blobCounter = new BlobCounter(); _blobCounter.MinHeight = _imageProcessorConfig.BlobMinHeight; _blobCounter.MinHeight = _imageProcessorConfig.BlobMinWidth; _blobCounter.FilterBlobs = true; _blobCounter.ObjectsOrder = ObjectsOrder.XY; _blobCounter.ProcessImage(threshold); var blobs = _blobCounter.GetObjectsInformation(); var shapeChecker = new SimpleShapeChecker(); // 각 영역에 대해 처리 수행 foreach (var blob in blobs) { // 현재 시도하는 마커 Marker marker = new Marker(); var edgePoints = _blobCounter.GetBlobsEdgePoints(blob); // 사각형 판정 var points = new List <IntPoint>(); if (shapeChecker.IsQuadrilateral(edgePoints, out points)) { marker.Points = points; List <IntPoint> leftEdge, rightEdge; _blobCounter.GetBlobsLeftAndRightEdges(blob, out leftEdge, out rightEdge); // 각도와 길이 판정 if (MoreQuadTest(blob, marker, leftEdge, rightEdge)) { // 검은색 테두리 판정 if (BorderTest(blob, marker, grayscale, leftEdge, rightEdge)) { // 판정 완료 result.Markers.Add(marker); // 무게 중심 좌표 marker.X = (int)(blob.CenterOfGravity.X - (threshold.Width / 2)); marker.Y = (int)(-(blob.CenterOfGravity.Y - (threshold.Height / 2))); // 프레임워크에서 계산한 넓이 marker.FrameworkArea = blob.Area; // 오버레이 ApplyOverlay(overlayData, marker.Points); } } } } overlay.UnlockBits(overlayData); foreach (var marker in result.Markers) { var points = marker.Points; // 방향 보정 var sideLength = points[0].DistanceTo(points[1]); if (points[2].Y - points[1].Y < sideLength / 1.6) { points = new List <IntPoint>( new IntPoint[] { points[1], points[2], points[3], points[0] }); marker.Points = points; } // 변형 복구 var quadrilateralTransformation = new QuadrilateralTransformation(points, _imageProcessorConfig.QuadrilateralTransformationWidth, _imageProcessorConfig.QuadrilateralTransformationHeight); var transformed = quadrilateralTransformation.Apply(bitmap); // 회전 및 색상 판정 시작 int halfWidth = _imageProcessorConfig.QuadrilateralTransformationWidth / 2, halfHeight = _imageProcessorConfig.QuadrilateralTransformationHeight / 2; // x => x + 1 사분면 var crops = new[] { new Crop(new Rectangle(halfWidth, 0, halfWidth, halfHeight)), new Crop(new Rectangle(0, 0, halfWidth, halfHeight)), new Crop(new Rectangle(0, halfHeight, halfWidth, halfHeight)), new Crop(new Rectangle(halfWidth, halfHeight, halfWidth, halfHeight)) }; var quadImage = new[] { crops[0].Apply(transformed), crops[1].Apply(transformed), crops[2].Apply(transformed), crops[3].Apply(transformed) }; var filteredResult = new[] { new { Img = quadImage[0], Red = Filter(quadImage[0], MarkerColor.Red).Luminance(), Green = Filter(quadImage[0], MarkerColor.Green).Luminance(), Blue = Filter(quadImage[0], MarkerColor.Blue).Luminance(), White = Filter(quadImage[0], MarkerColor.White).Luminance() }, new { Img = quadImage[1], Red = Filter(quadImage[1], MarkerColor.Red).Luminance(), Green = Filter(quadImage[1], MarkerColor.Green).Luminance(), Blue = Filter(quadImage[1], MarkerColor.Blue).Luminance(), White = Filter(quadImage[1], MarkerColor.White).Luminance() }, new { Img = quadImage[2], Red = Filter(quadImage[2], MarkerColor.Red).Luminance(), Green = Filter(quadImage[2], MarkerColor.Green).Luminance(), Blue = Filter(quadImage[2], MarkerColor.Blue).Luminance(), White = Filter(quadImage[2], MarkerColor.White).Luminance() }, new { Img = quadImage[3], Red = Filter(quadImage[3], MarkerColor.Red).Luminance(), Green = Filter(quadImage[3], MarkerColor.Green).Luminance(), Blue = Filter(quadImage[3], MarkerColor.Blue).Luminance(), White = Filter(quadImage[3], MarkerColor.White).Luminance() } }; var whiteDesc = filteredResult.OrderByDescending(a => a.White).ToArray(); if (rgb) { // RGB 색상 판별 var colorQuad = whiteDesc.Skip(1); var red = colorQuad.Sum(a => a.Red); var green = colorQuad.Sum(a => a.Green); var blue = colorQuad.Sum(a => a.Blue); Console.WriteLine("{0}: {1} {2} {3}", colorQuad.Count(), red, green, blue); var max = Math.Max(red, Math.Max(green, blue)); if (red == max) { marker.Color = MarkerColor.Red; } else if (green == max) { marker.Color = MarkerColor.Green; } else if (blue == max) { marker.Color = MarkerColor.Blue; } } else { // 흑백 색상 판별 var whiteMax = whiteDesc[0].White; var whiteRest = (whiteDesc[1].White + whiteDesc[2].White + whiteDesc[3].White) / 3; if (whiteMax - whiteRest < _imageProcessorConfig.ColorTestWhite) { // White marker.Color = MarkerColor.White; } else { // Black marker.Color = MarkerColor.Black; } } // 회전 판별 for (int i = 0; i < 4; i++) { if (filteredResult[i].White == whiteDesc.First().White) { marker.Rotate = (MarkerRotate)(i + 1); break; } } // 백색 마커에는 회전 방향이 없습니다. if (marker.Color == MarkerColor.White) { // 지정되지 않습니다. marker.Rotate = MarkerRotate.None; } // 화상 중심으로 좌표 변환 for (int i = 0; i < marker.Points.Count; i++) { marker.Points[i] = new IntPoint { X = marker.Points[i].X - _configService.DeviceConfig.PixelWidth / 2, Y = marker.Points[i].Y - _configService.DeviceConfig.PixelHeight / 2 }; } // 코어 서비스에서 기하학적 방법으로 거리 계산 var coreResult = _coreService.Query(marker.Points, _imageProcessorConfig.MarkerSize); marker.EuclideanDistance = coreResult.Distance; marker.TiltAngle = Math.Asin(coreResult.TranslationVector[1] / marker.EuclideanDistance); marker.PanAngle = Math.Asin(coreResult.TranslationVector[0] / marker.EuclideanDistance); if (marker.PanAngle > Math.PI) { // 음수 marker.PanAngle = 2 * Math.PI - marker.PanAngle; } marker.TransX = coreResult.TranslationVector[0]; marker.TransY = coreResult.TranslationVector[1]; marker.TransZ = coreResult.TranslationVector[2]; } BackgroundBitmap = overlay; Console.WriteLine(); foreach (var marker in result.Markers) { Console.WriteLine(marker.Color); } return(result); }
public static Tuple <Bitmap, Bitmap, Bitmap, Bitmap, List <Bitmap> > blob(Bitmap img, Interval w, Interval h, Boolean couple) { Bitmap sourceImage = (Bitmap)(img); //////////////////////////////////////////////////// BlobCounter blobCounter = new BlobCounter(); blobCounter.FilterBlobs = true; blobCounter.MinHeight = (int)h.Min; blobCounter.MaxHeight = (int)h.Max; blobCounter.MinWidth = (int)w.Min; blobCounter.MaxWidth = (int)w.Max; blobCounter.CoupledSizeFiltering = couple; //blobCounter.BackgroundThreshold. blobCounter.ProcessImage(sourceImage); Blob[] blobs = blobCounter.GetObjectsInformation(); //bitmap.UnlockBits(sourceImage); //////////////////////////////////////////////////// Bitmap newBlobImg = (Bitmap)(sourceImage).Clone(); // create filter BlobsFiltering filterBlob = new BlobsFiltering(); // configure filter filterBlob.CoupledSizeFiltering = couple; filterBlob.MinHeight = (int)h.Min; filterBlob.MaxHeight = (int)h.Max; filterBlob.MinWidth = (int)w.Min; filterBlob.MaxWidth = (int)w.Max; // apply the filter newBlobImg = filterBlob.Apply(newBlobImg); //////////////////////////////////////////////////// Bitmap rectImage = (Bitmap)(newBlobImg).Clone(); GrayscaleToRGB convertToColor = new GrayscaleToRGB(); if (rectImage.PixelFormat == System.Drawing.Imaging.PixelFormat.Format8bppIndexed) { rectImage = convertToColor.Apply(newBlobImg); } Graphics g = Graphics.FromImage(rectImage); Pen myPen = new Pen(Color.Red, 1); List <Bitmap> blobIMGs = new List <Bitmap>(); for (int i = 0; i < blobs.Length; i++) { blobIMGs.Add((Bitmap)(newBlobImg).Clone(blobs[i].Rectangle, PixelFormat.Format32bppArgb)); g.DrawRectangle(myPen, blobs[i].Rectangle); } //////////////////////////////////////////////////// Bitmap colorImage = (Bitmap)(newBlobImg).Clone(); ConnectedComponentsLabeling filter = new ConnectedComponentsLabeling(); // apply the filter colorImage = filter.Apply(colorImage); ////////////////////////////////////////////////// SimpleShapeChecker shapeChecker = new SimpleShapeChecker(); Bitmap shapeImage = (Bitmap)(newBlobImg).Clone(); Graphics gg = Graphics.FromImage(shapeImage); Pen yellowPen = new Pen(Color.Yellow, 2); // circles Pen redPen = new Pen(Color.Red, 2); // quadrilateral Pen brownPen = new Pen(Color.Brown, 2); // quadrilateral with known sub-type Pen greenPen = new Pen(Color.Green, 2); // known triangle Pen bluePen = new Pen(Color.Blue, 2); // triangle for (int i = 0, n = blobs.Length; i < n; i++) { List <IntPoint> edgePoints = blobCounter.GetBlobsEdgePoints(blobs[i]); AForge.Point center; float radius; // is circle ? if (shapeChecker.IsCircle(edgePoints, out center, out radius)) { gg.DrawEllipse(yellowPen, (float)(center.X - radius), (float)(center.Y - radius), (float)(radius * 2), (float)(radius * 2)); } else { List <IntPoint> corners; // is triangle or quadrilateral if (shapeChecker.IsConvexPolygon(edgePoints, out corners)) { // get sub-type PolygonSubType subType = shapeChecker.CheckPolygonSubType(corners); Pen pen; if (subType == PolygonSubType.Unknown) { pen = (corners.Count == 4) ? redPen : bluePen; } else { pen = (corners.Count == 4) ? brownPen : greenPen; } gg.DrawPolygon(pen, ToPointsArray(corners)); } } } return(new Tuple <Bitmap, Bitmap, Bitmap, Bitmap, List <Bitmap> >(newBlobImg, colorImage, rectImage, shapeImage, blobIMGs)); }
private void button2_Click(object sender, EventArgs e) { Orignal = ConvertTo24bpp(CDC.Apply(Orignal, 0.6)); if (Template.PixelFormat != PixelFormat.Format8bppIndexed) { Template = Grayscale.CommonAlgorithms.BT709.Apply(Template); } if (Orignal.PixelFormat != PixelFormat.Format8bppIndexed) { Orignal = Grayscale.CommonAlgorithms.BT709.Apply(Orignal); } BradleyLocalThresholding FilterBrad = new BradleyLocalThresholding(); FilterBrad.WindowSize = 100; FilterBrad.PixelBrightnessDifferenceLimit = 0.2f; Template = FilterBrad.Apply(Template); Orignal = FilterBrad.Apply(Orignal); //TemplatImage.Save( @"D:\Temp.jpg" ); //SourceImage.Save( @"D:\src.jpg" ); if (Template.PixelFormat != PixelFormat.Format24bppRgb) { GrayscaleToRGB FilterRGB = new GrayscaleToRGB(); Template = FilterRGB.Apply(Template); } if (Orignal.PixelFormat != PixelFormat.Format24bppRgb) { GrayscaleToRGB FilterRGB = new GrayscaleToRGB(); Orignal = FilterRGB.Apply(Orignal); } int divisor = int.Parse(textBox1.Text); int CellsizeLength = int.Parse(textBox2.Text); ExhaustiveTemplateMatching etm = new ExhaustiveTemplateMatching(0.1f); if (Orignal.PixelFormat != PixelFormat.Format24bppRgb) { GrayscaleToRGB FilterRGB = new GrayscaleToRGB(); Orignal = FilterRGB.Apply(Orignal); } ResizeNearestNeighbor Resize_filter2 = new ResizeNearestNeighbor(Orignal.Width / divisor, Orignal.Height / divisor); Bitmap Resize_Org_Image = Resize_filter2.Apply(AForge.Imaging.Image.Clone(Orignal)); ResizeNearestNeighbor Resize_filter3 = new ResizeNearestNeighbor(Template.Width / divisor, Template.Height / divisor); Bitmap Resize_Template = Resize_filter3.Apply(AForge.Imaging.Image.Clone(Template)); TemplateMatch[] tm = etm.ProcessImage(Resize_Org_Image, Resize_Template); if (tm.Length > 0) { List <IntPoint> cornersRect = new List <IntPoint> { new IntPoint(tm[0].Rectangle.X * divisor - CellsizeLength, tm[0].Rectangle.Y * divisor - CellsizeLength), new IntPoint((tm[0].Rectangle.X * divisor) + (tm[0].Rectangle.Width * divisor) + CellsizeLength, tm[0].Rectangle.Y * divisor - CellsizeLength), new IntPoint((tm[0].Rectangle.X * divisor) + (tm[0].Rectangle.Width * divisor) + CellsizeLength, (tm[0].Rectangle.Y * divisor) + (tm[0].Rectangle.Height * divisor) + CellsizeLength), new IntPoint(tm[0].Rectangle.X * divisor - CellsizeLength, (tm[0].Rectangle.Y * divisor) + (tm[0].Rectangle.Height * divisor) + CellsizeLength) }; SimpleQuadrilateralTransformation squadtran = new SimpleQuadrilateralTransformation(cornersRect, Orignal.Width + CellsizeLength * 2, Orignal.Height + CellsizeLength * 2) { AutomaticSizeCalculaton = true }; Bitmap ExhaustiveTemplate24bit = squadtran.Apply(AForge.Imaging.Image.Clone(Orignal)); ExhaustiveTemplate24bit = new ResizeNearestNeighbor(Template.Width, Template.Height).Apply(ExhaustiveTemplate24bit); pictureBox3.Image = ExhaustiveTemplate24bit; } GC.Collect(); }
protected void ImageButton1_Click(object sender, ImageClickEventArgs e) { if (FileUpload1.HasFile) { string filename = "C:\\Users\\darwesh\\Documents\\Visual Studio 2010\\WebSites\\WebSite1\\Images\\" + FileUpload1.FileName; string filedir = "C:\\Users\\darwesh\\Documents\\Visual Studio 2010\\WebSites\\WebSite1\\"; FileUpload1.SaveAs("C:\\Users\\darwesh\\Documents\\Visual Studio 2010\\WebSites\\WebSite1\\Images\\" + FileUpload1.FileName); pixels16 = new List <ushort>(); Imagemri im = new Imagemri(); DicomDecoder dd = new DicomDecoder(); dd.DicomFileName = filename; imageWidth = dd.width; imageHeight = dd.height; bitDepth = dd.bitsAllocated; winCentre = dd.windowCentre; winWidth = dd.windowWidth; bool result = dd.dicomFileReadSuccess; if (result == true) { im.NewImage = true; if (bitDepth == 16) { pixels16.Clear(); dd.GetPixels16(ref pixels16); byte[] buffer = new byte[pixels16.Count * 2]; byte[] temp; ByteConverter d = new ByteConverter(); int j = 0; for (int i = 0; i < pixels16.Count; i++) { temp = System.BitConverter.GetBytes(pixels16[i]); buffer[j++] = temp[0]; buffer[j++] = temp[1]; } if (winCentre == 0 && winWidth == 0) { winWidth = 4095; winCentre = 4095 / 2; } } im.SetParameters(ref pixels16, imageWidth, imageHeight, winWidth, winCentre, true); string index = ""; foreach (string stt in dd.dicomInfo) { if (stt.Contains("Patient's Weight")) { index = stt; } } string wii = index.Split(':')[1]; foreach (string stt in dd.dicomInfo) { if (stt.Contains("Patient's Name")) { index = stt; } } string pn = index.Split(':')[1];; AForge.Imaging.Filters.Grayscale g1 = new Grayscale(0.2125, 0.7154, 0.0721); Bitmap imagew = g1.Apply(im.bmp); int thresholding = (int)((dd.windowWidth - dd.windowCentre) * 255 / dd.windowWidth); AForge.Imaging.Filters.Threshold thf = new AForge.Imaging.Filters.Threshold(thresholding); Bitmap ther = thf.Apply(imagew); BlobCounter blobCounter = new BlobCounter(ther); Blob[] blobs = blobCounter.GetObjects(ther, false); ImageStatistics img; AForge.Imaging.Filters.GrayscaleToRGB d1 = new GrayscaleToRGB(); Bitmap bm = d1.Apply(imagew); Edges s = new Edges(); Graphics gg = Graphics.FromImage(bm); string ss = null; Bitmap myImage = null; Blob b; int count = 0; string locc = ""; foreach (Blob blob in blobs) { img = new ImageStatistics(blob.Image); double perc = ((double)img.PixelsCountWithoutBlack / (double)img.PixelsCount) * 100; if (blob.Image.Size.Height > 20 && blob.Image.Size.Width > 20 && perc > 35) { b = blob; ImageStatistics st = new ImageStatistics(b.Image); Bitmap pp = s.Apply(b.Image); ChannelFiltering c = new ChannelFiltering(new IntRange(0, 255), new IntRange(0, 0), new IntRange(0, 0)); Bitmap pp2 = d1.Apply(pp); c.ApplyInPlace(pp2); pp2.MakeTransparent(Color.Black); gg.DrawImage(pp2, b.Rectangle); gg.Flush(); myImage = im.bmp.Clone(b.Rectangle, System.Drawing.Imaging.PixelFormat.Format24bppRgb); ss = ((double)(st.PixelsCountWithoutBlack) * (double)dd.pixelHeight * dd.pixelWidth).ToString(); locc = (b.Rectangle.Location.X * dd.pixelWidth).ToString() + "mm," + (b.Rectangle.Location.Y * dd.pixelHeight).ToString() + "mm"; count++; } }//end foreach bm.Save(filedir + FileUpload1.FileName + ".png", ImageFormat.Png); records r = new records(); recordsTableAdapters.recordsTableAdapter ta = new recordsTableAdapters.recordsTableAdapter(); ta.InsertRecord(pn, wii, FileUpload1.FileName, FileUpload1.FileName + ".png", "", ss, locc); } } }
static void Main(string[] args) { Threshold thresh = new Threshold(10); Median median = new Median(9); Erosion3x3 erode = new Erosion3x3(); Dilatation3x3 dilate = new Dilatation3x3(); GrahamConvexHull hullFinder = new GrahamConvexHull(); ConnectedComponentsLabeling ccLabeler = new ConnectedComponentsLabeling(); BorderFollowing contourFinder = new BorderFollowing(); GrayscaleToRGB rgb = new GrayscaleToRGB(); ConvexHullDefects defectFinder = new ConvexHullDefects(10); Bitmap img = (Bitmap)Bitmap.FromFile("hand3.jpg"); Bitmap image = Grayscale.CommonAlgorithms.BT709.Apply(img); thresh.ApplyInPlace(image); //median.ApplyInPlace(image); erode.ApplyInPlace(image); dilate.ApplyInPlace(image); BlobCounter counter = new BlobCounter(image); counter.ObjectsOrder = ObjectsOrder.Area; Blob[] blobs = counter.GetObjectsInformation(); if (blobs.Length > 0) { counter.ExtractBlobsImage(image, blobs[0], true); UnmanagedImage hand = blobs[0].Image; var contour = contourFinder.FindContour(hand); if (contour.Count() > 0) { var initialHull = hullFinder.FindHull(contour); var defects = defectFinder.FindDefects(contour, initialHull); var filteredHull = initialHull.ClusterHullPoints().FilterLinearHullPoints(); var palmCenter = defects.Centroid(contour); var wristPoints = filteredHull.SelectWristPoints(defects, contour); Bitmap color = rgb.Apply(hand).ToManagedImage(); //BitmapData data = color.LockBits(new Rectangle(0, 0, color.Width, color.Height), ImageLockMode.ReadWrite, color.PixelFormat); //Drawing.Polyline(data, contour, Color.Blue); //Drawing.Polygon(data, filteredHull, Color.Red); //color.UnlockBits(data); Graphics gr = Graphics.FromImage(color); gr.DrawPolygon(new Pen(Brushes.Red, 3), filteredHull.ToPtArray()); gr.DrawLines(new Pen(Brushes.Blue, 3), contour.ToPtArray()); gr.DrawEllipse(new Pen(Brushes.Red, 3), palmCenter.X - 10, palmCenter.Y - 10, 20, 20); foreach (ConvexityDefect defect in defects) { gr.DrawEllipse(new Pen(Brushes.Green, 6), contour[defect.Point].X - 10, contour[defect.Point].Y - 10, 20, 20); } foreach (AForge.IntPoint pt in filteredHull) { gr.DrawEllipse(new Pen(Brushes.Yellow, 6), pt.X - 10, pt.Y - 10, 20, 20); } foreach (AForge.IntPoint pt in wristPoints) { gr.DrawEllipse(new Pen(Brushes.PowderBlue, 6), pt.X - 10, pt.Y - 10, 20, 20); } ImageBox.Show(color); } } }
public override void consumeMessage(IMessage message, int msgID) { VisionMessage vm = message as VisionMessage; Bitmap rgb = vm.Bitmap; rgb = filter.Apply(rgb); if (rgb.PixelFormat == PixelFormat.Format8bppIndexed) { rgb = new GrayscaleToRGB().Apply(rgb); } vm = new VisionMessage(rgb.Height, rgb.Width, ByteTools.pixelFormatToBPP(rgb.PixelFormat), rgb); msgService.sendMsg(vm); }
public override void consumeMessage(IMessage message, int msgID) { VisionMessage vm = message as VisionMessage; if (vm.bypass) { vm.bypass = false; msgService.sendMsg(vm); return; } // Bitmap bitmap = ByteTools.BytesToBmp(vm.Bitmap, vm.BmpWidth, vm.BmpHeight, ByteTools.bppToPixelFormat(vm.BytePerPixel)); Bitmap bitmap = vm.Bitmap; Bitmap rgb = bitmap; bitmap = lf.Apply(bitmap); rgb = new GrayscaleToRGB().Apply(bitmap); Graphics g = Graphics.FromImage(rgb); if (g == null) { Console.WriteLine("Null Graphics!"); return; } lock (this) { if (!threadRunning) { threadRunning = true; /*for (int i = 0; i < 3; i++) { if (g != null && currentStateProbs[i] > double.MinValue) { drawLine(g, new HoughLine((double)currentThetaStates[i], (short)currentRStates[i], 0, 0d), //new Rectangle(0, heightRes[0], bitmap.Width, heightRes[heightRes.Length - 1]-heightRes[0]) new Rectangle(0,0,bitmap.Width, bitmap.Height*2)); } } */ for (int i = 0; i < hlidx; i++) { drawLine(g, hls[i], new Rectangle(0, 0, bitmap.Width, bitmap.Height * 2)); } for (int i = 0; i < heightRes.Length - 1; i++) { int y = heightRes[i]; int yDelta = heightRes[i + 1] - heightRes[i]; int xDelta = widthRes[i + 1] - widthRes[i]; for (int x = 0; x < bitmap.Width; x += xDelta) { Rectangle sub = new Rectangle(x, y, xDelta, yDelta); Bitmap subImg = (Bitmap)bitmap.Clone(sub, bitmap.PixelFormat); HoughLine hl = processSubImage(subImg); g.DrawRectangle(p2, sub); } } ThreadStart starter = delegate { findLines(bitmap); }; Thread innerThread = new Thread(starter); innerThread.Start(); } } vm = new VisionMessage(rgb.Height, rgb.Width, ByteTools.pixelFormatToBPP(rgb.PixelFormat), rgb); msgService.sendMsg(vm); }
/// <summary> /// Process the filter on the specified image. /// </summary> /// /// <param name="sourceData">Source image data.</param> /// <param name="destinationData">Destination image data.</param> /// /// <exception cref="InvalidImagePropertiesException">Texture size does not match image size.</exception> /// <exception cref="ApplicationException">Filters should not change image dimension.</exception> /// protected override unsafe void ProcessFilter( UnmanagedImage sourceData, UnmanagedImage destinationData ) { // get source image dimension int width = sourceData.Width; int height = sourceData.Height; // if generator was specified, then generate a texture // otherwise use provided texture if ( textureGenerator != null ) { texture = textureGenerator.Generate( width, height ); } else { // check existing texture if ( ( texture.GetLength( 0 ) != height ) || ( texture.GetLength( 1 ) != width ) ) { // sorry, but source image must have the same dimension as texture throw new InvalidImagePropertiesException( "Texture size does not match image size." ); } } // apply first filter UnmanagedImage filteredImage1 = filter1.Apply( sourceData ); // check size of the result image if ( ( width != filteredImage1.Width ) || ( height != filteredImage1.Height ) ) { filteredImage1.Dispose( ); throw new ApplicationException( "Filters should not change image dimension." ); } // convert 1st image to RGB if required if ( filteredImage1.PixelFormat == PixelFormat.Format8bppIndexed ) { GrayscaleToRGB coloringFilter = new GrayscaleToRGB( ); UnmanagedImage temp = coloringFilter.Apply( filteredImage1 ); filteredImage1.Dispose( ); filteredImage1 = temp; } UnmanagedImage filteredImage2 = null; // apply second filter, if it was specified if ( filter2 != null ) { filteredImage2 = filter2.Apply( sourceData ); // check size of the result image if ( ( width != filteredImage2.Width ) || ( height != filteredImage2.Height ) ) { filteredImage1.Dispose( ); filteredImage2.Dispose( ); // we are not handling such situations yet throw new ApplicationException( "Filters should not change image dimension." ); } // convert 2nd image to RGB if required if ( filteredImage2.PixelFormat == PixelFormat.Format8bppIndexed ) { GrayscaleToRGB coloringFilter = new GrayscaleToRGB( ); UnmanagedImage temp = coloringFilter.Apply( filteredImage2 ); filteredImage2.Dispose( ); filteredImage2 = temp; } } // use source image as a second image, if second filter is not set if ( filteredImage2 == null ) { filteredImage2 = sourceData; } // do the job unsafe { byte* dst = (byte*) destinationData.ImageData.ToPointer( ); byte* src1 = (byte*) filteredImage1.ImageData.ToPointer( ); byte* src2 = (byte*) filteredImage2.ImageData.ToPointer( ); int dstOffset = destinationData.Stride - 3 * width; int src1Offset = filteredImage1.Stride - 3 * width; int src2Offset = filteredImage2.Stride - 3 * width; if ( preserveLevel != 0.0 ) { // for each line for ( int y = 0; y < height; y++ ) { // for each pixel for ( int x = 0; x < width; x++ ) { double t1 = texture[y, x]; double t2 = 1 - t1; for ( int i = 0; i < 3; i++, src1++, src2++, dst++ ) { *dst = (byte) Math.Min( 255.0f, filterLevel * ( t1 * ( *src1 ) + t2 * ( *src2 ) ) + preserveLevel * ( *src2 ) ); } } src1 += src1Offset; src2 += src2Offset; dst += dstOffset; } } else { // for each line for ( int y = 0; y < height; y++ ) { // for each pixel for ( int x = 0; x < width; x++ ) { double t1 = texture[y, x]; double t2 = 1 - t1; for ( int i = 0; i < 3; i++, src1++, src2++, dst++ ) { *dst = (byte) Math.Min( 255.0f, t1 * *src1 + t2 * *src2 ); } } src1 += src1Offset; src2 += src2Offset; dst += dstOffset; } } } // dispose temp images filteredImage1.Dispose( ); if ( filteredImage2 != sourceData ) { filteredImage2.Dispose( ); } }
static void Process2(string fileName) { Console.WriteLine(fileName); var meta = new OCR.Shared.ImageInfo { FileName = fileName, MaskName = fileName + ".mask.png", FilteredName = fileName + ".filtered.png", Words = new List <OCR.Shared.Blob>(), Areas = new List <OCR.Shared.Rect>() }; using (var img = System.Drawing.Image.FromFile(fileName)) using (var bmp = new Bitmap(img)) { var gray = new FiltersSequence(Grayscale.CommonAlgorithms.BT709, new GaussianSharpen(), new ContrastCorrection(20)) .Apply(UnmanagedImage.FromManagedImage(bmp)); gray.ToManagedImage().Save(fileName + ".gray.png", System.Drawing.Imaging.ImageFormat.Png); var bw = new FiltersSequence(new BradleyLocalThresholding { PixelBrightnessDifferenceLimit = 0.20f }, new Invert()) .Apply(gray); bw.ToManagedImage().Save(fileName + ".bw.png", System.Drawing.Imaging.ImageFormat.Png); var mask = new FiltersSequence(new Dilation(), new BlobsFiltering(5, 5, gray.Width / 2, gray.Height / 2, false)) .Apply(bw); mask.ToManagedImage().Save(meta.MaskName, System.Drawing.Imaging.ImageFormat.Png); { var ccs = new BlobCounter(mask).GetObjectsRectangles(); var sortedw = ccs.Select(p => p.Width).OrderBy(p => p).ToArray(); var sortedh = ccs.Select(p => p.Height).OrderBy(p => p).ToArray(); meta.MedianWordSize = new OCR.Shared.Point { X = sortedw[sortedw.Length / 2], Y = sortedh[sortedh.Length / 2] }; Console.WriteLine($"Median: {meta.MedianWordSize.X}, {meta.MedianWordSize.Y}"); } var filtered = new FiltersSequence(new Invert(), new Intersect(mask), new Invert()) .Apply(gray); var filteredBmp = filtered.ToManagedImage(); filteredBmp.Save(meta.FilteredName, System.Drawing.Imaging.ImageFormat.Png); var rgb = new GrayscaleToRGB().Apply(filtered); mask = new FiltersSequence(new HorizontalRunLengthSmoothing(meta.MedianWordSize.X * 2), new VerticalRunLengthSmoothing(meta.MedianWordSize.Y * 2)) .Apply(mask); mask.ToManagedImage().Save(fileName + ".area.png", System.Drawing.Imaging.ImageFormat.Png); foreach (Rectangle rect in new BlobCounter(mask).GetObjectsRectangles()) { Drawing.FillRectangle(mask, rect, Color.White); } mask.ToManagedImage().Save(fileName + ".rect.png", System.Drawing.Imaging.ImageFormat.Png); foreach (Rectangle rect in new BlobCounter(mask).GetObjectsRectangles()) { meta.Areas.Add(new OCR.Shared.Rect { X = rect.X, Y = rect.Y, W = rect.Width, H = rect.Height }); Drawing.Rectangle(rgb, rect, Color.Red); } new Intersect(mask).ApplyInPlace(bw); foreach (var blob in new BlobCounter(bw).GetObjects(bw, false)) { var outRect = new OCR.Shared.Rect { X = blob.Rectangle.X - 1, Y = blob.Rectangle.Y - 1, W = blob.Rectangle.Width + 2, H = blob.Rectangle.Height + 2 }; if (outRect.X < 0) { outRect.X = 0; outRect.W--; } if (outRect.Y < 0) { outRect.Y = 0; outRect.H--; } if (outRect.X + outRect.W > bw.Width) { outRect.W = bw.Width - outRect.X; } if (outRect.Y + outRect.H > bw.Height) { outRect.H = bw.Height - outRect.Y; } var gravityCenter = new OCR.Shared.Point { X = (int)blob.CenterOfGravity.X, Y = (int)blob.CenterOfGravity.Y }; var geometryCenter = new OCR.Shared.Point { X = blob.Rectangle.X + blob.Rectangle.Width / 2, Y = blob.Rectangle.Y + blob.Rectangle.Height / 2 }; var bytedata = blob.Image.ToByteArray(); var newbytedata = new byte[outRect.W * outRect.H]; for (var j = 0; j < outRect.H - 2; j++) { for (var i = 0; i < outRect.W - 2; i++) { newbytedata[j * outRect.W + i + 1] = bytedata[j * (outRect.W - 2) + i]; } } var blobImg = new FiltersSequence(new Dilation()).Apply(UnmanagedImage.FromByteArray(newbytedata, outRect.W, outRect.H, System.Drawing.Imaging.PixelFormat.Format8bppIndexed)); var area = new Rectangle(outRect.X, outRect.Y, outRect.W, outRect.H); Drawing.Rectangle(rgb, area, Color.Blue); Drawing.FillRectangle(rgb, new Rectangle(geometryCenter.X - 2, geometryCenter.Y - 2, 5, 5), Color.Magenta); Drawing.FillRectangle(rgb, new Rectangle(gravityCenter.X - 2, gravityCenter.Y - 2, 5, 5), Color.Cyan); var bits = filteredBmp.LockBits(area, System.Drawing.Imaging.ImageLockMode.ReadOnly, System.Drawing.Imaging.PixelFormat.Format8bppIndexed); meta.Words.Add( new OCR.Shared.Blob { Id = blob.ID, Data = new FiltersSequence(new Invert(), new Intersect(blobImg), new Invert()).Apply(new UnmanagedImage(bits)).ToByteArray(), Position = outRect, GravityCenter = gravityCenter, GeometryCenter = geometryCenter }); filteredBmp.UnlockBits(bits); } rgb.ToManagedImage().Save(fileName + ".marked.png", System.Drawing.Imaging.ImageFormat.Png); File.WriteAllText(fileName + ".meta.json", Newtonsoft.Json.JsonConvert.SerializeObject(meta, Newtonsoft.Json.Formatting.Indented)); } }