private void EdgeDetectImg(ref Bitmap frame) { frame = Grayscale.CommonAlgorithms.RMY.Apply(frame); // Make gray switch (EdgeDetectValue) { case 1: SobelEdgeDetector SobelFilter = new SobelEdgeDetector(); SobelFilter.ApplyInPlace(frame); break; case 2: DifferenceEdgeDetector DifferenceFilter = new DifferenceEdgeDetector(); DifferenceFilter.ApplyInPlace(frame); break; case 3: HomogenityEdgeDetector HomogenityFilter = new HomogenityEdgeDetector(); HomogenityFilter.ApplyInPlace(frame); break; case 4: CannyEdgeDetector Cannyfilter = new CannyEdgeDetector(); // apply the MirrFilter Cannyfilter.ApplyInPlace(frame); break; default: HomogenityEdgeDetector filter = new HomogenityEdgeDetector(); filter.ApplyInPlace(frame); break; } GrayscaleToRGB RGBfilter = new GrayscaleToRGB(); // back to color format frame = RGBfilter.Apply(frame); }
public mEdgeDifference() { BitmapType = mFilter.BitmapTypes.GrayscaleBT709; Effect = new DifferenceEdgeDetector(); filter = Effect; }
private void differenceToolStripMenuItem_Click(object sender, EventArgs e) { GrayscaleBT709 grayObject = new GrayscaleBT709(); pictureBox2.Image = grayObject.Apply((Bitmap)pictureBox1.Image); DifferenceEdgeDetector filter = new DifferenceEdgeDetector(); pictureBox2.Image = filter.Apply((Bitmap)pictureBox2.Image); }
private Bitmap ProcessSingleImage(Bitmap _src) { // Apply filter. Bitmap tmp = new DifferenceEdgeDetector().Apply(Grayscale.CommonAlgorithms.BT709.Apply(_src)); _src.Dispose(); // Back to 24bpp. Bitmap tmp2 = new GrayscaleToRGB().Apply(tmp); tmp.Dispose(); return(tmp2); }
public void ApplyEdge(object sender, DoWorkEventArgs e) { Bitmap raw_image = null; if (edgeInputRB.Checked) { raw_image = Accord.Imaging.Filters.Grayscale.CommonAlgorithms.BT709.Apply((Bitmap)input_PB.Image.Clone()); } else if (edgeOutputRb.Checked) { raw_image = (Bitmap)outputImageBox.Image.Clone(); } if (sobelRb.Checked) { var sobel = new SobelEdgeDetector(); Bitmap raw_img = raw_image; UnmanagedImage res = sobel.Apply(UnmanagedImage.FromManagedImage(raw_img)); outputImageBox.Image.Dispose(); outputImageBox.Image = res.ToManagedImage(); } else if (prewittRb.Checked) { var prewitt = new DifferenceEdgeDetector(); Bitmap raw_img = raw_image; UnmanagedImage res = prewitt.Apply(UnmanagedImage.FromManagedImage(raw_img)); outputImageBox.Image.Dispose(); outputImageBox.Image = res.ToManagedImage(); } else if (CannyRb.Checked) { var canny = new CannyEdgeDetector(); Bitmap raw_img = raw_image; byte High = byte.Parse(textBox3.Text); byte Low = byte.Parse(textBox2.Text); double GaussSigma = double.Parse(textBox1.Text); int GaussSize = int.Parse(textBox4.Text); canny.GaussianSize = GaussSize; canny.HighThreshold = High; canny.LowThreshold = Low; canny.GaussianSigma = GaussSigma; UnmanagedImage res = canny.Apply(UnmanagedImage.FromManagedImage(raw_img)); outputImageBox.Image.Dispose(); outputImageBox.Image = res.ToManagedImage(); } }
private static void function() { string path = "filePath"; Bitmap image = (Bitmap)Bitmap.FromFile(path); BlobCounter blobCounter = new BlobCounter(); blobCounter.CoupledSizeFiltering = true; blobCounter.FilterBlobs = true; blobCounter.MinHeight = 10; blobCounter.MinWidth = 10; ContrastCorrection contrastCorrection = new ContrastCorrection(200); Bitmap newImage = contrastCorrection.Apply(image); newImage.Save("filterresult.png"); DifferenceEdgeDetector filter2 = new DifferenceEdgeDetector(); var myFile= filter2.Apply(newImage); myFile.Save("sobel.png"); }
void chosendevice_newframe(object sender, NewFrameEventArgs eventargs) { // throw new NotImplementedException(); image = (Bitmap)eventargs.Frame.Clone(); // create grayscale filter (BT709) Grayscale filter = new Grayscale(0.2125, 0.7154, 0.0721); // apply the filter Bitmap grayImage = filter.Apply(image); //edge // create filter DifferenceEdgeDetector filters = new DifferenceEdgeDetector(); // apply the filter filters.ApplyInPlace(grayImage); pictureBox1.Image = grayImage; }
public void DifferenceClickHandler(object sender, EventArgs e) { if (image != null) { Bitmap imx = new Bitmap(path); imx = Grayscale.CommonAlgorithms.Y.Apply(imx); DifferenceEdgeDetector gb = new DifferenceEdgeDetector(); imx = gb.Apply(imx); if (mov != null) { this.WorkItem.Workspaces[WorkspaceNames.TabWorkspace].Close(mov); } mov = this.WorkItem.SmartParts.AddNew <ImageAView>(); mov.panAndZoomPictureBox1.Image = imx; SmartPartInfo spi = new SmartPartInfo("Difference", "MyOwnDescription"); this.WorkItem.Workspaces[WorkspaceNames.TabWorkspace].Show(mov, spi); } }
private Bitmap ProcessSingleImage(Bitmap _src) { // Apply filter. Bitmap img = (_src.PixelFormat == PixelFormat.Format24bppRgb) ? _src : CloneTo24bpp(_src); Bitmap tmp = new DifferenceEdgeDetector().Apply(Grayscale.CommonAlgorithms.BT709.Apply(img)); _src.Dispose(); if (_src.PixelFormat != PixelFormat.Format24bppRgb) { img.Dispose(); } // Back to 24bpp. Bitmap tmp2 = new GrayscaleToRGB().Apply(tmp); tmp.Dispose(); return(tmp2); }
private void BtnDetectEdges_Click(object sender, EventArgs e) { try { ProgressBar.Maximum = 3; ProgressBar.Value = 0; UnmanagedImage image = UnmanagedImage.FromManagedImage((Bitmap)ImgInput.Image); // 1 - grayscaling UnmanagedImage grayImage = null; if (image.PixelFormat == PixelFormat.Format8bppIndexed) { grayImage = image; } else { grayImage = UnmanagedImage.Create(image.Width, image.Height, PixelFormat.Format8bppIndexed); Grayscale.CommonAlgorithms.BT709.Apply(image, grayImage); } ProgressBar.Value++; // 2 - Edge detection DifferenceEdgeDetector edgeDetector = new DifferenceEdgeDetector(); UnmanagedImage edgesImage = edgeDetector.Apply(grayImage); ProgressBar.Value++; // 3 - Threshold edges Threshold thresholdFilter = new Threshold((int)NumericTrashold.Value); thresholdFilter.ApplyInPlace(edgesImage); ProgressBar.Value++; ImgOutput.Image = edgesImage.ToManagedImage(); } catch (Exception exception) { MessageBox.Show(exception.Message); } }
public static Bitmap TranformImage(Bitmap image) { List <IntPoint> corners; //using (Bitmap workingBitmap = (Bitmap)image.Clone()) using (Bitmap grayscaleBitmap = ConvertToGreyscale(image)) { DifferenceEdgeDetector detector = new DifferenceEdgeDetector(); detector.ApplyInPlace(grayscaleBitmap); //grayscaleBitmap.Save(@"D:\k\trash\workingBitmap.jpg"); //todo: threshold value new Threshold(20).ApplyInPlace(grayscaleBitmap); //grayscaleBitmap.Save(@"D:\k\trash\workingBitmapAfterThreshold.jpg"); QuadrilateralFinder quadrilateralFinder = new QuadrilateralFinder(); corners = quadrilateralFinder.ProcessImage(grayscaleBitmap); //todo del //using (Bitmap clone = workingBitmap.Clone(new Rectangle(0, 0, image.Width, image.Height), PixelFormat.Format32bppArgb)) //{ // foreach (IntPoint corner in corners) // { // clone.SetPixel(corner.X, corner.Y, Color.Red); // //g. // //g.FillEllipse(Brushes.Red, corner.X -len, corner.Y + len, corner.X + len, corner.Y - len); // } // clone.Save(@"D:\k\trash\edgeDifference.jpg"); //} } int boundingSquareSideLength = GetBoundingSquareSideLength(corners); QuadrilateralTransformation quadrilateralTransformation = new QuadrilateralTransformation(corners, boundingSquareSideLength, boundingSquareSideLength); Corners = corners; return(quadrilateralTransformation.Apply(image)); }
// ========================================================= private void Edge_detectFunc(ref Bitmap frame, int par_int) { frame = Grayscale.CommonAlgorithms.RMY.Apply(frame); // Make gray switch (par_int) { case 1: SobelEdgeDetector SobelFilter = new SobelEdgeDetector(); SobelFilter.ApplyInPlace(frame); break; case 2: DifferenceEdgeDetector DifferenceFilter = new DifferenceEdgeDetector(); DifferenceFilter.ApplyInPlace(frame); break; case 3: HomogenityEdgeDetector HomogenityFilter = new HomogenityEdgeDetector(); HomogenityFilter.ApplyInPlace(frame); break; case 4: // can we not have references to canny in the code. gives me ptsd flashbacks CannyEdgeDetector Nightmare = new CannyEdgeDetector(); // apply the filter Nightmare.ApplyInPlace(frame); break; default: HomogenityEdgeDetector filter = new HomogenityEdgeDetector(); filter.ApplyInPlace(frame); break; } GrayscaleToRGB RGBfilter = new GrayscaleToRGB(); // back to color format frame = RGBfilter.Apply(frame); }
public static Tuple <Bitmap, double, Rectangle> smate_rotate(Bitmap src) { Bitmap retB = null; Rectangle retR = Rectangle.Empty; double retAngle = 90.0; double angle = 0.0; RotateBicubic filter = new RotateBicubic(retAngle); Bitmap src1 = filter.Apply(src); //Bitmap src1 = (Bitmap)src.Clone(); Bitmap g = Grayscale.CommonAlgorithms.BT709.Apply(src1); Invert it = new Invert(); it.ApplyInPlace(g); //g.Save("temp_1.jpg"); ImageStatistics stat = new ImageStatistics(g); Threshold t = new Threshold((int)(stat.Gray.Mean - stat.Gray.StdDev)); t.ApplyInPlace(g); //g.Save("temp_2.jpg"); stat = new ImageStatistics(g); DifferenceEdgeDetector edgeDetector = new DifferenceEdgeDetector(); edgeDetector.ApplyInPlace(g); //g.Save("temp_3.jpg"); HoughLineTransformation lineTransform = new HoughLineTransformation(); lineTransform.ProcessImage(g); HoughLine[] lines = lineTransform.GetLinesByRelativeIntensity(0.8); foreach (HoughLine l in lines) { Program.logIt(string.Format("Intensity={0}, Radius={1}, Theta={2}", l.Intensity, l.Radius, l.Theta)); if (l.Radius < 0) { if (l.Theta < 90) { angle = -l.Theta; } else { angle = 180.0 - l.Theta; } } else { if (l.Theta < 90) { angle = -l.Theta; } else { angle = 180.0 - l.Theta; } } if (Math.Abs(angle) < 45.0) { break; } } Program.logIt(string.Format("angle={0}", angle)); retAngle += angle; RotateBicubic r_filter = new RotateBicubic(angle); Bitmap rotated = r_filter.Apply(src1); // crop if (rotated != null) { Grayscale g_filter = new Grayscale(0.2125, 0.7154, 0.0721); Bitmap grayImage = g_filter.Apply(rotated); Blur bf = new Blur(); bf.ApplyInPlace(grayImage); OtsuThreshold o_filter = new OtsuThreshold(); o_filter.ApplyInPlace(grayImage); BlobCounter blobCounter = new BlobCounter(); blobCounter.MinHeight = 20; blobCounter.MinWidth = 20; blobCounter.FilterBlobs = false; blobCounter.BlobsFilter = null; blobCounter.ObjectsOrder = ObjectsOrder.YX; blobCounter.ProcessImage(grayImage); Blob[] blobs = blobCounter.GetObjectsInformation(); Program.logIt(string.Format("blobs={0}", blobCounter.ObjectsCount)); Rectangle r = Rectangle.Empty; for (int i = 1; i < blobs.Length; i++) { Blob b = blobs[i]; Program.logIt(string.Format("{0}: {1}", b.ID, b.Rectangle)); if (r == Rectangle.Empty) { r = b.Rectangle; } else { r = Rectangle.Union(r, b.Rectangle); } } Program.logIt(string.Format("rect: {0}", r)); retR = r; Crop c_filter = new Crop(r); retB = c_filter.Apply(rotated); } return(new Tuple <Bitmap, double, Rectangle>(retB, retAngle, retR)); }
public static Bitmap FilterImage(Bitmap img, int filter) { Bitmap sourceImage = img; sourceImage = ImageUtil.convert(sourceImage, System.Drawing.Imaging.PixelFormat.Format32bppArgb); IFilter myFilter; Bitmap filteredImage = sourceImage; if (filter == Filters.filters["Greyscale"]) { sourceImage = Grayscale.CommonAlgorithms.RMY.Apply(sourceImage); filteredImage = sourceImage; } else if (filter == Filters.filters["Sepia"]) { myFilter = new Sepia(); filteredImage = myFilter.Apply(sourceImage); } else if (filter == Filters.filters["Invert"]) { sourceImage = ImageUtil.convert(sourceImage, System.Drawing.Imaging.PixelFormat.Format24bppRgb); myFilter = new Invert(); filteredImage = myFilter.Apply(sourceImage); } else if (filter == Filters.filters["RotateChannel"]) { myFilter = new RotateChannels(); filteredImage = myFilter.Apply(sourceImage); } else if (filter == Filters.filters["Threshold"]) { sourceImage = Grayscale.CommonAlgorithms.RMY.Apply(sourceImage); myFilter = new Threshold(); filteredImage = myFilter.Apply(sourceImage); } else if (filter == Filters.filters["FloydFilter"]) { FloydSteinbergColorDithering myReduction = new FloydSteinbergColorDithering(); filteredImage = myReduction.Apply(sourceImage); } else if (filter == Filters.filters["OrderedDithering"]) { sourceImage = Grayscale.CommonAlgorithms.RMY.Apply(sourceImage); myFilter = new OrderedDithering(); filteredImage = myFilter.Apply(sourceImage); } else if (filter == Filters.filters["Sharpen"]) { myFilter = new Sharpen(); filteredImage = myFilter.Apply(sourceImage); } else if (filter == Filters.filters["DifferenceEdgeDetector"]) { sourceImage = Grayscale.CommonAlgorithms.RMY.Apply(sourceImage); myFilter = new DifferenceEdgeDetector(); filteredImage = myFilter.Apply(sourceImage); } else if (filter == Filters.filters["HomogenityEdgeDetector"]) { sourceImage = Grayscale.CommonAlgorithms.RMY.Apply(sourceImage); myFilter = new HomogenityEdgeDetector(); filteredImage = myFilter.Apply(sourceImage); } else if (filter == Filters.filters["Sobel"]) { sourceImage = Grayscale.CommonAlgorithms.RMY.Apply(sourceImage); myFilter = new SobelEdgeDetector(); filteredImage = myFilter.Apply(sourceImage); } else if (filter == Filters.filters["Jitter"]) { myFilter = new Jitter(); //Needs Expand filteredImage = myFilter.Apply(sourceImage); } else if (filter == Filters.filters["OilPainting"]) { sourceImage = ImageUtil.convert(sourceImage, System.Drawing.Imaging.PixelFormat.Format24bppRgb); myFilter = new OilPainting(); //Needs Expand filteredImage = myFilter.Apply(sourceImage); } else if (filter == Filters.filters["TextureFiltering"]) { sourceImage = ImageUtil.convert(sourceImage, System.Drawing.Imaging.PixelFormat.Format24bppRgb); myFilter = new Texturer(new TextileTexture(), 1.0, 0.8); //Needs Expand filteredImage = myFilter.Apply(sourceImage); } else if (filter == Filters.filters["Median"]) { sourceImage = ImageUtil.convert(sourceImage, System.Drawing.Imaging.PixelFormat.Format24bppRgb); myFilter = new Median(); filteredImage = myFilter.Apply(sourceImage); } else if (filter == Filters.filters["Mean"]) { myFilter = new Mean(); filteredImage = myFilter.Apply(sourceImage); } else if (filter == Filters.filters["Blur"]) { myFilter = new GaussianBlur(); filteredImage = myFilter.Apply(sourceImage); } //Console.Write(filteredImage.PixelFormat.ToString()); //Console.Write(sourceImage.PixelFormat.ToString()); filteredImage = ImageUtil.convert(filteredImage, System.Drawing.Imaging.PixelFormat.Format32bppArgb); return(filteredImage); }
public ImageProcessorResult Process(Bitmap bitmap, bool rgb) { var result = new ImageProcessorResult(); _viewConfig = _configService.ViewConfig; _imageProcessorConfig = _configService.ImageProcessorConfig; new Blur().ApplyInPlace(bitmap); Bitmap overlay = bitmap; if (_viewConfig.BackgroundImage == ViewConfigBackgroundImage.CameraRaw) { // 카메라 원본 overlay = bitmap; } // 그레이 스케일 var grayscale = Grayscale.CommonAlgorithms.BT709.Apply(bitmap); // 경계 검출 var edges = new DifferenceEdgeDetector().Apply(grayscale); if (_viewConfig.BackgroundImage == ViewConfigBackgroundImage.Edge) { overlay = new GrayscaleToRGB().Apply(edges); } // 이진화 // var threshold = new Threshold(_imageProcessorConfig.Threshold).Apply(edges); var threshold = new OtsuThreshold().Apply(edges); if (_viewConfig.BackgroundImage == ViewConfigBackgroundImage.Binary) { overlay = new GrayscaleToRGB().Apply(threshold); } // 오버레이 복제 overlay = overlay.CloneBitmap(); // 오버레이 데이터 var overlayData = overlay.LockBits(overlay.GetRectangle(), ImageLockMode.ReadWrite, overlay.PixelFormat); _blobCounter = new BlobCounter(); _blobCounter.MinHeight = _imageProcessorConfig.BlobMinHeight; _blobCounter.MinHeight = _imageProcessorConfig.BlobMinWidth; _blobCounter.FilterBlobs = true; _blobCounter.ObjectsOrder = ObjectsOrder.XY; _blobCounter.ProcessImage(threshold); var blobs = _blobCounter.GetObjectsInformation(); var shapeChecker = new SimpleShapeChecker(); // 각 영역에 대해 처리 수행 foreach (var blob in blobs) { // 현재 시도하는 마커 Marker marker = new Marker(); var edgePoints = _blobCounter.GetBlobsEdgePoints(blob); // 사각형 판정 var points = new List <IntPoint>(); if (shapeChecker.IsQuadrilateral(edgePoints, out points)) { marker.Points = points; List <IntPoint> leftEdge, rightEdge; _blobCounter.GetBlobsLeftAndRightEdges(blob, out leftEdge, out rightEdge); // 각도와 길이 판정 if (MoreQuadTest(blob, marker, leftEdge, rightEdge)) { // 검은색 테두리 판정 if (BorderTest(blob, marker, grayscale, leftEdge, rightEdge)) { // 판정 완료 result.Markers.Add(marker); // 무게 중심 좌표 marker.X = (int)(blob.CenterOfGravity.X - (threshold.Width / 2)); marker.Y = (int)(-(blob.CenterOfGravity.Y - (threshold.Height / 2))); // 프레임워크에서 계산한 넓이 marker.FrameworkArea = blob.Area; // 오버레이 ApplyOverlay(overlayData, marker.Points); } } } } overlay.UnlockBits(overlayData); foreach (var marker in result.Markers) { var points = marker.Points; // 방향 보정 var sideLength = points[0].DistanceTo(points[1]); if (points[2].Y - points[1].Y < sideLength / 1.6) { points = new List <IntPoint>( new IntPoint[] { points[1], points[2], points[3], points[0] }); marker.Points = points; } // 변형 복구 var quadrilateralTransformation = new QuadrilateralTransformation(points, _imageProcessorConfig.QuadrilateralTransformationWidth, _imageProcessorConfig.QuadrilateralTransformationHeight); var transformed = quadrilateralTransformation.Apply(bitmap); // 회전 및 색상 판정 시작 int halfWidth = _imageProcessorConfig.QuadrilateralTransformationWidth / 2, halfHeight = _imageProcessorConfig.QuadrilateralTransformationHeight / 2; // x => x + 1 사분면 var crops = new[] { new Crop(new Rectangle(halfWidth, 0, halfWidth, halfHeight)), new Crop(new Rectangle(0, 0, halfWidth, halfHeight)), new Crop(new Rectangle(0, halfHeight, halfWidth, halfHeight)), new Crop(new Rectangle(halfWidth, halfHeight, halfWidth, halfHeight)) }; var quadImage = new[] { crops[0].Apply(transformed), crops[1].Apply(transformed), crops[2].Apply(transformed), crops[3].Apply(transformed) }; var filteredResult = new[] { new { Img = quadImage[0], Red = Filter(quadImage[0], MarkerColor.Red).Luminance(), Green = Filter(quadImage[0], MarkerColor.Green).Luminance(), Blue = Filter(quadImage[0], MarkerColor.Blue).Luminance(), White = Filter(quadImage[0], MarkerColor.White).Luminance() }, new { Img = quadImage[1], Red = Filter(quadImage[1], MarkerColor.Red).Luminance(), Green = Filter(quadImage[1], MarkerColor.Green).Luminance(), Blue = Filter(quadImage[1], MarkerColor.Blue).Luminance(), White = Filter(quadImage[1], MarkerColor.White).Luminance() }, new { Img = quadImage[2], Red = Filter(quadImage[2], MarkerColor.Red).Luminance(), Green = Filter(quadImage[2], MarkerColor.Green).Luminance(), Blue = Filter(quadImage[2], MarkerColor.Blue).Luminance(), White = Filter(quadImage[2], MarkerColor.White).Luminance() }, new { Img = quadImage[3], Red = Filter(quadImage[3], MarkerColor.Red).Luminance(), Green = Filter(quadImage[3], MarkerColor.Green).Luminance(), Blue = Filter(quadImage[3], MarkerColor.Blue).Luminance(), White = Filter(quadImage[3], MarkerColor.White).Luminance() } }; var whiteDesc = filteredResult.OrderByDescending(a => a.White).ToArray(); if (rgb) { // RGB 색상 판별 var colorQuad = whiteDesc.Skip(1); var red = colorQuad.Sum(a => a.Red); var green = colorQuad.Sum(a => a.Green); var blue = colorQuad.Sum(a => a.Blue); Console.WriteLine("{0}: {1} {2} {3}", colorQuad.Count(), red, green, blue); var max = Math.Max(red, Math.Max(green, blue)); if (red == max) { marker.Color = MarkerColor.Red; } else if (green == max) { marker.Color = MarkerColor.Green; } else if (blue == max) { marker.Color = MarkerColor.Blue; } } else { // 흑백 색상 판별 var whiteMax = whiteDesc[0].White; var whiteRest = (whiteDesc[1].White + whiteDesc[2].White + whiteDesc[3].White) / 3; if (whiteMax - whiteRest < _imageProcessorConfig.ColorTestWhite) { // White marker.Color = MarkerColor.White; } else { // Black marker.Color = MarkerColor.Black; } } // 회전 판별 for (int i = 0; i < 4; i++) { if (filteredResult[i].White == whiteDesc.First().White) { marker.Rotate = (MarkerRotate)(i + 1); break; } } // 백색 마커에는 회전 방향이 없습니다. if (marker.Color == MarkerColor.White) { // 지정되지 않습니다. marker.Rotate = MarkerRotate.None; } // 화상 중심으로 좌표 변환 for (int i = 0; i < marker.Points.Count; i++) { marker.Points[i] = new IntPoint { X = marker.Points[i].X - _configService.DeviceConfig.PixelWidth / 2, Y = marker.Points[i].Y - _configService.DeviceConfig.PixelHeight / 2 }; } // 코어 서비스에서 기하학적 방법으로 거리 계산 var coreResult = _coreService.Query(marker.Points, _imageProcessorConfig.MarkerSize); marker.EuclideanDistance = coreResult.Distance; marker.TiltAngle = Math.Asin(coreResult.TranslationVector[1] / marker.EuclideanDistance); marker.PanAngle = Math.Asin(coreResult.TranslationVector[0] / marker.EuclideanDistance); if (marker.PanAngle > Math.PI) { // 음수 marker.PanAngle = 2 * Math.PI - marker.PanAngle; } marker.TransX = coreResult.TranslationVector[0]; marker.TransY = coreResult.TranslationVector[1]; marker.TransZ = coreResult.TranslationVector[2]; } BackgroundBitmap = overlay; Console.WriteLine(); foreach (var marker in result.Markers) { Console.WriteLine(marker.Color); } return(result); }
/// <summary> /// This is the method that actually does the work. /// </summary> /// <param name="DA">The DA object can be used to retrieve data from input parameters and /// to store data in output parameters.</param> protected override void SolveInstance(IGH_DataAccess DA) { Bitmap sourceImage = null; DA.GetData(0, ref sourceImage); string filter = ""; DA.GetData(1, ref filter); sourceImage = ImageUtilities.convert(sourceImage, System.Drawing.Imaging.PixelFormat.Format32bppArgb); IFilter myFilter; Bitmap filteredImage = sourceImage; //Grayscale.CommonAlgorithms.Y.Apply switch (filter) { case "Greyscale": Console.Write("Applying: " + filter); sourceImage = Grayscale.CommonAlgorithms.RMY.Apply(sourceImage); filteredImage = sourceImage; break; case "Sepia": Console.Write("Applying: " + filter); myFilter = new Sepia(); filteredImage = myFilter.Apply(sourceImage); break; case "Invert": Console.Write("Applying: " + filter); sourceImage = ImageUtilities.convert(sourceImage, System.Drawing.Imaging.PixelFormat.Format24bppRgb); myFilter = new Invert(); filteredImage = myFilter.Apply(sourceImage); break; case "RotateChannel": Console.Write("Applying: " + filter); myFilter = new RotateChannels(); filteredImage = myFilter.Apply(sourceImage); break; case "Threshold": //Need Extended Version Console.Write("Applying: " + filter); sourceImage = Grayscale.CommonAlgorithms.RMY.Apply(sourceImage); myFilter = new Threshold(); filteredImage = myFilter.Apply(sourceImage); break; case "FloydFilter": Console.Write("Applying: " + filter); //sourceImage = Grayscale.CommonAlgorithms.RMY.Apply(sourceImage); //myFilter = new FloydSteinbergColorDithering(); FloydSteinbergColorDithering myReduction = new FloydSteinbergColorDithering(); filteredImage = myReduction.Apply(sourceImage); //filteredImage = myFilter.Apply(sourceImage); break; case "OrderedDithering": Console.Write("Applying: " + filter); sourceImage = Grayscale.CommonAlgorithms.RMY.Apply(sourceImage); myFilter = new OrderedDithering(); filteredImage = myFilter.Apply(sourceImage); break; case "Sharpen": Console.Write("Applying: " + filter); myFilter = new Sharpen(); filteredImage = myFilter.Apply(sourceImage); break; case "DifferenceEdgeDetector": Console.Write("Applying: " + filter); sourceImage = Grayscale.CommonAlgorithms.RMY.Apply(sourceImage); myFilter = new DifferenceEdgeDetector(); filteredImage = myFilter.Apply(sourceImage); break; case "HomogenityEdgeDetector": Console.Write("Applying: " + filter); sourceImage = Grayscale.CommonAlgorithms.RMY.Apply(sourceImage); myFilter = new HomogenityEdgeDetector(); filteredImage = myFilter.Apply(sourceImage); break; case "Sobel": Console.Write("Applying: " + filter); sourceImage = Grayscale.CommonAlgorithms.RMY.Apply(sourceImage); myFilter = new SobelEdgeDetector(); filteredImage = myFilter.Apply(sourceImage); break; case "Jitter": Console.Write("Applying: " + filter); myFilter = new Jitter(); //Needs Expand filteredImage = myFilter.Apply(sourceImage); break; case "OilPainting": Console.Write("Applying: " + filter); myFilter = new OilPainting(); //Needs Expand filteredImage = myFilter.Apply(sourceImage); break; case "TextureFiltering": Console.Write("Applying: " + filter); sourceImage = ImageUtilities.convert(sourceImage, System.Drawing.Imaging.PixelFormat.Format24bppRgb); myFilter = new Texturer(new TextileTexture(), 1.0, 0.8); //Needs Expand filteredImage = myFilter.Apply(sourceImage); break; case "Median": Console.Write("Applying: " + filter); myFilter = new Median(); filteredImage = myFilter.Apply(sourceImage); break; case "Mean": Console.Write("Applying: " + filter); myFilter = new Mean(); filteredImage = myFilter.Apply(sourceImage); break; case "Blur": //Need Extended Version Console.Write("Applying: " + filter); myFilter = new GaussianBlur(); filteredImage = myFilter.Apply(sourceImage); break; default: Console.Write("No Filter"); break; } Console.Write(filteredImage.PixelFormat.ToString()); Console.Write(sourceImage.PixelFormat.ToString()); filteredImage = ImageUtilities.convert(filteredImage, System.Drawing.Imaging.PixelFormat.Format32bppArgb); DA.SetData(0, filteredImage); }
private void differenceEdgeToolStripMenuItem_Click(object sender, EventArgs e) { islem = new GrayscaleBT709().Apply(kaynak); islem = new DifferenceEdgeDetector().Apply(islem); islemBox.Image = islem; }
private void scan_code() { List <IntPoint> TempCorners = new List <IntPoint>(); // 2 - Edge detection DifferenceEdgeDetector edgeDetector = new DifferenceEdgeDetector(); UnmanagedImage edgesImage = edgeDetector.Apply(grayImage); // 3 - Threshold edges Threshold thresholdFilter = new Threshold(40); thresholdFilter.ApplyInPlace(edgesImage); // create and configure blob counter BlobCounter blobCounter = new BlobCounter(); blobCounter.MinHeight = 32; blobCounter.MinWidth = 32; blobCounter.FilterBlobs = true; blobCounter.ObjectsOrder = ObjectsOrder.Size; // 4 - find all stand alone blobs blobCounter.ProcessImage(edgesImage); Blob[] blobs = blobCounter.GetObjectsInformation(); // 5 - check each blob for (int i = 0, n = blobs.Length; i < n; i++) { // ... List <IntPoint> edgePoints = blobCounter.GetBlobsEdgePoints(blobs[i]); List <IntPoint> corners = null; // does it look like a quadrilateral ? SimpleShapeChecker shapeChecker = new SimpleShapeChecker(); if (shapeChecker.IsQuadrilateral(edgePoints, out corners)) { TempCorners.AddRange(corners); // ... // get edge points on the left and on the right side List <IntPoint> leftEdgePoints, rightEdgePoints; blobCounter.GetBlobsLeftAndRightEdges(blobs[i], out leftEdgePoints, out rightEdgePoints); // calculate average difference between pixel values fro+m outside of the // shape and from inside float diff = CalculateAverageEdgesBrightnessDifference( leftEdgePoints, rightEdgePoints, grayImage); // check average difference, which tells how much outside is lighter than // inside on the average if (diff > 20) { QuadrilateralTransformation quadrilateralTransformation = new QuadrilateralTransformation(corners, 100, 100); UnmanagedImage glyphImage = quadrilateralTransformation.Apply(grayImage); //// otsu thresholding hier fehler OtsuThreshold otsuThresholdFilter = new OtsuThreshold(); otsuThresholdFilter.ApplyInPlace(glyphImage); image = glyphImage; //// recognize raw glyph float confidence; //code geändert byte[,] LeftUpMarker = new byte[5, 5]; LeftUpMarker[0, 0] = 0; LeftUpMarker[0, 1] = 0; LeftUpMarker[0, 2] = 0; LeftUpMarker[0, 3] = 0; LeftUpMarker[0, 4] = 0; LeftUpMarker[1, 0] = 0; LeftUpMarker[1, 1] = 0; LeftUpMarker[1, 2] = 1; LeftUpMarker[1, 3] = 0; LeftUpMarker[1, 4] = 0; LeftUpMarker[2, 0] = 0; LeftUpMarker[2, 1] = 1; LeftUpMarker[2, 2] = 0; LeftUpMarker[2, 3] = 1; LeftUpMarker[2, 4] = 0; LeftUpMarker[3, 0] = 0; LeftUpMarker[3, 1] = 0; LeftUpMarker[3, 2] = 1; LeftUpMarker[3, 3] = 0; LeftUpMarker[3, 4] = 0; LeftUpMarker[4, 0] = 0; LeftUpMarker[4, 1] = 0; LeftUpMarker[4, 2] = 0; LeftUpMarker[4, 3] = 0; LeftUpMarker[4, 4] = 0; byte[,] RightUpMarker = new byte[5, 5]; RightUpMarker[0, 0] = 0; RightUpMarker[0, 1] = 0; RightUpMarker[0, 2] = 0; RightUpMarker[0, 3] = 0; RightUpMarker[0, 4] = 0; RightUpMarker[1, 0] = 0; RightUpMarker[1, 1] = 1; RightUpMarker[1, 2] = 0; RightUpMarker[1, 3] = 1; RightUpMarker[1, 4] = 0; RightUpMarker[2, 0] = 0; RightUpMarker[2, 1] = 0; RightUpMarker[2, 2] = 0; RightUpMarker[2, 3] = 0; RightUpMarker[2, 4] = 0; RightUpMarker[3, 0] = 0; RightUpMarker[3, 1] = 1; RightUpMarker[3, 2] = 0; RightUpMarker[3, 3] = 1; RightUpMarker[3, 4] = 0; RightUpMarker[4, 0] = 0; RightUpMarker[4, 1] = 0; RightUpMarker[4, 2] = 0; RightUpMarker[4, 3] = 0; RightUpMarker[4, 4] = 0; byte[,] LeftDownMarker = new byte[5, 5]; LeftDownMarker[0, 0] = 0; LeftDownMarker[0, 1] = 0; LeftDownMarker[0, 2] = 0; LeftDownMarker[0, 3] = 0; LeftDownMarker[0, 4] = 0; LeftDownMarker[1, 0] = 0; LeftDownMarker[1, 1] = 0; LeftDownMarker[1, 2] = 1; LeftDownMarker[1, 3] = 0; LeftDownMarker[1, 4] = 0; LeftDownMarker[2, 0] = 0; LeftDownMarker[2, 1] = 1; LeftDownMarker[2, 2] = 1; LeftDownMarker[2, 3] = 1; LeftDownMarker[2, 4] = 0; LeftDownMarker[3, 0] = 0; LeftDownMarker[3, 1] = 0; LeftDownMarker[3, 2] = 1; LeftDownMarker[3, 3] = 0; LeftDownMarker[3, 4] = 0; LeftDownMarker[4, 0] = 0; LeftDownMarker[4, 1] = 0; LeftDownMarker[4, 2] = 0; LeftDownMarker[4, 3] = 0; LeftDownMarker[4, 4] = 0; byte[,] ReightDownMarker = new byte[5, 5]; ReightDownMarker[0, 0] = 0; ReightDownMarker[0, 1] = 0; ReightDownMarker[0, 2] = 0; ReightDownMarker[0, 3] = 0; ReightDownMarker[0, 4] = 0; ReightDownMarker[1, 0] = 0; ReightDownMarker[1, 1] = 1; ReightDownMarker[1, 2] = 1; ReightDownMarker[1, 3] = 1; ReightDownMarker[1, 4] = 0; ReightDownMarker[2, 0] = 0; ReightDownMarker[2, 1] = 1; ReightDownMarker[2, 2] = 0; ReightDownMarker[2, 3] = 1; ReightDownMarker[2, 4] = 0; ReightDownMarker[3, 0] = 0; ReightDownMarker[3, 1] = 1; ReightDownMarker[3, 2] = 1; ReightDownMarker[3, 3] = 1; ReightDownMarker[3, 4] = 0; ReightDownMarker[4, 0] = 0; ReightDownMarker[4, 1] = 0; ReightDownMarker[4, 2] = 0; ReightDownMarker[4, 3] = 0; ReightDownMarker[4, 4] = 0; byte[,] glyphValues = Recognize(glyphImage, new System.Drawing.Rectangle(0, 0, glyphImage.Width, glyphImage.Height), out confidence); Boolean bool_LeftUpMarkerMarker = true; for (int l = 0; l < 5; l++) { for (int m = 0; m < 5; m++) { if (LeftUpMarker[l, m] != glyphValues[l, m]) { bool_LeftUpMarkerMarker = false; break; } } } if (bool_LeftUpMarkerMarker) { Debug.Log("Marker erkannt"); } Boolean bool_RightUpMarker = true; for (int l = 0; l < 5; l++) { for (int m = 0; m < 5; m++) { if (RightUpMarker[l, m] != glyphValues[l, m]) { bool_RightUpMarker = false; break; } } } if (bool_RightUpMarker) { Debug.Log("Marker erkannt"); } Boolean bool_LeftDownMarker = true; for (int l = 0; l < 5; l++) { for (int m = 0; m < 5; m++) { if (LeftDownMarker[l, m] != glyphValues[l, m]) { bool_LeftDownMarker = false; break; } } } if (bool_LeftDownMarker) { Debug.Log("Marker erkannt"); } Boolean bool_ReightDownMarker = true; for (int l = 0; l < 5; l++) { for (int m = 0; m < 5; m++) { if (ReightDownMarker[l, m] != glyphValues[l, m]) { bool_ReightDownMarker = false; break; } } } if (bool_ReightDownMarker) { Debug.Log("Marker erkannt"); } } } } if (TempCorners.Count > 0) { Corners = TempCorners; } }
public DifferenceEdgeDetectorFilter() { differenceEdgeDetector = new DifferenceEdgeDetector(); }
public void Detect(ref Bitmap image) { List <List <IntPoint> > markers = new List <List <IntPoint> >(); Bitmap tmp = image; BitmapData bitmapData = image.LockBits(new Rectangle(0, 0, image.Width, image.Height), ImageLockMode.ReadOnly, image.PixelFormat); UnmanagedImage unmanagedImage = new UnmanagedImage(bitmapData); UnmanagedImage grayImage = UnmanagedImage.Create(unmanagedImage.Width, unmanagedImage.Height, PixelFormat.Format8bppIndexed); Grayscale.CommonAlgorithms.BT709.Apply(unmanagedImage, grayImage); DifferenceEdgeDetector edgeDetector = new DifferenceEdgeDetector(); UnmanagedImage edgesImage = edgeDetector.Apply(grayImage); image.UnlockBits(bitmapData); if (this.edgeImage.Checked) { tmp = edgesImage.ToManagedImage().Clone(new Rectangle(0, 0, edgesImage.Width, edgesImage.Height), PixelFormat.Format24bppRgb); } Threshold thresholdFilter = new Threshold(this.binThreshold); thresholdFilter.ApplyInPlace(edgesImage); if (this.thresholdEdgeImage.Checked) { tmp = edgesImage.ToManagedImage().Clone(new Rectangle(0, 0, edgesImage.Width, edgesImage.Height), PixelFormat.Format24bppRgb); } this.blobCounter.ProcessImage(edgesImage); Blob[] blobs = blobCounter.GetObjectsInformation(); for (int i = 0, n = blobs.Length; i < n; i++) { List <IntPoint> edgePoints = blobCounter.GetBlobsEdgePoints(blobs[i]); List <IntPoint> corners = null; if (this.isSquare(edgePoints, out corners)) { List <IntPoint> leftEdgePoints, rightEdgePoints; blobCounter.GetBlobsLeftAndRightEdges(blobs[i], out leftEdgePoints, out rightEdgePoints); float diff = calculateAverageEdgesBrightnessDifference( leftEdgePoints, rightEdgePoints, grayImage); if (diff > 50) { markers.Add(corners); } } } foreach (List <IntPoint> marker in markers) { Color markerColor; IntPoint markerOrientation = this.markerOrientation(image, marker, out markerColor); IntPoint center = marker[2] - marker[0]; center.X = marker[0].X + Convert.ToInt32(center.X * 0.5); center.Y = marker[0].Y + Convert.ToInt32(center.Y * 0.5); if (this.drawMarkersOnVideo.Checked) { if ((this.edgeImage.Checked) || (this.thresholdEdgeImage.Checked)) { this.drawMarker(tmp, marker, markerOrientation, markerColor); } else { this.drawMarker(image, marker, markerOrientation, markerColor); } } ColorDiscriminator discriminator = new ColorDiscriminator(); discriminator.Color = markerColor; LocationSourceManager.Instance.updateLocationSource(discriminator, center); } image = tmp; }
private void video_NewFrame1(object sender, NewFrameEventArgs eventArgs) { videoCapture.Stop(); pictureBox1.SizeMode = PictureBoxSizeMode.CenterImage; pictureBox2.SizeMode = PictureBoxSizeMode.CenterImage; pictureBox1.Image = (Bitmap)eventArgs.Frame.Clone(); //pictureBox2.Image = (Bitmap)eventArgs.Frame.Clone(); image = (Bitmap)eventArgs.Frame.Clone(); byte[,] matrix = new byte[4, 4] { { 95, 233, 127, 255 }, { 159, 31, 191, 63 }, { 111, 239, 79, 207 }, { 175, 47, 143, 15 } }; switch (choice) { case 0: { } break; //Threshold case 1: { Grayscale g = new Grayscale(0.2125, 0.7154, 0.0721); image = g.Apply(image); Threshold filter = new Threshold(100); applyfilter(filter); } break; //OrderedDithering case 2: { Grayscale g = new Grayscale(0.2125, 0.7154, 0.0721); image = g.Apply(image); OrderedDithering filter = new OrderedDithering(matrix); applyfilter(filter); } break; //BayerDithering case 3: { Grayscale g = new Grayscale(0.2125, 0.7154, 0.0721); image = g.Apply(image); BayerDithering filter = new BayerDithering(); applyfilter(filter); } break; //floyd case 4: { Grayscale g = new Grayscale(0.2125, 0.7154, 0.0721); image = g.Apply(image); FloydSteinbergDithering filter = new FloydSteinbergDithering(); applyfilter(filter); } break; //burkes case 5: { Grayscale g = new Grayscale(0.2125, 0.7154, 0.0721); image = g.Apply(image); BurkesDithering filter = new BurkesDithering(); applyfilter(filter); } break; //jarvis case 6: { Grayscale g = new Grayscale(0.2125, 0.7154, 0.0721); image = g.Apply(image); JarvisJudiceNinkeDithering filter = new JarvisJudiceNinkeDithering(); applyfilter(filter); } break; //sierra case 7: { Grayscale g = new Grayscale(0.2125, 0.7154, 0.0721); image = g.Apply(image); SierraDithering filter = new SierraDithering(); applyfilter(filter); } break; //stucki case 8: { Grayscale g = new Grayscale(0.2125, 0.7154, 0.0721); image = g.Apply(image); StuckiDithering filter = new StuckiDithering(); applyfilter(filter); } break; //Homogenity case 9: { Grayscale g = new Grayscale(0.2125, 0.7154, 0.0721); image = g.Apply(image); HomogenityEdgeDetector filter = new HomogenityEdgeDetector(); applyfilter(filter); } break; //rotate case 10: { applyfilter(new RotateChannels()); } break; //sobel case 11: { Grayscale g = new Grayscale(0.2125, 0.7154, 0.0721); image = g.Apply(image); SobelEdgeDetector filter = new SobelEdgeDetector(); applyfilter(filter); } break; //canny case 12: { Grayscale g = new Grayscale(0.2125, 0.7154, 0.0721); image = g.Apply(image); CannyEdgeDetector filter = new CannyEdgeDetector(); applyfilter(filter); } break; case 13: { applyfilter(new YCbCrFiltering()); } break; case 14: { applyfilter(new HueModifier(180)); } break; //SIS Threshold case 15: { Grayscale g = new Grayscale(0.2125, 0.7154, 0.0721); image = g.Apply(image); SISThreshold filter = new SISThreshold(); applyfilter(filter); } break; //Difference case 16: { Grayscale g = new Grayscale(0.2125, 0.7154, 0.0721); image = g.Apply(image); DifferenceEdgeDetector filter = new DifferenceEdgeDetector(); applyfilter(filter); } break; //mirror case 17: { applyfilter(new Mirror(false, true)); } break; //flip case 18: { applyfilter(new RotateBilinear(180, true)); } break; //Erosion case 19: { applyfilter1(new Erosion()); } break; //Dilatation case 20: { applyfilter1(new Dilatation()); } break; //Opening case 21: { applyfilter1(new Opening()); } break; //closing case 22: { applyfilter1(new Closing()); } break; //jitter case 23: { applyfilter(new Jitter(15)); } break; //OilPainting case 24: { applyfilter(new OilPainting(10)); } break; //pixellate case 25: { applyfilter(new Pixellate(10)); } break; } }
public void GetImageProcess()//显示图片时读取 { XmlNode node = xmlDoc.SelectSingleNode("PatientBackImage/Image[@Name='" + ImageName + "'] "); if (node != null) { if (node.HasChildNodes) { XmlNodeList xmlNolist = node.ChildNodes; foreach (XmlNode xn in xmlNolist) { XmlElement xmlE = (XmlElement)xn; switch (xmlE.Name) { case "DrawLine": DrawLine dl = new DrawLine(Convert.ToInt32(xmlE.GetAttribute("StartPointX")), Convert.ToInt32(xmlE.GetAttribute("StartPointY")), Convert.ToInt32(xmlE.GetAttribute("EndPointX")), Convert.ToInt32(xmlE.GetAttribute("EndPointY"))); dl.ID = Convert.ToInt32(xmlE.GetAttribute("ID")); frmImgProcess.imgProcess.drawArea.GraphicsList.UnselectAll(); frmImgProcess.imgProcess.drawArea.GraphicsList.Add(dl); frmImgProcess.imgProcess.drawArea.Capture = true; frmImgProcess.imgProcess.drawArea.Refresh(); break; case "DrawRectangle": DrawRectangle dr = new DrawRectangle(Convert.ToInt32(xmlE.GetAttribute("X")), Convert.ToInt32(xmlE.GetAttribute("Y")), Convert.ToInt32(xmlE.GetAttribute("Width")), Convert.ToInt32(xmlE.GetAttribute("Height"))); dr.ID = Convert.ToInt32(xmlE.GetAttribute("ID")); frmImgProcess.imgProcess.drawArea.GraphicsList.UnselectAll(); frmImgProcess.imgProcess.drawArea.GraphicsList.Add(dr); frmImgProcess.imgProcess.drawArea.Capture = true; frmImgProcess.imgProcess.drawArea.Refresh(); break; case "DrawEllipse": DrawEllipse de = new DrawEllipse(Convert.ToInt32(xmlE.GetAttribute("X")), Convert.ToInt32(xmlE.GetAttribute("Y")), Convert.ToInt32(xmlE.GetAttribute("Width")), Convert.ToInt32(xmlE.GetAttribute("Height"))); de.ID = Convert.ToInt32(xmlE.GetAttribute("ID")); frmImgProcess.imgProcess.drawArea.GraphicsList.UnselectAll(); frmImgProcess.imgProcess.drawArea.GraphicsList.Add(de); frmImgProcess.imgProcess.drawArea.Capture = true; frmImgProcess.imgProcess.drawArea.Refresh(); break; case "DrawPoint": DrawPoint dp = new DrawPoint(Convert.ToInt32(xmlE.GetAttribute("X")), Convert.ToInt32(xmlE.GetAttribute("Y"))); dp.ID = Convert.ToInt32(xmlE.GetAttribute("ID")); frmImgProcess.imgProcess.drawArea.GraphicsList.UnselectAll(); frmImgProcess.imgProcess.drawArea.GraphicsList.Add(dp); frmImgProcess.imgProcess.drawArea.Capture = true; frmImgProcess.imgProcess.drawArea.Refresh(); break; case "DrawPolygon": DrawPolygon dpy = new DrawPolygon(); string pointStr = xmlE.GetAttribute("pointStr"); string[] poList = pointStr.Split('$'); string[] p = { }; for (int i = 0; i < poList.Length; i++) { if (poList[i].ToString() != "") { p = poList[i].Split(','); Point point = new Point(Convert.ToInt32(p[0]), Convert.ToInt32(p[1])); dpy.pointArray.Add(point); } } dpy.ID = Convert.ToInt32(xmlE.GetAttribute("ID")); frmImgProcess.imgProcess.drawArea.GraphicsList.UnselectAll(); frmImgProcess.imgProcess.drawArea.GraphicsList.Add(dpy); frmImgProcess.imgProcess.drawArea.Capture = true; frmImgProcess.imgProcess.drawArea.Refresh(); break; case "TextBox": ToolText tx = new ToolText(frmImgProcess.imgProcess.drawArea); tx.Location = new Point(Convert.ToInt32(xmlE.GetAttribute("X")), Convert.ToInt32(xmlE.GetAttribute("Y"))); tx.Width = Convert.ToInt32(xmlE.GetAttribute("Width")); tx.Height = Convert.ToInt32(xmlE.GetAttribute("Height")); tx.Name = xmlE.GetAttribute("ID"); tx.Min = true; tx.Max = true; tx.IsChangeSize = true; tx.ReadOnly = false; tx.IsMove = true; tx.Text = xmlE.GetAttribute("Content"); tx.ForeColor = System.Drawing.Color.Red; frmImgProcess.imgProcess.drawArea.Controls.Add(tx); break; case "Process": XmlNode nodeProcess = xmlDoc.SelectSingleNode("PatientBackImage/Image[@Name='" + ImageName + "']/Process "); if (nodeProcess != null) { if (nodeProcess.HasChildNodes) { XmlNodeList xmlNodeProcesslist = nodeProcess.ChildNodes; foreach (XmlNode xn2 in xmlNodeProcesslist) { XmlElement xmlE2 = (XmlElement)xn2; switch (xmlE2.Name) { case "BrightnessCorrection": //亮度 BrightnessCorrection brightnesscorr = new BrightnessCorrection(); brightnesscorr.AdjustValue = double.Parse(xmlE2.GetAttribute("Value")); frmImgProcess.imgProcess.ApplyFilter(brightnesscorr); break; case "ContrastCorrection": //对比 ContrastCorrection contrastCorr = new ContrastCorrection(); contrastCorr.Factor = double.Parse(xmlE2.GetAttribute("Value")); frmImgProcess.imgProcess.ApplyFilter(contrastCorr); break; case "HueModifier": //色相 HueModifier huemodifier = new HueModifier(); huemodifier.Hue = int.Parse(xmlE2.GetAttribute("Value")); frmImgProcess.imgProcess.ApplyFilter(huemodifier); break; case "Saturation": //饱和度 SaturationCorrection saturationcorr = new SaturationCorrection(); saturationcorr.AdjustValue = double.Parse(xmlE2.GetAttribute("Value")); frmImgProcess.imgProcess.ApplyFilter(saturationcorr); break; case "GrayscaleBT709": //灰度 GrayscaleBT709 grayscalebt = new GrayscaleBT709(); frmImgProcess.imgProcess.ApplyFilter(grayscalebt); break; case "Filter": //过滤 ColorFiltering colorfilter = new ColorFiltering(); IntRange red = new IntRange(0, 255); IntRange green = new IntRange(0, 255); IntRange blue = new IntRange(0, 255); byte fillR = 0, fillG = 0, fillB = 0; string fillType = ""; red.Min = int.Parse(xmlE2.GetAttribute("RedMin")); red.Max = int.Parse(xmlE2.GetAttribute("RedMax")); green.Min = int.Parse(xmlE2.GetAttribute("GreenMin")); green.Max = int.Parse(xmlE2.GetAttribute("GreenMax")); blue.Min = int.Parse(xmlE2.GetAttribute("BlueMin")); blue.Max = int.Parse(xmlE2.GetAttribute("BlueMax")); fillR = byte.Parse(xmlE2.GetAttribute("FillRed")); fillG = byte.Parse(xmlE2.GetAttribute("FillGreen")); fillB = byte.Parse(xmlE2.GetAttribute("FillBlue")); fillType = xmlE2.GetAttribute("FillType"); colorfilter.Red = red; colorfilter.Green = green; colorfilter.Blue = blue; colorfilter.FillColor = new RGB(fillR, fillG, fillB); if (fillType == "OutSide") { colorfilter.FillOutsideRange = true; } else { colorfilter.FillOutsideRange = false; } frmImgProcess.imgProcess.ApplyFilter(colorfilter); break; case "Gaussian": //柔化 GaussianBlur gaussianBlur = new GaussianBlur(); gaussianBlur.Sigma = double.Parse(xmlE2.GetAttribute("Sigma")); gaussianBlur.Size = int.Parse(xmlE2.GetAttribute("Size")); frmImgProcess.imgProcess.ApplyFilter(gaussianBlur); break; case "DifferenceEdgeDetector": //边缘增强 DifferenceEdgeDetector differenceEdgeD = new DifferenceEdgeDetector(); frmImgProcess.imgProcess.ApplyFilter(differenceEdgeD); break; case "RotateFlip": //镜像 frmImgProcess.imgProcess.drawArea.Image.RotateFlip(RotateFlipType.RotateNoneFlipX); frmImgProcess.imgProcess.drawArea.Refresh(); break; case "PerlinNoise": //去噪 string value = ""; value = xmlE2.GetAttribute("Value"); float imageWidth = 0; float imageHeight = 0; switch (value) { case "Marble": filter = new Texturer(new MarbleTexture(imageWidth / 96, imageHeight / 48), 0.7f, 0.3f); break; case "Wood": filter = new Texturer(new WoodTexture(), 0.7f, 0.3f); break; case "Clouds": filter = new Texturer(new CloudsTexture(), 0.7f, 0.3f); break; case "Labyrinth": filter = new Texturer(new LabyrinthTexture(), 0.7f, 0.3f); break; case "Textile": filter = new Texturer(new TextileTexture(), 0.7f, 0.3f); break; case "Dirty": TexturedFilter f = new TexturedFilter(new CloudsTexture(), new Sepia()); f.PreserveLevel = 0.30f; f.FilterLevel = 0.90f; filter = f; break; case "Rusty": filter = new TexturedFilter(new CloudsTexture(), new Sepia(), new GrayscaleBT709()); break; } frmImgProcess.imgProcess.ApplyFilter(filter); break; case "Sharpen": Sharpen sharpen = new Sharpen(); frmImgProcess.imgProcess.ApplyFilter(sharpen); break; case "Mean": Mean mean = new Mean(); frmImgProcess.imgProcess.ApplyFilter(mean); break; } } } } break; } } } } }
public static void DifferenceEdgeFilter(UnmanagedImage img) { DifferenceEdgeDetector filter2 = new DifferenceEdgeDetector(); filter2.ApplyInPlace(img); }
// Process specified image trying to recognize counter's image public void Process(Bitmap image, IImageProcessingLog log) { log.AddMessage("Image size: " + image.Width + " x " + image.Height); // 1 - Grayscale Bitmap grayImage = Grayscale.CommonAlgorithms.BT709.Apply(image); log.AddImage("Grayscale", grayImage); // 2 - Edge detection DifferenceEdgeDetector edgeDetector = new DifferenceEdgeDetector( ); Bitmap edges = edgeDetector.Apply(grayImage); log.AddImage("Edges", edges); // 3 - Threshold edges Threshold thresholdFilter = new Threshold(40); thresholdFilter.ApplyInPlace(edges); log.AddImage("Thresholded Edges", edges); // 4 - Blob Counter BlobCounter blobCounter = new BlobCounter( ); blobCounter.MinHeight = 32; blobCounter.MinWidth = 32; blobCounter.FilterBlobs = true; blobCounter.ObjectsOrder = ObjectsOrder.Size; blobCounter.ProcessImage(edges); Blob[] blobs = blobCounter.GetObjectsInformation( ); // create unmanaged copy of source image, so we could draw on it UnmanagedImage imageData = UnmanagedImage.FromManagedImage(image); // Get unmanaged copy of grayscale image, so we could access it's pixel values UnmanagedImage grayUI = UnmanagedImage.FromManagedImage(grayImage); // list of found dark/black quadrilaterals surrounded by white area List <List <IntPoint> > foundObjects = new List <List <IntPoint> >( ); // shape checker for checking quadrilaterals SimpleShapeChecker shapeChecker = new SimpleShapeChecker( ); // 5 - check each blob for (int i = 0, n = blobs.Length; i < n; i++) { List <IntPoint> edgePoints = blobCounter.GetBlobsEdgePoints(blobs[i]); List <IntPoint> corners = null; // does it look like a quadrilateral ? if (shapeChecker.IsQuadrilateral(edgePoints, out corners)) { // do some more checks to filter so unacceptable shapes // if ( CheckIfShapeIsAcceptable( corners ) ) { log.AddMessage("Blob size: " + blobs[i].Rectangle.Width + " x " + blobs[i].Rectangle.Height); // get edge points on the left and on the right side List <IntPoint> leftEdgePoints, rightEdgePoints; blobCounter.GetBlobsLeftAndRightEdges(blobs[i], out leftEdgePoints, out rightEdgePoints); // calculate average difference between pixel values from outside of the shape and from inside float diff = this.CalculateAverageEdgesBrightnessDifference( leftEdgePoints, rightEdgePoints, grayUI); log.AddMessage("Avg Diff: " + diff); // check average difference, which tells how much outside is lighter than inside on the average if (diff > 20) { Drawing.Polygon(imageData, corners, Color.FromArgb(255, 255, 0, 0)); // add the object to the list of interesting objects for further processing foundObjects.Add(corners); } } } } log.AddImage("Potential glyps", imageData.ToManagedImage()); int counter = 1; // further processing of each potential glyph foreach (List <IntPoint> corners in foundObjects) { log.AddMessage("Glyph #" + counter); log.AddMessage(string.Format("Corners: ({0}), ({1}), ({2}), ({3})", corners[0], corners[1], corners[2], corners[3])); // 6 - do quadrilateral transformation QuadrilateralTransformation quadrilateralTransformation = new QuadrilateralTransformation(corners, 250, 250); Bitmap transformed = quadrilateralTransformation.Apply(grayImage); log.AddImage("Transformed #" + counter, transformed); // 7 - otsu thresholding OtsuThreshold otsuThresholdFilter = new OtsuThreshold( ); Bitmap transformedOtsu = otsuThresholdFilter.Apply(transformed); log.AddImage("Transformed Otsu #" + counter, transformedOtsu); int glyphSize = 5; SquareBinaryGlyphRecognizer gr = new SquareBinaryGlyphRecognizer(glyphSize); bool[,] glyphValues = gr.Recognize(ref transformedOtsu, new Rectangle(0, 0, 250, 250)); log.AddImage("Glyph lines #" + counter, transformedOtsu); // output recognize glyph to log log.AddMessage(string.Format("glyph: {0:F2}%", gr.confidence * 100)); for (int i = 0; i < glyphSize; i++) { StringBuilder sb = new StringBuilder(" "); for (int j = 0; j < glyphSize; j++) { sb.Append((glyphValues[i, j]) ? "1 " : "0 "); } log.AddMessage(sb.ToString( )); } counter++; } }
private void button2_Click(object sender, EventArgs e) { button2.Text = "处理中"; switch (comboBox4.SelectedIndex) { case 0: { Bitmap temp = (Bitmap)pictureBox1.Image; OilPainting filter3 = new OilPainting(10); // apply the filter filter3.ApplyInPlace(temp); this.pictureBox2.Image = ResizeBitmap(temp); break; } case 1: { Bitmap temp = (Bitmap)pictureBox1.Image; temp = new Grayscale(0.2125, 0.7154, 0.0721).Apply(temp); DifferenceEdgeDetector edgeDetector = new DifferenceEdgeDetector(); temp = edgeDetector.Apply(temp); temp = new Threshold((int)numericUpDown1.Value).Apply(temp); //FillHoles filter2 = new FillHoles(); //filter2.MaxHoleHeight = MinHeight; //filter2.MaxHoleWidth = MaxWidth; //filter2.CoupledSizeFiltering = false; // apply the filter //temp = filter2.Apply(temp); //HorizontalRunLengthSmoothing hrls = new HorizontalRunLengthSmoothing(40); // apply the filter //hrls.ApplyInPlace(temp); /*AForge.Imaging.Filters.BlobsFiltering filter = new AForge.Imaging.Filters.BlobsFiltering(); * // 设置过滤条件(对象长、宽至少为70) * filter.CoupledSizeFiltering = true; * filter.MaxWidth = (int)numericUpDown3.Value; * filter.MaxHeight = (int)numericUpDown4.Value; * filter.MinWidth = (int)numericUpDown5.Value; * filter.MinHeight = (int)numericUpDown6.Value; * filter.ApplyInPlace(temp);*/ BlobCounter blobCounter = new BlobCounter(); blobCounter.MinHeight = 32; blobCounter.MinWidth = 32; blobCounter.FilterBlobs = true; blobCounter.ObjectsOrder = ObjectsOrder.Size; // 4 - find all stand alone blobs blobCounter.ProcessImage(temp); Blob[] blobs = blobCounter.GetObjectsInformation(); SimpleShapeChecker shapeChecker = new SimpleShapeChecker(); List <IntPoint> corners = null; List <IntPoint> corners2 = null; for (int i = 0, n = blobs.Length; i < n; i++) { List <IntPoint> edgePoints = blobCounter.GetBlobsEdgePoints(blobs[i]); // does it look like a quadrilateral ? if (shapeChecker.IsQuadrilateral(edgePoints, out corners)) { // get edge points on the left and on the right side List <IntPoint> leftEdgePoints, rightEdgePoints; blobCounter.GetBlobsLeftAndRightEdges(blobs[i], out leftEdgePoints, out rightEdgePoints); listBox1.DataSource = leftEdgePoints; listBox2.DataSource = rightEdgePoints; } } //listBox1.DataSource = corners; //listBox2.DataSource = corners2; this.pictureBox1.Image = temp; break; } case 2: { Bitmap bt2 = new Bitmap(@"D:\TCL条码\截图01.bmp"); Bitmap bt1 = new Bitmap(@"D:\TCL条码\截图03.bmp"); //Bitmap bt1 = new Bitmap(pictureBox2.Image); ExhaustiveTemplateMatching tm = new ExhaustiveTemplateMatching(0.80f); //基于一定的相似性阈值获得匹配块 TemplateMatch[] matchings = tm.ProcessImage(bt1, bt2); BitmapData data = bt1.LockBits( new Rectangle(0, 0, bt1.Width, bt1.Height), ImageLockMode.ReadWrite, bt1.PixelFormat); foreach (TemplateMatch m in matchings) { Drawing.Rectangle(data, m.Rectangle, Color.Red); } bt1.UnlockBits(data); pictureBox2.Image = bt1; break; } case 3: { Bitmap bt2 = new Bitmap(@"D:\TCL条码\Canny算法.png"); AForge.Imaging.Filters.BlobsFiltering filter = new AForge.Imaging.Filters.BlobsFiltering(); // 设置过滤条件(对象长、宽至少为70) filter.CoupledSizeFiltering = true; filter.MaxWidth = (int)numericUpDown3.Value; filter.MaxHeight = (int)numericUpDown4.Value; filter.MinWidth = (int)numericUpDown5.Value; filter.MinHeight = (int)numericUpDown6.Value; filter.ApplyInPlace(bt2); pictureBox1.Image = bt2; byte[] RESULT = BitmapToBytes(bt2); break; } case 4: { Bitmap temp = (Bitmap)pictureBox1.Image; temp = new Grayscale(0.2125, 0.7154, 0.0721).Apply(temp); AForge.Imaging.Filters.CannyEdgeDetector filter = new AForge.Imaging.Filters.CannyEdgeDetector(); filter.ApplyInPlace(temp); pictureBox2.Image = temp; break; } } button2.Text = "处理"; }
public Bitmap Detect(Bitmap bitmap) { Bitmap grayscaleBitmap = Grayscale.CommonAlgorithms.BT709.Apply(bitmap); IFilter smoothingFilter = null; switch (_smoothMode) { case "None": smoothingFilter = null; break; case "Mean": smoothingFilter = new Mean(); break; case "Median": smoothingFilter = new Median(); break; case "Conservative": smoothingFilter = new ConservativeSmoothing(); break; case "Adaptive": smoothingFilter = new AdaptiveSmoothing(); break; case "Bilateral": smoothingFilter = new BilateralSmoothing(); break; } Bitmap smoothBitmap = smoothingFilter != null?smoothingFilter.Apply(grayscaleBitmap) : grayscaleBitmap; IFilter edgeFilter = null; switch (_edgeMode) { case "Homogenity": edgeFilter = new HomogenityEdgeDetector(); break; case "Difference": edgeFilter = new DifferenceEdgeDetector(); break; case "Sobel": edgeFilter = new SobelEdgeDetector(); break; case "Canny": edgeFilter = new CannyEdgeDetector(); break; } Bitmap edgeBitmap = edgeFilter != null?edgeFilter.Apply(smoothBitmap) : smoothBitmap; IFilter threshholdFilter = new Threshold(_threshold); Bitmap thresholdBitmap = _threshold == 0 ? edgeBitmap : threshholdFilter.Apply(edgeBitmap); BlobCounter blobCounter = new BlobCounter(); blobCounter.FilterBlobs = true; blobCounter.MinHeight = _minHeight; blobCounter.MinWidth = _minWidth; blobCounter.ProcessImage(thresholdBitmap); Blob[] blobs = blobCounter.GetObjectsInformation(); Bitmap outputBitmap = new Bitmap(thresholdBitmap.Width, thresholdBitmap.Height, PixelFormat.Format24bppRgb); Graphics bitmapGraphics = Graphics.FromImage(outputBitmap); Bitmap inputBitmap = null; switch (_drawMode) { case "Original": inputBitmap = bitmap; break; case "Grayscale": inputBitmap = grayscaleBitmap; break; case "Smooth": inputBitmap = smoothBitmap; break; case "Edge": inputBitmap = edgeBitmap; break; case "Threshold": inputBitmap = thresholdBitmap; break; } if (inputBitmap != null) { bitmapGraphics.DrawImage(inputBitmap, 0, 0); } Pen nonConvexPen = new Pen(Color.Red, 2); Pen nonRectPen = new Pen(Color.Orange, 2); Pen cardPen = new Pen(Color.Blue, 2); SimpleShapeChecker shapeChecker = new SimpleShapeChecker(); List <IntPoint> cardPositions = new List <IntPoint>(); for (int i = 0; i < blobs.Length; i++) { List <IntPoint> edgePoints = blobCounter.GetBlobsEdgePoints(blobs[i]); List <IntPoint> corners; if (shapeChecker.IsConvexPolygon(edgePoints, out corners)) { PolygonSubType subType = shapeChecker.CheckPolygonSubType(corners); if ((subType == PolygonSubType.Parallelogram || subType == PolygonSubType.Rectangle) && corners.Count == 4) { // Check if its sideways, if so rearrange the corners so it's vertical. RearrangeCorners(corners); // Prevent detecting the same card twice by comparing distance against other detected cards. bool sameCard = false; foreach (IntPoint point in cardPositions) { if (corners[0].DistanceTo(point) < _minDistance) { sameCard = true; break; } } if (sameCard) { continue; } // Hack to prevent it from detecting smaller sections of the card instead of the whole card. if (GetArea(corners) < _minArea) { continue; } cardPositions.Add(corners[0]); bitmapGraphics.DrawPolygon(cardPen, ToPointsArray(corners)); } else { foreach (IntPoint point in edgePoints.Take(300)) { bitmapGraphics.DrawEllipse(nonRectPen, point.X, point.Y, 1, 1); } } } else { foreach (IntPoint point in edgePoints.Take(300)) { bitmapGraphics.DrawEllipse(nonConvexPen, point.X, point.Y, 1, 1); } } } bitmapGraphics.Dispose(); nonConvexPen.Dispose(); nonRectPen.Dispose(); cardPen.Dispose(); return(outputBitmap); }
private void AugmentedMethod2() { UnmanagedImage image = UnmanagedImage.FromManagedImage(new Bitmap(picSource.Image)); // 1 - grayscaling UnmanagedImage grayImage = null; if (image.PixelFormat == PixelFormat.Format8bppIndexed) { grayImage = image; } else { grayImage = UnmanagedImage.Create(image.Width, image.Height, PixelFormat.Format8bppIndexed); Grayscale.CommonAlgorithms.BT709.Apply(image, grayImage); } // 2 - Edge detection DifferenceEdgeDetector edgeDetector = new DifferenceEdgeDetector(); UnmanagedImage edgesImage = edgeDetector.Apply(grayImage); // 3 - Threshold edges Threshold thresholdFilter = new Threshold(40); thresholdFilter.ApplyInPlace(edgesImage); // create and configure blob counter BlobCounter blobCounter = new BlobCounter(); blobCounter.MinHeight = 32; blobCounter.MinWidth = 32; blobCounter.FilterBlobs = true; blobCounter.ObjectsOrder = ObjectsOrder.Size; // 4 - find all stand alone blobs blobCounter.ProcessImage(edgesImage); Blob[] blobs = blobCounter.GetObjectsInformation(); SimpleShapeChecker shapeChecker = new SimpleShapeChecker(); int counter = 0; // 5 - check each blob for (int i = 0, n = blobs.Length; i < n; i++) { // get edge points on the left and on the right side List <IntPoint> leftEdgePoints, rightEdgePoints; blobCounter.GetBlobsLeftAndRightEdges(blobs[i], out leftEdgePoints, out rightEdgePoints); // calculate average difference between pixel values from outside of the // shape and from inside float diff = CalculateAverageEdgesBrightnessDifference( leftEdgePoints, rightEdgePoints, grayImage); // check average difference, which tells how much outside is lighter than // inside on the average if (diff >= 50) { ++counter; } txtOut.AppendText(diff + ","); /*List<IntPoint> edgePoints = blobCounter.GetBlobsEdgePoints(blobs[i]); * List<IntPoint> corners = null; * * // does it look like a quadrilateral ? * if (shapeChecker.IsQuadrilateral(edgePoints, out corners)) * { ++counter; * }*/ } txtOut.AppendText(Environment.NewLine); lblCount.Text = counter.ToString(); picResult.Image = edgesImage.ToManagedImage(); }
public override Bitmap ApplyFilter(List <Bitmap> bitmaps) { DifferenceEdgeDetector filter = new DifferenceEdgeDetector(); return(filter.Apply(bitmaps[0].ConvertPixelFormat(PixelFormat.Format8bppIndexed))); }
//eventhandler if new frame is ready private void video_NewFrame(object sender, NewFrameEventArgs eventArgs) { Bitmap img = (Bitmap)eventArgs.Frame.Clone(); if (counterImg == 10) { double delaisImage = DateTime.Now.TimeOfDay.TotalMilliseconds - _mill_last_pic; _mill_last_pic = DateTime.Now.TimeOfDay.TotalMilliseconds; double FPS = 1 / delaisImage * 1000 * counterImg + 1; // txt_nb_fps.Text = FPS.ToString() ; //txt_resolution.Text = "" + videoSource.DesiredFrameSize.Height + " * " + videoSource.DesiredFrameSize.Width; string resolutionTxt = "" + img.Width + " * " + img.Height; if (this != null && (!this.IsDisposed)) { try { this.Invoke((ProcessNewFPS)UpdateNewFPS, FPS); this.Invoke((ProcessNewResolution)UpdateNewResolution, resolutionTxt); } catch (ObjectDisposedException) // La fenetre était en train de se fermée { } } counterImg = 0; } counterImg++; //Rectangle rect = new Rectangle(0,0,eventArgs.Frame.Width,eventArgs.Frame.Height); // 1 - grayscaling UnmanagedImage image = UnmanagedImage.FromManagedImage(img); UnmanagedImage imageRouge = image.Clone(); UnmanagedImage imageBleu = image.Clone(); UnmanagedImage imageVert = image.Clone(); UnmanagedImage grayImage = null; Color colorPoint = image.GetPixel(posX, posY); this.Invoke((ProcessLalbelText)ChangeLabelText, new object[] { colorPoint.GetHue().ToString(), lbl_hue }); this.Invoke((ProcessLalbelText)ChangeLabelText, new object[] { colorPoint.GetBrightness().ToString(), lbl_lum }); this.Invoke((ProcessLalbelText)ChangeLabelText, new object[] { colorPoint.GetSaturation().ToString(), lbl_sat }); if (image.PixelFormat == PixelFormat.Format8bppIndexed) { grayImage = image; } else { grayImage = UnmanagedImage.Create(image.Width, image.Height, PixelFormat.Format8bppIndexed); Grayscale.CommonAlgorithms.BT709.Apply(image, grayImage); } // 2 - Edge detection DifferenceEdgeDetector edgeDetector = new DifferenceEdgeDetector(); UnmanagedImage edgesImage = edgeDetector.Apply(grayImage); // 3 - Threshold edges Threshold thresholdFilterGlyph = new Threshold((short)numericUpDown3.Value); Threshold thresholdFilterCouleur = new Threshold((short)numericUpDown2.Value); thresholdFilterGlyph.ApplyInPlace(edgesImage); /* * * Bitmap image = (Bitmap)eventArgs.Frame.Clone(); * * //Reference : http://www.aforgenet.com/framework/docs/html/743311a9-6c27-972d-39d2-ddc383dd1dd4.htm * * private HSLFiltering filter = new HSLFiltering(); * // set color ranges to keep red-orange * filter.Hue = new IntRange(0, 20); * filter.Saturation = new DoubleRange(0.5, 1); * * // apply the filter * filter.ApplyInPlace(image); * */ /*RGB colorRed = new RGB(215, 30, 30); * RGB colorBlue = new RGB(10, 10, 215); * RGB colorVert = new RGB(30, 215, 30); * RGB colorBlanc = new RGB(225, 219, 160);*/ HSLFiltering filter = new HSLFiltering(); // create filter // EuclideanColorFiltering filter = new EuclideanColorFiltering(); //filter.Radius = (short)numericUpDown1.Value; filter.Hue = new IntRange(40, 140); filter.Saturation = new Range(0.5f, 1.0f); filter.Luminance = new Range(0.2f, 1.0f); //filter.CenterColor = colorRed; filter.ApplyInPlace(imageRouge); filter.Hue = new IntRange(100, 180); //filter.CenterColor = colorBlanc; filter.ApplyInPlace(imageVert); filter.Hue = new IntRange(0, 40); //filter.CenterColor = colorBlue; filter.ApplyInPlace(imageBleu); Grayscale filterRouge = new Grayscale(0.800, 0.200, 0.200); Grayscale filterVert = new Grayscale(0.200, 0.800, 0.200); Grayscale filterBleu = new Grayscale(0.200, 0.200, 0.800); UnmanagedImage grayRougeImage = filterRouge.Apply(imageRouge); UnmanagedImage grayBleuImage = filterBleu.Apply(imageBleu); UnmanagedImage edgesRougeImage = edgeDetector.Apply(grayRougeImage); UnmanagedImage edgesBleuImage = edgeDetector.Apply(grayBleuImage); thresholdFilterCouleur.ApplyInPlace(edgesRougeImage); thresholdFilterCouleur.ApplyInPlace(edgesBleuImage); // All the image processing is done here... // pictureBox1.Image = image.ToManagedImage(); if (this != null && (!this.IsDisposed)) // Si on est pas en train de suppirmer la fenetre { try { this.Invoke((ProcessNewImage)DisplayNewImage, new object[] { image, pic_ImageNormal }); this.Invoke((ProcessNewImage)DisplayNewImage, new object[] { edgesImage, pic_ImageEdge }); this.Invoke((ProcessNewImage)DisplayNewImage, new object[] { imageRouge, pic_ImageRouge }); this.Invoke((ProcessNewImage)DisplayNewImage, new object[] { imageBleu, pic_ImageBleu }); this.Invoke((ProcessNewImage)DisplayNewImage, new object[] { imageVert, pic_ImageVert }); } catch (ObjectDisposedException) // La fenetre était en train de se fermée { } } /*pictureBox2.Image = grayImage.ToManagedImage(); * pictureBox3.Image = edgesImage.ToManagedImage(); * pictureBox4.Image = imageRouge.ToManagedImage();*/ }
private void FillPictureBoxes(ref Bitmap image) { Bitmap tmpImg = image; Bitmap tmpImg2 = image; try { bool hasFilter = false; //setup resize and filtersequesce //resize img to fit picturebox ResizeBicubic resizeFilter = new ResizeBicubic(0, 0); resizeFilter = new ResizeBicubic(pbCapture.Width, pbCapture.Height); tmpImg = resizeFilter.Apply(tmpImg); resizeFilter = new ResizeBicubic(pbShapes.Width, pbShapes.Height); tmpImg2 = resizeFilter.Apply(tmpImg2); FiltersSequence processingFilter = new FiltersSequence(); //List all filters IFilter ConservativeSmoothingFilter = new AForge.Imaging.Filters.ConservativeSmoothing(); IFilter InvertFilter = new AForge.Imaging.Filters.Invert(); IFilter HSLFilteringFilter = new AForge.Imaging.Filters.HSLFiltering(); IFilter SepiaFilter = new AForge.Imaging.Filters.Sepia(); IFilter grayscaleFilter = new AForge.Imaging.Filters.GrayscaleBT709(); IFilter SkeletonizationFilter = new AForge.Imaging.Filters.SimpleSkeletonization(); IFilter pixFilter = new AForge.Imaging.Filters.Pixellate(); ////apply filter and process img--------------------------------------------- if (ConservativeSmoothing) { processingFilter.Add(ConservativeSmoothingFilter); hasFilter = true; } if (Invert) { processingFilter.Add(InvertFilter); hasFilter = true; } if (HSLswitch) { processingFilter.Add(HSLFilteringFilter); hasFilter = true; } if (sepiaSwitch) { processingFilter.Add(SepiaFilter); hasFilter = true; } if (Skeletonization) { processingFilter.Add(grayscaleFilter); processingFilter.Add(SkeletonizationFilter); hasFilter = true; } //apply the filter(s) to image if (hasFilter) { //tmpImg = processingFilter.Apply(tmpImg); tmpImg2 = processingFilter.Apply(tmpImg2); } processingFilter.Clear(); if (bwSwitch) { switchBandW(ref tmpImg); } if (CannyEdgeDetector) { // create filter CannyEdgeDetector filter = new CannyEdgeDetector(); // apply the filter tmpImg = Grayscale.CommonAlgorithms.BT709.Apply(tmpImg); filter.ApplyInPlace(tmpImg); // image = DrawFocusArea(gsImage); } else { // image = DrawFocusArea(image); } if (DifferenceEdgeDetector) { DifferenceEdgeDetector dFilter = new DifferenceEdgeDetector(); // apply the filter tmpImg = Grayscale.CommonAlgorithms.BT709.Apply(tmpImg); dFilter.ApplyInPlace(tmpImg); } if (HomogenityEdgeDetector) { HomogenityEdgeDetector hFilter = new HomogenityEdgeDetector(); // apply the filter tmpImg = Grayscale.CommonAlgorithms.BT709.Apply(tmpImg); hFilter.ApplyInPlace(tmpImg); } if (SobelEdgeDetector) { SobelEdgeDetector hFilter = new SobelEdgeDetector(); // apply the filter tmpImg = Grayscale.CommonAlgorithms.BT709.Apply(tmpImg); hFilter.ApplyInPlace(tmpImg); BlobCounter bc = new BlobCounter(tmpImg); Rectangle[] brecs = bc.GetObjectsRectangles(); //Graphics pg = Graphics.FromImage(tmpImg); //Pen p = new Pen(Color.White, 2); //foreach (Rectangle r in brecs) //{ // pg.DrawRectangle(p, r); //} } if (findShapes) { tmpImg = FindShapes(tmpImg, ref tmpImg2); //ProcessImage(image); } else { pbCapture.Image = tmpImg; //set picturebox image---------------- pbShapes.Image = tmpImg2; //set picturebox image---------------- } // Graphics g = Graphics.FromImage(tmpImg); // Pen p = new Pen(Color.Red, 2); // Rectangle lr = new Rectangle(100, 120, 80, 40); //// Rectangle rr = new Rectangle(360, 220, 80, 40); // g.DrawRectangle(p, lr); // //g.DrawRectangle(p, rr); } catch (Exception ex) { MessageBox.Show(ex.Message); } // pbCapture.Image = tmpImg;//set picturebox image---------------- // pbShapes.Image = tmpImg2;//set picturebox image---------------- }