public Filters() { InitializeComponent(); this.imageFilters = new ImageFilters(); if (FilterSettings.war3IconType == War3IconType.None) { radioBtnNone.Checked = true; } if (FilterSettings.war3IconType == War3IconType.ClassicIcon) { radioBtnClassic.Checked = true; } if (FilterSettings.war3IconType == War3IconType.ReforgedIcon) { radioBtnReforged.Checked = true; } checkBoxButton.Checked = FilterSettings.isIconBTN; checkBoxPassive.Checked = FilterSettings.isIconPAS; checkBoxAutocast.Checked = FilterSettings.isIconATC; checkBoxDisabled.Checked = FilterSettings.isIconDISBTN; checkBoxResize.Checked = FilterSettings.isResized; upDownSizeX.Text = FilterSettings.resizeX.ToString(); upDownSizeY.Text = FilterSettings.resizeY.ToString(); }
public void ApplyFilter_InvalidArguments_ReturnsArgumentException() { // Arrange var sourceBitmap = Properties.Resources.unitTestPropImage1; // Act var resultBitmap = ImageFilters.ApplyFilter(sourceBitmap, -1, -1, -1, -1); }
static void Main(string[] args) { // Create ByteScout.ImageFilters.ImageFilters object instance. ImageFilters imageFilters = new ImageFilters(); // Load document. imageFilters.LoadDocument(@".\fax.tif"); // APPLY FILTERS: // Fix orientation of rotated and flipped pages by analyzing text orientation. imageFilters.FixRotation(); // Unrotate skewed pages. imageFilters.Deskew(); // Remove speckles and noise. imageFilters.RemoveNoise(); // Remove lines. This can improve the quality of optical character recognition (OCR). imageFilters.RemoveHorizontalLines(); imageFilters.RemoveVerticalLines(); // You can get a preview image after each modification to display to a user. Image previewImage = imageFilters.GetPreview(); // Save improved image. imageFilters.SaveDocument(@".\fax.png"); // Cleanup. imageFilters.Dispose(); }
public void FilterTestBlackAndWhite() { //load the original bitmap bitmapOriginal = ImageUtils.loadOriginalPicture(); //apply Image Filter filter Bitmap bitmapOriginalWithImageFilters = ImageFilters.BlackWhite(bitmapOriginal); //apply manually filter on the bitmap expected Bitmap bitmapExpected = bitmapOriginal; int rgb; Color c; for (int y = 0; y < bitmapExpected.Height; y++) { for (int x = 0; x < bitmapExpected.Width; x++) { c = bitmapExpected.GetPixel(x, y); rgb = (int)((c.R + c.G + c.B) / 3); bitmapExpected.SetPixel(x, y, Color.FromArgb(rgb, rgb, rgb)); } } //check if the pixel of the two images are the same bool result = ImageUtils.comparePixelImages(bitmapOriginalWithImageFilters, bitmapExpected); Assert.IsTrue(result); }
public static Bitmap RecognizeDocumentInImage(Bitmap originalBmp) { var grayscale = new GrayscaleImage(originalBmp); var compressionBmp = BitmapProcessing.ImageCompression(originalBmp); var processedImage = ImageFilters.ImageFiltering(compressionBmp); var resultSearch = SearchSingularPoints.SerchSPForImage(originalBmp, compressionBmp, processedImage); var spPoints = resultSearch.Item1; var equations = resultSearch.Item2; BitmapProcessing.SelectBackground(grayscale, equations); var widthAndHeight = MakeWidthAndHeightDocument(spPoints); var documentWidth = widthAndHeight.Item1; var documentHeight = widthAndHeight.Item2; var anglePoints = MakeAnglePoints(documentWidth, documentHeight, spPoints); var H = GetMatrixHomography(spPoints, anglePoints); var inverseH = H.Inverse(); var correctImage = ImageCorrection(inverseH, grayscale); var correctSpPoints = TransformPoints(inverseH, spPoints); var correctEquations = MakeEquationsLines(correctSpPoints); var angle = correctEquations[2].AngleDeviationOX(); correctImage = BitmapProcessing.RotateGrayscaleImage(correctImage, angle); var document = ImageCutter.CutDocument(correctImage); return(BitmapProcessing.MakeBitmap(document.Width, document.Height, document.Colors)); //return BitmapProcessing.MakeBitmap(correctImage.Width, correctImage.Height, correctImage.Colors); }
public void RainbowFilter_ArgumentIsNull_ReturnsNullReferenceException() { // Arrange Bitmap sourceBitmap = new Bitmap(100, 100); sourceBitmap = null; // Act var resultBitmap = ImageFilters.RainbowFilter(sourceBitmap); }
public void extensionTest() { Bitmap imageFiltreeTest = ImageFilters.BlackWhite(imageInitiale); String extensionImageInitiale = Path.GetExtension(imageInitiale.ToString()); String extensionImageFiltree = Path.GetExtension(imageFiltreeTest.ToString()); Assert.AreEqual(extensionImageInitiale, extensionImageFiltree); }
public void TestCrazyFilter() { Bitmap temp = new Bitmap(ResourceTestImages.original_for_filters); Bitmap expectedResult = new Bitmap(ResourceTestImages.crazy_filter); temp = ImageFilters.ApplyFilterSwapDivide(new Bitmap(temp), 1, 1, 2, 1); Bitmap actualResult = ImageFilters.ApplyFilterSwap(new Bitmap(temp)); Assert.IsTrue(UtilImageComparison.CompareImageTheSame(actualResult, expectedResult, 0.02), "The images did not match."); }
public void pixelIntegrityTest() { Bitmap imageFiltree = ImageFilters.BlackWhite(imageInitiale); for (int x = 0; x < imageFiltree.Height; x++) { for (int y = 0; y < imageFiltree.Width; y++) { Assert.AreEqual(imageInitiale.GetPixel(y, x), imageFiltree.GetPixel(y, x)); } } }
public void ValidFilter() { Color origine = Color.FromArgb(100, 140, 200, 60); Color wanted = Color.FromArgb(100, 70, 50, 60); Bitmap imageBase = getImage(origine); Bitmap filtred = ImageFilters.ApplyFilter(imageBase, 1, 2, 1, 4); for (int with = 0; with < filtred.Width; with++) { for (int height = 0; height < filtred.Height; height++) { Assert.AreEqual(filtred.GetPixel(with, height), wanted); } } }
public void colorPixelTest() { Bitmap imageFiltreeTest = ImageFilters.BlackWhite(imageInitiale); for (int i = 0; i < imageFiltreeTest.Width; i++) { for (int j = 0; j < imageFiltreeTest.Height; j++) { Color couleurPixelSouhaite = imageFiltreeBlackWhite.GetPixel(i, j); Color couleurPixelTest = imageFiltreeTest.GetPixel(i, j); Assert.AreEqual(couleurPixelSouhaite, couleurPixelTest); } } }
public void SepiaTest() { foreach (var a in Enumerable.Range(0, 256)) { foreach (var r in Enumerable.Range(0, 256)) { foreach (var g in Enumerable.Range(0, 256)) { foreach (var b in Enumerable.Range(0, 256)) { var t = Color.FromArgb(a, r, g, b); Assert.AreEqual(ImageFilters.SepiaFilter((uint)t.ToArgb()), SepiaFilter(t).ToArgb()); } } } } }
public void BlackWhiteTest() { // Custom image used for test Bitmap TestImg = new Bitmap(100, 100); // Method result for comparison Bitmap Result; for (int y = 0; y < TestImg.Height; y++) { for (int x = 0; x < TestImg.Width; x++) { TestImg.SetPixel(x, y, Color.FromArgb(120, 90, 150)); } } Result = ImageFilters.BlackWhite(TestImg); Assert.IsTrue(IsPixelColorEqual(Result)); }
private void Input_DoubleClick(object sender, EventArgs e) { if (calibrateUiState != CalibrateUIState.DetectBoard && calibrateUiState != CalibrateUIState.DetectTiles) { return; } MouseEventArgs args = e as MouseEventArgs; Bitmap scaledImage = new Bitmap(Input.ClientSize.Width, Input.ClientSize.Height); Input.DrawToBitmap(scaledImage, Input.ClientRectangle); var sample = ImageFilters.SampleHSBAverage(scaledImage, args.X, args.Y); hueVal.Value = sample.Hue; satVal.Value = sample.Saturation; briVal.Value = sample.Brightness; // board calibration if (modeCombo.SelectedIndex == 0) { hueTol.Value = 10; satTol.Value = 0.2m; briTol.Value = 0.2m; UpdateBoardCheck.Checked = true; } // tiles calibration if (modeCombo.SelectedIndex == 1) { // offset as tile hue is too close to triple word score hueVal.Value += 10; hueTol.Value = 30; satTol.Value = 1m; briTol.Value = 1m; UpdateBoardCheck.Checked = false; } updateDetector(); }
/// <summary> /// Applies a filter to the image. /// </summary> /// <param name="imageFilters">The filter to apply to the image.</param> /// <returns>The resulting image.</returns> public Image ApplyFilter(ImageFilters imageFilters) { _editor.Load(_tempImage); IMatrixFilter filter; switch (imageFilters) { case ImageFilters.BlackWhite: filter = MatrixFilters.BlackWhite; break; case ImageFilters.Comic: filter = MatrixFilters.Comic; break; case ImageFilters.Gotham: filter = MatrixFilters.Gotham; break; case ImageFilters.Invert: filter = MatrixFilters.Invert; break; case ImageFilters.Polaroid: filter = MatrixFilters.Polaroid; break; case ImageFilters.Sepia: filter = MatrixFilters.Sepia; break; default: throw new ArgumentOutOfRangeException(nameof(imageFilters), imageFilters, null); } _editor.Filter(filter); return(_editor.Image); }
public void ThresholdTest() { foreach (var lvl in Enumerable.Range(1, 101).Select(i => (byte)i)) { foreach (var a in Enumerable.Range(0, 256)) { foreach (var r in Enumerable.Range(0, 256)) { foreach (var g in Enumerable.Range(0, 256)) { foreach (var b in Enumerable.Range(0, 256)) { var t = Color.FromArgb(a, r, g, b); Assert.AreEqual(ImageFilters.ThresholdFilter((uint)t.ToArgb(), (byte)(255 * lvl / 100)), ThresholdFilter(t, lvl).ToArgb()); } } } } } }
public async Task <IHttpActionResult> Filter(string flt, string crd) { var tracer = Request.GetConfiguration().Services.GetTraceWriter(); if (!Request.TryToBitmap(out var img) || img.PixelFormat != PixelFormat.Format32bppArgb || img.Width > 1000 || img.Height > 1000) { tracer.Info(Request, ControllerContext.ControllerDescriptor.ControllerType.FullName, "Incorrect PNG"); return(BadRequest()); } var rect = (Rectangle)(new RectangleConverter().ConvertFromInvariantString(crd) ?? Rectangle.Empty); rect = Rectangle.Intersect(rect.Normalise(), new Rectangle(0, 0, img.Width, img.Height)); if (rect.IsEmpty || rect.Width == 0 || rect.Height == 0) { tracer.Info(Request, ControllerContext.ControllerDescriptor.ControllerType.FullName, "Empty rectangle"); return(StatusCode(HttpStatusCode.NoContent)); } tracer.Info(Request, ControllerContext.ControllerDescriptor.ControllerType.FullName, "Filter begin"); img = img.Clone(rect, img.PixelFormat); rect.X = 0; rect.Y = 0; var bytes = rect.Width * rect.Height; var argbValues = img.ToArray(rect); var filter = ImageFilters.FromString(flt, out var byteLevel); for (var i = 0; i < bytes; i++) { argbValues[i] = filter((uint)argbValues[i], byteLevel); } tracer.Info(Request, ControllerContext.ControllerDescriptor.ControllerType.FullName, "Filter end"); return(await Task.FromResult(new OkResult(argbValues.ToBitmap(rect.Width, rect.Height)))); }
public static Image <Rgba32> SetFilter(Image <Rgba32> source, ImageFilters imageFilters) { var image = source.Clone(); var w = image.Width; var h = image.Height; for (int x = 0; x < w; x++) { for (int y = 0; y < h; y++) { var p = image[x, y]; if (imageFilters == ImageFilters.Red) { p.B = 0; p.G = 0; } else if (imageFilters == ImageFilters.Green) { p.R = 0; p.B = 0; } else if (imageFilters == ImageFilters.Blue) { p.R = 0; p.G = 0; } else if (imageFilters == ImageFilters.Gray) { var gray = (byte)((0.299 * (double)p.R) + (0.587 * (double)p.G) + (0.114 * (double)p.B)); p.R = gray; p.G = gray; p.B = gray; } image[x, y] = p; } } return(image); }
public override Bitmap Apply(Bitmap bitmap) { return(ImageFilters.ApplyFilterSwapDivide(new Bitmap(bitmap), 1, 1, 2, 1)); }
public UnityEngine.Color[] ApplyFilter(UnityEngine.Color[] sourceColors, int width, int height) { ImageFilters filters = new ImageFilters(); return filters.PrewittFilter(sourceColors, width, height); }
public override Bitmap Apply(Bitmap bitmap) { Color c = Color.Green; return(ImageFilters.ApplyFilterMega(new Bitmap(bitmap), 230, 110, c)); }
public void InvalidFilterMinusOne() { Bitmap image = getImage(Color.FromArgb(100, 140, 200, 60)); ImageFilters.ApplyFilter(image, -1, -1, -1, -1); }
public override Bitmap Apply(Bitmap bitmap) { return(ImageFilters.ApplyFilter(new Bitmap(bitmap), 1, 10, 1, 1)); }
private bool GetFitInMatrix(ITrackedObjectPsfFit gaussian, ref int matirxSize, float preselectedAperture) { rbGuidingStar.Text = "Guiding/Comparison Star"; m_IsBrightEnoughForAutoGuidingStar = false; if (m_Aperture == null) { if (gaussian != null && !double.IsNaN(gaussian.FWHM) && TangraConfig.Settings.Photometry.SignalApertureUnitDefault == TangraConfig.SignalApertureUnit.FWHM) { m_Aperture = (float)(gaussian.FWHM * TangraConfig.Settings.Photometry.DefaultSignalAperture); } else { m_Aperture = (float)(TangraConfig.Settings.Photometry.DefaultSignalAperture); } } else if ( gaussian != null && !double.IsNaN(gaussian.FWHM) && TangraConfig.Settings.Photometry.SignalApertureUnitDefault == TangraConfig.SignalApertureUnit.FWHM && m_Aperture < (float)(gaussian.FWHM * TangraConfig.Settings.Photometry.DefaultSignalAperture)) { m_Aperture = (float)(gaussian.FWHM * TangraConfig.Settings.Photometry.DefaultSignalAperture); } nudFitMatrixSize.ValueChanged -= nudFitMatrixSize_ValueChanged; try { uint[,] autoStarsPixels = m_AstroImage.GetMeasurableAreaPixels(m_Center.X, m_Center.Y, 35); m_AutoStarsInLargerArea = StarFinder.GetStarsInArea( ref autoStarsPixels, m_AstroImage.Pixelmap.BitPixCamera, m_AstroImage.Pixelmap.MaxSignalValue, m_AstroImage.MedianNoise, LightCurveReductionContext.Instance.DigitalFilter); m_ProcessingPixels = ImageFilters.CutArrayEdges(autoStarsPixels, 9); m_DisplayPixels = m_AstroImage.GetMeasurableAreaDisplayBitmapPixels(m_Center.X, m_Center.Y, 17); m_AutoStarsInArea = new List <PSFFit>(); foreach (PSFFit autoStar in m_AutoStarsInLargerArea) { if (autoStar.XCenter > 9 && autoStar.XCenter < 9 + 17 && autoStar.YCenter > 9 && autoStar.YCenter < 9 + 17) { // Don't change original star so use a clone PSFFit clone = autoStar.Clone(); clone.SetNewFieldCenterFrom35PixMatrix(8, 8); m_AutoStarsInArea.Add(clone); } } int oldMatirxSize = matirxSize; if (m_AutoStarsInArea.Count == 0) { rbGuidingStar.Text = "Guiding/Comparison Star"; // There are no stars that are bright enough. Simply let the user do what they want, but still try to default to a sensible aperture size MeasurementsHelper measurement = ReduceLightCurveOperation.DoConfiguredMeasurement(m_ProcessingPixels, m_Aperture.Value, m_AstroImage.Pixelmap.BitPixCamera, m_AstroImage.Pixelmap.MaxSignalValue, 3.0, ref matirxSize); if (measurement.FoundBestPSFFit != null && measurement.FoundBestPSFFit.IsSolved && measurement.FoundBestPSFFit.Certainty > 0.1) { m_X0 = measurement.XCenter; m_Y0 = measurement.YCenter; m_FWHM = (float)measurement.FoundBestPSFFit.FWHM; } else { m_X0 = 8; m_Y0 = 8; m_FWHM = 6; } m_Gaussian = null; nudFitMatrixSize.SetNUDValue(11); } else if (m_AutoStarsInArea.Count == 1) { // There is exactly one good star found. Go and do a fit in a wider area double bestFindTolerance = 3.0; for (int i = 0; i < 2; i++) { MeasurementsHelper measurement = ReduceLightCurveOperation.DoConfiguredMeasurement(m_ProcessingPixels, m_Aperture.Value, m_AstroImage.Pixelmap.BitPixCamera, m_AstroImage.Pixelmap.MaxSignalValue, bestFindTolerance, ref matirxSize); if (measurement != null && matirxSize != -1) { if (matirxSize < 5) { // Do a centroid in the full area, and get another matix centered at the centroid ImagePixel centroid = new ImagePixel(m_Center.X, m_Center.Y); m_ProcessingPixels = m_AstroImage.GetMeasurableAreaPixels(centroid); m_DisplayPixels = m_AstroImage.GetMeasurableAreaDisplayBitmapPixels(centroid); m_X0 = centroid.X; m_Y0 = centroid.Y; m_FWHM = 6; m_Gaussian = null; nudFitMatrixSize.SetNUDValue(11); } else { m_X0 = measurement.XCenter; m_Y0 = measurement.YCenter; if (measurement.FoundBestPSFFit != null) { m_FWHM = (float)measurement.FoundBestPSFFit.FWHM; m_Gaussian = measurement.FoundBestPSFFit; } else { m_FWHM = 6; m_Gaussian = null; } m_ProcessingPixels = measurement.PixelData; nudFitMatrixSize.SetNUDValue(matirxSize); } } else { matirxSize = oldMatirxSize; return(false); } if (m_Gaussian != null) { if (IsBrightEnoughtForGuidingStar()) { rbGuidingStar.Text = "Guiding/Comparison Star"; m_IsBrightEnoughForAutoGuidingStar = true; } break; } } } else if (m_AutoStarsInArea.Count > 1) { rbGuidingStar.Text = "Guiding/Comparison Star"; // There are more stars found. double xBest = m_Gaussian != null ? m_Gaussian.XCenter : m_IsEdit ? ObjectToAdd.ApertureMatrixX0 : 8.5; double yBest = m_Gaussian != null ? m_Gaussian.YCenter : m_IsEdit ? ObjectToAdd.ApertureMatrixY0 : 8.5; // by default use the one closest to the original location PSFFit closestFit = m_AutoStarsInArea[0]; double closestDist = double.MaxValue; foreach (PSFFit star in m_AutoStarsInArea) { double dist = Math.Sqrt((star.XCenter - xBest) * (star.XCenter - xBest) + (star.YCenter - yBest) * (star.YCenter - yBest)); if (closestDist > dist) { closestDist = dist; closestFit = star; } } m_X0 = (float)closestFit.XCenter; m_Y0 = (float)closestFit.YCenter; m_FWHM = (float)closestFit.FWHM; m_Gaussian = closestFit; nudFitMatrixSize.SetNUDValue(m_IsEdit ? ObjectToAdd.PsfFitMatrixSize : closestFit.MatrixSize); } //if (m_Gaussian == null && gaussian.Certainty > 0.1 && ImagePixel.ComputeDistance(gaussian.X0_Matrix, 8, gaussian.Y0_Matrix, 8) < 3) //{ // // Id we failed to locate a bright enough autostar, but the default Gaussian is still certain enough and close enought to the center, we present it as a starting point // m_X0 = (float)gaussian.X0_Matrix; // m_Y0 = (float)gaussian.Y0_Matrix; // m_FWHM = (float)gaussian.FWHM; // m_Gaussian = gaussian; //} decimal appVal; if (float.IsNaN(preselectedAperture)) { if (float.IsNaN(m_Aperture.Value)) { appVal = Convert.ToDecimal(TangraConfig.Settings.Photometry.DefaultSignalAperture); } else { appVal = Convert.ToDecimal(m_Aperture.Value); if (nudAperture1.Maximum < appVal) { nudAperture1.Maximum = appVal + 1; } } } else { appVal = (decimal)preselectedAperture; } if ((float)appVal > m_Aperture) { m_Aperture = (float)appVal; } nudAperture1.SetNUDValue(Math.Round(appVal, 2)); PlotSingleTargetPixels(); PlotGaussian(); return(true); } finally { nudFitMatrixSize.ValueChanged += nudFitMatrixSize_ValueChanged; } }
/* * * The pipeline * * This big ugly Process method takes a webcam frame and pushes it through the module pipeline * The module instances each process the frame, updating their own internal state in real-time and in-place * * HINDSIGHT: a functional, stateless approach may have been better? (but probably more verbose) * * TODO: make this less ugly * */ public void Process(Bitmap frame, bool tileDetectionEnabled, bool pauseRealtime, bool tileRecognitionEnabled, bool drawOcrResults, bool drawTileRegions, bool drawTileExtractions, bool placeDetectedTiles, bool drawBoardRegion, bool boardDetectionEnabled) { Graphics graphics; // Flatten lighting if enabled if (BoardDetector.FlattenLighting) { ImageFilters.FlattenLighting(frame); } // Board detection if enabled if (boardDetectionEnabled) { BoardDetector.Process(frame); } // Board rectification BoardDetector.Rectify(frame); // Tile detection TileDetector.Process(BoardDetector.RectifiedBoard); // Display the board region overlay if enabled if (BoardDetector.HasDetected && drawBoardRegion) { graphics = Graphics.FromImage(frame); PointF[] corners = new PointF[4]; var padding = 3; for (int i = 0; i < 4; i++) { var xx = (i == 0 || i == 3) ? -padding : padding; var yy = (i == 0 || i == 1) ? -padding : padding; corners[i] = new PointF(BoardDetector.BoardCorners[i].X + xx, BoardDetector.BoardCorners[i].Y + yy); } graphics.DrawPolygon(new Pen(Color.Black, 6f), corners); graphics.DrawPolygon(new Pen(Color.Red, 4f), corners); graphics.Dispose(); } // Clear the unplaced previous frame's tile detections Game.Board.ClearUnplacedCells(); // Get a graphics context for the board image after rectification graphics = Graphics.FromImage(BoardDetector.RectifiedBoard); // For every detected tile foreach (Blob blob in TileDetector.TileBlobs) { // Upscale the detected tile region but make it slightly larger var border = 5; var br = blob.Rectangle; var x = Math.Max(0, -border + br.X * TileDetector.Scale); var y = Math.Max(0, -border + br.Y * TileDetector.Scale); var width = Math.Min(BoardDetector.RectifiedBoard.Width, border + br.Width * TileDetector.Scale); var height = Math.Min(BoardDetector.RectifiedBoard.Height, border + br.Height * TileDetector.Scale); var region = new Rectangle(x, y, width, height); // Search for tiles if enabled if (tileDetectionEnabled) { try { // Get a copy of the rectified board image Bitmap extract = BoardDetector.RectifiedBoard.Clone(region, PixelFormat.Format24bppRgb); // Extract the current blob's image TileDetector.BlobCounter.ExtractBlobsImage(TileDetector.FilteredBoard, blob, false); Bitmap blobImage = blob.Image.ToManagedImage(); // Resize the blob image Bitmap rectifiedBlobImage = (new ResizeNearestNeighbor(extract.Width, extract.Height)).Apply(blobImage); // Apply the tile mask ImageFilters.Mask(extract, rectifiedBlobImage); // Perform letter blob extraction (and recognition if enabled) if (pauseRealtime) { TileOcr.Recognise = true; } TileOcr.Process(extract); TileOcr.Recognise = tileRecognitionEnabled; // For every OCR result (recognised or not) foreach (OCRResult result in TileOcr.Results) { // Get the absolute blob position var xx = region.X + result.Blob.Rectangle.X; var yy = region.Y + result.Blob.Rectangle.Y; // Display OCR result overlay if enabled if (drawOcrResults || pauseRealtime) { graphics.FillRectangle(new SolidBrush(Color.DarkGray), xx, yy, 32, 32); graphics.DrawString(result.Letter, new Font("Verdana", 20, FontStyle.Bold), new SolidBrush(Color.Black), xx + 1, yy + 1); graphics.DrawString(result.Letter, new Font("Verdana", 20, FontStyle.Bold), new SolidBrush(Color.White), xx, yy); } // Display tile region if enabled if (drawTileRegions) { graphics.DrawRectangle(new Pen(Color.Red, 2), xx - 5, yy - 5, result.Blob.Rectangle.Width + 10, result.Blob.Rectangle.Height + 10); } // Place the tile on to the virtual Scrabble board if required if (placeDetectedTiles) { Game.Board.PlaceTile(Game.CurrentPlayer, result.Letter, xx + 6, yy + 6, BoardDetector.RectifiedBoard.Width, BoardDetector.RectifiedBoard.Height, result.Blob.Rectangle); } // Display the raw tile extractions if enabled if (drawTileExtractions) { graphics.DrawImageUnscaled(result.Image, new Point(xx, yy)); } } } catch { // sometimes this may fail :( } } } graphics.Dispose(); }
internal static List <PotentialStarStruct> GetPeakPixelsInArea( uint[,] data, out uint[,] lpdData, int bpp, uint maxSignalValue, uint aboveNoiseLevelRequired, double minDistanceInPixels, bool useLPDFilter, Rectangle excludeArea) { if (useLPDFilter) { lpdData = ImageFilters.LowPassDifferenceFilter(data, bpp, false); } else { lpdData = data; } int nWidth = lpdData.GetLength(0); int nHeight = lpdData.GetLength(1); List <PotentialStarStruct> potentialStars = new List <PotentialStarStruct>(); ExaminePeakPixelCandidate examinePixelCallback = delegate(int x, int y, uint z) { bool tooClose = false; // Local maximum, test for a star foreach (PotentialStarStruct prevStar in potentialStars) { double dist = Math.Sqrt((prevStar.X - x) * (prevStar.X - x) + (prevStar.Y - y) * (prevStar.Y - y)); if (dist <= minDistanceInPixels) { tooClose = true; if (prevStar.Z < z) { prevStar.Z = z; prevStar.X = x; prevStar.Y = y; } break; } } if (!tooClose) { potentialStars.Add(new PotentialStarStruct() { X = x, Y = y, Z = z }); } // An early return if too many peak pixels have been found return(potentialStars.Count <= TangraConfig.Settings.Special. StarFinderMaxNumberOfPotentialStars); }; if (useLPDFilter) { CheckAllPixels(lpdData, nWidth, nHeight, aboveNoiseLevelRequired, excludeArea, examinePixelCallback); } else { CheckPixelsFromBrightToFaint(lpdData, nWidth, nHeight, bpp, maxSignalValue, aboveNoiseLevelRequired, excludeArea, examinePixelCallback); } return(potentialStars); }
public static void RenderPreview(String filePath) { if (imagePreview != null) { imagePreview.Dispose(); } imagePreview = null; Reader.ReadFile(filePath); fileSizeString = Reader.GetFileSizeString(filePath); Bitmap image = Reader.image; if (image != null) { ImageFilters filters = new ImageFilters(); int iconsChecked = 0; if (FilterSettings.isIconBTN) { iconsChecked++; } if (FilterSettings.isIconPAS) { iconsChecked++; } if (FilterSettings.isIconATC) { iconsChecked++; } if (FilterSettings.isIconDISBTN) { iconsChecked++; } if (FilterSettings.isIconDISPAS) { iconsChecked++; } if (FilterSettings.isIconDISATC) { iconsChecked++; } if (FilterSettings.isIconATT) { iconsChecked++; } if (FilterSettings.isIconUPG) { iconsChecked++; } if (iconsChecked == 1) { if (FilterSettings.isIconBTN) { image = filters.AddIconBorder(image, IconTypes.BTN); } if (FilterSettings.isIconPAS) { image = filters.AddIconBorder(image, IconTypes.PAS); } if (FilterSettings.isIconATC) { image = filters.AddIconBorder(image, IconTypes.ATC); } if (FilterSettings.isIconDISBTN) { image = filters.AddIconBorder(image, IconTypes.DISBTN); } if (FilterSettings.isIconDISPAS) { image = filters.AddIconBorder(image, IconTypes.DISPAS); } if (FilterSettings.isIconDISATC) { image = filters.AddIconBorder(image, IconTypes.DISATC); } if (FilterSettings.isIconATT) { image = filters.AddIconBorder(image, IconTypes.ATT); } if (FilterSettings.isIconUPG) { image = filters.AddIconBorder(image, IconTypes.UPG); } errorMsg = ""; } else { if (FilterSettings.war3IconType == War3IconType.ClassicIcon && iconsChecked > 1 && image.Width == 64 && image.Height == 64) { errorMsg = "Cannot display multiple icon filters"; } else if (FilterSettings.war3IconType == War3IconType.ReforgedIcon && iconsChecked > 1 && image.Width == 256 && image.Height == 256) { errorMsg = "Cannot display multiple icon filters"; } else { errorMsg = ""; } } if (FilterSettings.isResized) { image = ImageFilters.ResizeBitmap(image, FilterSettings.resizeX, FilterSettings.resizeY); } } else { errorMsg = Reader.errorMsg; } imagePreview = image; }
public void InvalidFilterzero() { Bitmap image = getImage(Color.FromArgb(100, 140, 200, 60)); ImageFilters.ApplyFilter(image, 0, 0, 0, 0); }
public Matrix Apply(Matrix input) { var guassian = ImageFilters.Gaussian(0.6f, 5); var r = new ImageData(input.Width, input.Height, PixelFormats.Gray8); var blurred = ImageProcessing.Convolve(input, guassian); r[0] = ImageProcessing.Scale(blurred, 0, 255); r.Save <JpegBitmapEncoder>(@"C:\imageprocessing\blurred.jpg"); var gx = ImageProcessing.Convolve(input, new float[, ] { { -1, 0, 1 }, { -2, 0, 2 }, { -1, 0, 1 } }); r[0] = ImageProcessing.Scale(gx, 0, 255); r.Save <JpegBitmapEncoder>(@"C:\imageprocessing\gx.jpg"); var gy = ImageProcessing.Convolve(input, new float[, ] { { 1, 2, 1 }, { 0, 0, 0 }, { -1, -2, -1 } }); r[0] = ImageProcessing.Scale(gy, 0, 255); r.Save <JpegBitmapEncoder>(@"C:\imageprocessing\gy.jpg"); var gradient = Matrix.Sqrt(Matrix.Pow(gx, 2) + Matrix.Pow(gy, 2)); r[0] = ImageProcessing.Scale(gradient, 0, 255); r.Save <JpegBitmapEncoder>(@"C:\imageprocessing\gradient.jpg"); var angle = new Matrix(input.Width, input.Height); for (var x = 1; x < gradient.Width - 1; x++) { for (var y = 1; y < gradient.Height - 1; y++) { var orientation = (float)(Math.Atan2(gy[x, y], gx[x, y]) * 180f / Math.PI); if (orientation < 0) { orientation += 180f; } angle[x, y] = orientation; } } r[0] = ImageProcessing.Scale(angle, 0, 255); r.Save <JpegBitmapEncoder>(@"C:\imageprocessing\angle.jpg"); var result = new Matrix(input.Width, input.Height); for (var x = 1; x < gradient.Width - 1; x++) { for (var y = 1; y < gradient.Height - 1; y++) { var orientation = angle[x, y]; var val = gradient[x, y]; // N-S if (orientation <= 22.5 || orientation >= 157.5) { if (gradient[x, y] > gradient[x, y - 1] && gradient[x, y] > gradient[x, y + 1]) { result[x, y] = val; } } // E-W if (orientation >= 67.5 && orientation <= 112.5) { if (gradient[x, y] > gradient[x - 1, y] && gradient[x, y] > gradient[x + 1, y]) { result[x, y] = val; } } // NE-SW if (orientation >= 22.5 && orientation <= 67.5) { if (gradient[x, y] > gradient[x + 1, y - 1] && gradient[x, y] > gradient[x - 1, y + 1]) { result[x, y] = val; } } // SE-NW if (orientation <= 157.5 && orientation >= 112.5) { if (gradient[x, y] > gradient[x - 1, y - 1] && gradient[x, y] > gradient[x + 1, y + 1]) { result[x, y] = val; } } } } return(result); }
public override Bitmap Apply(Bitmap bitmap) { return(ImageFilters.ApplyFilterSwap(new Bitmap(bitmap))); }
public override Bitmap Apply(Bitmap bitmap) { return(ImageFilters.RainbowFilter(new Bitmap(bitmap))); }