public FloatMapImage FilterImage(FloatMapImage inputImage, FloatMapImage outputImage) { if (inputImage == null) return null; if (inputImage.PixelFormat.HasAlpha()) { inputImage = inputImage.PremultiplyByAlpha(); } uint width = inputImage.Width; uint height = inputImage.Height; if (width < 1 || height < 1) return null; if (outputImage == null) { outputImage = new FloatMapImage(width, height, inputImage.PixelFormat); } FloatMapImage spreadingTable = new FloatMapImage(width, height, inputImage.PixelFormat); FloatMapImage normalizationTable = new FloatMapImage(width, height, PixelFormat.Greyscale); //var blurMap = GetBlurMap(width, height); //blurMap.ToBitmap(true).Save("blurmap.png"); //blurMap.Differentiate().ToBitmap(true).Save("blurmap-diff.png"); Filter(inputImage, spreadingTable, normalizationTable); Normalize(spreadingTable, normalizationTable, outputImage); spreadingTable.Dispose(); normalizationTable.Dispose(); return outputImage; }
public NeighborhoodBuffer(FloatMapImage depthMap) { UndefinedValue = 1.0f; Width = (int)depthMap.Width; Height = (int)depthMap.Height; // For simple evaluation with a single bounding power-of-2 square // there is Ceiling function, otherwise when evaluating with four // smaller rectangles Floor is ok (we need one less level). LevelCount = (int)Math.Ceiling(Math.Log(Math.Max(Width, Height), 2)); minLevels = new FloatMapImage[LevelCount]; maxLevels = new FloatMapImage[LevelCount]; // The fist level has no neighborhood so it is a copy of the // original depth map. minLevels[0] = CopySingleChannelDepthMap(depthMap); maxLevels[0] = minLevels[0]; // The subsequent level take maximum from neighborhood 2^i. // This is just a simple single-pass construction. int offset = 1; for (int i = 1; i < LevelCount; i++) { minLevels[i] = ConstructLevel(offset, i, Math.Min, 1.0f, minLevels[i - 1]); maxLevels[i] = ConstructLevel(offset, i, Math.Max, 0.0f, maxLevels[i - 1]); offset *= 2; } }
public void MakeCriterionImages() { FloatMapImage origImage = ((Bitmap)Bitmap.FromFile("chessRGB.jpg")).ToFloatMap(); HybridSpreadingFilter.FilterSelectionCriterion criterion = new HybridSpreadingFilter.FilterSelectionCriterion() { Threshold = CRITERION_THRESHOLD }; criterion.OriginalImage = origImage; criterion.OriginalImageSAT = origImage.Integrate(); int width = (int)origImage.Width; int height = (int)origImage.Height; FloatMapImage criterionImage = new FloatMapImage(origImage.Width, origImage.Height, PixelFormat.Greyscale); float[, ,] sat = criterion.OriginalImageSAT.Image; int minPsfRadius = 1; int maxPsfRadius = 100; int step = 10; for (int psfRadius = minPsfRadius; psfRadius < maxPsfRadius; psfRadius += step) { for (int x = 0; x < width; x++) { for (int y = 0; y < height; y++) { criterionImage.Image[x, y, 0] = criterion.SelectFilter(x, y, psfRadius); } } criterionImage.ToBitmap().Save(string.Format("chessRGB_criterion_{0:000}.png", psfRadius), System.Drawing.Imaging.ImageFormat.Png); } }
public FloatMapImage RenderImage(Size imageSize) { int height = imageSize.Height; int width = imageSize.Width; FloatMapImage outputImage = new FloatMapImage((uint)width, (uint)height, PixelFormat.Greyscale); Sensor.RasterSize = imageSize; Sampler sampler = new Sampler(); int SqrtSampleCount = (int)Math.Sqrt(SampleCount); foreach (Vector2d sample in sampler.GenerateJitteredSamples(SqrtSampleCount)) { // generate a sample at the lens surface Vector3d lensPos = Lens.GetBackSurfaceSample(sample); lensPos.Z = 0; // make an incoming ray from the light source to the lens sample and // transfer the incoming ray through the lens creating the outgoing ray Ray outgoingRay = Lens.Transfer(LightSourcePosition, lensPos); if (outgoingRay == null) { continue; } // intersect the senzor with the outgoing ray Intersection intersection = Sensor.Intersect(outgoingRay); if (intersection == null) { continue; } Vector3d intersectionPoint = intersection.Position; Vector2d intersectionPixelPoint = Sensor.CameraToImage(intersectionPoint); // put a splat on the senzor at the intersection Splat(outputImage, LightIntensity, intersectionPixelPoint); } return outputImage; }
/// <summary> /// Multiplies colors by alpha channel. Needed for combining /// semi-transparent pixels (such as compositing, blurring, etc.). /// Creates a new image for the result. Alpha channel remains the same. /// </summary> /// <param name="image"></param> /// <returns></returns> public static FloatMapImage PremultiplyByAlpha(this FloatMapImage image) { if (!image.PixelFormat.HasAlpha()) { throw new ArgumentException(String.Format( "The image must have an alpha channel. Pixel format: {0}", image.PixelFormat)); } uint width = image.Width; uint height = image.Height; uint bands = image.PixelFormat.GetColorChannelsCount(); uint alphaBand = bands; FloatMapImage outputImage = new FloatMapImage(width, height, image.PixelFormat); float[, ,] imIn = image.Image; float[, ,] imOut = outputImage.Image; for (uint y = 0; y < height; y++) { for (uint x = 0; x < width; x++) { float alpha = imIn[x, y, alphaBand]; imOut[x, y, alphaBand] = alpha; for (uint band = 0; band < bands; band++) { imOut[x, y, band] = alpha * imIn[x, y, band]; } } } return outputImage; }
/// <summary> /// Composites images: A over B. Both A and B are alpha pre-multiplied. /// Assumes that image A has an alpha channel and image B is completely /// opaque (does not have any alpha channel). /// </summary> /// <param name="imageA"></param> /// <param name="imageB"></param> /// <returns></returns> public static FloatMapImage Over(this FloatMapImage imageA, FloatMapImage imageB) { CheckImageCompatibility(imageA, imageB); uint width = imageA.Width; uint height = imageA.Height; uint bands = imageA.PixelFormat.GetColorChannelsCount(); uint alphaBand = bands; FloatMapImage outputImage = new FloatMapImage(width, height, imageA.PixelFormat.RemoveAlpha()); float[, ,] imA = imageA.Image; float[, ,] imB = imageB.Image; float[, ,] imOut = outputImage.Image; for (uint y = 0; y < height; y++) { for (uint x = 0; x < width; x++) { float alphaA = imA[x, y, alphaBand]; for (uint band = 0; band < bands; band++) { imOut[x, y, band] = imA[x, y, band] + (1 - alphaA) * imB[x, y, band]; } } } return outputImage; }
protected FloatMapImage(FloatMapImage image) { Width = image.Width; Height = image.Height; PixelFormat = image.PixelFormat; ColorChannelsCount = PixelFormat.GetColorChannelsCount(); TotalChannelsCount = PixelFormat.GetTotalChannelsCount(); Scale = image.Scale; Image = (float[, ,])image.Image.Clone(); }
public void TrySimpleHeightfield1x1WithPerpendicularRay() { FloatMapImage data = new FloatMapImage(1, 1, PixelFormat.Greyscale); data.Image[0, 0, 0] = 0.5f; HeightField heightfield = new HeightField(new[] { data }); Vector3 start = new Vector3(0.1f, 0.1f, 0.25f); Vector3 end = new Vector3(0.1f, 0.1f, 0.75f); IntersectAndReport(heightfield, start, end); }
protected override void Filter(FloatMapImage inputImage, FloatMapImage spreadingTable, FloatMapImage normalizationTable) { Spread(inputImage, spreadingTable, normalizationTable); //spreadingTable.ToBitmap(true).Save("spreading-diffs.png"); //normalizationTable.ToBitmap(true).Save("normalization-diffs.png"); IntegrateHorizontally(spreadingTable, normalizationTable); //spreadingTable.ToBitmap(true).Save("spreading-horiz.png"); //normalizationTable.ToBitmap(true).Save("normalization-horiz.png"); }
public void TrySimpleHeightfield2x2() { FloatMapImage data = new FloatMapImage(2, 2, PixelFormat.Greyscale); data.Image[0, 0, 0] = 0.5f; data.Image[0, 1, 0] = 0.5f; data.Image[1, 0, 0] = 0.5f; data.Image[1, 1, 0] = 0.5f; HeightField heightfield = new HeightField(new[] { data }); Vector3 start = new Vector3(0.5f, 1.5f, 0.0f); Vector3 end = new Vector3(2, 2, 1); IntersectAndReport(heightfield, start, end); }
public void MakeUnthresholdedCriterionImages() { FloatMapImage origImage = ((Bitmap)Bitmap.FromFile("chessRGB.jpg")).ToFloatMap(); HybridSpreadingFilter.FilterSelectionCriterion criterion = new HybridSpreadingFilter.FilterSelectionCriterion() { Threshold = CRITERION_THRESHOLD }; criterion.OriginalImage = origImage; criterion.OriginalImageSAT = origImage.Integrate(); int width = (int)origImage.Width; int height = (int)origImage.Height; FloatMapImage criterionImage = new FloatMapImage(origImage.Width, origImage.Height, PixelFormat.Greyscale); FloatMapImage diffImage = new FloatMapImage(origImage.Width, origImage.Height, PixelFormat.Greyscale); float[, ,] sat = criterion.OriginalImageSAT.Image; int minPsfRadius = 1; int maxPsfRadius = 100; int step = 10; for (int psfRadius = minPsfRadius; psfRadius < maxPsfRadius; psfRadius += step) { for (int x = 0; x < width; x++) { for (int y = 0; y < height; y++) { float sourceIntensity = origImage.Image[x, y, 0]; int left = MathHelper.Clamp<int>(x - psfRadius - 1, 0, width - 1); int right = MathHelper.Clamp<int>(x + psfRadius, 0, width - 1); int top = MathHelper.Clamp<int>(y - psfRadius - 1, 0, height - 1); int bottom = MathHelper.Clamp<int>(y + psfRadius, 0, height - 1); float psfArea = (right - left) * (bottom - top); float psfSum = sat[right, bottom, 0] + sat[left, top, 0] - sat[left, bottom, 0] - sat[right, top, 0]; // average over neighborhood of the current pixel within the PSF radius // (except the current pixel itself) float averageOverPsf = (psfSum - sourceIntensity) / (psfArea - 1); //criterionImage.Image[x, y, 0] = (sourceIntensity > averageOverPsf) ? 1 : 0; diffImage.Image[x, y, 0] = sourceIntensity - averageOverPsf; //diffImage.Image[x, y, 0] = Math.Abs(sourceIntensity - averageOverPsf); //criterionImage.Image[x, y, 0] = criterion.SelectFilter(x, y, psfRadius); } } //criterionImage.ToBitmap().Save(string.Format("chessRGB_criterion_{0:000}.png", psfRadius), System.Drawing.Imaging.ImageFormat.Png); diffImage.ToBitmap(false).Save(string.Format("chessRGB_diff_nonabs_{0:000}.png", psfRadius), System.Drawing.Imaging.ImageFormat.Png); } }
protected override void Filter(FloatMapImage inputImage, FloatMapImage spreadingTable, FloatMapImage normalizationTable) { // phase 1: distribute corners into the table Spread(inputImage, spreadingTable, normalizationTable); //spreadingTable.ToBitmap(true).Save("spreading-diffs.png"); //normalizationTable.ToBitmap(true).Save("normalization-diffs.png"); // phase 2: accumulate the corners into rectangles IntegrateHorizontally(spreadingTable, normalizationTable); //spreadingTable.ToBitmap(true).Save("spreading-horiz.png"); //normalizationTable.ToBitmap(true).Save("normalization-horiz.png"); IntegrateVertically(spreadingTable, normalizationTable); //spreadingTable.ToBitmap(true).Save("spreading-horiz-vert.png"); //normalizationTable.ToBitmap(true).Save("normalization-horiz-vert.png"); }
public static FloatMapImage ToFloatMap(this System.Drawing.Bitmap ldrImage) { Bitmap inputImage = ldrImage; int width = ldrImage.Width; int height = ldrImage.Height; PixelFormat pixelFormat = PixelFormatExtensions.FromBitmapFormat(ldrImage.PixelFormat); bool shouldWriteAlpha = pixelFormat.HasAlpha(); FloatMapImage hdrImage = new FloatMapImage((uint)width, (uint)height, pixelFormat); BitmapData inputData = inputImage.LockBits(new Rectangle(0, 0, width, height), ImageLockMode.ReadOnly, inputImage.PixelFormat); float conversionFactor = 1 / 255.0f; float[, ,] floatMap = hdrImage.Image; unsafe { int colorBands = (int)hdrImage.ColorChannelsCount; int maxColorBand = colorBands - 1; int alphaBand = colorBands; int bands = (int)hdrImage.TotalChannelsCount; for (int y = 0; y < height; y++) { byte* inputRow = (byte*)inputData.Scan0 + (y * inputData.Stride); for (int x = 0; x < width; x++) { // BGR -> RGB or BGRA -> RGBA for (int band = maxColorBand; band >= 0; band--) { int hdrBand = maxColorBand - band; // translate BGR input image to RGB output image in case of a RGB input image floatMap[x, y, hdrBand] = inputRow[x * bands + band] * conversionFactor; } if (shouldWriteAlpha) { floatMap[x, y, alphaBand] = inputRow[x * bands + alphaBand] * conversionFactor; } } } } inputImage.UnlockBits(inputData); return hdrImage; }
private FloatMapImage CopySingleChannelDepthMap(FloatMapImage depthMap) { if (depthMap.PixelFormat == PixelFormat.Greyscale) { return (FloatMapImage)depthMap.Clone(); } else { return depthMap.ExtractChannel(0); } }
private void loadDepthMapButton_Click(object sender, EventArgs e) { OpenFileDialog ofd = new OpenFileDialog(); ofd.Title = "Open depth map"; ofd.Filter = "PNG Files|*.png" + "|PFM Files|*.pfm" + "|Bitmap Files|*.bmp" + "|Gif Files|*.gif" + "|JPEG Files|*.jpg" + "|TIFF Files|*.tif" + "|All Image types|*.png;*.pfm;*.bmp;*.gif;*.jpg;*.tif"; ofd.FilterIndex = 7; ofd.FileName = ""; if (ofd.ShowDialog() != DialogResult.OK) return; if (depthMap != null) { depthMap.Dispose(); } if (ofd.FileName.EndsWith(".pfm")) { depthMap = PortableFloatMap.LoadImage(ofd.FileName); } else { Bitmap depthMapLdr = (Bitmap)Image.FromFile(ofd.FileName); depthMap = depthMapLdr.ToFloatMap(); depthMapLdr.Dispose(); } }
private FloatMapImage SpreadFrame(FloatMapImage color, FloatMapImage depth) { thinLensBlur.DepthMap = depth; return spreadingFilter.FilterImage(color, null); }
private FloatMapImage GetColorTexture() { // pixel format: unsinged int //Bitmap bmp = new Bitmap(TextureSize.Width, TextureSize.Height); //System.Drawing.Imaging.BitmapData data = // bmp.LockBits(new System.Drawing.Rectangle(0, 0, TextureSize.Width, TextureSize.Height), // System.Drawing.Imaging.ImageLockMode.WriteOnly, // System.Drawing.Imaging.PixelFormat.Format24bppRgb); //GL.BindTexture(TextureTarget.Texture2D, ColorTexture); //GL.GetTexImage(TextureTarget.Texture2D, 0, OpenTK.Graphics.OpenGL.PixelFormat.Bgr, PixelType.UnsignedByte, data.Scan0); //GL.BindTexture(TextureTarget.Texture2D, 0); //bmp.UnlockBits(data); ////bmp.RotateFlip(RotateFlipType.RotateNoneFlipY); //return bmp.ToFloatMap(); int bands = 3; // RGB IntPtr colorTextureFloatPtr = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(float)) * bands * TextureSize.Width * TextureSize.Height); GL.BindTexture(TextureTarget.Texture2D, ColorTexture); GL.GetTexImage(TextureTarget.Texture2D, 0, OpenTK.Graphics.OpenGL.PixelFormat.Rgb, PixelType.Float, colorTextureFloatPtr); GL.BindTexture(TextureTarget.Texture2D, 0); FloatMapImage colorImage = new FloatMapImage((uint)TextureSize.Width, (uint)TextureSize.Height, BokehLab.FloatMap.PixelFormat.RGB); var image = colorImage.Image; unsafe { int inputStride = bands * TextureSize.Width; for (int y = 0; y < TextureSize.Height; y++) { float* inputRow = (float*)colorTextureFloatPtr + (y * inputStride); int xIndex = 0; for (int x = 0; x < TextureSize.Width; x++) { for (int band = 0; band < bands; band++) { image[x, y, band] = inputRow[xIndex]; xIndex++; } } } } return colorImage; }
public static FloatMapImage ExtractChannel(this FloatMapImage inputImage, int band) { int width = (int)inputImage.Width; int height = (int)inputImage.Height; FloatMapImage outputImage = new FloatMapImage((uint)width, (uint)height, PixelFormat.Greyscale, inputImage.Scale); for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { outputImage.Image[x, y, 0] = inputImage.Image[x, y, band]; } } return outputImage; }
private BlurMap CreateBlurFunction(FloatMapImage depthMap) { BlurMap blur; if (depthMap != null) { thinLensBlur.DepthMap = depthMap; blur = thinLensBlur; } else { int maxBlurRadius = (int)blurRadiusNumeric.Value; blur = new ConstantBlur(maxBlurRadius - 1); } return blur; }
/// <summary> /// Divide one image by another. /// </summary> /// <remarks> /// If inPlace /// </remarks> /// <param name="inputImage"></param> /// <param name="divisorImage"></param> /// <param name="outputImage"></param> /// <param name="inPlace"></param> /// <returns></returns> public static FloatMapImage DivideBy(this FloatMapImage inputImage, FloatMapImage divisorImage, FloatMapImage outputImage, bool inPlace) { uint bands = inputImage.TotalChannelsCount; int width = (int)inputImage.Width; int height = (int)inputImage.Height; if (outputImage == null) { PrepareOutputImage(inputImage, inPlace, out outputImage); } else { width = Math.Min(width, (int)outputImage.Width); height = Math.Min(height, (int)outputImage.Height); } float[, ,] input = inputImage.Image; float[, ,] output = outputImage.Image; float[, ,] divisor = divisorImage.Image; for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { float normalization = 1 / divisor[x, y, 0]; for (int band = 0; band < bands; band++) { output[x, y, band] = input[x, y, band] * normalization; } } } return outputImage; }
/// <summary> /// Divide one image by another. /// </summary> /// <param name="inputImage"></param> /// <param name="divisorImage"></param> /// <param name="inPlace">if true put the output to the inputImage; otherwise create a new image</param> /// <returns></returns> public static FloatMapImage DivideBy(this FloatMapImage inputImage, FloatMapImage divisorImage, bool inPlace) { return DivideBy(inputImage, divisorImage, null, inPlace); }
/// <summary> /// Divide one image by another. Create a new image for the output. /// </summary> /// <param name="inputImage"></param> /// <param name="divisorImage"></param> /// <returns></returns> public static FloatMapImage DivideBy(this FloatMapImage inputImage, FloatMapImage divisorImage) { return DivideBy(inputImage, divisorImage, false); }
private void buttonLoad_Click(object sender, EventArgs e) { OpenFileDialog ofd = new OpenFileDialog(); ofd.Title = "Open Image File"; ofd.Filter = "PNG Files|*.png" + "|PFM Files|*.pfm" + "|Bitmap Files|*.bmp" + "|Gif Files|*.gif" + "|JPEG Files|*.jpg" + "|TIFF Files|*.tif" + "|All Image types|*.png;*.pfm;*.bmp;*.gif;*.jpg;*.tif"; ofd.FilterIndex = 7; ofd.FileName = ""; if (ofd.ShowDialog() != DialogResult.OK) return; if (ofd.FileName.EndsWith(".pfm")) { if (inputHdrImage != null) { inputHdrImage.Dispose(); } inputHdrImage = PortableFloatMap.LoadImage(ofd.FileName); ReplaceLdrImage(ref inputLdrImage, inputHdrImage.ToBitmap(ToneMappingEnabled)); } else { ReplaceLdrImage(ref inputLdrImage, (Bitmap)Image.FromFile(ofd.FileName)); if (inputHdrImage != null) { inputHdrImage.Dispose(); } inputHdrImage = inputLdrImage.ToFloatMap(); } imageTypeComboBox.SelectedIndex = 0; // TODO: select original better updatePictureBoxImage(); if (outputHdrImage != null) { outputHdrImage.Dispose(); } outputHdrImage = null; ReplaceLdrImage(ref outputLdrImage, null); imageSizeLabel.Text = String.Format("{0}x{1}", inputHdrImage.Width, inputHdrImage.Height); }
/// <summary> /// Returns a new image consisting only of the color channel removing /// the alpha channel if it was present in the original image. /// </summary> /// <returns>The image with only color channels.</returns> public static FloatMapImage ExtractColorChannels(this FloatMapImage inputImage) { if (!inputImage.PixelFormat.HasAlpha()) { return inputImage; } int width = (int)inputImage.Width; int height = (int)inputImage.Height; FloatMapImage colorImage = new FloatMapImage((uint)width, (uint)height, inputImage.PixelFormat.RemoveAlpha(), inputImage.Scale); int bands = (int)inputImage.PixelFormat.GetColorChannelsCount(); for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { for (int band = 0; band < bands; band++) { colorImage.Image[x, y, band] = inputImage.Image[x, y, band]; } } } return colorImage; }
private static void SaveHeader(FileStream fs, FloatMapImage image, Endianness endianness) { StreamWriter writer = new StreamWriter(fs); // write PFM signature with information on the number of color channels // RGB, greyscale string signature; switch (image.PixelFormat) { case PixelFormat.RGB: signature = "PF"; break; case PixelFormat.Greyscale: signature = "Pf"; break; case PixelFormat.RGBA: signature = "PFA"; break; case PixelFormat.GreyscaleA: signature = "PfA"; break; default: throw new ArgumentException(String.Format("Unsupported pixel format: {0}", image.PixelFormat)); } writer.Write("{0}\n", signature); // write image dimensions - width, height writer.Write("{0} {1}\n", image.Width, image.Height); // write image scale and endianness float scale = image.Scale; if (endianness == Endianness.LittleEndian) { scale = -scale; } writer.Write(String.Format(CultureInfo.CreateSpecificCulture("en-US"), "{0:F6}\n", scale)); writer.Flush(); }
private static void PrepareOutputImage(FloatMapImage inputImage, bool inPlace, out FloatMapImage outputImage) { outputImage = (inPlace) ? inputImage : new FloatMapImage( (uint)inputImage.Width, (uint)inputImage.Height, inputImage.PixelFormat); }
private FloatMapImage GetDepthTexture() { // allocate 32-bit unit unmanaged array to grab the depth buffer IntPtr depthBufferUInt32Ptr = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(UInt32)) * TextureSize.Width * TextureSize.Height); GL.BindTexture(TextureTarget.Texture2D, DepthTexture); GL.GetTexImage(TextureTarget.Texture2D, 0, OpenTK.Graphics.OpenGL.PixelFormat.DepthComponent, PixelType.UnsignedInt, depthBufferUInt32Ptr); GL.BindTexture(TextureTarget.Texture2D, 0); FloatMapImage depthImage = new FloatMapImage((uint)TextureSize.Width, (uint)TextureSize.Height, BokehLab.FloatMap.PixelFormat.Greyscale); var image = depthImage.Image; unsafe { int inputStride = TextureSize.Width; float conversionFactor = 1 / (float)UInt32.MaxValue; for (int y = 0; y < TextureSize.Height; y++) { UInt32* inputRow = (UInt32*)depthBufferUInt32Ptr + (y * inputStride); for (int x = 0; x < TextureSize.Width; x++) { image[x, y, 0] = inputRow[x] * conversionFactor; } } } return depthImage; }
private FloatMapImage ConstructLevel(int offset, int i, Func<float, float, float> func, float defaultValue, FloatMapImage prevLevel) { var level = new FloatMapImage((uint)Width, (uint)Height, PixelFormat.Greyscale); var current = level.Image; var prev = prevLevel.Image; for (int y = 0; y < Height; y++) { for (int x = 0; x < Width; x++) { bool withinWidth = x + offset < Width; bool withinHeight = y + offset < Height; float topLeft = prev[x, y, 0]; //if (topLeft == UndefinedValue) topLeft = defaultValue; float bottomLeft = withinHeight ? prev[x, y + offset, 0] : defaultValue; //if (bottomLeft == UndefinedValue) bottomLeft = defaultValue; float topRight = withinWidth ? prev[x + offset, y, 0] : defaultValue; //if (topRight == UndefinedValue) topRight = defaultValue; float bottomRight = (withinWidth && withinHeight) ? prev[x + offset, y + offset, 0] : defaultValue; //if (bottomRight == UndefinedValue) bottomRight = defaultValue; current[x, y, 0] = func(func(topLeft, bottomLeft), func(topRight, bottomRight)); } } return level; }
/// <summary> /// Returns a new image consisting only of the alpha channel. /// </summary> /// <returns>The image with alpha channel or null if the original /// image contained no alpha channel.</returns> public static FloatMapImage ExtractAlphaChannel(this FloatMapImage inputImage) { if (!inputImage.PixelFormat.HasAlpha()) { return null; } int width = (int)inputImage.Width; int height = (int)inputImage.Height; FloatMapImage alphaImage = new FloatMapImage((uint)width, (uint)height, PixelFormat.Greyscale, inputImage.Scale); int inputAlphaBand = (int)inputImage.PixelFormat.GetTotalChannelsCount() - 1; for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { alphaImage.Image[x, y, 0] = inputImage.Image[x, y, inputAlphaBand]; } } return alphaImage; }
private void filterImage() { if ((inputLdrImage == null) || (inputLdrImage == null)) return; Cursor.Current = Cursors.WaitCursor; Stopwatch sw = new Stopwatch(); sw.Start(); try { uint width = inputHdrImage.Width; uint height = inputHdrImage.Height; if ((depthMap != null) && ((depthMap.Width != width) || (depthMap.Height != height))) { throw new ArgumentException(String.Format( "Depth map must have the same dimensions as the input image" + " {0}x{1}, but it's size was {2}x{3}.", width, height, depthMap.Width, depthMap.Height)); } AbstractSpreadingFilter filter = GetSpreadingFilter(); filter.Blur = CreateBlurFunction(depthMap); //filter.SpreadOneRoundedPSF = true; outputHdrImage = filter.FilterImage(inputHdrImage, outputHdrImage); ReplaceLdrImage(ref outputLdrImage, outputHdrImage.ToBitmap(ToneMappingEnabled)); imageTypeComboBox.SelectedIndex = 1; // TODO: select the filtered image better updatePictureBoxImage(); } catch (Exception ex) { MessageBox.Show(ex.Message + "\n" + ex.StackTrace, "Error"); } sw.Stop(); labelElapsed.Text = String.Format("Elapsed time: {0:f}s", 1.0e-3 * sw.ElapsedMilliseconds); Cursor.Current = Cursors.Default; }