public span_image_resample(IImageBufferAccessor src, ISpanInterpolator inter, ImageFilterLookUpTable filter) : base(src, inter, filter) { m_scale_limit = (20); m_blur_x = ((int)image_subpixel_scale_e.image_subpixel_scale); m_blur_y = ((int)image_subpixel_scale_e.image_subpixel_scale); }
//-------------------------------------------------------------------- public span_image_resample_rgb(IImageBufferAccessor src, ISpanInterpolator inter, ImageFilterLookUpTable filter) : base(src, inter, filter) { if (src.SourceImage.GetRecieveBlender().NumPixelBits != 24) { throw new System.FormatException("You have to use a rgb blender with span_image_resample_rgb"); } }
public span_image_filter(IImageBufferAccessor src, ISpanInterpolator interpolator, ImageFilterLookUpTable filter) { imageBufferAccessor = src; m_interpolator = interpolator; m_filter = (filter); m_dx_dbl = (0.5); m_dy_dbl = (0.5); m_dx_int = ((int)image_subpixel_scale_e.image_subpixel_scale / 2); m_dy_int = ((int)image_subpixel_scale_e.image_subpixel_scale / 2); }
public span_image_filter(IImageBufferAccessor src, ISpanInterpolator interpolator, ImageFilterLookUpTable filter) { imageBufferAccessor = src; m_interpolator = interpolator; m_filter = (filter); m_dx_dbl = (0.5); m_dy_dbl = (0.5); m_dx_int = ((int)image_subpixel_scale_e.image_subpixel_scale / 2); m_dy_int = ((int)image_subpixel_scale_e.image_subpixel_scale / 2); }
private void transform_image(double angle) { double width = m_TempDestImage.Width; double height = m_TempDestImage.Height; #if SourceDepthFloat ImageClippingProxyFloat clippedDest = new ImageClippingProxyFloat(m_TempDestImage); #else ImageClippingProxy clippedDest = new ImageClippingProxy(m_TempDestImage); #endif clippedDest.clear(new ColorF(1.0, 1.0, 1.0)); Affine src_mtx = Affine.NewIdentity(); src_mtx *= Affine.NewTranslation(-width / 2.0, -height / 2.0); src_mtx *= Affine.NewRotation(angle * Math.PI / 180.0); src_mtx *= Affine.NewTranslation(width / 2.0, height / 2.0); Affine img_mtx = new Affine(src_mtx); img_mtx.invert(); double r = width; if (height < r) { r = height; } r *= 0.5; r -= 4.0; VertexSource.Ellipse ell = new MatterHackers.Agg.VertexSource.Ellipse(width / 2.0, height / 2.0, r, r, 200); VertexSourceApplyTransform tr = new VertexSourceApplyTransform(ell, src_mtx); m_num_pix += r * r * Math.PI; #if SourceDepthFloat span_interpolator_linear_float interpolator = new span_interpolator_linear_float(img_mtx); #else span_interpolator_linear interpolator = new span_interpolator_linear(img_mtx); #endif ImageFilterLookUpTable filter = new ImageFilterLookUpTable(); bool norm = m_normalize.Checked; #if SourceDepthFloat ImageBufferAccessorClipFloat source = new ImageBufferAccessorClipFloat(m_RotatedImage, RGBA_Floats.rgba_pre(0, 0, 0, 0).ToColorF()); #else ImageBufferAccessorClip source = new ImageBufferAccessorClip(m_RotatedImage, ColorF.rgba_pre(0, 0, 0, 0).ToColor()); #endif IImageFilterFunction filterFunction = null; ScanlineRenderer scanlineRenderer = new ScanlineRenderer(); switch (filterSelectionButtons.SelectedIndex) { case 0: { #if SourceDepthFloat span_image_filter_float spanGenerator; #else span_image_filter spanGenerator; #endif switch (source.SourceImage.BitDepth) { case 24: #if SourceDepthFloat throw new NotImplementedException(); #else spanGenerator = new span_image_filter_rgb_nn(source, interpolator); #endif break; case 32: #if SourceDepthFloat throw new NotImplementedException(); #else spanGenerator = new span_image_filter_rgba_nn(source, interpolator); #endif break; default: throw new NotImplementedException("only support 24 and 32 bit"); } m_Rasterizer.add_path(tr); scanlineRenderer.GenerateAndRender(m_Rasterizer, m_ScanlineUnpacked, clippedDest, m_SpanAllocator, spanGenerator); } break; case 1: { #if SourceDepthFloat span_image_filter_float spanGenerator; #else span_image_filter spanGenerator; #endif switch (source.SourceImage.BitDepth) { case 24: #if SourceDepthFloat throw new NotImplementedException(); #else spanGenerator = new span_image_filter_rgb_bilinear(source, interpolator); #endif break; case 32: #if SourceDepthFloat throw new NotImplementedException(); #else spanGenerator = new span_image_filter_rgba_bilinear(source, interpolator); #endif break; #if SourceDepthFloat case 128: spanGenerator = new span_image_filter_rgba_bilinear_float(source, interpolator); break; #endif default: throw new NotImplementedException("only support 24 and 32 bit"); } m_Rasterizer.add_path(tr); scanlineRenderer.GenerateAndRender(m_Rasterizer, m_ScanlineUnpacked, clippedDest, m_SpanAllocator, spanGenerator); } break; case 5: case 6: case 7: { switch (filterSelectionButtons.SelectedIndex) { case 5: filter.calculate(new image_filter_hanning(), norm); break; case 6: filter.calculate(new image_filter_hamming(), norm); break; case 7: filter.calculate(new image_filter_hermite(), norm); break; } #if SourceDepthFloat throw new NotImplementedException(); #else span_image_filter_rgb_2x2 spanGenerator = new span_image_filter_rgb_2x2(source, interpolator, filter); #endif m_Rasterizer.add_path(tr); #if SourceDepthFloat throw new NotImplementedException(); #else scanlineRenderer.GenerateAndRender(m_Rasterizer, m_ScanlineUnpacked, clippedDest, m_SpanAllocator, spanGenerator); #endif } break; case 2: case 3: case 4: case 8: case 9: case 10: case 11: case 12: case 13: case 14: case 15: case 16: { switch (filterSelectionButtons.SelectedIndex) { case 2: filter.calculate(new image_filter_bicubic(), norm); break; case 3: filter.calculate(new image_filter_spline16(), norm); break; case 4: filter.calculate(new image_filter_spline36(), norm); break; case 8: filter.calculate(new image_filter_kaiser(), norm); break; case 9: filter.calculate(new image_filter_quadric(), norm); break; case 10: filter.calculate(new image_filter_catrom(), norm); break; case 11: filter.calculate(new image_filter_gaussian(), norm); break; case 12: filter.calculate(new image_filter_bessel(), norm); break; case 13: filter.calculate(new image_filter_mitchell(), norm); break; case 14: filter.calculate(new image_filter_sinc(m_radius.Value), norm); break; case 15: filter.calculate(new image_filter_lanczos(m_radius.Value), norm); break; case 16: filterFunction = new image_filter_blackman(m_radius.Value); //filterFunction = new image_filter_bilinear(); filter.calculate(filterFunction, norm); break; } #if SourceDepthFloat span_image_filter_float spanGenerator; #else span_image_filter spanGenerator; #endif switch (source.SourceImage.BitDepth) { case 24: #if SourceDepthFloat throw new NotImplementedException(); #else spanGenerator = new span_image_filter_rgb(source, interpolator, filter); #endif break; case 32: #if SourceDepthFloat throw new NotImplementedException(); #else spanGenerator = new span_image_filter_rgba(source, interpolator, filter); #endif break; #if SourceDepthFloat case 128: spanGenerator = new span_image_filter_rgba_float(source, interpolator, filterFunction); break; #endif default: throw new NotImplementedException("only support 24 and 32 bit"); } m_Rasterizer.add_path(tr); scanlineRenderer.GenerateAndRender(m_Rasterizer, m_ScanlineUnpacked, clippedDest, m_SpanAllocator, spanGenerator); } break; } }
public override void Render(IImageByte source, double destX, double destY, double angleRadians, double inScaleX, double inScaleY) { Affine graphicsTransform = GetTransform(); // exit early if the dest and source bounds don't touch. // TODO: <BUG> make this do rotation and scaling RectangleInt sourceBounds = source.GetBounds(); RectangleInt destBounds = this.destImageByte.GetBounds(); sourceBounds.Offset((int)(destX + graphicsTransform.tx), (int)(destY + graphicsTransform.ty)); if (!RectangleInt.DoIntersect(sourceBounds, destBounds)) { if (inScaleX != 1 || inScaleY != 1 || angleRadians != 0) { //throw new NotImplementedException(); } //return; } double scaleX = inScaleX; double scaleY = inScaleY; if (!graphicsTransform.is_identity()) { if (scaleX != 1 || scaleY != 1 || angleRadians != 0) { //throw new NotImplementedException(); } graphicsTransform.transform(ref destX, ref destY); } #if false // this is an optimization that eliminates the drawing of images that have their alpha set to all 0 (happens with generated images like explosions). MaxAlphaFrameProperty maxAlphaFrameProperty = MaxAlphaFrameProperty::GetMaxAlphaFrameProperty(source); if ((maxAlphaFrameProperty.GetMaxAlpha() * color.A_Byte) / 256 <= ALPHA_CHANNEL_BITS_DIVISOR) { m_OutFinalBlitBounds.SetRect(0, 0, 0, 0); } #endif bool isScaled = scaleX != 1 || scaleY != 1; bool isRotated = true; if (Math.Abs(angleRadians) < (0.1 * MathHelper.Tau / 360)) { isRotated = false; angleRadians = 0; } // bool IsMipped = false; double sourceOriginOffsetX = source.OriginOffset.X; double sourceOriginOffsetY = source.OriginOffset.Y; bool canUseMipMaps = isScaled; if (scaleX > 0.5 || scaleY > 0.5) { canUseMipMaps = false; } bool renderRequriesSourceSampling = isScaled || isRotated || destX != (int)destX || destY != (int)destY; // this is the fast drawing path if (renderRequriesSourceSampling) { #if false // if the scaling is small enough the results can be improved by using mip maps if (CanUseMipMaps) { CMipMapFrameProperty *pMipMapFrameProperty = CMipMapFrameProperty::GetMipMapFrameProperty(source); double OldScaleX = scaleX; double OldScaleY = scaleY; const CFrameInterface *pMippedFrame = pMipMapFrameProperty.GetMipMapFrame(ref scaleX, ref scaleY); if (pMippedFrame != source) { IsMipped = true; source = pMippedFrame; sourceOriginOffsetX *= (OldScaleX / scaleX); sourceOriginOffsetY *= (OldScaleY / scaleY); } HotspotOffsetX *= (inScaleX / scaleX); HotspotOffsetY *= (inScaleY / scaleY); } #endif switch (ImageRenderQuality) { case TransformQuality.Fastest: { DrawImageGetDestBounds(source, destX, destY, sourceOriginOffsetX, sourceOriginOffsetY, scaleX, scaleY, angleRadians, out Affine destRectTransform); var sourceRectTransform = new Affine(destRectTransform); // We invert it because it is the transform to make the image go to the same position as the polygon. LBB [2/24/2004] sourceRectTransform.invert(); span_image_filter spanImageFilter; var interpolator = new span_interpolator_linear(sourceRectTransform); var sourceAccessor = new ImageBufferAccessorClip(source, ColorF.rgba_pre(0, 0, 0, 0).ToColor()); spanImageFilter = new span_image_filter_rgba_bilinear_clip(sourceAccessor, ColorF.rgba_pre(0, 0, 0, 0), interpolator); DrawImage(spanImageFilter, destRectTransform); } break; case TransformQuality.Best: { DrawImageGetDestBounds(source, destX, destY, sourceOriginOffsetX, sourceOriginOffsetY, scaleX, scaleY, angleRadians, out Affine destRectTransform); var sourceRectTransform = new Affine(destRectTransform); // We invert it because it is the transform to make the image go to the same position as the polygon. LBB [2/24/2004] sourceRectTransform.invert(); var interpolator = new span_interpolator_linear(sourceRectTransform); var sourceAccessor = new ImageBufferAccessorClip(source, ColorF.rgba_pre(0, 0, 0, 0).ToColor()); // spanImageFilter = new span_image_filter_rgba_bilinear_clip(sourceAccessor, RGBA_Floats.rgba_pre(0, 0, 0, 0), interpolator); IImageFilterFunction filterFunction = null; filterFunction = new image_filter_blackman(4); var filter = new ImageFilterLookUpTable(); filter.calculate(filterFunction, true); span_image_filter spanGenerator = new span_image_filter_rgba(sourceAccessor, interpolator, filter); DrawImage(spanGenerator, destRectTransform); } break; } #if false // this is some debug you can enable to visualize the dest bounding box LineFloat(BoundingRect.left, BoundingRect.top, BoundingRect.right, BoundingRect.top, WHITE); LineFloat(BoundingRect.right, BoundingRect.top, BoundingRect.right, BoundingRect.bottom, WHITE); LineFloat(BoundingRect.right, BoundingRect.bottom, BoundingRect.left, BoundingRect.bottom, WHITE); LineFloat(BoundingRect.left, BoundingRect.bottom, BoundingRect.left, BoundingRect.top, WHITE); #endif } else // TODO: this can be even faster if we do not use an intermediate buffer { DrawImageGetDestBounds(source, destX, destY, sourceOriginOffsetX, sourceOriginOffsetY, scaleX, scaleY, angleRadians, out Affine destRectTransform); var sourceRectTransform = new Affine(destRectTransform); // We invert it because it is the transform to make the image go to the same position as the polygon. LBB [2/24/2004] sourceRectTransform.invert(); var interpolator = new span_interpolator_linear(sourceRectTransform); var sourceAccessor = new ImageBufferAccessorClip(source, ColorF.rgba_pre(0, 0, 0, 0).ToColor()); span_image_filter spanImageFilter = null; switch (source.BitDepth) { case 32: spanImageFilter = new span_image_filter_rgba_nn_stepXby1(sourceAccessor, interpolator); break; case 24: spanImageFilter = new span_image_filter_rgb_nn_stepXby1(sourceAccessor, interpolator); break; case 8: spanImageFilter = new span_image_filter_gray_nn_stepXby1(sourceAccessor, interpolator); break; default: throw new NotImplementedException(); } // spanImageFilter = new span_image_filter_rgba_nn(sourceAccessor, interpolator); DrawImage(spanImageFilter, destRectTransform); DestImage.MarkImageChanged(); } }
public void filter(ImageFilterLookUpTable v) { m_filter = v; }
//-------------------------------------------------------------------- public span_image_resample_rgb(IImageBufferAccessor src, ISpanInterpolator inter, ImageFilterLookUpTable filter) : base(src, inter, filter) { if (src.SourceImage.GetRecieveBlender().NumPixelBits != 24) { throw new System.FormatException("You have to use a rgb blender with span_image_resample_rgb"); } }
//-------------------------------------------------------------------- public span_image_filter_rgb_2x2(IImageBufferAccessor src, ISpanInterpolator inter, ImageFilterLookUpTable filter) : base(src, inter, filter) { }
public void filter(ImageFilterLookUpTable v) { m_filter = v; }
public override void Render(IImageByte source, double destX, double destY, double angleRadians, double inScaleX, double inScaleY) { { // exit early if the dest and source bounds don't touch. // TODO: <BUG> make this do rotation and scaling RectangleInt sourceBounds = source.GetBounds(); RectangleInt destBounds = this.destImageByte.GetBounds(); sourceBounds.Offset((int)destX, (int)destY); if (!RectangleInt.DoIntersect(sourceBounds, destBounds)) { if (inScaleX != 1 || inScaleY != 1 || angleRadians != 0) { throw new NotImplementedException(); } return; } } double scaleX = inScaleX; double scaleY = inScaleY; Affine graphicsTransform = GetTransform(); if (!graphicsTransform.is_identity()) { if (scaleX != 1 || scaleY != 1 || angleRadians != 0) { throw new NotImplementedException(); } graphicsTransform.transform(ref destX, ref destY); } #if false // this is an optimization that eliminates the drawing of images that have their alpha set to all 0 (happens with generated images like explosions). MaxAlphaFrameProperty maxAlphaFrameProperty = MaxAlphaFrameProperty::GetMaxAlphaFrameProperty(source); if((maxAlphaFrameProperty.GetMaxAlpha() * color.A_Byte) / 256 <= ALPHA_CHANNEL_BITS_DIVISOR) { m_OutFinalBlitBounds.SetRect(0,0,0,0); } #endif bool IsScaled = (scaleX != 1 || scaleY != 1); bool IsRotated = true; if (Math.Abs(angleRadians) < (0.1 * MathHelper.Tau / 360)) { IsRotated = false; angleRadians = 0; } //bool IsMipped = false; double sourceOriginOffsetX = source.OriginOffset.x; double sourceOriginOffsetY = source.OriginOffset.y; bool CanUseMipMaps = IsScaled; if (scaleX > 0.5 || scaleY > 0.5) { CanUseMipMaps = false; } bool renderRequriesSourceSampling = IsScaled || IsRotated || destX != (int)destX || destY != (int)destY; // this is the fast drawing path if (renderRequriesSourceSampling) { #if false // if the scaling is small enough the results can be improved by using mip maps if(CanUseMipMaps) { CMipMapFrameProperty* pMipMapFrameProperty = CMipMapFrameProperty::GetMipMapFrameProperty(source); double OldScaleX = scaleX; double OldScaleY = scaleY; const CFrameInterface* pMippedFrame = pMipMapFrameProperty.GetMipMapFrame(ref scaleX, ref scaleY); if(pMippedFrame != source) { IsMipped = true; source = pMippedFrame; sourceOriginOffsetX *= (OldScaleX / scaleX); sourceOriginOffsetY *= (OldScaleY / scaleY); } HotspotOffsetX *= (inScaleX / scaleX); HotspotOffsetY *= (inScaleY / scaleY); } #endif switch (ImageRenderQuality) { case TransformQuality.Fastest: { Affine destRectTransform; DrawImageGetDestBounds(source, destX, destY, sourceOriginOffsetX, sourceOriginOffsetY, scaleX, scaleY, angleRadians, out destRectTransform); Affine sourceRectTransform = new Affine(destRectTransform); // We invert it because it is the transform to make the image go to the same position as the polygon. LBB [2/24/2004] sourceRectTransform.invert(); span_image_filter spanImageFilter; span_interpolator_linear interpolator = new span_interpolator_linear(sourceRectTransform); ImageBufferAccessorClip sourceAccessor = new ImageBufferAccessorClip(source, RGBA_Floats.rgba_pre(0, 0, 0, 0).GetAsRGBA_Bytes()); spanImageFilter = new span_image_filter_rgba_bilinear_clip(sourceAccessor, RGBA_Floats.rgba_pre(0, 0, 0, 0), interpolator); DrawImage(source, spanImageFilter, destRectTransform); } break; case TransformQuality.Best: { Affine destRectTransform; DrawImageGetDestBounds(source, destX, destY, sourceOriginOffsetX, sourceOriginOffsetY, scaleX, scaleY, angleRadians, out destRectTransform); Affine sourceRectTransform = new Affine(destRectTransform); // We invert it because it is the transform to make the image go to the same position as the polygon. LBB [2/24/2004] sourceRectTransform.invert(); span_interpolator_linear interpolator = new span_interpolator_linear(sourceRectTransform); ImageBufferAccessorClip sourceAccessor = new ImageBufferAccessorClip(source, RGBA_Floats.rgba_pre(0, 0, 0, 0).GetAsRGBA_Bytes()); //spanImageFilter = new span_image_filter_rgba_bilinear_clip(sourceAccessor, RGBA_Floats.rgba_pre(0, 0, 0, 0), interpolator); IImageFilterFunction filterFunction = null; filterFunction = new image_filter_blackman(4); ImageFilterLookUpTable filter = new ImageFilterLookUpTable(); filter.calculate(filterFunction, true); span_image_filter spanGenerator = new span_image_filter_rgba(sourceAccessor, interpolator, filter); DrawImage(source, spanGenerator, destRectTransform); } break; } #if false // this is some debug you can enable to visualize the dest bounding box LineFloat(BoundingRect.left, BoundingRect.top, BoundingRect.right, BoundingRect.top, WHITE); LineFloat(BoundingRect.right, BoundingRect.top, BoundingRect.right, BoundingRect.bottom, WHITE); LineFloat(BoundingRect.right, BoundingRect.bottom, BoundingRect.left, BoundingRect.bottom, WHITE); LineFloat(BoundingRect.left, BoundingRect.bottom, BoundingRect.left, BoundingRect.top, WHITE); #endif } else // TODO: this can be even faster if we do not use an intermediate buffer { Affine destRectTransform; DrawImageGetDestBounds(source, destX, destY, sourceOriginOffsetX, sourceOriginOffsetY, scaleX, scaleY, angleRadians, out destRectTransform); Affine sourceRectTransform = new Affine(destRectTransform); // We invert it because it is the transform to make the image go to the same position as the polygon. LBB [2/24/2004] sourceRectTransform.invert(); span_interpolator_linear interpolator = new span_interpolator_linear(sourceRectTransform); ImageBufferAccessorClip sourceAccessor = new ImageBufferAccessorClip(source, RGBA_Floats.rgba_pre(0, 0, 0, 0).GetAsRGBA_Bytes()); span_image_filter spanImageFilter = null; switch (source.BitDepth) { case 32: spanImageFilter = new span_image_filter_rgba_nn_stepXby1(sourceAccessor, interpolator); break; case 24: spanImageFilter = new span_image_filter_rgb_nn_stepXby1(sourceAccessor, interpolator); break; case 8: spanImageFilter = new span_image_filter_gray_nn_stepXby1(sourceAccessor, interpolator); break; default: throw new NotImplementedException(); } //spanImageFilter = new span_image_filter_rgba_nn(sourceAccessor, interpolator); DrawImage(source, spanImageFilter, destRectTransform); DestImage.MarkImageChanged(); } }
//-------------------------------------------------------------------- public span_image_filter_rgb_2x2(IImageBufferAccessor src, ISpanInterpolator inter, ImageFilterLookUpTable filter) : base(src, inter, filter) { }
//-------------------------------------------------------------------- public span_image_filter_rgb(IImageBufferAccessor src, ISpanInterpolator inter, ImageFilterLookUpTable filter) : base(src, inter, filter) { if (src.SourceImage.GetBytesBetweenPixelsInclusive() != 3) { throw new System.NotSupportedException("span_image_filter_rgb must have a 24 bit DestImage"); } }
public span_image_resample(IImageBufferAccessor src, ISpanInterpolator inter, ImageFilterLookUpTable filter) : base(src, inter, filter) { m_scale_limit = (20); m_blur_x = ((int)image_subpixel_scale_e.image_subpixel_scale); m_blur_y = ((int)image_subpixel_scale_e.image_subpixel_scale); }
public override void OnDraw(Graphics2D graphics2D) { ImageBuffer widgetsSubImage = ImageBuffer.NewSubImageReference(graphics2D.DestImage, graphics2D.GetClippingRect()); if (!didInit) { didInit = true; OnInitialize(); } if (m_gamma.Value != m_old_gamma) { m_gamma_lut.SetGamma(m_gamma.Value); ImageIO.LoadImageData("spheres.bmp", m_SourceImage); //m_SourceImage.apply_gamma_dir(m_gamma_lut); m_old_gamma = m_gamma.Value; } ImageBuffer pixf = new ImageBuffer(); switch (widgetsSubImage.BitDepth) { case 24: pixf.Attach(widgetsSubImage, new BlenderBGR()); break; case 32: pixf.Attach(widgetsSubImage, new BlenderBGRA()); break; default: throw new NotImplementedException(); } ImageClippingProxy clippingProxy = new ImageClippingProxy(pixf); clippingProxy.clear(new RGBA_Floats(1, 1, 1)); if (m_trans_type.SelectedIndex < 2) { // For the affine parallelogram transformations we // calculate the 4-th (implicit) point of the parallelogram m_quad.SetXN(3, m_quad.GetXN(0) + (m_quad.GetXN(2) - m_quad.GetXN(1))); m_quad.SetYN(3, m_quad.GetYN(0) + (m_quad.GetYN(2) - m_quad.GetYN(1))); } ScanlineRenderer scanlineRenderer = new ScanlineRenderer(); // draw a background to show how the alpha is working int RectWidth = 70; int xoffset = 50; int yoffset = 50; for (int i = 0; i < 7; i++) { for (int j = 0; j < 7; j++) { if ((i + j) % 2 != 0) { VertexSource.RoundedRect rect = new VertexSource.RoundedRect(i * RectWidth + xoffset, j * RectWidth + yoffset, (i + 1) * RectWidth + xoffset, (j + 1) * RectWidth + yoffset, 2); rect.normalize_radius(); g_rasterizer.add_path(rect); scanlineRenderer.RenderSolid(clippingProxy, g_rasterizer, g_scanline, new RGBA_Bytes(.2, .2, .2)); } } } //-------------------------- // Render the "quad" tool and controls g_rasterizer.add_path(m_quad); scanlineRenderer.RenderSolid(clippingProxy, g_rasterizer, g_scanline, new RGBA_Bytes(0, 0.3, 0.5, 0.1)); // Prepare the polygon to rasterize. Here we need to fill // the destination (transformed) polygon. g_rasterizer.SetVectorClipBox(0, 0, Width, Height); g_rasterizer.reset(); int b = 0; g_rasterizer.move_to_d(m_quad.GetXN(0) - b, m_quad.GetYN(0) - b); g_rasterizer.line_to_d(m_quad.GetXN(1) + b, m_quad.GetYN(1) - b); g_rasterizer.line_to_d(m_quad.GetXN(2) + b, m_quad.GetYN(2) + b); g_rasterizer.line_to_d(m_quad.GetXN(3) - b, m_quad.GetYN(3) + b); //typedef agg::span_allocator<color_type> span_alloc_type; span_allocator sa = new span_allocator(); image_filter_bilinear filter_kernel = new image_filter_bilinear(); ImageFilterLookUpTable filter = new ImageFilterLookUpTable(filter_kernel, true); ImageBufferAccessorClamp source = new ImageBufferAccessorClamp(m_SourceImage); stopwatch.Restart(); switch (m_trans_type.SelectedIndex) { case 0: { /* agg::trans_affine tr(m_quad.polygon(), g_x1, g_y1, g_x2, g_y2); typedef agg::span_interpolator_linear<agg::trans_affine> interpolator_type; interpolator_type interpolator(tr); typedef image_filter_2x2_type<source_type, interpolator_type> span_gen_type; span_gen_type sg(source, interpolator, filter); agg::render_scanlines_aa(g_rasterizer, g_scanline, rb_pre, sa, sg); */ break; } case 1: { /* agg::trans_affine tr(m_quad.polygon(), g_x1, g_y1, g_x2, g_y2); typedef agg::span_interpolator_linear<agg::trans_affine> interpolator_type; typedef image_resample_affine_type<source_type> span_gen_type; interpolator_type interpolator(tr); span_gen_type sg(source, interpolator, filter); sg.blur(m_blur.Value); agg::render_scanlines_aa(g_rasterizer, g_scanline, rb_pre, sa, sg); */ break; } case 2: { /* agg::trans_perspective tr(m_quad.polygon(), g_x1, g_y1, g_x2, g_y2); if(tr.is_valid()) { typedef agg::span_interpolator_linear_subdiv<agg::trans_perspective> interpolator_type; interpolator_type interpolator(tr); typedef image_filter_2x2_type<source_type, interpolator_type> span_gen_type; span_gen_type sg(source, interpolator, filter); agg::render_scanlines_aa(g_rasterizer, g_scanline, rb_pre, sa, sg); } */ break; } case 3: { /* agg::trans_perspective tr(m_quad.polygon(), g_x1, g_y1, g_x2, g_y2); if(tr.is_valid()) { typedef agg::span_interpolator_trans<agg::trans_perspective> interpolator_type; interpolator_type interpolator(tr); typedef image_filter_2x2_type<source_type, interpolator_type> span_gen_type; span_gen_type sg(source, interpolator, filter); agg::render_scanlines_aa(g_rasterizer, g_scanline, rb_pre, sa, sg); } */ break; } case 4: { //typedef agg::span_interpolator_persp_lerp<> interpolator_type; //typedef agg::span_subdiv_adaptor<interpolator_type> subdiv_adaptor_type; span_interpolator_persp_lerp interpolator = new span_interpolator_persp_lerp(m_quad.polygon(), g_x1, g_y1, g_x2, g_y2); span_subdiv_adaptor subdiv_adaptor = new span_subdiv_adaptor(interpolator); span_image_resample sg = null; if (interpolator.is_valid()) { switch (source.SourceImage.BitDepth) { case 24: sg = new span_image_resample_rgb(source, subdiv_adaptor, filter); break; case 32: sg = new span_image_resample_rgba(source, subdiv_adaptor, filter); break; } sg.blur(m_blur.Value); scanlineRenderer.GenerateAndRender(g_rasterizer, g_scanline, clippingProxy, sa, sg); } break; } case 5: { /* typedef agg::span_interpolator_persp_exact<> interpolator_type; typedef agg::span_subdiv_adaptor<interpolator_type> subdiv_adaptor_type; interpolator_type interpolator(m_quad.polygon(), g_x1, g_y1, g_x2, g_y2); subdiv_adaptor_type subdiv_adaptor(interpolator); if(interpolator.is_valid()) { typedef image_resample_type<source_type, subdiv_adaptor_type> span_gen_type; span_gen_type sg(source, subdiv_adaptor, filter); sg.blur(m_blur.Value); agg::render_scanlines_aa(g_rasterizer, g_scanline, rb_pre, sa, sg); } */ break; } } double tm = stopwatch.ElapsedMilliseconds; //pixf.apply_gamma_inv(m_gamma_lut); gsv_text t = new gsv_text(); t.SetFontSize(10.0); Stroke pt = new Stroke(t); pt.width(1.5); string buf = string.Format("{0:F2} ms", tm); t.start_point(10.0, 70.0); t.text(buf); g_rasterizer.add_path(pt); scanlineRenderer.RenderSolid(clippingProxy, g_rasterizer, g_scanline, new RGBA_Bytes(0, 0, 0)); //-------------------------- //m_trans_type.Render(g_rasterizer, g_scanline, clippingProxy); //m_gamma.Render(g_rasterizer, g_scanline, clippingProxy); //m_blur.Render(g_rasterizer, g_scanline, clippingProxy); base.OnDraw(graphics2D); }
public override void OnDraw(Graphics2D graphics2D) { ImageBuffer widgetsSubImage = ImageBuffer.NewSubImageReference(graphics2D.DestImage, graphics2D.GetClippingRect()); if (!didInit) { didInit = true; OnInitialize(); } if (m_gamma.Value != m_old_gamma) { m_gamma_lut.SetGamma(m_gamma.Value); ImageIO.LoadImageData("spheres.bmp", m_SourceImage); //m_SourceImage.apply_gamma_dir(m_gamma_lut); m_old_gamma = m_gamma.Value; } ImageBuffer pixf = new ImageBuffer(); switch (widgetsSubImage.BitDepth) { case 24: pixf.Attach(widgetsSubImage, new BlenderBGR()); break; case 32: pixf.Attach(widgetsSubImage, new BlenderBGRA()); break; default: throw new NotImplementedException(); } ImageClippingProxy clippingProxy = new ImageClippingProxy(pixf); clippingProxy.clear(new ColorF(1, 1, 1)); if (m_trans_type.SelectedIndex < 2) { // For the affine parallelogram transformations we // calculate the 4-th (implicit) point of the parallelogram m_quad.SetXN(3, m_quad.GetXN(0) + (m_quad.GetXN(2) - m_quad.GetXN(1))); m_quad.SetYN(3, m_quad.GetYN(0) + (m_quad.GetYN(2) - m_quad.GetYN(1))); } ScanlineRenderer scanlineRenderer = new ScanlineRenderer(); // draw a background to show how the alpha is working int RectWidth = 70; int xoffset = 50; int yoffset = 50; for (int i = 0; i < 7; i++) { for (int j = 0; j < 7; j++) { if ((i + j) % 2 != 0) { VertexSource.RoundedRect rect = new VertexSource.RoundedRect(i * RectWidth + xoffset, j * RectWidth + yoffset, (i + 1) * RectWidth + xoffset, (j + 1) * RectWidth + yoffset, 2); rect.normalize_radius(); g_rasterizer.add_path(rect); scanlineRenderer.RenderSolid(clippingProxy, g_rasterizer, g_scanline, new Color(.2, .2, .2)); } } } //-------------------------- // Render the "quad" tool and controls g_rasterizer.add_path(m_quad); scanlineRenderer.RenderSolid(clippingProxy, g_rasterizer, g_scanline, new Color(0, 0.3, 0.5, 0.1)); // Prepare the polygon to rasterize. Here we need to fill // the destination (transformed) polygon. g_rasterizer.SetVectorClipBox(0, 0, Width, Height); g_rasterizer.reset(); int b = 0; g_rasterizer.move_to_d(m_quad.GetXN(0) - b, m_quad.GetYN(0) - b); g_rasterizer.line_to_d(m_quad.GetXN(1) + b, m_quad.GetYN(1) - b); g_rasterizer.line_to_d(m_quad.GetXN(2) + b, m_quad.GetYN(2) + b); g_rasterizer.line_to_d(m_quad.GetXN(3) - b, m_quad.GetYN(3) + b); //typedef agg::span_allocator<color_type> span_alloc_type; span_allocator sa = new span_allocator(); image_filter_bilinear filter_kernel = new image_filter_bilinear(); ImageFilterLookUpTable filter = new ImageFilterLookUpTable(filter_kernel, true); ImageBufferAccessorClamp source = new ImageBufferAccessorClamp(m_SourceImage); stopwatch.Restart(); switch (m_trans_type.SelectedIndex) { case 0: { /* * agg::trans_affine tr(m_quad.polygon(), g_x1, g_y1, g_x2, g_y2); * * typedef agg::span_interpolator_linear<agg::trans_affine> interpolator_type; * interpolator_type interpolator(tr); * * typedef image_filter_2x2_type<source_type, * interpolator_type> span_gen_type; * span_gen_type sg(source, interpolator, filter); * agg::render_scanlines_aa(g_rasterizer, g_scanline, rb_pre, sa, sg); */ break; } case 1: { /* * agg::trans_affine tr(m_quad.polygon(), g_x1, g_y1, g_x2, g_y2); * * typedef agg::span_interpolator_linear<agg::trans_affine> interpolator_type; * typedef image_resample_affine_type<source_type> span_gen_type; * * interpolator_type interpolator(tr); * span_gen_type sg(source, interpolator, filter); * sg.blur(m_blur.Value); * agg::render_scanlines_aa(g_rasterizer, g_scanline, rb_pre, sa, sg); */ break; } case 2: { /* * agg::trans_perspective tr(m_quad.polygon(), g_x1, g_y1, g_x2, g_y2); * if(tr.is_valid()) * { * typedef agg::span_interpolator_linear_subdiv<agg::trans_perspective> interpolator_type; * interpolator_type interpolator(tr); * * typedef image_filter_2x2_type<source_type, * interpolator_type> span_gen_type; * span_gen_type sg(source, interpolator, filter); * agg::render_scanlines_aa(g_rasterizer, g_scanline, rb_pre, sa, sg); * } */ break; } case 3: { /* * agg::trans_perspective tr(m_quad.polygon(), g_x1, g_y1, g_x2, g_y2); * if(tr.is_valid()) * { * typedef agg::span_interpolator_trans<agg::trans_perspective> interpolator_type; * interpolator_type interpolator(tr); * * typedef image_filter_2x2_type<source_type, * interpolator_type> span_gen_type; * span_gen_type sg(source, interpolator, filter); * agg::render_scanlines_aa(g_rasterizer, g_scanline, rb_pre, sa, sg); * } */ break; } case 4: { //typedef agg::span_interpolator_persp_lerp<> interpolator_type; //typedef agg::span_subdiv_adaptor<interpolator_type> subdiv_adaptor_type; span_interpolator_persp_lerp interpolator = new span_interpolator_persp_lerp(m_quad.polygon(), g_x1, g_y1, g_x2, g_y2); span_subdiv_adaptor subdiv_adaptor = new span_subdiv_adaptor(interpolator); span_image_resample sg = null; if (interpolator.is_valid()) { switch (source.SourceImage.BitDepth) { case 24: sg = new span_image_resample_rgb(source, subdiv_adaptor, filter); break; case 32: sg = new span_image_resample_rgba(source, subdiv_adaptor, filter); break; } sg.blur(m_blur.Value); scanlineRenderer.GenerateAndRender(g_rasterizer, g_scanline, clippingProxy, sa, sg); } break; } case 5: { /* * typedef agg::span_interpolator_persp_exact<> interpolator_type; * typedef agg::span_subdiv_adaptor<interpolator_type> subdiv_adaptor_type; * * interpolator_type interpolator(m_quad.polygon(), g_x1, g_y1, g_x2, g_y2); * subdiv_adaptor_type subdiv_adaptor(interpolator); * * if(interpolator.is_valid()) * { * typedef image_resample_type<source_type, * subdiv_adaptor_type> span_gen_type; * span_gen_type sg(source, subdiv_adaptor, filter); * sg.blur(m_blur.Value); * agg::render_scanlines_aa(g_rasterizer, g_scanline, rb_pre, sa, sg); * } */ break; } } double tm = stopwatch.ElapsedMilliseconds; //pixf.apply_gamma_inv(m_gamma_lut); gsv_text t = new gsv_text(); t.SetFontSize(10.0); Stroke pt = new Stroke(t); pt.Width = 1.5; string buf = string.Format("{0:F2} ms", tm); t.start_point(10.0, 70.0); t.text(buf); g_rasterizer.add_path(pt); scanlineRenderer.RenderSolid(clippingProxy, g_rasterizer, g_scanline, new Color(0, 0, 0)); //-------------------------- //m_trans_type.Render(g_rasterizer, g_scanline, clippingProxy); //m_gamma.Render(g_rasterizer, g_scanline, clippingProxy); //m_blur.Render(g_rasterizer, g_scanline, clippingProxy); base.OnDraw(graphics2D); }
//-------------------------------------------------------------------- public span_image_filter_rgb(IImageBufferAccessor src, ISpanInterpolator inter, ImageFilterLookUpTable filter) : base(src, inter, filter) { if (src.SourceImage.GetBytesBetweenPixelsInclusive() != 3) { throw new System.NotSupportedException("span_image_filter_rgb must have a 24 bit DestImage"); } }