private void RenderMenu_Click(object sender, EventArgs e) { // disable the render menu while rendering RenderMenu.Enabled = false; // find the render size (stored as an object in the selected menu's Tag property) int size = int.Parse((sender as ToolStripMenuItem).Tag.ToString()); // create a new Bitmap object where we will render to _bitmap = new Bitmap(size, size); // update the size and location of the picturebox that will show the final result RenderedImage.Size = new Size(size, size); RenderedImage.BackgroundImage = null; CenterRenderedImage(); // print out a message on the picture box using (Graphics g = RenderedImage.CreateGraphics()) { g.FillRectangle(SystemBrushes.Control, 0, 0, RenderedImage.Width, RenderedImage.Height); g.DrawString("Rendering.\r\nPlease wait...", this.Font, SystemBrushes.WindowText, 0f, 0f); } // create the ray tracer RayTracerEngine rayTracer = new RayTracerEngine(_bitmap.Width, _bitmap.Height); // start rendering the scene RenderedImage.BackgroundImage = rayTracer.Render(_scenes.GetByName(_selectedScene)); // re-enable the render menu RenderMenu.Enabled = true; }
/** * @see Graphics2D#drawRenderedImage(RenderedImage, AffineTransform) */ public void drawRenderedImage(RenderedImage img, AffineTransform xform) { BufferedImage image = null; if (img is BufferedImage) { image = (BufferedImage)img; } else { ColorModel cm = img.getColorModel(); int width = img.getWidth(); int height = img.getHeight(); WritableRaster raster = cm.createCompatibleWritableRaster(width, height); boolean isAlphaPremultiplied = cm.isAlphaPremultiplied(); Hashtable properties = new Hashtable(); String[] keys = img.getPropertyNames(); if (keys != null) { for (int i = 0; i < keys.length; i++) { properties.put(keys[i], img.getProperty(keys[i])); } } BufferedImage result = new BufferedImage(cm, raster, isAlphaPremultiplied, properties); img.copyData(raster); } drawImage(image, xform, null); }
/// <summary> /// Creates a RenderedImage which represents this /// RenderableImageOp (including its Renderable sources) rendered /// according to the given RenderContext. /// /// <para> This method supports chaining of either Renderable or /// RenderedImage operations. If sources in /// the ParameterBlock used to construct the RenderableImageOp are /// RenderableImages, then a three step process is followed: /// /// <ol> /// <li> mapRenderContext() is called on the associated CRIF for /// each RenderableImage source; /// <li> createRendering() is called on each of the RenderableImage sources /// using the backwards-mapped RenderContexts obtained in step 1, /// resulting in a rendering of each source; /// <li> ContextualRenderedImageFactory.create() is called /// with a new ParameterBlock containing the parameters of /// the RenderableImageOp and the RenderedImages that were created by the /// createRendering() calls. /// </ol> /// /// </para> /// <para> If the elements of the source Vector of /// the ParameterBlock used to construct the RenderableImageOp are /// instances of RenderedImage, then the CRIF.create() method is /// called immediately using the original ParameterBlock. /// This provides a basis case for the recursion. /// /// </para> /// <para> The created RenderedImage may have a property identified /// by the String HINTS_OBSERVED to indicate which RenderingHints /// (from the RenderContext) were used to create the image. /// In addition any RenderedImages /// that are obtained via the getSources() method on the created /// RenderedImage may have such a property. /// /// </para> /// </summary> /// <param name="renderContext"> The RenderContext to use to perform the rendering. </param> /// <returns> a RenderedImage containing the desired output image. </returns> public virtual RenderedImage CreateRendering(RenderContext renderContext) { RenderedImage image = null; RenderContext rcOut = null; // Clone the original ParameterBlock; if the ParameterBlock // contains RenderableImage sources, they will be replaced by // RenderedImages. ParameterBlock renderedParamBlock = (ParameterBlock)ParamBlock.Clone(); ArrayList sources = RenderableSources; try { // This assumes that if there is no renderable source, that there // is a rendered source in paramBlock if (sources != null) { ArrayList renderedSources = new ArrayList(); for (int i = 0; i < sources.Count; i++) { rcOut = MyCRIF.MapRenderContext(i, renderContext, ParamBlock, this); RenderedImage rdrdImage = ((RenderableImage)sources[i]).CreateRendering(rcOut); if (rdrdImage == null) { return(null); } // Add this rendered image to the ParameterBlock's // list of RenderedImages. renderedSources.Add(rdrdImage); } if (renderedSources.Count > 0) { renderedParamBlock.Sources = renderedSources; } } return(MyCRIF.Create(renderContext, renderedParamBlock)); } catch (ArrayIndexOutOfBoundsException) { // This should never happen return(null); } }
public override void Handle(TickRenderedMessage message) { base.Handle(message); if (NodeModel.Data == null) { return; } int width = NodeModel.Data.Size.Width; int height = NodeModel.Data.Size.Height; if (width == 0 || height == 0) { return; } if (RenderedImage == null || width != widthOld || height != heightOld) { RenderedImage = new WriteableBitmap(width, height, 96, 96, PixelFormats.Bgr32, null); widthOld = width; heightOld = height; NotifyOfPropertyChange(() => RenderedImage); } RenderedImage.Lock(); unsafe { int *backBuffer = (int *)RenderedImage.BackBuffer; for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { // Compute colors in pixel format. That is sRGB: // MSDN: Bgr32 is a sRGB format with 32 bits per pixel (BPP). // Each color channel (blue, green, and red) is allocated 8 bits per pixel (BPP). int rgb = ((NodeModel.Data[x, y].R << 16) | (NodeModel.Data[x, y].G << 8) | (NodeModel.Data[x, y].B)); // Set the pixel at the current position to the BGR of the frame *backBuffer++ = rgb; } } } RenderedImage.AddDirtyRect(new Int32Rect(0, 0, width, height)); RenderedImage.Unlock(); }
public unsafe void Send(RenderedImage image) { _sendPool.QueueUserWorkItem(() => { _deviceContext.MakeCurrent(_glContext); var(decoded, nBuffer) = AssRenderCore.Decode(buffer, image); buffer = nBuffer; fixed(byte *unmanaged = decoded.Buffer) { _sender.SendImage( unmanaged, _width, _height, Gl.RGBA, false, 0); } }); }
/// <summary> /// Renders a <A HREF="../../java/awt/image/RenderedImage.html" title="interface in java.awt.image"><CODE>RenderedImage</CODE></A>, /// applying a transform from image /// space into user space before drawing. /// </summary> abstract public void drawRenderedImage(RenderedImage @img, AffineTransform @xform);
/// <summary> /// Causes a full render of the frame with the current parameters. The <see cref="RenderedImage"/> is updated with the new /// rendered frame. /// </summary> /// <param name="frameNum">If this is a multiframe image, this determines which frame is rendered. If this is a single frame /// image, this parameter is ignored.</param> /// <param name="filter">True for bilinear filtering (slower but prettier), false for nearest neighbor filtering (faster but ugly).</param> public void RenderFrame(int frameNum, bool filter) { int dataOffset, dataLength; byte[] sourceData = GetFrameData(frameNum, out dataOffset, out dataLength); float pixelPitch = GetPixelPitch(); float topleftX = -pixelPitch * (size.Width / 2f) - CenterX + imWidth / 2f; float topleftY = -pixelPitch * (size.Height / 2f) - CenterY + imHeight / 2f; if (filter) { topleftX -= 0.5f; topleftY -= 0.5f; } BitmapData bdata = RenderedImage.LockBits(new Rectangle(Point.Empty, RenderedImage.Size), ImageLockMode.WriteOnly, PixelFormat.Format32bppPArgb); unsafe { fixed(byte *byteData = &sourceData[dataOffset]) { fixed(byte *p1 = palettes[0], p2 = palettes[1], p3 = palettes[2]) { fixed(ushort *p16r = palettes16[0], p16g = palettes16[1], p16b = palettes16[2]) { byte *[] pal8 = new byte *[3] { (byte *)p1, (byte *)p2, (byte *)p3 }; ushort *[] pal16 = new ushort *[3] { p16r, p16g, p16b }; short *shortData = (short *)byteData; int frameOffset = imWidth * imHeight * bytes; //BS vars to init for filtering later float yf = 0, w1 = 0, w2 = 0, w3 = 0, w4 = 0; ushort psrc10 = 0, psrc01 = 0, psrc11 = 0; int pX1 = 0, pY1 = 0; float yCoord = topleftY; for (int y = 0; y < size.Height; y++, yCoord += pixelPitch) { byte *pBMPData = (byte *)bdata.Scan0 + y * bdata.Stride; int pY = (int)yCoord; if (filter) { yf = yCoord - pY; pY1 = pY + 1; if (pY1 >= imHeight) { pY1 = pY; } } float xCoord = topleftX; for (int x = 0; x < size.Width; x++, xCoord += pixelPitch) { *((uint *)(pBMPData + 4 * x)) = 0xFF000000; int pX = (int)xCoord; if (pX < 0 || pX >= imWidth || pY < 0 || pY >= imHeight) { continue; } if (filter) { float xf = xCoord - pX; w1 = (1.0f - xf) * (1.0f - yf); w2 = (xf) * (1.0f - yf); w3 = (1.0f - xf) * (yf); w4 = (xf) * (yf); pX1 = pX + 1; if (pX1 >= imWidth) { pX1 = pX; } } //Check for buffer overruns using the worst case scenario if (filter) { //Lower right pixel for filter if (bytes * (pY1 * imWidth + pX1) >= dataLength) { continue; } } else { if (bytes * (pY * imWidth + pX) >= dataLength) { continue; } } if (bPalette) { //palette lookup. 1 byte -> 3 after lookup ushort psrc = (ushort)(((bytes == 2) ? (ushort)shortData[pY * imWidth + pX] : byteData[pY * imWidth + pX]) - paletteFirstEntry); if (psrc < 0) { psrc = 0; } else if (psrc >= paletteNumEntries) { psrc = (ushort)(paletteNumEntries - 1); } if (filter) { psrc10 = (ushort)(((bytes == 2) ? shortData[pY * imWidth + pX1] : byteData[pY * imWidth + pX1]) - paletteFirstEntry); if (psrc10 < 0) { psrc10 = 0; } else if (psrc10 >= paletteNumEntries) { psrc10 = (ushort)(paletteNumEntries - 1); } psrc01 = (ushort)(((bytes == 2) ? shortData[(pY1) * imWidth + pX] : byteData[(pY1) * imWidth + pX]) - paletteFirstEntry); if (psrc01 < 0) { psrc01 = 0; } else if (psrc01 >= paletteNumEntries) { psrc01 = (ushort)(paletteNumEntries - 1); } psrc11 = (ushort)(((bytes == 2) ? shortData[(pY1) * imWidth + pX1] : byteData[(pY1) * imWidth + pX1]) - paletteFirstEntry); if (psrc11 < 0) { psrc11 = 0; } else if (psrc11 >= paletteNumEntries) { psrc11 = (ushort)(paletteNumEntries - 1); } } for (int b = 0; b < 3; b++) { int src = palette16 ? pal16[b][psrc] : pal8[b][psrc]; if (filter) { int src10 = palette16 ? pal16[b][psrc10] : pal8[b][psrc10]; int src01 = palette16 ? pal16[b][psrc01] : pal8[b][psrc01]; int src11 = palette16 ? pal16[b][psrc11] : pal8[b][psrc11]; src = (int)((src * w1) + (src10 * w2) + (src01 * w3) + (src11 * w4)); } if (bRescaling) { src = (int)(src * adjM + adjB); } src = (int)((src - Level) * (255f / Window) + 128); if (src < 0) { src = 0; } else if (src > 255) { src = 255; } if (bFlipMono) { src = (255 - src); } pBMPData[4 * x + (2 - b)] = (byte)src; } } else if (bRGB) { if (bPlanarOne) { //RRR GGG BBB for (int b = 0; b < 3; b++) { int src = byteData[frameOffset * b + pY * imWidth + pX]; if (filter) { int src10 = byteData[frameOffset * b + pY * imWidth + pX1]; int src01 = byteData[frameOffset * b + pY1 * imWidth + pX]; int src11 = byteData[frameOffset * b + pY1 * imWidth + pX1]; src = (int)((src * w1) + (src10 * w2) + (src01 * w3) + (src11 * w4)); } if (bRescaling) { src = (int)(src * adjM + adjB); } src = (int)((src - Level) * (255f / Window) + 128); if (src < 0) { src = 0; } else if (src > 255) { src = 255; } if (bFlipMono) { src = (255 - src); } pBMPData[4 * x + (2 - b)] = (byte)src; } } else { //RGB RGB for (int b = 0; b < 3; b++) { int src = byteData[3 * (pY * imWidth + pX) + b]; if (filter) { int src10 = byteData[3 * (pY * imWidth + pX1) + b]; int src01 = byteData[3 * (pY1 * imWidth + pX) + b]; int src11 = byteData[3 * (pY1 * imWidth + pX1) + b]; src = (int)((src * w1) + (src10 * w2) + (src01 * w3) + (src11 * w4)); } if (bRescaling) { src = (int)(src * adjM + adjB); } src = (int)((src - Level) * (255f / Window) + 128); if (src < 0) { src = 0; } else if (src > 255) { src = 255; } if (bFlipMono) { src = (255 - src); } pBMPData[4 * x + (2 - b)] = (byte)src; } } } else { //grayscale int src = (bytes == 2) ? (signedData ? shortData[pY * imWidth + pX] : (int)(ushort)shortData[pY * imWidth + pX]) : byteData[pY * imWidth + pX]; if (filter) { int src10 = (bytes == 2) ? (signedData ? shortData[pY * imWidth + pX1] : (int)(ushort)shortData[pY * imWidth + pX1]) : byteData[pY * imWidth + pX1]; int src01 = (bytes == 2) ? (signedData ? shortData[(pY1) * imWidth + pX] : (int)(ushort)shortData[(pY1) * imWidth + pX]) : byteData[(pY1) * imWidth + pX]; int src11 = (bytes == 2) ? (signedData ? shortData[(pY1) * imWidth + pX1] : (int)(ushort)shortData[(pY1) * imWidth + pX1]) : byteData[(pY1) * imWidth + pX1]; src = (int)((src * w1) + (src10 * w2) + (src01 * w3) + (src11 * w4)); } if (bRescaling) { src = (int)(src * adjM + adjB); } src = (int)((src - Level) * (255f / Window) + 128); if (src < 0) { src = 0; } else if (src > 255) { src = 255; } if (bFlipMono) { src = (255 - src); } pBMPData[4 * x] = (byte)src; pBMPData[4 * x + 1] = (byte)src; pBMPData[4 * x + 2] = (byte)src; } } } } } } } RenderedImage.UnlockBits(bdata); }
public void DrawRenderedImage(RenderedImage renderedimage, AffineTransform affinetransform) { BufferedImage bufferedimage = new BufferedImage(renderedimage.GetColorModel(), renderedimage.GetData().CreateCompatibleWritableRaster(), false, null); bufferedimage.SetData(renderedimage.GetData()); DrawImage(bufferedimage, affinetransform, null); }
/** * Renders a * {@link RenderableImage}, * Applying a transform from image space into user space before Drawing. * The transformation from user space into device space is done with * the current <code>Transform</code> in the <code>Graphics2D</code>. * The specified transformation is applied to the image before the * transform attribute in the <code>Graphics2D</code> context is applied. * The rendering attributes applied include the <code>Clip</code>, * <code>Transform</code>, and <code>Composite</code> attributes. Note * that no rendering is done if the specified transform is * noninvertible. *<p> * Rendering hints Set on the <code>Graphics2D</code> object might * be used in rendering the <code>RenderableImage</code>. * If explicit control is required over specific hints recognized by a * specific <code>RenderableImage</code>, or if knowledge of which hints * are used is required, then a <code>RenderedImage</code> should be * obtained directly from the <code>RenderableImage</code> * and rendered using *{@link #drawRenderedImage(RenderedImage, AffineTransform) DrawRenderedImage}. * @param img the image to be rendered. This method does * nothing if <code>img</code> is null. * @param xform the transformation from image space into user space * @see #_transform * @see #setTransform * @see #setComposite * @see #clip * @see #setClip * @see #drawRenderedImage */ public void DrawRenderedImage(RenderedImage img, AffineTransform xform) { log.log(POILogger.WARN, "Not implemented"); }