/// <summary> /// Saves the specified color data as an image with the specified format. /// </summary> private void Save(Color[] data, Int32 width, Int32 height, Stream stream, Boolean asPng) { using (var rep = new NSBitmapImageRep(IntPtr.Zero, width, height, 8, 4, true, false, "NSCalibratedRGBColorSpace", 0, 0)) { fixed (Color* pData = data) { for (int y = 0; y < height; y++) { var pSrc = pData + (y * width); var pDst = (Byte*)rep.BitmapData + (y * rep.BytesPerRow); for (int x = 0; x < width; x++) { var color = *pSrc++; *pDst++ = color.R; *pDst++ = color.G; *pDst++ = color.B; *pDst++ = color.A; } } } var filetype = asPng ? NSBitmapImageFileType.Png : NSBitmapImageFileType.Jpeg; var properties = new NSDictionary(); using (var imgData = rep.RepresentationUsingTypeProperties(filetype, properties)) { using (var imgStream = imgData.AsStream()) { imgStream.CopyTo(stream); } } } }
static Gdk.Pixbuf GetPixbufFromNSBitmapImageRep (NSBitmapImageRep bitmap, int width, int height) { byte[] data; using (var tiff = bitmap.TiffRepresentation) { data = new byte[tiff.Length]; System.Runtime.InteropServices.Marshal.Copy (tiff.Bytes, data, 0, data.Length); } int pw = (int)bitmap.PixelsWide, ph = (int)bitmap.PixelsHigh; var pixbuf = new Gdk.Pixbuf (data, pw, ph); // if one dimension matches, and the other is same or smaller, use as-is if ((pw == width && ph <= height) || (ph == height && pw <= width)) return pixbuf; // otherwise scale proportionally such that the largest dimension matches the desired size if (pw == ph) { pw = width; ph = height; } else if (pw > ph) { ph = (int) (width * ((float) ph / pw)); pw = width; } else { pw = (int) (height * ((float) pw / ph)); ph = height; } var scaled = pixbuf.ScaleSimple (pw, ph, Gdk.InterpType.Bilinear); pixbuf.Dispose (); return scaled; }
public void ApplicationDidFinishLaunching(NSNotification notification) { NSThread.MakeMultiThreaded (); this.imageRep = new NSBitmapImageRep (IntPtr.Zero, width, height, 8, 4, true, false, "NSCalibratedRGBColorSpace", 4 * width, 32); NSImage image = new NSImage (new NSSize (width, height)); image.AddRepresentation (this.imageRep); this.imageView.Image = image; Thread t = new Thread (this.DoComputation); t.IsBackground = true; t.Start (); }
public static Gdk.Pixbuf GetPixbufFromNSImage (NSImage icon, int width, int height) { var rect = new CGRect (0, 0, width, height); var rep = icon.BestRepresentation (rect, null, null); var bitmap = rep as NSBitmapImageRep; try { if (bitmap == null) { if (rep != null) rep.Dispose (); using (var cgi = icon.AsCGImage (ref rect, null, null)) { if (cgi == null) return null; bitmap = new NSBitmapImageRep (cgi); } } return GetPixbufFromNSBitmapImageRep (bitmap, width, height); } finally { if (bitmap != null) bitmap.Dispose (); } }
static void SetValueForProperty(NSBitmapImageRep rep, NSString property, NSObject value) { Messaging.void_objc_msgSend_IntPtr_IntPtr(rep.Handle, selSetPropertyWithValueHandle, property.Handle, value.Handle); }
static NSObject GetValueForProperty(NSBitmapImageRep rep, NSString property) { return(Runtime.GetNSObject(Messaging.IntPtr_objc_msgSend_IntPtr(rep.Handle, selValueForPropertyHandle, property.Handle))); }
MacImageAdapter(NSImage img, NSBitmapImageRep rep, List <RImageFrame> frames) { _img = img; _rep = rep; _frames = frames; }
protected void EnsureRep() { if (rep == null) { rep = GetBestRepresentation(); } // on Big Sur, rep is usually going to be a proxy, so let's find the concrete NSBitmapImageRep class the slow way.. if (bmprep != null) { return; } if (rep is IconFrameHandler.LazyImageRep lazyRep) { bmprep = lazyRep.Rep; } else { bmprep = rep as NSBitmapImageRep ?? GetBestRepresentation() as NSBitmapImageRep; } if (bmprep != null) { return; } // go through concrete representations as we might have a proxy (Big Sur) // this is fixed with MonoMac, but not Xamarin.Mac. var representations = Control.Representations(); for (int i = 0; i < representations.Length; i++) { NSImageRep rep = representations[i]; if (rep is NSBitmapImageRep brep) { bmprep = brep; return; } } // create a new bitmap rep and copy the contents var size = Size; int numComponents = rep.HasAlpha ? 4 : 3; int bitsPerComponent = 8; int bitsPerPixel = numComponents * bitsPerComponent; int bytesPerPixel = bitsPerPixel / 8; int bytesPerRow = bytesPerPixel * size.Width; bmprep = new NSBitmapImageRep(IntPtr.Zero, size.Width, size.Height, bitsPerComponent, numComponents, rep.HasAlpha, false, rep.ColorSpaceName, bytesPerRow, bitsPerPixel); var graphicsContext = NSGraphicsContext.FromBitmap(bmprep); NSGraphicsContext.GlobalSaveGraphicsState(); NSGraphicsContext.CurrentContext = graphicsContext; Control.Draw(CGPoint.Empty, new CGRect(CGPoint.Empty, size.ToNS()), NSCompositingOperation.Copy, 1); NSGraphicsContext.GlobalRestoreGraphicsState(); // remove all existing representations for (int i = 0; i < representations.Length; i++) { NSImageRep rep = representations[i]; Control.RemoveRepresentation(rep); } // add the new one back Control.AddRepresentation(bmprep); }
protected override Gdk.Pixbuf OnGetPixbufForFile(string filename, Gtk.IconSize size) { //this only works on MacOS 10.6.0 and greater if (systemVersion < 0x1060) { return(base.OnGetPixbufForFile(filename, size)); } NSImage icon = null; if (Path.IsPathRooted(filename) && File.Exists(filename)) { icon = NSWorkspace.SharedWorkspace.IconForFile(filename); } else { string extension = Path.GetExtension(filename); if (!string.IsNullOrEmpty(extension)) { icon = NSWorkspace.SharedWorkspace.IconForFileType(extension); } } if (icon == null) { return(base.OnGetPixbufForFile(filename, size)); } int w, h; if (!Gtk.Icon.SizeLookup(Gtk.IconSize.Menu, out w, out h)) { w = h = 22; } var rect = new System.Drawing.RectangleF(0, 0, w, h); var arep = icon.BestRepresentation(rect, null, null); if (arep == null) { return(base.OnGetPixbufForFile(filename, size)); } var rep = arep as NSBitmapImageRep; if (rep == null) { using (var cgi = arep.AsCGImage(rect, null, null)) rep = new NSBitmapImageRep(cgi); arep.Dispose(); } try { byte[] arr; using (var tiff = rep.TiffRepresentation) { arr = new byte[tiff.Length]; System.Runtime.InteropServices.Marshal.Copy(tiff.Bytes, arr, 0, arr.Length); } int pw = rep.PixelsWide, ph = rep.PixelsHigh; var px = new Gdk.Pixbuf(arr, pw, ph); //if one dimension matches, and the other is same or smaller, use as-is if ((pw == w && ph <= h) || (ph == h && pw <= w)) { return(px); } //else scale proportionally such that the largest dimension matches the desired size if (pw == ph) { pw = w; ph = h; } else if (pw > ph) { ph = (int)(w * ((float)ph / pw)); pw = w; } else { pw = (int)(h * ((float)pw / ph)); ph = h; } var scaled = px.ScaleSimple(pw, ph, Gdk.InterpType.Bilinear); px.Dispose(); return(scaled); } finally { if (rep != null) { rep.Dispose(); } } }
public BitmapHandler(NSImage image) { Control = image; rep = Control.BestRepresentationForDevice(null); bmprep = rep as NSBitmapImageRep; }
public override object ConvertToBitmap(ImageDescription idesc, double scaleFactor, ImageFormat format) { double width = idesc.Size.Width; double height = idesc.Size.Height; int pixelWidth = (int)(width * scaleFactor); int pixelHeight = (int)(height * scaleFactor); if (idesc.Backend is CustomImage) { var flags = CGBitmapFlags.ByteOrderDefault; int bytesPerRow; switch (format) { case ImageFormat.ARGB32: bytesPerRow = pixelWidth * 4; flags |= CGBitmapFlags.PremultipliedFirst; break; case ImageFormat.RGB24: bytesPerRow = pixelWidth * 3; flags |= CGBitmapFlags.None; break; default: throw new NotImplementedException("ImageFormat: " + format.ToString()); } var bmp = new CGBitmapContext(IntPtr.Zero, pixelWidth, pixelHeight, 8, bytesPerRow, Util.DeviceRGBColorSpace, flags); bmp.TranslateCTM(0, pixelHeight); bmp.ScaleCTM((float)scaleFactor, (float)-scaleFactor); var ctx = new CGContextBackend { Context = bmp, Size = new CGSize((nfloat)width, (nfloat)height), InverseViewTransform = bmp.GetCTM().Invert(), ScaleFactor = scaleFactor }; var ci = (CustomImage)idesc.Backend; ci.DrawInContext(ctx, idesc); var img = new NSImage(((CGBitmapContext)bmp).ToImage(), new CGSize(pixelWidth, pixelHeight)); var imageData = img.AsTiff(); var imageRep = (NSBitmapImageRep)NSBitmapImageRep.ImageRepFromData(imageData); var im = new NSImage(); im.AddRepresentation(imageRep); im.Size = new CGSize((nfloat)width, (nfloat)height); bmp.Dispose(); return(im); } else { NSImage img = (NSImage)idesc.Backend; NSBitmapImageRep bitmap = img.Representations().OfType <NSBitmapImageRep> ().FirstOrDefault(); if (bitmap == null) { var imageData = img.AsTiff(); var imageRep = (NSBitmapImageRep)NSBitmapImageRep.ImageRepFromData(imageData); var im = new NSImage(); im.AddRepresentation(imageRep); im.Size = new CGSize((nfloat)width, (nfloat)height); return(im); } return(idesc.Backend); } }
/// <summary> /// Read the file and draw rectangles on it. /// </summary> /// <param name="fileName">The name of the file.</param> /// <param name="annotations">Annotations to be add to the image. Can consist of rectangles and lables</param> /// <returns>The image in Jpeg stream format</returns> public static JpegData ImageFileToJpeg(String fileName, Annotation[] annotations = null) { #if __ANDROID__ BitmapFactory.Options options = new BitmapFactory.Options(); options.InMutable = true; Android.Graphics.Bitmap bmp = BitmapFactory.DecodeFile(fileName, options); Android.Graphics.Paint p = new Android.Graphics.Paint(); p.SetStyle(Paint.Style.Stroke); p.AntiAlias = true; p.Color = Android.Graphics.Color.Red; Canvas c = new Canvas(bmp); for (int i = 0; i < annotations.Length; i++) { float[] rects = ScaleLocation(annotations[i].Rectangle, bmp.Width, bmp.Height); Android.Graphics.Rect r = new Rect((int)rects[0], (int)rects[1], (int)rects[2], (int)rects[3]); c.DrawRect(r, p); } using (MemoryStream ms = new MemoryStream()) { bmp.Compress(Bitmap.CompressFormat.Jpeg, 90, ms); JpegData result = new JpegData(); result.Raw = ms.ToArray(); result.Width = bmp.Width; result.Height = bmp.Height; return(result); } #elif __MACOS__ NSImage img = NSImage.ImageNamed(fileName); if (annotations != null && annotations.Length > 0) { DrawAnnotations(img, annotations); } var imageData = img.AsTiff(); var imageRep = NSBitmapImageRep.ImageRepsWithData(imageData)[0] as NSBitmapImageRep; var jpegData = imageRep.RepresentationUsingTypeProperties(NSBitmapImageFileType.Jpeg, null); byte[] jpeg = new byte[jpegData.Length]; System.Runtime.InteropServices.Marshal.Copy(jpegData.Bytes, jpeg, 0, (int)jpegData.Length); JpegData result = new JpegData(); result.Raw = jpeg; result.Width = (int)img.Size.Width; result.Height = (int)img.Size.Height; return(result); #elif __IOS__ UIImage uiimage = new UIImage(fileName); UIGraphics.BeginImageContextWithOptions(uiimage.Size, false, 0); var context = UIGraphics.GetCurrentContext(); uiimage.Draw(new CGPoint()); context.SetStrokeColor(UIColor.Red.CGColor); context.SetLineWidth(2); for (int i = 0; i < annotations.Length; i++) { float[] rects = ScaleLocation( annotations[i].Rectangle, (int)uiimage.Size.Width, (int)uiimage.Size.Height); CGRect cgRect = new CGRect( (nfloat)rects[0], (nfloat)rects[1], (nfloat)(rects[2] - rects[0]), (nfloat)(rects[3] - rects[1])); context.AddRect(cgRect); context.DrawPath(CGPathDrawingMode.Stroke); } context.ScaleCTM(1, -1); context.TranslateCTM(0, -uiimage.Size.Height); for (int i = 0; i < annotations.Length; i++) { float[] rects = ScaleLocation( annotations[i].Rectangle, (int)uiimage.Size.Width, (int)uiimage.Size.Height); context.SelectFont("Helvetica", 18, CGTextEncoding.MacRoman); context.SetFillColor((nfloat)1.0, (nfloat)0.0, (nfloat)0.0, (nfloat)1.0); context.SetTextDrawingMode(CGTextDrawingMode.Fill); context.ShowTextAtPoint(rects[0], uiimage.Size.Height - rects[1], annotations[i].Label); } UIImage imgWithRect = UIGraphics.GetImageFromCurrentImageContext(); UIGraphics.EndImageContext(); var jpegData = imgWithRect.AsJPEG(); byte[] jpeg = new byte[jpegData.Length]; System.Runtime.InteropServices.Marshal.Copy(jpegData.Bytes, jpeg, 0, (int)jpegData.Length); JpegData result = new JpegData(); result.Raw = jpeg; result.Width = (int)uiimage.Size.Width; result.Height = (int)uiimage.Size.Height; return(result); #else Bitmap img = new Bitmap(fileName); if (annotations != null) { using (Graphics g = Graphics.FromImage(img)) { for (int i = 0; i < annotations.Length; i++) { if (annotations[i].Rectangle != null) { float[] rects = ScaleLocation(annotations[i].Rectangle, img.Width, img.Height); PointF origin = new PointF(rects[0], rects[1]); RectangleF rect = new RectangleF(origin, new SizeF(rects[2] - rects[0], rects[3] - rects[1])); Pen redPen = new Pen(Color.Red, 3); g.DrawRectangle(redPen, Rectangle.Round(rect)); String label = annotations[i].Label; if (label != null) { g.DrawString(label, new Font(FontFamily.GenericSansSerif, 20f), Brushes.Red, origin); } } } g.Save(); } } using (MemoryStream ms = new MemoryStream()) { img.Save(ms, System.Drawing.Imaging.ImageFormat.Jpeg); JpegData result = new JpegData(); result.Raw = ms.ToArray(); result.Width = img.Size.Width; result.Height = img.Size.Height; return(result); } #endif }
internal static async Task <NSDraggingItem[]> CreateNativeDragDropData( DataPackageView data, Point startPoint) { NSDraggingItem draggingItem; var items = new List <NSDraggingItem>(); double maxFrameDimension = 300.0; // May be adjusted var defaultFrameRect = new CoreGraphics.CGRect(startPoint.X, startPoint.Y, 100, 30); /* Note that NSDraggingItems are required by the BeginDraggingSession methods. * Therefore, that is what is constructed here instead of pasteboard items. * * For several types such as NSString or NSImage, they implement the INSPasteboardWriting interface and * can therefore be used to directly construct an NSDraggingItem. * However, for other types (such as HTML) the full pasteboard item must be constructed first defining * both its type and string content. * * The dragging frame is used to represent the visual of the item being dragged. This could be a * preview of the image or sample text. At minimum, macOS requires the DraggingFrame property of the * NSDraggingItem to be set with a CGRect or the app will crash. It is however better to set both * the frame bounds and content at the same time with .SetDraggingFrame(). For caveats see: * https://developer.apple.com/documentation/appkit/nsdraggingitem/1528746-setdraggingframe * * Because Uno does not currently support the DragUI, this code only generates a real drag visual * for images where a visual is already defined. For other types such as text, no visual will be * generated. In the future, when DragUI and its corresponding image is supported, this can change. * */ if (data?.Contains(StandardDataFormats.Bitmap) ?? false) { NSImage?image = null; using (var stream = (await(await data.GetBitmapAsync()).OpenReadAsync()).AsStream()) { if (stream != null) { using (var ms = new MemoryStream()) { await stream.CopyToAsync(ms); ms.Flush(); ms.Position = 0; image = NSImage.FromStream(ms); } } } if (image != null) { draggingItem = new NSDraggingItem(image); // For an NSImage, we will use the image itself as the dragging visual. // The visual should be no larger than the max dimension setting and is therefore scaled. NSBitmapImageRep rep = new NSBitmapImageRep(image.CGImage); int width = (int)rep.PixelsWide; int height = (int)rep.PixelsHigh; double scale = maxFrameDimension / Math.Max(width, height); // Dragging frame must be set draggingItem.SetDraggingFrame( new CoreGraphics.CGRect(startPoint.X, startPoint.Y, width * scale, height * scale), image); items.Add(draggingItem); } } if (data?.Contains(StandardDataFormats.Html) ?? false) { var html = await data.GetHtmlFormatAsync(); if (!string.IsNullOrEmpty(html)) { var pasteboardItem = new NSPasteboardItem(); pasteboardItem.SetStringForType(html, NSPasteboard.NSPasteboardTypeHTML); draggingItem = new NSDraggingItem(pasteboardItem); draggingItem.DraggingFrame = defaultFrameRect; // Must be set items.Add(draggingItem); } } if (data?.Contains(StandardDataFormats.Rtf) ?? false) { var rtf = await data.GetRtfAsync(); if (!string.IsNullOrEmpty(rtf)) { // Use `NSPasteboardTypeRTF` instead of `NSPasteboardTypeRTFD` for max compatibility var pasteboardItem = new NSPasteboardItem(); pasteboardItem.SetStringForType(rtf, NSPasteboard.NSPasteboardTypeRTF); draggingItem = new NSDraggingItem(pasteboardItem); draggingItem.DraggingFrame = defaultFrameRect; // Must be set items.Add(draggingItem); } } if (data?.Contains(StandardDataFormats.StorageItems) ?? false) { var storageItems = await data.GetStorageItemsAsync(); if (storageItems.Count > 0) { // Not currently supported } } if (data?.Contains(StandardDataFormats.Text) ?? false) { var text = await data.GetTextAsync(); if (!string.IsNullOrEmpty(text)) { draggingItem = new NSDraggingItem((NSString)text); draggingItem.DraggingFrame = defaultFrameRect; // Must be set items.Add(draggingItem); } } if (data != null) { var uri = DataPackage.CombineUri( data.Contains(StandardDataFormats.WebLink) ? (await data.GetWebLinkAsync()).ToString() : null, data.Contains(StandardDataFormats.ApplicationLink) ? (await data.GetApplicationLinkAsync()).ToString() : null, data.Contains(StandardDataFormats.Uri) ? (await data.GetUriAsync()).ToString() : null); if (string.IsNullOrEmpty(uri) == false) { draggingItem = new NSDraggingItem(new NSUrl(uri)); draggingItem.DraggingFrame = defaultFrameRect; // Must be set items.Add(draggingItem); } } return(items.ToArray()); }
public MultiboxDetectionPage() : base() { var button = this.GetButton(); button.Text = "Perform People Detection"; button.Clicked += OnButtonClicked; OnImagesLoaded += async(sender, image) => { GetLabel().Text = "Please wait..."; SetImage(); Task <Tuple <byte[], long> > t = new Task <Tuple <byte[], long> >( () => { MultiboxGraph graph = new MultiboxGraph(); Tensor imageTensor = Emgu.TF.Models.ImageIO.ReadTensorFromImageFile(image[0], 224, 224, 128.0f, 1.0f / 128.0f); Stopwatch watch = Stopwatch.StartNew(); MultiboxGraph.Result detectResult = graph.Detect(imageTensor); watch.Stop(); #if __ANDROID__ BitmapFactory.Options options = new BitmapFactory.Options(); options.InMutable = true; Android.Graphics.Bitmap bmp = BitmapFactory.DecodeFile(image[0], options); MultiboxGraph.DrawResults(bmp, detectResult, 0.2f); using (MemoryStream ms = new MemoryStream()) { bmp.Compress(Bitmap.CompressFormat.Jpeg, 90, ms); return(new Tuple <byte[], long>(ms.ToArray(), watch.ElapsedMilliseconds)); } #elif __MACOS__ NSImage img = NSImage.ImageNamed(image[0]); Xamarin.Forms.Device.BeginInvokeOnMainThread(() => { MultiboxGraph.DrawResults(img, detectResult, 0.1f); var imageData = img.AsTiff(); var imageRep = NSBitmapImageRep.ImageRepsWithData(imageData)[0] as NSBitmapImageRep; var jpegData = imageRep.RepresentationUsingTypeProperties(NSBitmapImageFileType.Jpeg, null); byte[] raw = new byte[jpegData.Length]; System.Runtime.InteropServices.Marshal.Copy(jpegData.Bytes, raw, 0, (int)jpegData.Length); SetImage(raw); GetLabel().Text = String.Format("Detected with in {0} milliseconds.", watch.ElapsedMilliseconds); }); return(new Tuple <byte[], long>(null, 0)); #elif __IOS__ UIImage uiimage = new UIImage(image[0]); Xamarin.Forms.Device.BeginInvokeOnMainThread(() => { UIImage newImg = MultiboxGraph.DrawResults(uiimage, detectResult, 0.1f); var jpegData = newImg.AsJPEG(); byte[] raw = new byte[jpegData.Length]; System.Runtime.InteropServices.Marshal.Copy(jpegData.Bytes, raw, 0, (int)jpegData.Length); SetImage(raw); GetLabel().Text = String.Format("Detected with in {0} milliseconds.", watch.ElapsedMilliseconds); }); return(new Tuple <byte[], long>(null, 0)); #else return(new Tuple <byte[], long>(new byte[10], 0)); #endif }); t.Start(); #if !(__UNIFIED__) var result = await t; SetImage(t.Result.Item1); GetLabel().Text = String.Format("Detection took {0} milliseconds.", t.Result.Item2); #endif }; }
public PlotHandler() { Control = new DWSIM.UI.Desktop.Mac.PlotView(); { }; ContextMenu cmenu = new ContextMenu(); var b1 = new ButtonMenuItem() { Text = "Copy" }; cmenu.Items.Add(b1); b1.Click += (sender, e) => { Console.WriteLine(sender.ToString()); // Get the standard pasteboard var pasteboard = NSPasteboard.GeneralPasteboard; // Empty the current contents pasteboard.ClearContents(); NSImage image = new NSImage(new CoreGraphics.CGSize(Control.Bounds.Width, Control.Bounds.Height)); image.LockFocus(); var ctx = NSGraphicsContext.CurrentContext.GraphicsPort; Control.Layer.RenderInContext(ctx); image.UnlockFocus(); // Add the current image to the pasteboard pasteboard.WriteObjects(new NSImage[] { image }); }; var b4 = new ButtonMenuItem() { Text = "Save to File" }; cmenu.Items.Add(b4); b4.Click += (sender, e) => { Console.WriteLine(sender.ToString()); var sfd = new SaveFileDialog(); sfd.Title = "Save Chart to PNG"; sfd.Filters.Add(new FileFilter("PNG File", new string[] { ".png" })); sfd.CurrentFilterIndex = 0; if (sfd.ShowDialog(this.Widget) == DialogResult.Ok) { NSImage image = new NSImage(new CoreGraphics.CGSize(Control.Bounds.Width, Control.Bounds.Height)); image.LockFocus(); var ctx = NSGraphicsContext.CurrentContext.GraphicsPort; Control.Layer.RenderInContext(ctx); image.UnlockFocus(); var imageRep = new NSBitmapImageRep(image.AsTiff()); var pngData = imageRep.RepresentationUsingTypeProperties(NSBitmapImageFileType.Png); pngData.Save(sfd.FileName, false); } }; var b7 = new ButtonMenuItem() { Text = "Reset to Default View" }; cmenu.Items.Add(b7); b7.Click += (sender, e) => { Console.WriteLine(sender.ToString()); Control.Model.ResetAllAxes(); Control.Model.InvalidatePlot(false); }; Control.RightMouseAction = () => { cmenu.Show(this.Widget); }; }
public static byte[] PixelToJpeg(byte[] rawPixel, int width, int height, int channels) { #if __ANDROID__ if (channels != 4) { throw new NotImplementedException("Only 4 channel pixel input is supported."); } using (Bitmap bitmap = Bitmap.CreateBitmap(width, height, Bitmap.Config.Argb8888)) using (MemoryStream ms = new MemoryStream()) { IntPtr ptr = bitmap.LockPixels(); //GCHandle handle = GCHandle.Alloc(colors, GCHandleType.Pinned); Marshal.Copy(rawPixel, 0, ptr, rawPixel.Length); bitmap.UnlockPixels(); bitmap.Compress(Bitmap.CompressFormat.Jpeg, 90, ms); return(ms.ToArray()); } #elif __IOS__ if (channels != 3) { throw new NotImplementedException("Only 3 channel pixel input is supported."); } System.Drawing.Size sz = new System.Drawing.Size(width, height); GCHandle handle = GCHandle.Alloc(rawPixel, GCHandleType.Pinned); using (CGColorSpace cspace = CGColorSpace.CreateDeviceRGB()) using (CGBitmapContext context = new CGBitmapContext( handle.AddrOfPinnedObject(), sz.Width, sz.Height, 8, sz.Width * 3, cspace, CGImageAlphaInfo.PremultipliedLast)) using (CGImage cgImage = context.ToImage()) using (UIImage newImg = new UIImage(cgImage)) { handle.Free(); var jpegData = newImg.AsJPEG(); byte[] raw = new byte[jpegData.Length]; System.Runtime.InteropServices.Marshal.Copy(jpegData.Bytes, raw, 0, (int)jpegData.Length); return(raw); } #elif __UNIFIED__ //OSX if (channels != 4) { throw new NotImplementedException("Only 4 channel pixel input is supported."); } System.Drawing.Size sz = new System.Drawing.Size(width, height); using (CGColorSpace cspace = CGColorSpace.CreateDeviceRGB()) using (CGBitmapContext context = new CGBitmapContext( rawPixel, sz.Width, sz.Height, 8, sz.Width * 4, cspace, CGBitmapFlags.PremultipliedLast | CGBitmapFlags.ByteOrder32Big)) using (CGImage cgImage = context.ToImage()) using (NSBitmapImageRep newImg = new NSBitmapImageRep(cgImage)) { var jpegData = newImg.RepresentationUsingTypeProperties(NSBitmapImageFileType.Jpeg); byte[] raw = new byte[jpegData.Length]; System.Runtime.InteropServices.Marshal.Copy(jpegData.Bytes, raw, 0, (int)jpegData.Length); return(raw); } #else throw new NotImplementedException("Not Implemented"); #endif }
public static XIR.Image RemoteRepresentation(this NSBitmapImageRep bitmapImageRep) { return(RemoteRepresentation(bitmapImageRep.CGImage)); }
public NSImage Snapshot(NSRect sourceRect) { NSImage snapshot = new NSImage(sourceRect.size); this.LockFocus(); NSBitmapImageRep rep = new NSBitmapImageRep(sourceRect).SafeAutorelease(); this.UnlockFocus(); snapshot.AddRepresentation(rep); snapshot.Autorelease(); return snapshot; }
// Create one display list based on the given image. This assumes the image // uses 8-bit chunks to represent a sample bool MakeDisplayList(int listNum, NSImage theImage) { NSBitmapImageRep bitmap; int bytesPerRow, pixelsHigh, pixelsWide, samplesPerPixel; byte currentBit, byteValue; byte[] newBuffer; int rowIndex, colIndex; bitmap = new NSBitmapImageRep(theImage.AsTiff(NSTiffCompression.None, 0)); pixelsHigh = bitmap.PixelsHigh; pixelsWide = bitmap.PixelsWide; bytesPerRow = bitmap.BytesPerRow; samplesPerPixel = bitmap.SamplesPerPixel; newBuffer = new byte[(int)Math.Ceiling((float)bytesPerRow / 8.0) * pixelsHigh]; byte[] bitmapBytesArray = new byte[(pixelsWide * pixelsHigh) * samplesPerPixel]; System.Runtime.InteropServices.Marshal.Copy(bitmap.BitmapData, bitmapBytesArray, 0, (pixelsWide * pixelsHigh) * samplesPerPixel); int curIdx = 0; /* * Convert the color bitmap into a true bitmap, ie, one bit per pixel. We * read at last row, write to first row as Cocoa and OpenGL have opposite * y origins */ for (rowIndex = pixelsHigh - 1; rowIndex >= 0; rowIndex--) { currentBit = 0x80; byteValue = 0; for (colIndex = 0; colIndex < pixelsWide; colIndex++) { if (bitmapBytesArray [rowIndex * bytesPerRow + colIndex * samplesPerPixel] > 0) { byteValue |= currentBit; } currentBit >>= 1; if (currentBit == 0) { newBuffer [curIdx++] = byteValue; currentBit = 0x80; byteValue = 0; } } /* * Fill out the last byte; extra is ignored by OpenGL, but each row * must start on a new byte */ if (currentBit != 0x80) { newBuffer[curIdx++] = byteValue; } } GL.NewList(listNum, ListMode.Compile); GL.Bitmap(pixelsWide, pixelsHigh, 0, 0, pixelsWide, 0, newBuffer); GL.EndList(); return(true); }
NSData ImageAsJPEG (NSImage i) { NSData d = i.AsTiff (); NSBitmapImageRep rep = new NSBitmapImageRep (d); return rep.RepresentationUsingTypeProperties (NSBitmapImageFileType.Jpeg, NSDictionary.FromObjectAndKey (NSNumber.FromInt32 (1), NSBitmapImageRep.CompressionFactor)); }
// Create one display list based on the given image. This assumes the image // uses 8-bit chunks to represent a sample bool MakeDisplayList (int listNum, NSImage theImage) { NSBitmapImageRep bitmap; int bytesPerRow, pixelsHigh, pixelsWide, samplesPerPixel; byte currentBit, byteValue; byte[] newBuffer; int rowIndex, colIndex; bitmap = new NSBitmapImageRep ( theImage.AsTiff (NSTiffCompression.None, 0) ); pixelsHigh = (int)bitmap.PixelsHigh; pixelsWide = (int)bitmap.PixelsWide; bytesPerRow = (int)bitmap.BytesPerRow; samplesPerPixel = (int) bitmap.SamplesPerPixel; newBuffer = new byte[(int)Math.Ceiling ((float)bytesPerRow / 8.0) * pixelsHigh]; byte[] bitmapBytesArray = new byte[(pixelsWide * pixelsHigh) * samplesPerPixel]; System.Runtime.InteropServices.Marshal.Copy (bitmap.BitmapData, bitmapBytesArray, 0, (pixelsWide * pixelsHigh) * samplesPerPixel); int curIdx = 0; /* * Convert the color bitmap into a true bitmap, ie, one bit per pixel. We * read at last row, write to first row as Cocoa and OpenGL have opposite * y origins */ for (rowIndex = pixelsHigh - 1; rowIndex >= 0; rowIndex--) { currentBit = 0x80; byteValue = 0; for (colIndex = 0; colIndex < pixelsWide; colIndex++) { if (bitmapBytesArray [rowIndex * bytesPerRow + colIndex * samplesPerPixel] > 0) byteValue |= currentBit; currentBit >>= 1; if (currentBit == 0) { newBuffer [curIdx++] = byteValue; currentBit = 0x80; byteValue = 0; } } /* * Fill out the last byte; extra is ignored by OpenGL, but each row * must start on a new byte */ if (currentBit != 0x80) newBuffer[curIdx++] = byteValue; } GL.NewList( listNum, ListMode.Compile); GL.Bitmap(pixelsWide, pixelsHigh, 0, 0, pixelsWide, 0, newBuffer); GL.EndList(); return true; }
public void Create(int width, int height, PixelFormat pixelFormat) { switch (pixelFormat) { case PixelFormat.Format32bppRgb: { alpha = false; int numComponents = 4; int bitsPerComponent = 8; int bitsPerPixel = numComponents * bitsPerComponent; int bytesPerPixel = bitsPerPixel / 8; int bytesPerRow = bytesPerPixel * width; rep = bmprep = new NSBitmapImageRep(IntPtr.Zero, width, height, bitsPerComponent, 3, false, false, NSColorSpace.DeviceRGB, bytesPerRow, bitsPerPixel); Control = new NSImage(); Control.AddRepresentation(rep); //var provider = new CGDataProvider (data.Bytes, (int)data.Length); //var cgImage = new CGImage (width, height, bitsPerComponent, bitsPerPixel, bytesPerRow, CGColorSpace.CreateDeviceRGB (), CGBitmapFlags.ByteOrder32Little | CGBitmapFlags.PremultipliedFirst, provider, null, true, CGColorRenderingIntent.Default); //Control = new NSImage (cgImage, new System.Drawing.SizeF (width, height)); break; } case PixelFormat.Format24bppRgb: { alpha = false; int numComponents = 3; int bitsPerComponent = 8; int bitsPerPixel = numComponents * bitsPerComponent; int bytesPerPixel = bitsPerPixel / 8; int bytesPerRow = bytesPerPixel * width; rep = bmprep = new NSBitmapImageRep(IntPtr.Zero, width, height, bitsPerComponent, numComponents, false, false, NSColorSpace.DeviceRGB, bytesPerRow, bitsPerPixel); Control = new NSImage(); Control.AddRepresentation(rep); //var provider = new CGDataProvider (data.ClassHandle); //var cgImage = new CGImage (width, height, bitsPerComponent, bitsPerPixel, bytesPerRow, CGColorSpace.CreateDeviceRGB (), CGBitmapFlags.ByteOrder32Little | CGBitmapFlags.PremultipliedFirst, provider, null, true, CGColorRenderingIntent.Default); //Control = new NSImage (cgImage, new System.Drawing.SizeF (width, height)); break; } case PixelFormat.Format32bppRgba: { alpha = true; int numComponents = 4; int bitsPerComponent = 8; int bitsPerPixel = numComponents * bitsPerComponent; int bytesPerPixel = bitsPerPixel / 8; int bytesPerRow = bytesPerPixel * width; rep = bmprep = new NSBitmapImageRep(IntPtr.Zero, width, height, bitsPerComponent, numComponents, true, false, NSColorSpace.DeviceRGB, bytesPerRow, bitsPerPixel); Control = new NSImage(); Control.AddRepresentation(rep); //var provider = new CGDataProvider (data.Bytes, (int)data.Length); //var cgImage = new CGImage (width, height, bitsPerComponent, bitsPerPixel, bytesPerRow, CGColorSpace.CreateDeviceRGB (), CGBitmapFlags.ByteOrder32Little | CGBitmapFlags.PremultipliedFirst, provider, null, true, CGColorRenderingIntent.Default); //Control = new NSImage (cgImage, new System.Drawing.SizeF (width, height)); break; } /*case PixelFormat.Format16bppRgb555: * control = new Gdk.Pixbuf(Gdk.Colorspace.Rgb, false, 5, width, height); * break;*/ default: throw new ArgumentOutOfRangeException("pixelFormat", pixelFormat, "Not supported"); } }