public void Save(Stream systemStream, Direct2DImageFormat format, string text, string faceName, float fontSize, out int width, out int height) { #if BENCHMARK using (var handler = Benchmark.Instance.Start("DirectWrite", "Save")) #endif using (var layout = new TextLayout(factoryManager.DwFactory, text, new TextFormat(factoryManager.DwFactory, faceName, fontSize * 1.3f), 4000, 4000)) { width = (int)Math.Ceiling(layout.Metrics.WidthIncludingTrailingWhitespace); height = (int)Math.Ceiling(layout.Metrics.Height); using (var wicBitmap = new SharpDX.WIC.Bitmap(factoryManager.WicFactory, width, height, SharpDX.WIC.PixelFormat.Format32bppPRGBA, BitmapCreateCacheOption.CacheOnLoad)) { var renderTargetProperties = new RenderTargetProperties(RenderTargetType.Default, new SharpDX.Direct2D1.PixelFormat(Format.R8G8B8A8_UNorm, SharpDX.Direct2D1.AlphaMode.Unknown), imageDpi, imageDpi, RenderTargetUsage.None, FeatureLevel.Level_DEFAULT); using (var renderTarget = new WicRenderTarget(factoryManager.D2DFactory, wicBitmap, renderTargetProperties)) using (var brush = new SolidColorBrush(renderTarget, SharpDX.Color.White)) using (var encoder = new BitmapEncoder(factoryManager.WicFactory, Direct2DConverter.ConvertImageFormat(format))) { renderTarget.BeginDraw(); renderTarget.Clear(new Color4(1, 1, 1, 0)); renderTarget.DrawTextLayout(Vector2.Zero, layout, brush); renderTarget.EndDraw(); var stream = new WICStream(factoryManager.WicFactory, systemStream); encoder.Initialize(stream); using (var bitmapFrameEncode = new BitmapFrameEncode(encoder)) { bitmapFrameEncode.Initialize(); bitmapFrameEncode.SetSize(width, height); bitmapFrameEncode.WriteSource(wicBitmap); bitmapFrameEncode.Commit(); } encoder.Commit(); } } } }
/// <summary> /// Saves the wic texture to bitmap stream. /// </summary> /// <param name="deviceResource">The device resource.</param> /// <param name="source">The source.</param> /// <param name="bitmapStream">The bitmap stream.</param> /// <returns></returns> /// <exception cref="System.NotSupportedException"></exception> public static bool SaveWICTextureToBitmapStream(IDeviceResources deviceResource, Texture2D source, System.IO.MemoryStream bitmapStream) { Texture2D staging; if (!CaptureTexture(deviceResource.Device.ImmediateContext, source, out staging)) { Disposer.RemoveAndDispose(ref staging); return(false); } var desc = staging.Description; var sRGB = false; var pfGuid = GetPfGuid(desc.Format, ref sRGB); if (pfGuid == Guid.Empty) { Disposer.RemoveAndDispose(ref staging); throw new NotSupportedException($"Format: {desc.Format} does not support yet."); } var succ = false; try { using (var stream = new WICStream(deviceResource.WICImgFactory, bitmapStream)) { succ = CopyTextureToWICStream(deviceResource, staging, stream, pfGuid, BitmapExtensions.ToWICImageFormat(Direct2DImageFormat.Bmp)); } } finally { Disposer.RemoveAndDispose(ref staging); } return(succ); }
/// <summary> /// 将 Direct2D 位图保存到文件中。 /// </summary> /// <param name="image">要保存的位图。</param> /// <param name="fileName">要保存的文件名。</param> public void SaveBitmapToFile(Bitmap image, string fileName) { using (ImagingFactory2 factory = new ImagingFactory2()) { using (WICStream stream = new WICStream(factory, fileName, NativeFileAccess.Write)) { using (BitmapEncoder encoder = new PngBitmapEncoder(factory)) { encoder.Initialize(stream); using (BitmapFrameEncode bitmapFrameEncode = new BitmapFrameEncode(encoder)) { bitmapFrameEncode.Initialize(); int width = image.PixelSize.Width; int height = image.PixelSize.Height; bitmapFrameEncode.SetSize(width, height); Guid wicPixelFormat = WICPixelFormat; bitmapFrameEncode.SetPixelFormat(ref wicPixelFormat); using (ImageEncoder imageEncoder = new ImageEncoder(factory, this.d2DDevice)) { imageEncoder.WriteFrame(image, bitmapFrameEncode, new ImageParameters(D2PixelFormat, 96, 96, 0, 0, width, height)); bitmapFrameEncode.Commit(); encoder.Commit(); } } } } } }
void ColorCameraLoop() { while (true) { var encodedColorData = camera.Client.LatestJPEGImage(); // decode JPEG var memoryStream = new MemoryStream(encodedColorData); var stream = new WICStream(imagingFactory, memoryStream); // decodes to 24 bit BGR var decoder = new SharpDX.WIC.BitmapDecoder(imagingFactory, stream, SharpDX.WIC.DecodeOptions.CacheOnLoad); var bitmapFrameDecode = decoder.GetFrame(0); // convert to 32 bpp var formatConverter = new FormatConverter(imagingFactory); formatConverter.Initialize(bitmapFrameDecode, SharpDX.WIC.PixelFormat.Format32bppBGR); formatConverter.CopyPixels(nextColorData, Kinect2Calibration.colorImageWidth * 4); // TODO: consider copying directly to texture native memory //lock (colorData) // Swap<byte[]>(ref colorData, ref nextColorData); lock (renderLock) // necessary? { UpdateColorImage(device.ImmediateContext, nextColorData); } memoryStream.Close(); memoryStream.Dispose(); stream.Dispose(); decoder.Dispose(); formatConverter.Dispose(); bitmapFrameDecode.Dispose(); } }
public static MemoryStream ToMemoryStream(this global::SharpDX.WIC.Bitmap bitmap, IDevice2DResources deviceResources, Direct2DImageFormat imageType = Direct2DImageFormat.Bmp) { if (bitmap == null) { return(null); } var systemStream = new MemoryStream(); using (var stream = new WICStream(deviceResources.WICImgFactory, systemStream)) { using (var encoder = new BitmapEncoder(deviceResources.WICImgFactory, imageType.ToWICImageFormat())) { encoder.Initialize(stream); using (var frameEncoder = new BitmapFrameEncode(encoder)) { frameEncoder.Initialize(); frameEncoder.SetSize(bitmap.Size.Width, bitmap.Size.Height); frameEncoder.WriteSource(bitmap); frameEncoder.Commit(); encoder.Commit(); return(systemStream); } } } }
public void Save(Stream systemStream, Direct2DImageFormat format) { renderTarget.EndDraw(); var stream = new WICStream(factoryManager.WicFactory, systemStream); var encoder = new BitmapEncoder(factoryManager.WicFactory, Direct2DConverter.ConvertImageFormat(format)); encoder.Initialize(stream); var bitmapFrameEncode = new BitmapFrameEncode(encoder); bitmapFrameEncode.Initialize(); bitmapFrameEncode.SetSize(imageWidth, imageHeight); Guid fdc = SharpDX.WIC.PixelFormat.FormatDontCare; //fdc = Direct2DConverter.ConvertImageFormat(Direct2DImageFormat.Gif); bitmapFrameEncode.SetPixelFormat(ref fdc); bitmapFrameEncode.WriteSource(wicBitmap); bitmapFrameEncode.Commit(); try { encoder.Commit(); }catch (Exception ex) { var f = ex.Message; } bitmapFrameEncode.Dispose(); encoder.Dispose(); stream.Dispose(); }
public BitmapFrameDecoder(Func <Device, int, int, int, Format, Usage, Texture> textureFactory, MemoryPool memoryPool, Stream stream) : base(textureFactory, memoryPool) { var dataStream = stream as SharpDX.DataStream; using (var wicStream = new WICStream(Factory, new SharpDX.DataPointer(dataStream.DataPointer, (int)dataStream.Length))) using (var decoder = new BitmapDecoder(Factory, wicStream, DecodeOptions.CacheOnLoad)) { using (var frame = decoder.GetFrame(0)) { var dstPixelFormat = PixelFormat.Format32bppBGRA; Width = frame.Size.Width; Height = frame.Size.Height; FStride = PixelFormat.GetStride(dstPixelFormat, Width); FLength = FStride * Height; FBuffer = memoryPool.UnmanagedPool.GetMemory(FLength); if (frame.PixelFormat != dstPixelFormat) { using (var converter = new FormatConverter(Factory)) { converter.Initialize(frame, dstPixelFormat); converter.CopyPixels(FStride, FBuffer); } } else { frame.CopyPixels(FStride, FBuffer); } } } memoryPool.StreamPool.PutStream(stream); }
// Used for debugging purposes private void SaveToFile(string fileName) { using (var pStream = new WICStream(FactoryImaging, fileName, SharpDX.IO.NativeFileAccess.Write)) { //var format = SharpDX.WIC.PixelFormat.Format32bppPRGBA; var format = SharpDX.WIC.PixelFormat.FormatDontCare; //// Use InitializeFromFilename to write to a file. If there is need to write inside the memory, use InitializeFromMemory. var encodingFormat = BitmapEncoderGuids.Png; var encoder = new PngBitmapEncoder(FactoryImaging, pStream); // Create a Frame encoder var pFrameEncode = new BitmapFrameEncode(encoder); pFrameEncode.Initialize(); pFrameEncode.SetSize((int)_renderTarget.Size.Width, (int)_renderTarget.Size.Height); pFrameEncode.SetPixelFormat(ref format); pFrameEncode.WriteSource(_bitmap); pFrameEncode.Commit(); encoder.Commit(); } }
public static Bitmap LoadPngFromStream(Stream stream) { // Read input var wicStream = new WICStream(DXGraphicsService.FactoryImaging, stream); var decoder = new PngBitmapDecoder(DXGraphicsService.FactoryImaging); decoder.Initialize(wicStream, DecodeOptions.CacheOnDemand); var bitmapFrameDecode = decoder.GetFrame(0); var width = bitmapFrameDecode.Size.Width; var height = bitmapFrameDecode.Size.Height; //Convert WIC pixel format to D2D1 format var formatConverter = new FormatConverter(DXGraphicsService.FactoryImaging); formatConverter.Initialize(bitmapFrameDecode, PixelFormat.Format32bppBGRA, BitmapDitherType.None, null, 0f, BitmapPaletteType.MedianCut); // Bitmaps and render target settings var wicBitmap = new Bitmap( DXGraphicsService.FactoryImaging, width, height, PixelFormat.Format32bppBGRA, BitmapCreateCacheOption.CacheOnLoad); bitmapFrameDecode.Dispose(); decoder.Dispose(); wicStream.Dispose(); return(wicBitmap); }
private static void Main() { var wicFactory = new ImagingFactory(); var d2dFactory = new SharpDX.Direct2D1.Factory(); string filename = "output.jpg"; const int width = 512; const int height = 512; var rectangleGeometry = new RoundedRectangleGeometry(d2dFactory, new RoundedRectangle() { RadiusX = 32, RadiusY = 32, Rect = new RectangleF(128, 128, width - 128 * 2, height - 128 * 2) }); var wicBitmap = new Bitmap(wicFactory, width, height, SharpDX.WIC.PixelFormat.Format32bppBGR, BitmapCreateCacheOption.CacheOnLoad); var renderTargetProperties = new RenderTargetProperties(RenderTargetType.Default, new PixelFormat(Format.Unknown, AlphaMode.Unknown), 0, 0, RenderTargetUsage.None, FeatureLevel.Level_DEFAULT); var d2dRenderTarget = new WicRenderTarget(d2dFactory, wicBitmap, renderTargetProperties); var solidColorBrush = new SolidColorBrush(d2dRenderTarget, Color.White); d2dRenderTarget.BeginDraw(); d2dRenderTarget.Clear(Color.Black); d2dRenderTarget.FillGeometry(rectangleGeometry, solidColorBrush, null); d2dRenderTarget.EndDraw(); if (File.Exists(filename)) { File.Delete(filename); } var stream = new WICStream(wicFactory, filename, NativeFileAccess.Write); // Initialize a Jpeg encoder with this stream var encoder = new JpegBitmapEncoder(wicFactory); encoder.Initialize(stream); // Create a Frame encoder var bitmapFrameEncode = new BitmapFrameEncode(encoder); bitmapFrameEncode.Initialize(); bitmapFrameEncode.SetSize(width, height); var pixelFormatGuid = SharpDX.WIC.PixelFormat.FormatDontCare; bitmapFrameEncode.SetPixelFormat(ref pixelFormatGuid); bitmapFrameEncode.WriteSource(wicBitmap); bitmapFrameEncode.Commit(); encoder.Commit(); bitmapFrameEncode.Dispose(); encoder.Dispose(); stream.Dispose(); System.Diagnostics.Process.Start(Path.GetFullPath(Path.Combine(Environment.CurrentDirectory, filename))); }
public MemoryStream RenderToPngStream(FrameworkElement fe) { var width = (int)Math.Ceiling(fe.ActualWidth); var height = (int)Math.Ceiling(fe.ActualHeight); // pixel format with transparency/alpha channel and RGB values premultiplied by alpha var pixelFormat = WicPixelFormat.Format32bppPRGBA; // pixel format without transparency, but one that works with Cleartype antialiasing //var pixelFormat = WicPixelFormat.Format32bppBGR; var wicBitmap = new Bitmap( this.WicFactory, width, height, pixelFormat, BitmapCreateCacheOption.CacheOnLoad); var renderTargetProperties = new RenderTargetProperties( RenderTargetType.Default, new D2DPixelFormat(Format.R8G8B8A8_UNorm, AlphaMode.Premultiplied), //new D2DPixelFormat(Format.Unknown, AlphaMode.Unknown), // use this for non-alpha, cleartype antialiased text 0, 0, RenderTargetUsage.None, FeatureLevel.Level_DEFAULT); var renderTarget = new WicRenderTarget( this.D2DFactory, wicBitmap, renderTargetProperties) { //TextAntialiasMode = TextAntialiasMode.Cleartype // this only works with the pixel format with no alpha channel TextAntialiasMode = TextAntialiasMode.Grayscale // this is the best we can do for bitmaps with alpha channels }; Compose(renderTarget, fe); // TODO: There is no need to encode the bitmap to PNG - we could just copy the texture pixel buffer to a WriteableBitmap pixel buffer. var ms = new MemoryStream(); var stream = new WICStream( this.WicFactory, ms); var encoder = new PngBitmapEncoder(WicFactory); encoder.Initialize(stream); var frameEncoder = new BitmapFrameEncode(encoder); frameEncoder.Initialize(); frameEncoder.SetSize(width, height); var format = WicPixelFormat.Format32bppBGRA; //var format = WicPixelFormat.FormatDontCare; frameEncoder.SetPixelFormat(ref format); frameEncoder.WriteSource(wicBitmap); frameEncoder.Commit(); encoder.Commit(); frameEncoder.Dispose(); encoder.Dispose(); stream.Dispose(); ms.Position = 0; return(ms); }
public override Size GetTextureSize(Stream stream) { using var wicStream = new WICStream(_wicFactory, stream); using var decoder = new BitmapDecoder(_wicFactory, wicStream, DecodeOptions.CacheOnDemand); // Do NOT dispose the frame as it might lead to a crash. // Seems like it's owned by the decoder, so hopefully there should be no leaks. BitmapFrameDecode frame = decoder.GetFrame(0); stream.Seek(0, SeekOrigin.Begin); return(new Size((uint)frame.Size.Width, (uint)frame.Size.Height)); }
/// <summary> /// 從串流<see cref="System.IO.Stream"/>建立貼圖資源(非DDS) /// </summary> /// <param name="stream">串流</param> /// <param name="device">D3D Device</param> /// <param name="d3dContext">If a Direct3D 11 device context is provided and the current device supports it for the given pixel format, it will auto-generate mipmaps.</param> public static void CreateWICTextureFromStream(Device device, Stream stream, out Resource texture, out ShaderResourceView textureView, DeviceContext d3dContext = null) { texture = null; textureView = null; Guid containerFormatGuid; if (stream.CanRead) { if (stream.Length < 104857600 && stream.Length >= 8) { var temp = new byte[8]; stream.Read(temp, 0, 8); stream.Seek(0, SeekOrigin.Begin); // https://en.wikipedia.org/wiki/List_of_file_signatures if (temp[0] == 0xFF && temp[1] == 0xD8 && temp[2] == 0xFF) { containerFormatGuid = ContainerFormatGuids.Jpeg; } else if (temp[0] == 0x89 && temp[1] == 0x50 && temp[2] == 0x4E && temp[3] == 0x47 && temp[4] == 0x0D && temp[5] == 0x0A && temp[6] == 0x1A && temp[7] == 0x0A) { containerFormatGuid = ContainerFormatGuids.Png; } else if (temp[0] == 0x42 && temp[1] == 0x4D) { containerFormatGuid = ContainerFormatGuids.Bmp; } else if (temp[0] == 0x47 && temp[1] == 0x49 && temp[2] == 0x46 && temp[3] == 0x38 && (temp[4] == 0x37 || temp[4] == 0x39) && temp[5] == 0x61) { containerFormatGuid = ContainerFormatGuids.Gif; } else { return; } } using (var decoder = new BitmapDecoder(ImagingFactory, containerFormatGuid)) using (var wicstream = new WICStream(ImagingFactory, stream)) { try { decoder.Initialize(wicstream, DecodeOptions.CacheOnDemand); using (var frame = decoder.GetFrame(0)) { CreateWICTexture(device, d3dContext, frame, 0, ResourceUsage.Default, BindFlags.ShaderResource, CpuAccessFlags.None, ResourceOptionFlags.None, LoadFlags.Default, out texture, out textureView); } } catch (SharpDXException e) { System.Diagnostics.Debug.WriteLine(e.ToString()); } } } }
public BitmapFrameDecoder(Func <Device, int, int, int, Format, Usage, Texture> textureFactory, MemoryPool memoryPool, Stream stream, Format preferedFormat) : base(textureFactory, memoryPool, preferedFormat) { var dataStream = stream as SharpDX.DataStream; using (var wicStream = new WICStream(Factory, new SharpDX.DataPointer(dataStream.DataPointer, (int)dataStream.Length))) using (var decoder = new BitmapDecoder(Factory, wicStream, DecodeOptions.CacheOnLoad)) using (var frame = decoder.GetFrame(0)) { // Choose pixel format from file if (preferedFormat == Format.Unknown) { try { FChosenFormat = PixelToTextureFormat(frame.PixelFormat); } catch (NotSupportedException) { // The format as given by the file is not supported by DirectX FChosenFormat = Format.A8R8G8B8; } } FChosenFormat = GetFallbackFormatIfPreferedFormatIsNotSupported(FChosenFormat); var chosenPixelFormat = TextureToPixelFormat(FChosenFormat); Width = frame.Size.Width; Height = frame.Size.Height; FStride = PixelFormat.GetStride(chosenPixelFormat, Width); var length = FStride * Height; FBuffer = memoryPool.UnmanagedPool.GetMemory(length); if (frame.PixelFormat != chosenPixelFormat) { using (var converter = new FormatConverter(Factory)) { converter.Initialize(frame, chosenPixelFormat); converter.CopyPixels(FStride, FBuffer); } } else { frame.CopyPixels(FStride, FBuffer); } } memoryPool.StreamPool.PutStream(stream); }
public static void WriteJpegToStream(this Bitmap bitmap, Stream stream, int width = -1, int height = -1) { if (width <= 0) { width = bitmap.Size.Width; } if (height <= 0) { height = bitmap.Size.Height; } // ------------------------------------------------------ // Encode a JPEG image // ------------------------------------------------------ // Create a WIC outputstream var wicStream = new WICStream(DXGraphicsService.FactoryImaging, stream); // Initialize a Jpeg encoder with this stream var encoder = new JpegBitmapEncoder(DXGraphicsService.FactoryImaging); encoder.Initialize(wicStream); // Create a Frame encoder var bitmapFrameEncode = new BitmapFrameEncode(encoder); bitmapFrameEncode.Options.CompressionQuality = .8f; bitmapFrameEncode.Initialize(); bitmapFrameEncode.SetSize(width, height); var guid = PixelFormat.Format24bppRGB; bitmapFrameEncode.SetPixelFormat(ref guid); bitmapFrameEncode.WriteSource(bitmap); // Commit changes bitmapFrameEncode.Commit(); encoder.Commit(); // Cleanup bitmapFrameEncode.Options.Dispose(); bitmapFrameEncode.Dispose(); encoder.Dispose(); wicStream.Dispose(); }
public BblBitmapSource(MemoryStream stream) { try { using (var fac = new ImagingFactory()) { _pinnedArray = GCHandle.Alloc(stream.GetBuffer(), GCHandleType.Pinned); IntPtr pointer = _pinnedArray.AddrOfPinnedObject(); SharpDX.DataPointer p = new SharpDX.DataPointer(pointer, (int)stream.Length); using (WICStream wstream = new WICStream(fac, p)) { using (SharpDX.WIC.BitmapDecoder dec = new SharpDX.WIC.BitmapDecoder(fac, wstream, Guid.Empty, DecodeOptions.CacheOnDemand)) { WICBitmapSource = dec.GetFrame(0); } } } } catch (Exception e) { Console.WriteLine(e.Message); } }
protected override Texture LoadStaging(Stream stream) { using var wicStream = new WICStream(_wicFactory, stream); using var decoder = new BitmapDecoder(_wicFactory, wicStream, DecodeOptions.CacheOnDemand); using var formatConv = new FormatConverter(_wicFactory); // Do NOT dispose the frame as it might lead to a crash. // Seems like it's owned by the decoder, so hopefully there should be no leaks. BitmapFrameDecode frame = decoder.GetFrame(0); formatConv.Initialize(frame, SharpDX.WIC.PixelFormat.Format32bppRGBA); uint width = (uint)frame.Size.Width; uint height = (uint)frame.Size.Height; Texture stagingTexture = _rf.CreateTexture(TextureDescription.Texture2D( width, height, mipLevels: 1, arrayLayers: 1, PixelFormat.R8_G8_B8_A8_UNorm, TextureUsage.Staging )); MappedResource map = _gd.Map(stagingTexture, MapMode.Write); uint rowWidth = width * 4; if (rowWidth == map.RowPitch) { formatConv.CopyPixels((int)map.RowPitch, map.Data, (int)map.SizeInBytes); } else { for (uint y = 0; y < height; y++) { byte *dstStart = (byte *)map.Data + y * map.RowPitch; formatConv.CopyPixels( new RawBox(x: 0, (int)y, (int)width, height: 1), (int)map.RowPitch, new SharpDX.DataPointer(dstStart, (int)map.RowPitch) ); } } _gd.Unmap(stagingTexture); return(stagingTexture); }
public static MemoryStream CreateBitmapStream(IDevice2DResources deviceResources, int width, int height, Direct2DImageFormat imageType, Action <RenderTarget> drawingAction) { using (var bitmap = new global::SharpDX.WIC.Bitmap(deviceResources.WICImgFactory, (int)width, (int)height, global::SharpDX.WIC.PixelFormat.Format32bppBGR, BitmapCreateCacheOption.CacheOnDemand)) { using (var target = new WicRenderTarget(deviceResources.Factory2D, bitmap, new RenderTargetProperties() { DpiX = 96, DpiY = 96, MinLevel = FeatureLevel.Level_DEFAULT, PixelFormat = new global::SharpDX.Direct2D1.PixelFormat(global::SharpDX.DXGI.Format.Unknown, AlphaMode.Unknown) })) { target.Transform = Matrix3x2.Identity; target.BeginDraw(); drawingAction(target); target.EndDraw(); } var systemStream = new MemoryStream(); using (var stream = new WICStream(deviceResources.WICImgFactory, systemStream)) { using (var encoder = new BitmapEncoder(deviceResources.WICImgFactory, imageType.ToWICImageFormat())) { encoder.Initialize(stream); using (var frameEncoder = new BitmapFrameEncode(encoder)) { frameEncoder.Initialize(); frameEncoder.SetSize((int)width, (int)height); frameEncoder.WriteSource(bitmap); frameEncoder.Commit(); encoder.Commit(); return(systemStream); } } } } }
private async Task UpdateColorTexture() { try { var encodedColorData = await _camera.Client.LatestJPEGImageAsync(); // decode JPEG var memoryStream = new MemoryStream(encodedColorData); var stream = new WICStream(_imagingFactory, memoryStream); // decodes to 24 bit BGR var decoder = new SharpDX.WIC.BitmapDecoder(_imagingFactory, stream, SharpDX.WIC.DecodeOptions.CacheOnLoad); var bitmapFrameDecode = decoder.GetFrame(0); // convert to 32 bpp var formatConverter = new FormatConverter(_imagingFactory); formatConverter.Initialize(bitmapFrameDecode, SharpDX.WIC.PixelFormat.Format32bppBGR); formatConverter.CopyPixels(nextColorData, RoomAliveToolkit.Kinect2Calibration.colorImageWidth * 4); UpdateColorImage(GraphicsDevice, nextColorData); memoryStream.Close(); memoryStream.Dispose(); stream.Dispose(); decoder.Dispose(); formatConverter.Dispose(); bitmapFrameDecode.Dispose(); } catch (System.ServiceModel.EndpointNotFoundException ex) { // TODO Message LiveColor = false; Console.WriteLine("Could not connect to Kinect for live color. Start Kinect server."); } catch (System.ServiceModel.CommunicationException) { Console.WriteLine("Connection to Kinect server for live color was lost. Restart Kinect server and the application."); LiveDepth = false; } }
public static void WritePngToStream(Bitmap bitmap, Stream stream) { int width = bitmap.Size.Width; int height = bitmap.Size.Height; // ------------------------------------------------------ // Encode a PNG image // ------------------------------------------------------ // Create a WIC outputstream var wicStream = new WICStream(DXGraphicsService.FactoryImaging, stream); // Initialize a Jpeg encoder with this stream var encoder = new PngBitmapEncoder(DXGraphicsService.FactoryImaging); encoder.Initialize(wicStream); // Create a Frame encoder var bitmapFrameEncode = new BitmapFrameEncode(encoder); bitmapFrameEncode.Initialize(); bitmapFrameEncode.SetSize(width, height); Guid guid = PixelFormat.Format32bppRGBA; bitmapFrameEncode.SetPixelFormat(ref guid); bitmapFrameEncode.WriteSource(bitmap); // Commit changes bitmapFrameEncode.Commit(); encoder.Commit(); // Cleanup bitmapFrameEncode.Options.Dispose(); bitmapFrameEncode.Dispose(); encoder.Dispose(); wicStream.Dispose(); }
/// <summary> /// Saves the wic texture to file. /// </summary> /// <param name="deviceResource">The device resource.</param> /// <param name="source">The source.</param> /// <param name="fileName">Name of the file.</param> /// <param name="containerFormat">The container format.</param> /// <returns></returns> /// <exception cref="System.NotSupportedException"></exception> public static bool SaveWICTextureToFile(IDeviceResources deviceResource, Texture2D source, string fileName, Guid containerFormat) { Texture2D staging; if (!CaptureTexture(deviceResource.Device.ImmediateContext, source, out staging)) { return(false); } var desc = staging.Description; var sRGB = false; var pfGuid = GetPfGuid(desc.Format, ref sRGB); if (pfGuid == Guid.Empty) { staging.Dispose(); throw new NotSupportedException($"Format: {desc.Format} does not support yet."); } using (var stream = new WICStream(deviceResource.WICImgFactory, fileName, global::SharpDX.IO.NativeFileAccess.Write)) { return(CopyTextureToWICStream(deviceResource, staging, stream, pfGuid, containerFormat)); } }
internal SharpDX.DataStream CreateBitmapFromJpeg(MemoryStream stream, out SharpDX.Size2 size) { using (var istream = new WICStream(factory, stream)) { using (var decoder = new PngBitmapDecoder(factory)) { decoder.Initialize(istream, DecodeOptions.CacheOnDemand); using (var formatConverter = new FormatConverter(factory)) { formatConverter.Initialize(decoder.GetFrame(0), PixelFormat.Format32bppPRGBA); var stride = formatConverter.Size.Width * 4; var dataStream = new SharpDX.DataStream(formatConverter.Size.Height * stride, true, true); formatConverter.CopyPixels(stride, dataStream); size = formatConverter.Size; return(dataStream); } } } }
// // https://docs.microsoft.com/en-us/windows/desktop/direct3d11/overviews-direct3d-11-resources-textures-how-to // public static void LoadImage(ImageBytes image, Action <DataStream, Format, int, int> callback) { var bytes = image.Bytes; using (var s = new MemoryStream(bytes.Array, bytes.Offset, bytes.Count, false)) using (var factory = new ImagingFactory()) using (var stream = new WICStream(factory, s)) using (var decoder = GetDecoder(factory, image.Format)) { decoder.Initialize(stream, DecodeOptions.CacheOnDemand); using (var frame = decoder.GetFrame(0)) { var format = default(Format); var stride = frame.Size.Width * 4; using (var buffer = new DataStream(frame.Size.Height * stride, true, true)) { if (frame.PixelFormat == PixelFormat.Format32bppBGRA) { // OK format = Format.B8G8R8A8_UNorm; frame.CopyPixels(stride, buffer); } else { // Convert var fc = new FormatConverter(factory); fc.Initialize(frame, PixelFormat.Format32bppBGR); fc.CopyPixels(stride, buffer); format = Format.B8G8R8A8_UNorm; } callback(buffer, format, frame.Size.Width, frame.Size.Height); } }; } }
void colorFrameReader_FrameArrived(object sender, ColorFrameArrivedEventArgs e) { var colorFrame = e.FrameReference.AcquireFrame(); if (colorFrame != null) { using (colorFrame) { lastColorGain = colorFrame.ColorCameraSettings.Gain; lastColorExposureTimeTicks = colorFrame.ColorCameraSettings.ExposureTime.Ticks; if (yuvFrameReady.Count > 0) { lock (yuvByteBuffer) colorFrame.CopyRawFrameDataToArray(yuvByteBuffer); lock (yuvFrameReady) foreach (var autoResetEvent in yuvFrameReady) { autoResetEvent.Set(); } } if ((rgbFrameReady.Count > 0) || (jpegFrameReady.Count > 0)) { lock (rgbByteBuffer) colorFrame.CopyConvertedFrameDataToArray(rgbByteBuffer, ColorImageFormat.Bgra); lock (rgbFrameReady) foreach (var autoResetEvent in rgbFrameReady) { autoResetEvent.Set(); } } if (jpegFrameReady.Count > 0) { // should be put in a separate thread? stopWatch.Restart(); var bitmapSource = new Bitmap(imagingFactory, Kinect2Calibration.colorImageWidth, Kinect2Calibration.colorImageHeight, SharpDX.WIC.PixelFormat.Format32bppBGR, BitmapCreateCacheOption.CacheOnLoad); var bitmapLock = bitmapSource.Lock(BitmapLockFlags.Write); Marshal.Copy(rgbByteBuffer, 0, bitmapLock.Data.DataPointer, Kinect2Calibration.colorImageWidth * Kinect2Calibration.colorImageHeight * 4); bitmapLock.Dispose(); var memoryStream = new MemoryStream(); //var fileStream = new FileStream("test" + frame++ + ".jpg", FileMode.Create); //var stream = new WICStream(imagingFactory, "test" + frame++ + ".jpg", SharpDX.IO.NativeFileAccess.Write); var stream = new WICStream(imagingFactory, memoryStream); var jpegBitmapEncoder = new JpegBitmapEncoder(imagingFactory); jpegBitmapEncoder.Initialize(stream); var bitmapFrameEncode = new BitmapFrameEncode(jpegBitmapEncoder); bitmapFrameEncode.Options.ImageQuality = 0.5f; bitmapFrameEncode.Initialize(); bitmapFrameEncode.SetSize(Kinect2Calibration.colorImageWidth, Kinect2Calibration.colorImageHeight); var pixelFormatGuid = PixelFormat.FormatDontCare; bitmapFrameEncode.SetPixelFormat(ref pixelFormatGuid); bitmapFrameEncode.WriteSource(bitmapSource); bitmapFrameEncode.Commit(); jpegBitmapEncoder.Commit(); //fileStream.Close(); //fileStream.Dispose(); //Console.WriteLine(stopWatch.ElapsedMilliseconds + "ms " + memoryStream.Length + " bytes"); lock (jpegByteBuffer) { nJpegBytes = (int)memoryStream.Length; memoryStream.Seek(0, SeekOrigin.Begin); memoryStream.Read(jpegByteBuffer, 0, nJpegBytes); } lock (jpegFrameReady) foreach (var autoResetEvent in jpegFrameReady) { autoResetEvent.Set(); } //var file = new FileStream("test" + frame++ + ".jpg", FileMode.Create); //file.Write(jpegByteBuffer, 0, nJpegBytes); //file.Close(); bitmapSource.Dispose(); memoryStream.Close(); memoryStream.Dispose(); stream.Dispose(); jpegBitmapEncoder.Dispose(); bitmapFrameEncode.Dispose(); } } } }
private static bool CopyTextureToWICStream(IDeviceResources deviceResource, Texture2D staging, WICStream stream, Guid pfGuid, Guid containerFormat) { using (var encoder = new BitmapEncoder(deviceResource.WICImgFactory, containerFormat)) { var desc = staging.Description; encoder.Initialize(stream); var targetGuid = Guid.Empty; using (var frame = new BitmapFrameEncode(encoder)) { frame.Initialize(); frame.SetSize(desc.Width, desc.Height); frame.SetResolution(72, 72); switch (desc.Format) { case global::SharpDX.DXGI.Format.R32G32B32A32_Float: case global::SharpDX.DXGI.Format.R16G16B16A16_Float: targetGuid = PixelFormat.Format96bppRGBFloat; break; case global::SharpDX.DXGI.Format.R16G16B16A16_UNorm: targetGuid = PixelFormat.Format48bppBGR; break; case global::SharpDX.DXGI.Format.R32_Float: case global::SharpDX.DXGI.Format.R16_Float: case global::SharpDX.DXGI.Format.R16_UNorm: case global::SharpDX.DXGI.Format.R8_UNorm: case global::SharpDX.DXGI.Format.A8_UNorm: targetGuid = PixelFormat.Format48bppBGR; break; default: targetGuid = PixelFormat.Format24bppBGR; break; } frame.SetPixelFormat(ref targetGuid); var databox = deviceResource.Device.ImmediateContext.MapSubresource(staging, 0, MapMode.Read, MapFlags.None); try { if (targetGuid != pfGuid) { using (var bitmap = new Bitmap(deviceResource.WICImgFactory, desc.Width, desc.Height, pfGuid, new global::SharpDX.DataRectangle(databox.DataPointer, databox.RowPitch))) { using (var converter = new FormatConverter(deviceResource.WICImgFactory)) { if (converter.CanConvert(pfGuid, targetGuid)) { converter.Initialize(bitmap, targetGuid, BitmapDitherType.None, null, 0, BitmapPaletteType.MedianCut); frame.WriteSource(converter); } else { Debug.WriteLine("Cannot convert"); } } } } else { frame.WritePixels(desc.Height, new global::SharpDX.DataRectangle(databox.DataPointer, databox.RowPitch), databox.RowPitch * desc.Height); } } finally { deviceResource.Device.ImmediateContext.UnmapSubresource(staging, 0); } frame.Commit(); encoder.Commit(); return(true); } } }
private static void Save(Resource res, Stream stream, ImageFileFormat fmt) { var texture = res as Texture2D; var textureCopy = new Texture2D(MyRender11.Device, new Texture2DDescription { Width = (int)texture.Description.Width, Height = (int)texture.Description.Height, MipLevels = 1, ArraySize = 1, Format = texture.Description.Format, Usage = ResourceUsage.Staging, SampleDescription = new SharpDX.DXGI.SampleDescription(1, 0), BindFlags = BindFlags.None, CpuAccessFlags = CpuAccessFlags.Read, OptionFlags = ResourceOptionFlags.None }); RC.CopyResource(res, textureCopy); DataStream dataStream; var dataBox = RC.MapSubresource( textureCopy, 0, 0, MapMode.Read, MapFlags.None, out dataStream); var dataRectangle = new DataRectangle { DataPointer = dataStream.DataPointer, Pitch = dataBox.RowPitch }; var bitmap = new Bitmap( MyRender11.WIC, textureCopy.Description.Width, textureCopy.Description.Height, PixelFormatFromFormat(textureCopy.Description.Format), // TODO: should use some conversion from textureCopy.Description.Format dataRectangle); using (var wicStream = new WICStream(MyRender11.WIC, stream)) { BitmapEncoder bitmapEncoder; switch (fmt) { case ImageFileFormat.Png: bitmapEncoder = new PngBitmapEncoder(MyRender11.WIC, wicStream); break; case ImageFileFormat.Jpg: bitmapEncoder = new JpegBitmapEncoder(MyRender11.WIC, wicStream); break; case ImageFileFormat.Bmp: bitmapEncoder = new BmpBitmapEncoder(MyRender11.WIC, wicStream); break; default: MyRenderProxy.Assert(false, "Unsupported file format."); bitmapEncoder = null; break; } if (bitmapEncoder != null) { using (var bitmapFrameEncode = new BitmapFrameEncode(bitmapEncoder)) { bitmapFrameEncode.Initialize(); bitmapFrameEncode.SetSize(bitmap.Size.Width, bitmap.Size.Height); var pixelFormat = PixelFormat.FormatDontCare; bitmapFrameEncode.SetPixelFormat(ref pixelFormat); bitmapFrameEncode.WriteSource(bitmap); bitmapFrameEncode.Commit(); bitmapEncoder.Commit(); } bitmapEncoder.Dispose(); } } RC.UnmapSubresource(textureCopy, 0); textureCopy.Dispose(); bitmap.Dispose(); }
public bool GetThumbnail(Stream stream, int width, int height, bool cachedOnly, out byte[] imageData, out ImageType imageType) { imageData = null; imageType = ImageType.Unknown; // No support for cache if (cachedOnly) { return(false); } Bitmap cachedBitmap = null; // used only for rotation try { if (stream.CanSeek) { stream.Seek(0, SeekOrigin.Begin); } // open the image file for reading using (var factory = new ImagingFactory2()) using (var inputStream = new WICStream(factory, stream)) using (var decoder = new BitmapDecoder(factory, inputStream, DecodeOptions.CacheOnLoad)) using (var rotator = new BitmapFlipRotator(factory)) using (var scaler = new BitmapScaler(factory)) using (var output = new MemoryStream()) { // decode the loaded image to a format that can be consumed by D2D BitmapSource source = decoder.GetFrame(0); // Prefer PNG output for source PNG and for source formats with Alpha channel var usePngOutput = decoder.DecoderInfo.FriendlyName.StartsWith("PNG") || PixelFormat.GetBitsPerPixel(source.PixelFormat) == 32; BitmapTransformOptions bitmapTransformationOptions = BitmapTransformOptions.Rotate0; BitmapFrameDecode frame = source as BitmapFrameDecode; if (frame != null) { const string EXIF_ORIENTATION_TAG = "/app1/{ushort=0}/{ushort=274}"; ushort? orientation = null; try { // Not supported on all input types, i.e. BMP will fail here orientation = (ushort?)frame.MetadataQueryReader.TryGetMetadataByName(EXIF_ORIENTATION_TAG); //0x0112 } catch { } // If the EXIF orientation specifies that the image needs to be flipped or rotated before display, set that up to happen if (orientation.HasValue) { switch (orientation.Value) { case 1: break; // No rotation required. case 2: bitmapTransformationOptions = BitmapTransformOptions.Rotate0 | BitmapTransformOptions.FlipHorizontal; break; case 3: bitmapTransformationOptions = BitmapTransformOptions.Rotate180; break; case 4: bitmapTransformationOptions = BitmapTransformOptions.Rotate180 | BitmapTransformOptions.FlipHorizontal; break; case 5: bitmapTransformationOptions = BitmapTransformOptions.Rotate270 | BitmapTransformOptions.FlipHorizontal; break; case 6: bitmapTransformationOptions = BitmapTransformOptions.Rotate90; break; case 7: bitmapTransformationOptions = BitmapTransformOptions.Rotate90 | BitmapTransformOptions.FlipHorizontal; break; case 8: bitmapTransformationOptions = BitmapTransformOptions.Rotate270; break; } } } // Scale down larger images int sourceWidth = source.Size.Width; int sourceHeight = source.Size.Height; if (width > 0 && height > 0 && (sourceWidth > width || sourceHeight > height)) { if (sourceWidth <= height) { width = sourceWidth; } int newHeight = sourceHeight * height / sourceWidth; if (newHeight > height) { // Resize with height instead width = sourceWidth * height / sourceHeight; newHeight = height; } scaler.Initialize(source, width, newHeight, BitmapInterpolationMode.Fant); source = scaler; } // Rotate if (bitmapTransformationOptions != BitmapTransformOptions.Rotate0) { // For fast rotation a cached bitmap is needed, otherwise only per-pixel-decoding happens which makes the process extremly slow. // See https://social.msdn.microsoft.com/Forums/windowsdesktop/en-US/5ff2b52b-602f-4b22-9fb2-371539ff5ebb/hang-in-createbitmapfromwicbitmap-when-using-iwicbitmapfliprotator?forum=windowswic cachedBitmap = new Bitmap(factory, source, BitmapCreateCacheOption.CacheOnLoad); rotator.Initialize(cachedBitmap, bitmapTransformationOptions); source = rotator; } Guid formatGuid = ContainerFormatGuids.Jpeg; imageType = ImageType.Jpeg; if (usePngOutput) { formatGuid = ContainerFormatGuids.Png; imageType = ImageType.Png; } using (var encoder = new BitmapEncoder(factory, formatGuid)) { encoder.Initialize(output); using (var bitmapFrameEncode = new BitmapFrameEncode(encoder)) { // Create image encoder var wicPixelFormat = PixelFormat.FormatDontCare; bitmapFrameEncode.Initialize(); bitmapFrameEncode.SetSize(source.Size.Width, source.Size.Height); bitmapFrameEncode.SetPixelFormat(ref wicPixelFormat); bitmapFrameEncode.WriteSource(source); bitmapFrameEncode.Commit(); encoder.Commit(); } } imageData = output.ToArray(); return(true); } } catch (Exception) { //ServiceRegistration.Get<ILogger>().Warn("WICThumbnailProvider: Error loading bitmapSource from file data stream", ex); return(false); } finally { cachedBitmap?.Dispose(); } }
// // http://stackoverflow.com/questions/9151615/how-does-one-use-a-memory-stream-instead-of-files-when-rendering-direct2d-images // // Identical to above SO question, except that we are rendering to MemoryStream because it was added to the API // private MemoryStream RenderStaticTextToBitmap() { var width = 400; var height = 100; var pixelFormat = WicPixelFormat.Format32bppBGR; var wicFactory = new ImagingFactory(); var dddFactory = new SharpDX.Direct2D1.Factory(); var dwFactory = new SharpDX.DirectWrite.Factory(); var wicBitmap = new Bitmap( wicFactory, width, height, pixelFormat, BitmapCreateCacheOption.CacheOnLoad); var renderTargetProperties = new RenderTargetProperties( RenderTargetType.Default, new D2DPixelFormat(Format.Unknown, AlphaMode.Unknown), 0, 0, RenderTargetUsage.None, FeatureLevel.Level_DEFAULT); var renderTarget = new WicRenderTarget( dddFactory, wicBitmap, renderTargetProperties) { TextAntialiasMode = TextAntialiasMode.Cleartype }; renderTarget.BeginDraw(); var textFormat = new TextFormat(dwFactory, "Consolas", 48) { TextAlignment = SharpDX.DirectWrite.TextAlignment.Center, ParagraphAlignment = ParagraphAlignment.Center }; var textBrush = new SharpDX.Direct2D1.SolidColorBrush( renderTarget, SharpDX.Colors.Blue); renderTarget.Clear(Colors.White); renderTarget.DrawText( "Hi, mom!", textFormat, new RectangleF(0, 0, width, height), textBrush); renderTarget.EndDraw(); var ms = new MemoryStream(); var stream = new WICStream( wicFactory, ms); var encoder = new PngBitmapEncoder(wicFactory); encoder.Initialize(stream); var frameEncoder = new BitmapFrameEncode(encoder); frameEncoder.Initialize(); frameEncoder.SetSize(width, height); frameEncoder.PixelFormat = WicPixelFormat.FormatDontCare; frameEncoder.WriteSource(wicBitmap); frameEncoder.Commit(); encoder.Commit(); frameEncoder.Dispose(); encoder.Dispose(); stream.Dispose(); ms.Position = 0; return(ms); }
//------------------------------------------------------------------------------------- // Load a WIC-supported file in memory //------------------------------------------------------------------------------------- internal static Image LoadFromWICMemory(IntPtr pSource, int size, bool makeACopy, GCHandle?handle) { var flags = WICFlags.AllFrames; Image image = null; // Create input stream for memory using (var stream = new WICStream(Factory, new DataPointer(pSource, size))) { // If the decoder is unable to decode the image, than return null BitmapDecoder decoder = null; try { decoder = new BitmapDecoder(Factory, stream, DecodeOptions.CacheOnDemand); using (var frame = decoder.GetFrame(0)) { // Get metadata Guid convertGuid; var tempDesc = DecodeMetadata(flags, decoder, frame, out convertGuid); // If not supported. if (!tempDesc.HasValue) { return(null); } var mdata = tempDesc.Value; if ((mdata.ArraySize > 1) && (flags & WICFlags.AllFrames) != 0) { return(DecodeMultiframe(flags, mdata, decoder)); } image = DecodeSingleFrame(flags, mdata, convertGuid, frame); } } catch { image = null; } finally { if (decoder != null) { decoder.Dispose(); } } } // For WIC, we are not keeping the original buffer. if (image != null && !makeACopy) { if (handle.HasValue) { handle.Value.Free(); } else { Utilities.FreeMemory(pSource); } } return(image); }
public bool GetThumbnail(Stream stream, int width, int height, bool cachedOnly, out byte[] imageData, out ImageType imageType) { imageData = null; imageType = ImageType.Unknown; // No support for cache if (cachedOnly) { return(false); } try { if (stream.CanSeek) { stream.Seek(0, SeekOrigin.Begin); } // open the image file for reading using (var factory = new ImagingFactory2()) using (var inputStream = new WICStream(factory, stream)) using (var decoder = new BitmapDecoder(factory, inputStream, DecodeOptions.CacheOnLoad)) using (var scaler = new BitmapScaler(factory)) using (var output = new MemoryStream()) using (var encoder = new BitmapEncoder(factory, ContainerFormatGuids.Jpeg)) { // decode the loaded image to a format that can be consumed by D2D BitmapSource source = decoder.GetFrame(0); // Scale down larger images int sourceWidth = source.Size.Width; int sourceHeight = source.Size.Height; if (width > 0 && height > 0 && (sourceWidth > width || sourceHeight > height)) { if (sourceWidth <= height) { width = sourceWidth; } int newHeight = sourceHeight * height / sourceWidth; if (newHeight > height) { // Resize with height instead width = sourceWidth * height / sourceHeight; newHeight = height; } scaler.Initialize(source, width, newHeight, BitmapInterpolationMode.Fant); source = scaler; } encoder.Initialize(output); using (var bitmapFrameEncode = new BitmapFrameEncode(encoder)) { // Create image encoder var wicPixelFormat = PixelFormat.FormatDontCare; bitmapFrameEncode.Initialize(); bitmapFrameEncode.SetSize(source.Size.Width, source.Size.Height); bitmapFrameEncode.SetPixelFormat(ref wicPixelFormat); bitmapFrameEncode.WriteSource(source); bitmapFrameEncode.Commit(); encoder.Commit(); } imageData = output.ToArray(); imageType = ImageType.Jpeg; return(true); } } catch (Exception e) { // ServiceRegistration.Get<ILogger>().Warn("WICThumbnailProvider: Error loading bitmapSource from file data stream", e); return(false); } }