コード例 #1
0
ファイル: Program.cs プロジェクト: rbwhitaker/SharpDX
        private static void Main()
        {
            var wicFactory = new ImagingFactory();
            var d2dFactory = new SharpDX.Direct2D1.Factory();

            string filename = "output.jpg";
            const int width = 512;
            const int height = 512;

            var rectangleGeometry = new RoundedRectangleGeometry(d2dFactory, new RoundedRectangle() { RadiusX = 32, RadiusY = 32, Rect = new RectangleF(128, 128, width - 128, height-128) });

            var wicBitmap = new Bitmap(wicFactory, width, height, SharpDX.WIC.PixelFormat.Format32bppBGR, BitmapCreateCacheOption.CacheOnLoad);

            var renderTargetProperties = new RenderTargetProperties(RenderTargetType.Default, new PixelFormat(Format.Unknown, AlphaMode.Unknown), 0, 0, RenderTargetUsage.None, FeatureLevel.Level_DEFAULT);

            var d2dRenderTarget = new WicRenderTarget(d2dFactory, wicBitmap, renderTargetProperties);

            var solidColorBrush = new SolidColorBrush(d2dRenderTarget, Color.White);

            d2dRenderTarget.BeginDraw();
            d2dRenderTarget.Clear(Color.Black);
            d2dRenderTarget.FillGeometry(rectangleGeometry, solidColorBrush, null);
            d2dRenderTarget.EndDraw();

            if (File.Exists(filename))
                File.Delete(filename);

            var stream = new WICStream(wicFactory, filename, NativeFileAccess.Write);
            // Initialize a Jpeg encoder with this stream
            var encoder = new JpegBitmapEncoder(wicFactory);
            encoder.Initialize(stream);

            // Create a Frame encoder
            var bitmapFrameEncode = new BitmapFrameEncode(encoder);
            bitmapFrameEncode.Initialize();
            bitmapFrameEncode.SetSize(width, height);
            var pixelFormatGuid = SharpDX.WIC.PixelFormat.FormatDontCare;
            bitmapFrameEncode.SetPixelFormat(ref pixelFormatGuid);
            bitmapFrameEncode.WriteSource(wicBitmap);

            bitmapFrameEncode.Commit();
            encoder.Commit();

            bitmapFrameEncode.Dispose();
            encoder.Dispose();
            stream.Dispose();

            System.Diagnostics.Process.Start(Path.GetFullPath(Path.Combine(Environment.CurrentDirectory,filename)));
        }
コード例 #2
0
        void colorFrameReader_FrameArrived(object sender, ColorFrameArrivedEventArgs e)
        {
            var colorFrame = e.FrameReference.AcquireFrame();
            if (colorFrame != null)
            {
                using (colorFrame)
                {
                    lastColorGain = colorFrame.ColorCameraSettings.Gain;
                    lastColorExposureTimeTicks = colorFrame.ColorCameraSettings.ExposureTime.Ticks;

                    if (yuvFrameReady.Count > 0)
                    {
                        lock (yuvByteBuffer)
                            colorFrame.CopyRawFrameDataToArray(yuvByteBuffer);
                        lock (yuvFrameReady)
                            foreach (var autoResetEvent in yuvFrameReady)
                                autoResetEvent.Set();
                    }

                    if ((rgbFrameReady.Count > 0) || (jpegFrameReady.Count > 0))
                    {
                        lock (rgbByteBuffer)
                            colorFrame.CopyConvertedFrameDataToArray(rgbByteBuffer, ColorImageFormat.Bgra);
                        lock (rgbFrameReady)
                            foreach (var autoResetEvent in rgbFrameReady)
                                autoResetEvent.Set();
                    }

                    if (jpegFrameReady.Count > 0)
                    {
                        // should be put in a separate thread?

                        stopWatch.Restart();

                        var bitmapSource = new Bitmap(imagingFactory, Kinect2Calibration.colorImageWidth, Kinect2Calibration.colorImageHeight, SharpDX.WIC.PixelFormat.Format32bppBGR, BitmapCreateCacheOption.CacheOnLoad);
                        var bitmapLock = bitmapSource.Lock(BitmapLockFlags.Write);
                        Marshal.Copy(rgbByteBuffer, 0, bitmapLock.Data.DataPointer, Kinect2Calibration.colorImageWidth * Kinect2Calibration.colorImageHeight * 4);
                        bitmapLock.Dispose();

                        var memoryStream = new MemoryStream();

                        //var fileStream = new FileStream("test" + frame++ + ".jpg", FileMode.Create);
                        //var stream = new WICStream(imagingFactory, "test" + frame++ + ".jpg", SharpDX.IO.NativeFileAccess.Write);

                        var stream = new WICStream(imagingFactory, memoryStream);

                        var jpegBitmapEncoder = new JpegBitmapEncoder(imagingFactory);
                        jpegBitmapEncoder.Initialize(stream);

                        var bitmapFrameEncode = new BitmapFrameEncode(jpegBitmapEncoder);
                        bitmapFrameEncode.Options.ImageQuality = 0.5f;
                        bitmapFrameEncode.Initialize();
                        bitmapFrameEncode.SetSize(Kinect2Calibration.colorImageWidth, Kinect2Calibration.colorImageHeight);
                        var pixelFormatGuid = PixelFormat.FormatDontCare;
                        bitmapFrameEncode.SetPixelFormat(ref pixelFormatGuid);
                        bitmapFrameEncode.WriteSource(bitmapSource);

                        bitmapFrameEncode.Commit();
                        jpegBitmapEncoder.Commit();

                        //fileStream.Close();
                        //fileStream.Dispose();

                        //Console.WriteLine(stopWatch.ElapsedMilliseconds + "ms " + memoryStream.Length + " bytes");

                        lock (jpegByteBuffer)
                        {
                            nJpegBytes = (int)memoryStream.Length;
                            memoryStream.Seek(0, SeekOrigin.Begin);
                            memoryStream.Read(jpegByteBuffer, 0, nJpegBytes);
                        }
                        lock (jpegFrameReady)
                            foreach (var autoResetEvent in jpegFrameReady)
                                autoResetEvent.Set();

                        //var file = new FileStream("test" + frame++ + ".jpg", FileMode.Create);
                        //file.Write(jpegByteBuffer, 0, nJpegBytes);
                        //file.Close();

                        bitmapSource.Dispose();
                        memoryStream.Close();
                        memoryStream.Dispose();
                        stream.Dispose();
                        jpegBitmapEncoder.Dispose();
                        bitmapFrameEncode.Dispose();
                    }
                }
            }
        }
コード例 #3
0
        private static void Save(IResource res, Stream stream, ImageFileFormat fmt)
        {
            var texture = res.Resource as Texture2D;
            var textureCopy = new Texture2D(MyRender11.Device, new Texture2DDescription
            {
                Width = (int)texture.Description.Width,
                Height = (int)texture.Description.Height,
                MipLevels = 1,
                ArraySize = 1,
                Format = texture.Description.Format,
                Usage = ResourceUsage.Staging,
                SampleDescription = new SharpDX.DXGI.SampleDescription(1, 0),
                BindFlags = BindFlags.None,
                CpuAccessFlags = CpuAccessFlags.Read,
                OptionFlags = ResourceOptionFlags.None
            });
            RC.CopyResource(res, textureCopy);

            DataStream dataStream;
            var dataBox = RC.MapSubresource(
                textureCopy,
                0,
                0,
                MapMode.Read,
                MapFlags.None,
                out dataStream);

            var dataRectangle = new DataRectangle
            {
                DataPointer = dataStream.DataPointer,
                Pitch = dataBox.RowPitch
            };

            var bitmap = new Bitmap(
                MyRender11.WIC,
                textureCopy.Description.Width,
                textureCopy.Description.Height,
                PixelFormatFromFormat(textureCopy.Description.Format), // TODO: should use some conversion from textureCopy.Description.Format
                dataRectangle);

            using (var wicStream = new WICStream(MyRender11.WIC, stream))
            {
                BitmapEncoder bitmapEncoder;
                switch (fmt)
                {
                    case ImageFileFormat.Png:
                        bitmapEncoder = new PngBitmapEncoder(MyRender11.WIC, wicStream);
                        break;
                    case ImageFileFormat.Jpg:
                        bitmapEncoder = new JpegBitmapEncoder(MyRender11.WIC, wicStream);
                        break;
                    case ImageFileFormat.Bmp:
                        bitmapEncoder = new BmpBitmapEncoder(MyRender11.WIC, wicStream);
                        break;
                    default:
                        MyRenderProxy.Assert(false, "Unsupported file format.");
                        bitmapEncoder = null;
                        break;
                }
                if (bitmapEncoder != null)
                {
                    using (var bitmapFrameEncode = new BitmapFrameEncode(bitmapEncoder))
                    {
                        bitmapFrameEncode.Initialize();
                        bitmapFrameEncode.SetSize(bitmap.Size.Width, bitmap.Size.Height);
                        var pixelFormat = PixelFormat.FormatDontCare;
                        bitmapFrameEncode.SetPixelFormat(ref pixelFormat);
                        bitmapFrameEncode.WriteSource(bitmap);
                        bitmapFrameEncode.Commit();
                        bitmapEncoder.Commit();
                    }
                    bitmapEncoder.Dispose();
                }
            }

            RC.UnmapSubresource(textureCopy, 0);
            textureCopy.Dispose();
            bitmap.Dispose();
        }
コード例 #4
0
ファイル: TextureExtensions.cs プロジェクト: Altaxo/Altaxo
		/// <summary>
		/// Saves a texture to a stream as an image.
		/// </summary>
		/// <param name="texture">The texture to save.</param>
		/// <param name="imageFormat">The image format of the saved image.</param>
		/// <param name="imageResolutionInDpi">The image resolution in dpi.</param>
		/// <param name="toStream">The stream to save the texture to.</param>
		public static void SaveToStream(this Texture2D texture, System.Drawing.Imaging.ImageFormat imageFormat, double imageResolutionInDpi, System.IO.Stream toStream)
		{
			Texture2D textureCopy = null;
			ImagingFactory imagingFactory = null;
			Bitmap bitmap = null;
			BitmapEncoder bitmapEncoder = null;

			try
			{
				textureCopy = new Texture2D(texture.Device, new Texture2DDescription
				{
					Width = (int)texture.Description.Width,
					Height = (int)texture.Description.Height,
					MipLevels = 1,
					ArraySize = 1,
					Format = texture.Description.Format,
					Usage = ResourceUsage.Staging,
					SampleDescription = new SampleDescription(1, 0),
					BindFlags = BindFlags.None,
					CpuAccessFlags = CpuAccessFlags.Read,
					OptionFlags = ResourceOptionFlags.None
				});

				texture.Device.CopyResource(texture, textureCopy);

				DataRectangle dataRectangle = textureCopy.Map(0, MapMode.Read, SharpDX.Direct3D10.MapFlags.None);

				imagingFactory = new ImagingFactory();
				bitmap = new Bitmap(
						imagingFactory,
						textureCopy.Description.Width,
						textureCopy.Description.Height,
						PixelFormat.Format32bppBGRA,
						dataRectangle);

				toStream.Position = 0;

				if (imageFormat == System.Drawing.Imaging.ImageFormat.Png)
					bitmapEncoder = new PngBitmapEncoder(imagingFactory, toStream);
				else if (imageFormat == System.Drawing.Imaging.ImageFormat.Bmp)
					bitmapEncoder = new BmpBitmapEncoder(imagingFactory, toStream);
				else if (imageFormat == System.Drawing.Imaging.ImageFormat.Gif)
					bitmapEncoder = new GifBitmapEncoder(imagingFactory, toStream);
				else if (imageFormat == System.Drawing.Imaging.ImageFormat.Jpeg)
					bitmapEncoder = new JpegBitmapEncoder(imagingFactory, toStream);
				else if (imageFormat == System.Drawing.Imaging.ImageFormat.Tiff)
					bitmapEncoder = new TiffBitmapEncoder(imagingFactory, toStream);
				else
					bitmapEncoder = new PngBitmapEncoder(imagingFactory, toStream);

				using (var bitmapFrameEncode = new BitmapFrameEncode(bitmapEncoder))
				{
					bitmapFrameEncode.Initialize();
					bitmapFrameEncode.SetSize(bitmap.Size.Width, bitmap.Size.Height);
					var pixelFormat = PixelFormat.FormatDontCare;
					bitmapFrameEncode.SetPixelFormat(ref pixelFormat);
					bitmapFrameEncode.SetResolution(imageResolutionInDpi, imageResolutionInDpi);
					bitmapFrameEncode.WriteSource(bitmap);
					bitmapFrameEncode.Commit();
					bitmapEncoder.Commit();
				}
			}
			finally
			{
				bitmapEncoder?.Dispose();
				textureCopy?.Unmap(0);
				textureCopy?.Dispose();
				bitmap?.Dispose();
				imagingFactory?.Dispose();
			}
		}
コード例 #5
0
ファイル: Direct2D.cs プロジェクト: BEEden/GdiBench
        public static MemoryStream Resize(System.IO.Stream source, int maxwidth, int maxheight, Action beforeDrawImage, Action afterDrawImage)
        {
            // initialize the D3D device which will allow to render to image any graphics - 3D or 2D
            var defaultDevice = new SharpDX.Direct3D11.Device(SharpDX.Direct3D.DriverType.Warp,
                                                              d3d.DeviceCreationFlags.BgraSupport | d3d.DeviceCreationFlags.SingleThreaded | d3d.DeviceCreationFlags.PreventThreadingOptimizations);

            var d3dDevice = defaultDevice.QueryInterface<d3d.Device1>(); // get a reference to the Direct3D 11.1 device
            var dxgiDevice = d3dDevice.QueryInterface<dxgi.Device>(); // get a reference to DXGI device

            var d2dDevice = new d2.Device(dxgiDevice); // initialize the D2D device

            var imagingFactory = new wic.ImagingFactory2(); // initialize the WIC factory

            // initialize the DeviceContext - it will be the D2D render target and will allow all rendering operations
            var d2dContext = new d2.DeviceContext(d2dDevice, d2.DeviceContextOptions.None);

            var dwFactory = new dw.Factory();

            // specify a pixel format that is supported by both D2D and WIC
            var d2PixelFormat = new d2.PixelFormat(dxgi.Format.R8G8B8A8_UNorm, d2.AlphaMode.Premultiplied);
            // if in D2D was specified an R-G-B-A format - use the same for wic
            var wicPixelFormat = wic.PixelFormat.Format32bppPRGBA;

            // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
            // IMAGE LOADING ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
            var decoder = new wic.BitmapDecoder(imagingFactory,source, wic.DecodeOptions.CacheOnLoad);

            // decode the loaded image to a format that can be consumed by D2D
            var formatConverter = new wic.FormatConverter(imagingFactory);
            formatConverter.Initialize(decoder.GetFrame(0), wicPixelFormat);

            // store the image size - output will be of the same size
            var inputImageSize = formatConverter.Size;

            // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
            // RENDER TARGET SETUP ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

            // create the d2d bitmap description using default flags (from SharpDX samples) and 96 DPI
            var d2dBitmapProps = new d2.BitmapProperties1(d2PixelFormat, 96, 96, d2.BitmapOptions.Target | d2.BitmapOptions.CannotDraw);

            //Calculate size
            var resultSize = MathUtil.ScaleWithin(inputImageSize.Width,inputImageSize.Height,maxwidth,maxheight);
            var newWidth = resultSize.Item1;
            var newHeight = resultSize.Item2;

            // the render target
            var d2dRenderTarget = new d2.Bitmap1(d2dContext, new Size2(newWidth, newHeight), d2dBitmapProps);
            d2dContext.Target = d2dRenderTarget; // associate bitmap with the d2d context

            var bitmapSourceEffect = new d2.Effects.BitmapSourceEffect(d2dContext);
            bitmapSourceEffect.WicBitmapSource = formatConverter;

            // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
            // DRAWING ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
            beforeDrawImage();
            // slow preparations - fast drawing:
            d2dContext.BeginDraw();
            d2dContext.Transform = Matrix3x2.Scaling(new Vector2((float)(newWidth / (float)inputImageSize.Width), (float)(newHeight / (float)inputImageSize.Height)));
            d2dContext.DrawImage(bitmapSourceEffect, d2.InterpolationMode.HighQualityCubic);
            d2dContext.EndDraw();
            afterDrawImage();

            // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
            // IMAGE SAVING ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

            var ms = new MemoryStream();

            // use the appropiate overload to write either to stream or to a file
            var stream = new wic.WICStream(imagingFactory,ms);

            // select the image encoding format HERE
            var encoder = new wic.JpegBitmapEncoder(imagingFactory);
            encoder.Initialize(stream);

            var bitmapFrameEncode = new wic.BitmapFrameEncode(encoder);
            bitmapFrameEncode.Initialize();
            bitmapFrameEncode.SetSize(newWidth, newHeight);
            bitmapFrameEncode.SetPixelFormat(ref wicPixelFormat);

            // this is the trick to write D2D1 bitmap to WIC
            var imageEncoder = new wic.ImageEncoder(imagingFactory, d2dDevice);
            imageEncoder.WriteFrame(d2dRenderTarget, bitmapFrameEncode, new wic.ImageParameters(d2PixelFormat, 96, 96,  0, 0, newWidth, newHeight));

            bitmapFrameEncode.Commit();
            encoder.Commit();

            // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
            // CLEANUP ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

            // dispose everything and free used resources

            bitmapFrameEncode.Dispose();
            encoder.Dispose();
            stream.Dispose();
            formatConverter.Dispose();
            bitmapSourceEffect.Dispose();
            d2dRenderTarget.Dispose();
            decoder.Dispose();
            d2dContext.Dispose();
            dwFactory.Dispose();
            imagingFactory.Dispose();
            d2dDevice.Dispose();
            dxgiDevice.Dispose();
            d3dDevice.Dispose();
            defaultDevice.Dispose();
            return ms;
        }
コード例 #6
0
ファイル: Program.cs プロジェクト: MaybeMars/SharpDX-Samples
        /// <summary>
        /// SharpDX WIC sample. Encode to JPG and decode.
        /// </summary>
        static void Main()
        {
            const int width = 512;
            const int height = 512;
            const string filename = "output.jpg";

            var factory = new ImagingFactory();

            WICStream stream = null;

            // ------------------------------------------------------
            // Encode a JPG image
            // ------------------------------------------------------

            // Create a WIC outputstream 
            if (File.Exists(filename))
                File.Delete(filename);

            stream = new WICStream(factory, filename, NativeFileAccess.Write);

            // Initialize a Jpeg encoder with this stream
            var encoder = new JpegBitmapEncoder(factory);
            encoder.Initialize(stream);

            // Create a Frame encoder
            var bitmapFrameEncode = new BitmapFrameEncode(encoder);
            bitmapFrameEncode.Options.ImageQuality = 0.8f;
            bitmapFrameEncode.Initialize();
            bitmapFrameEncode.SetSize(width, height);
            var guid = PixelFormat.Format24bppBGR;
            bitmapFrameEncode.SetPixelFormat(ref guid);

            // Write a pseudo-plasma to a buffer
            int stride = PixelFormat.GetStride(PixelFormat.Format24bppBGR, width);
            var bufferSize = height * stride;
            var buffer = new DataStream(bufferSize, true, true);
            for (int y = 0; y < height; y++)
            {
                for (int x = 0; x < width; x++)
                {
                    buffer.WriteByte((byte)(x / 2.0 + 20.0 * Math.Sin(y / 40.0)));
                    buffer.WriteByte((byte)(y / 2.0 + 30.0 * Math.Sin(x / 80.0)));
                    buffer.WriteByte((byte)(x / 2.0));
                }
            }

            // Copy the pixels from the buffer to the Wic Bitmap Frame encoder
            bitmapFrameEncode.WritePixels(512, new DataRectangle(buffer.DataPointer, stride));

            // Commit changes
            bitmapFrameEncode.Commit();
            encoder.Commit();
            bitmapFrameEncode.Dispose();
            encoder.Dispose();
            stream.Dispose();

            // ------------------------------------------------------
            // Decode the previous JPG image
            // ------------------------------------------------------

            // Read input
            stream = new WICStream(factory, filename, NativeFileAccess.Read);
            var decoder = new JpegBitmapDecoder(factory);
            decoder.Initialize(stream, DecodeOptions.CacheOnDemand);
            var bitmapFrameDecode = decoder.GetFrame(0);
            var queryReader = bitmapFrameDecode.MetadataQueryReader;

            // Dump MetadataQueryreader
            queryReader.Dump(Console.Out);
            queryReader.Dispose();

            bitmapFrameDecode.Dispose();
            decoder.Dispose();
            stream.Dispose();

            // Dispose
            factory.Dispose();

            System.Diagnostics.Process.Start(Path.GetFullPath(Path.Combine(Environment.CurrentDirectory, filename)));
        }