protected static void FromD3DLock(PixelBox rval, DX.DataRectangle lrect) { var bpp = PixelUtil.GetNumElemBytes(rval.Format); var size = 0; if (bpp != 0) { rval.RowPitch = lrect.Pitch / bpp; rval.SlicePitch = rval.RowPitch * rval.Height; Debug.Assert((lrect.Pitch % bpp) == 0); size = lrect.Pitch * rval.Height; } else if (PixelUtil.IsCompressed(rval.Format)) { rval.RowPitch = rval.Width; rval.SlicePitch = rval.Width * rval.Height; size = rval.Width * rval.Height; } else { throw new AxiomException("Invalid pixel format"); } rval.Data = BufferBase.Wrap(lrect.DataPointer, size); }
/// <summary> /// Creates a dynamic depth texture, allocates GPU resources /// </summary> /// <param name="device">Direct3D Device</param> /// <param name="frameData">Initial frame data</param> public ImmutableInfraredTexture(Device device, InfraredFrameData frameData) { if (device == null) throw new ArgumentNullException("device"); if (frameData == null) throw new ArgumentNullException("frameData"); DataRectangle rect = new DataRectangle(frameData.DataPointer, Consts.DepthWidth * sizeof(ushort)); this.texture = new Texture2D(device, InfraredTextureDescriptors.DynamicResource,rect); this.shaderView = new ShaderResourceView(device, this.texture); }
/// <summary> /// Function to create a WIC bitmap from a System.Drawing.Image object. /// </summary> /// <param name="image">Image to convert.</param> public WIC.Bitmap CreateWICImageFromImage(Image image) { BitmapData bmpData = null; var imageBitmap = image as Bitmap; bool bitmapClone = false; if (image == null) { throw new ArgumentNullException("image"); } // If the image being passed is not a bitmap, then make it into one. // Or, if the image is a 32bpp RGB (without alpha), or if the image is indexed. // ReSharper disable once BitwiseOperatorOnEnumWithoutFlags if ((imageBitmap == null) || (image.PixelFormat == PixelFormat.Format32bppRgb) || ((image.PixelFormat & PixelFormat.Indexed) == PixelFormat.Indexed)) { imageBitmap = new Bitmap(image); bitmapClone = true; } try { // Try to get a compatible WIC format. Guid guid = GetGUID(imageBitmap.PixelFormat); if (guid == Guid.Empty) { throw new GorgonException(GorgonResult.FormatNotSupported, string.Format(Resources.GORGFX_FORMAT_NOT_SUPPORTED, image.PixelFormat)); } bmpData = imageBitmap.LockBits(new Rectangle(0, 0, imageBitmap.Width, imageBitmap.Height), ImageLockMode.ReadOnly, imageBitmap.PixelFormat); var pointer = new DX.DataRectangle(bmpData.Scan0, bmpData.Stride); var result = new WIC.Bitmap(Factory, imageBitmap.Width, imageBitmap.Height, guid, pointer, bmpData.Stride * bmpData.Height); result.SetResolution(image.HorizontalResolution, image.VerticalResolution); return(result); } finally { if (bmpData != null) { imageBitmap.UnlockBits(bmpData); } if (bitmapClone) { imageBitmap.Dispose(); } } }
/// <summary> /// Convenience factory to create table from Kinect point list /// </summary> /// <param name="device">Direct3D Device</param> /// <param name="initialData">Initial points array</param> /// <returns>Ray table texture</returns> public static unsafe RayTableTexture FromPoints(Device device, PointF[] initialData) { if (initialData.Length != Consts.DepthPixelCount) throw new ArgumentException("initialData", "Initial data length should be same size as depth frame pixel count"); fixed (PointF* ptr = &initialData[0]) { DataRectangle rect = new DataRectangle(new IntPtr(ptr), Consts.DepthWidth * 8); var texture = new Texture2D(device, LookupTableTextureDescriptors.DepthToCameraRayTable, rect); var view = new ShaderResourceView(device, texture); return new RayTableTexture(texture, view); } }
/// <summary> /// Creates an immutable depth texture, allocates GPU resources /// </summary> /// <param name="device">Direct3D Device</param> /// <param name="data">Depth frame data</param> public ImmutableDepthTexture(Device device, DepthFrameData data) { if (device == null) throw new ArgumentNullException("device"); if (data == null) throw new ArgumentNullException("data"); DataRectangle dr = new DataRectangle(data.DataPointer, Consts.DepthWidth * sizeof(ushort)); this.texture = new Texture2D(device, DepthTextureDescriptors.ImmutableResource, dr); this.rawView = new ShaderResourceView(device,this.texture, DepthTextureDescriptors.RawView); this.normalizedView = new ShaderResourceView(device,this.texture, DepthTextureDescriptors.NormalizedView); }
public IImage CreateImage(Color[] colors, int width, double scale = 1.0) { var factories = Direct2DFactories.Shared; var pf = WIC.PixelFormat.Format32bppBGRA; unsafe { fixed (Color* p = colors) { var data = new DataRectangle { Pitch = width * 4, DataPointer = (IntPtr)p, }; var bmp = new WIC.Bitmap (factories.WICFactory, width, colors.Length / width, pf, data); return new WICBitmapSourceImage (bmp, factories); } } }
/// <summary> /// Function to create a WIC bitmap from a Gorgon image buffer. /// </summary> /// <param name="buffer">Image buffer containing the image data to convert.</param> /// <returns>The WIC bitmap.</returns> public WIC.Bitmap CreateWICBitmapFromImageBuffer(GorgonImageBuffer buffer) { var pointer = new DX.DataRectangle(buffer.Data.BasePointer, buffer.PitchInformation.RowPitch); Guid bitmapFormat = GetGUID(buffer.Format); if (bitmapFormat == Guid.Empty) { throw new GorgonException(GorgonResult.FormatNotSupported, string.Format(Resources.GORGFX_FORMAT_NOT_SUPPORTED, buffer.Format)); } return(new WIC.Bitmap(Factory, buffer.Width, buffer.Height, bitmapFormat, pointer, pointer.Pitch * buffer.Height)); }
private Bitmap GetDX9ScreenShot() { try { screenShot = null; screenShot = new System.Drawing.Bitmap(WIDTH, HEIGHT, pixelFormat); dx9ScreenSurface = SharpDX.Direct3D9.Surface.CreateOffscreenPlain( dx9Device, WIDTH, HEIGHT, SharpDX.Direct3D9.Format.A8R8G8B8, Pool.SystemMemory); dx9Device.GetFrontBufferData(0, dx9ScreenSurface); dx9Map = dx9ScreenSurface.LockRectangle(LockFlags.None); bmpData = screenShot.LockBits(boundsRect, System.Drawing.Imaging.ImageLockMode.WriteOnly, screenShot.PixelFormat); var sourcePtr = dx9Map.DataPointer; var destPtr = bmpData.Scan0; for (int y = 0; y < HEIGHT; y++) { // Copy a single line Utilities.CopyMemory(destPtr, sourcePtr, ARGB_WIDTH); // Advance pointers sourcePtr = IntPtr.Add(sourcePtr, dx9Map.Pitch); destPtr = IntPtr.Add(destPtr, bmpData.Stride); } screenShot.UnlockBits(bmpData); dx9ScreenSurface.UnlockRectangle(); dx9ScreenSurface.Dispose(); bmpData = null; GC.Collect(); return screenShot; } catch (Exception ex) { LdpLog.Error("GetDX9ScreenShot error.\n" + ex.Message); return screenShot = null; } }
/// <summary> /// Function to create a list of WIC bitmaps from Gorgon image data. /// </summary> /// <param name="data">Data to convert to the list of WIC bitmaps.</param> /// <returns>The list of WIC bitmaps.</returns> public WIC.Bitmap[] CreateWICBitmapsFromImageData(GorgonImageData data) { int bitmapIndex = 0; Guid bitmapFormat = GetGUID(data.Settings.Format); if (bitmapFormat == Guid.Empty) { throw new GorgonException(GorgonResult.FormatNotSupported, string.Format(Resources.GORGFX_FORMAT_NOT_SUPPORTED, data.Settings.Format)); } // Make room for all the buffers. var bitmaps = new WIC.Bitmap[data.Buffers.Count]; // Copy to the bitmap. foreach (var buffer in data.Buffers) { var pointer = new DX.DataRectangle(buffer.Data.BasePointer, buffer.PitchInformation.RowPitch); bitmaps[bitmapIndex] = new WIC.Bitmap(Factory, buffer.Width, buffer.Height, bitmapFormat, pointer, pointer.Pitch * buffer.Height); bitmapIndex++; } return(bitmaps); }
public unsafe Texture2D GetTexture(Imaging.ImageFile source) { var key = source.Path; Texture2D tex; if (_Textures.TryGetValue(key, out tex)) return tex; byte[] buffer; var desc = new Texture2DDescription { ArraySize = 1, BindFlags = BindFlags.ShaderResource, CpuAccessFlags = CpuAccessFlags.None, Height = source.Height, Width = source.Width, MipLevels = 1, OptionFlags = ResourceOptionFlags.None, SampleDescription = new SharpDX.DXGI.SampleDescription(1, 0), Usage = ResourceUsage.Immutable }; if (source.Format == Imaging.ImageFormat.A16R16G16B16Float) { buffer = source.GetData(); desc.Format = SharpDX.DXGI.Format.R16G16B16A16_Float; } else { buffer = Imaging.ImageConverter.GetA8R8G8B8(source); for (var i = 0; i < buffer.Length; i += 4) { var r = buffer[i + 0]; var b = buffer[i + 2]; buffer[i + 0] = b; buffer[i + 2] = r; } desc.Format = SharpDX.DXGI.Format.R8G8B8A8_UNorm; if (source.Format == Imaging.ImageFormat.A8R8G8B8_Cube) desc.ArraySize = 6; } fixed (byte* p = buffer) { var ptr = (IntPtr)p; var pitch = SharpDX.DXGI.FormatHelper.SizeOfInBytes(desc.Format) * source.Width; var dataRects = new DataRectangle[desc.ArraySize]; for (var i = 0; i < desc.ArraySize; ++i) dataRects[i] = new DataRectangle(ptr + i * pitch * source.Height, pitch); tex = new Texture2D(_Engine.Device, desc, dataRects); } _Textures.Add(key, tex); return tex; }
private Bitmap GetDX11ScreenShot() { try { screenShot = null; screenShot = new Bitmap(WIDTH, HEIGHT, this.pixelFormat); if (dx11DuplicatedOutput != null) { dx11DuplicatedOutput.AcquireNextFrame(NEXT_FRAME_TIMEOUT, out dx11DuplFrameInfo, out dx11ScreenResource); dx11Device.ImmediateContext .CopyResource(dx11ScreenResource.QueryInterface<SharpDX.Direct3D11.Resource>(), dx11ScreenTexture); // cast from texture to surface, so we can access its bytes dx11ScreenSurface = dx11ScreenTexture.QueryInterface<SharpDX.DXGI.Surface>(); // map the resource to access it dx11Map = dx11ScreenSurface.Map(SharpDX.DXGI.MapFlags.Read); bmpData = screenShot.LockBits(boundsRect, ImageLockMode.WriteOnly, screenShot.PixelFormat); var sourcePtr = dx11Map.DataPointer; var destPtr = bmpData.Scan0; for (int y = 0; y < HEIGHT; y++) { // Copy a single line Utilities.CopyMemory(destPtr, sourcePtr, ARGB_WIDTH); // Advance pointers sourcePtr = IntPtr.Add(sourcePtr, dx11Map.Pitch); destPtr = IntPtr.Add(destPtr, bmpData.Stride); } dx11Device.ImmediateContext.UnmapSubresource(dx11ScreenTexture, 0); screenShot.UnlockBits(bmpData); dx11ScreenSurface.Unmap(); dx11ScreenSurface.Dispose(); dx11ScreenResource.Dispose(); dx11DuplicatedOutput.ReleaseFrame(); } else return screenShot = null; dx11ScreenSurface = null; bmpData = null; GC.Collect(); return screenShot; } catch (SharpDX.SharpDXException e) { if (e.ResultCode.Code == SharpDX.DXGI.ResultCode.WaitTimeout.Result.Code) { //screen does not changed LdpLog.Warning("DX11 surface timeout.. Recursion is coming:)"); return GetDX11ScreenShot(); } else { return screenShot = null; } } catch (Exception ex) { LdpLog.Error("GetDX11ScreenShot\n" + ex.Message); return screenShot = null; } }
private static void Save(IResource res, Stream stream, ImageFileFormat fmt) { var texture = res.Resource as Texture2D; var textureCopy = new Texture2D(MyRender11.Device, new Texture2DDescription { Width = (int)texture.Description.Width, Height = (int)texture.Description.Height, MipLevels = 1, ArraySize = 1, Format = texture.Description.Format, Usage = ResourceUsage.Staging, SampleDescription = new SharpDX.DXGI.SampleDescription(1, 0), BindFlags = BindFlags.None, CpuAccessFlags = CpuAccessFlags.Read, OptionFlags = ResourceOptionFlags.None }); RC.CopyResource(res, textureCopy); DataStream dataStream; var dataBox = RC.MapSubresource( textureCopy, 0, 0, MapMode.Read, MapFlags.None, out dataStream); var dataRectangle = new DataRectangle { DataPointer = dataStream.DataPointer, Pitch = dataBox.RowPitch }; var bitmap = new Bitmap( MyRender11.WIC, textureCopy.Description.Width, textureCopy.Description.Height, PixelFormatFromFormat(textureCopy.Description.Format), // TODO: should use some conversion from textureCopy.Description.Format dataRectangle); using (var wicStream = new WICStream(MyRender11.WIC, stream)) { BitmapEncoder bitmapEncoder; switch (fmt) { case ImageFileFormat.Png: bitmapEncoder = new PngBitmapEncoder(MyRender11.WIC, wicStream); break; case ImageFileFormat.Jpg: bitmapEncoder = new JpegBitmapEncoder(MyRender11.WIC, wicStream); break; case ImageFileFormat.Bmp: bitmapEncoder = new BmpBitmapEncoder(MyRender11.WIC, wicStream); break; default: MyRenderProxy.Assert(false, "Unsupported file format."); bitmapEncoder = null; break; } if (bitmapEncoder != null) { using (var bitmapFrameEncode = new BitmapFrameEncode(bitmapEncoder)) { bitmapFrameEncode.Initialize(); bitmapFrameEncode.SetSize(bitmap.Size.Width, bitmap.Size.Height); var pixelFormat = PixelFormat.FormatDontCare; bitmapFrameEncode.SetPixelFormat(ref pixelFormat); bitmapFrameEncode.WriteSource(bitmap); bitmapFrameEncode.Commit(); bitmapEncoder.Commit(); } bitmapEncoder.Dispose(); } } RC.UnmapSubresource(textureCopy, 0); textureCopy.Dispose(); bitmap.Dispose(); }
private async void context_DeviceReset(object sender, DeviceResetEventArgs e) { this.ReleaseResources(); string assetsPath = Package.Current.InstalledLocation.Path + "/Assets/Render/"; byte[] vertexShaderByteCode = NativeFile.ReadAllBytes(assetsPath + "MiniCubeTexture_VS.fxo"); this.vertexShader = new VertexShader(this.parentContext.D3DDevice, vertexShaderByteCode); byte[] pixelShaderByteCode = NativeFile.ReadAllBytes(assetsPath + "MiniCubeTexture_PS.fxo"); this.pixelShader = new PixelShader(this.parentContext.D3DDevice, pixelShaderByteCode); this.vertexLayout = new InputLayout(this.parentContext.D3DDevice, vertexShaderByteCode, new[] { new InputElement("POSITION", 0, SharpDX.DXGI.Format.R32G32B32A32_Float, 0, 0), new InputElement("TEXCOORD", 0, SharpDX.DXGI.Format.R32G32_Float, 16, 0) }); SharpDX.Direct3D11.Buffer vertices = SharpDX.Direct3D11.Buffer.Create(this.parentContext.D3DDevice, BindFlags.VertexBuffer, new[] { -0.5f, -0.5f, -0.5f, 0.5f, 0.0f, 1.0f, -0.5f, 0.5f, -0.5f, 0.5f, 0.0f, 0.0f, 0.5f, 0.5f, -0.5f, 0.5f, 1.0f, 0.0f, -0.5f, -0.5f, -0.5f, 0.5f, 0.0f, 1.0f, 0.5f, 0.5f, -0.5f, 0.5f, 1.0f, 0.0f, 0.5f, -0.5f, -0.5f, 0.5f, 1.0f, 1.0f, }); this.vertexBufferBinding = new VertexBufferBinding(vertices, sizeof(float) * 6, 0); this.constantBuffer = new SharpDX.Direct3D11.Buffer(this.parentContext.D3DDevice, Utilities.SizeOf<Matrix>(), ResourceUsage.Default, BindFlags.ConstantBuffer, CpuAccessFlags.None, ResourceOptionFlags.None, 0); this.sampler = new SamplerState(this.parentContext.D3DDevice, new SamplerStateDescription() { Filter = Filter.MinMagMipLinear, AddressU = TextureAddressMode.Wrap, AddressV = TextureAddressMode.Wrap, AddressW = TextureAddressMode.Wrap, BorderColor = Color.Black, ComparisonFunction = Comparison.Never, MaximumAnisotropy = 16, MipLodBias = 0, MinimumLod = -float.MaxValue, MaximumLod = float.MaxValue }); #if SILVERLIGHT Deployment.Current.Dispatcher.BeginInvoke(() => #else await CoreWindow.GetForCurrentThread().Dispatcher.RunAsync(CoreDispatcherPriority.Normal, async () => #endif { using (MemoryStream sourceStream = new MemoryStream(NativeFile.ReadAllBytes(assetsPath + "sharpdx.png"))) { #if SILVERLIGHT BitmapImage image = new BitmapImage(); image.CreateOptions = BitmapCreateOptions.None; image.SetSource(sourceStream); WriteableBitmap bitmap = new WriteableBitmap(image); using (DataStream dataStream = new DataStream(bitmap.Pixels.Length * 4, true, true)) { dataStream.WriteRange<int>(bitmap.Pixels); #else BitmapDecoder decoder = await BitmapDecoder.CreateAsync(sourceStream.AsRandomAccessStream()); BitmapFrame bitmap = await decoder.GetFrameAsync(0); PixelDataProvider dataProvider = await bitmap.GetPixelDataAsync(BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied, new BitmapTransform(), ExifOrientationMode.IgnoreExifOrientation, ColorManagementMode.DoNotColorManage); byte[] pixelData = dataProvider.DetachPixelData(); using (DataStream dataStream = new DataStream(pixelData.Length, true, true)) { dataStream.WriteRange<byte>(pixelData); #endif dataStream.Seek(0, SeekOrigin.Begin); DataRectangle dataRectangle = new DataRectangle(dataStream.DataPointer, (int)(bitmap.PixelWidth * 4)); this.texture = new Texture2D(this.parentContext.D3DDevice, new Texture2DDescription() { Format = SharpDX.DXGI.Format.B8G8R8A8_UNorm, Width = (int)bitmap.PixelWidth, Height = (int)bitmap.PixelHeight, ArraySize = 1, MipLevels = 1, BindFlags = BindFlags.ShaderResource, Usage = ResourceUsage.Default, CpuAccessFlags = CpuAccessFlags.None, OptionFlags = ResourceOptionFlags.None, SampleDescription = new SharpDX.DXGI.SampleDescription(1, 0) }, dataRectangle); this.textureView = new ShaderResourceView(this.parentContext.D3DDevice, this.texture); } #if SILVERLIGHT bitmap = null; image = null; #else pixelData = null; dataProvider = null; bitmap = null; decoder = null; #endif } }); }
/// <summary> /// Implementation of capturing from the render target of the Direct3D9 Device (or DeviceEx) /// </summary> /// <param name="device"></param> void DoCaptureRenderTarget(Device device, string hook) { this.Frame(); try { #region Screenshot Request // If we have issued the command to copy data to our render target, check if it is complete bool qryResult; if (_queryIssued && _requestCopy != null && _query.GetData(out qryResult, false)) { // The GPU has finished copying data to _renderTargetCopy, we can now lock // the data and access it on another thread. _queryIssued = false; // Lock the render target SharpDX.Rectangle rect; SharpDX.DataRectangle lockedRect = LockRenderTarget(_renderTargetCopy, out rect); _renderTargetCopyLocked = true; // Copy the data from the render target System.Threading.Tasks.Task.Factory.StartNew(() => { lock (_lockRenderTarget) { ProcessCapture(rect.Width, rect.Height, lockedRect.Pitch, _renderTargetCopy.Description.Format.ToPixelFormat(), lockedRect.DataPointer, _requestCopy); } }); } // Single frame capture request if (this.Request != null) { DateTime start = DateTime.Now; try { using (Surface renderTarget = device.GetRenderTarget(0)) { int width, height; // If resizing of the captured image, determine correct dimensions if (Request.Resize != null && (renderTarget.Description.Width > Request.Resize.Value.Width || renderTarget.Description.Height > Request.Resize.Value.Height)) { if (renderTarget.Description.Width > Request.Resize.Value.Width) { width = Request.Resize.Value.Width; height = (int)Math.Round((renderTarget.Description.Height * ((double)Request.Resize.Value.Width / (double)renderTarget.Description.Width))); } else { height = Request.Resize.Value.Height; width = (int)Math.Round((renderTarget.Description.Width * ((double)Request.Resize.Value.Height / (double)renderTarget.Description.Height))); } } else { width = renderTarget.Description.Width; height = renderTarget.Description.Height; } // If existing _renderTargetCopy, ensure that it is the correct size and format if (_renderTargetCopy != null && (_renderTargetCopy.Description.Width != width || _renderTargetCopy.Description.Height != height || _renderTargetCopy.Description.Format != renderTarget.Description.Format)) { // Cleanup resources Cleanup(); } // Ensure that we have something to put the render target data into if (!_resourcesInitialised || _renderTargetCopy == null) { CreateResources(device, width, height, renderTarget.Description.Format); } // Resize from render target Surface to resolvedSurface (also deals with resolving multi-sampling) device.StretchRectangle(renderTarget, _resolvedTarget, TextureFilter.None); } // If the render target is locked from a previous request unlock it if (_renderTargetCopyLocked) { // Wait for the the ProcessCapture thread to finish with it lock (_lockRenderTarget) { if (_renderTargetCopyLocked) { _renderTargetCopy.UnlockRectangle(); _renderTargetCopyLocked = false; } } } // Copy data from resolved target to our render target copy device.GetRenderTargetData(_resolvedTarget, _renderTargetCopy); _requestCopy = Request.Clone(); _query.Issue(Issue.End); _queryIssued = true; } finally { // We have completed the request - mark it as null so we do not continue to try to capture the same request // Note: If you are after high frame rates, consider implementing buffers here to capture more frequently // and send back to the host application as needed. The IPC overhead significantly slows down // the whole process if sending frame by frame. Request = null; } DateTime end = DateTime.Now; this.DebugMessage(hook + ": Capture time: " + (end - start).ToString()); } #endregion if (this.Config.ShowOverlay) { #region Draw Overlay // Check if overlay needs to be initialised if (_overlayEngine == null || _overlayEngine.Device.NativePointer != device.NativePointer) { // Cleanup if necessary if (_overlayEngine != null) { RemoveAndDispose(ref _overlayEngine); } _overlayEngine = ToDispose(new DX9.DXOverlayEngine()); // Create Overlay _overlayEngine.Overlays.Add(new Capture.Hook.Common.Overlay { Elements = { // Add frame rate new Capture.Hook.Common.FramesPerSecond(new System.Drawing.Font("Arial", 16, FontStyle.Bold)) { Location = new System.Drawing.Point(5, 5), Color= System.Drawing.Color.Red, AntiAliased = true }, // Example of adding an image to overlay (can implement semi transparency with Tint, e.g. Ting = Color.FromArgb(127, 255, 255, 255)) //new Capture.Hook.Common.ImageElement(@"C:\Temp\test.bmp") { Location = new System.Drawing.Point(20, 20) } } }); _overlayEngine.Initialise(device); } // Draw Overlay(s) else if (_overlayEngine != null) { foreach (var overlay in _overlayEngine.Overlays) { overlay.Frame(); } _overlayEngine.Draw(); } #endregion } } catch (Exception e) { DebugMessage(e.ToString()); } }
public Texture2D GetTexture2D(Device device) { IntPtr unmanagedPtr = Marshal.AllocHGlobal(PixelCount); Marshal.Copy(GetPixelData(), 0, unmanagedPtr, PixelCount); DataRectangle data = new DataRectangle(); data.DataPointer = unmanagedPtr; data.Pitch = Width * 4; Texture2DDescription textureDesc = new Texture2DDescription { ArraySize = 1, BindFlags = BindFlags.ShaderResource, Usage = ResourceUsage.Dynamic, CpuAccessFlags = CpuAccessFlags.Write, Format = SharpDX.DXGI.Format.B8G8R8A8_UNorm, Height = Height, Width = Width, MipLevels = 1, OptionFlags = ResourceOptionFlags.None, SampleDescription = new SharpDX.DXGI.SampleDescription(1, 0) }; Texture2D texture = new Texture2D(device, textureDesc, data); Marshal.FreeHGlobal(unmanagedPtr); return texture; }
private static Texture2D CreateTexture2DFromBitmapSource(Device device, BitmapSource bitmapSource, TextureLoadOptions options) { // Allocate DataStream to receive the WIC image pixels var stride = bitmapSource.Size.Width * 4; using (var buffer = new DataStream(bitmapSource.Size.Height * stride, true, true)) { // Copy the content of the WIC to the buffer bitmapSource.CopyPixels(stride, buffer); var texture2DDescription = new Texture2DDescription() { Width = bitmapSource.Size.Width, Height = bitmapSource.Size.Height, ArraySize = 1, BindFlags = options.BindFlags, Usage = options.ResourceUsage, CpuAccessFlags = options.CpuAccessFlags, Format = options.Format, MipLevels = options.MipLevels, OptionFlags = ResourceOptionFlags.None, SampleDescription = new SampleDescription(1, 0), }; bitmapSource.Dispose(); var dataRectangle = new DataRectangle(buffer.DataPointer, stride); return new Texture2D(device, texture2DDescription, dataRectangle); } }
Texture2D createTextureFromFile(string filename) { BitmapImage loadedImage = new BitmapImage(); loadedImage.BeginInit(); loadedImage.CacheOption = BitmapCacheOption.OnLoad; loadedImage.UriSource = new Uri(filename); loadedImage.EndInit(); loadedImage.Freeze(); int stride = loadedImage.PixelWidth * (loadedImage.Format.BitsPerPixel / 8); byte[] pixels = new byte[loadedImage.PixelHeight * stride]; loadedImage.CopyPixels(pixels, stride, 0); pinnedArray = GCHandle.Alloc(pixels, GCHandleType.Pinned); IntPtr pixelPtr = pinnedArray.AddrOfPinnedObject(); DataRectangle data = new DataRectangle(pixelPtr, stride); var texDesc = new Texture2DDescription { ArraySize = 1, BindFlags = BindFlags.ShaderResource, CpuAccessFlags = CpuAccessFlags.None, Format = Format.B8G8R8A8_UNorm, Height = loadedImage.PixelHeight, MipLevels = 1, OptionFlags = ResourceOptionFlags.None, SampleDescription = new SampleDescription(1, 0), Usage = ResourceUsage.Default, Width = loadedImage.PixelWidth }; Texture2D texture = new Texture2D(Host.Device, texDesc, data); return (texture); }
private static DxTexture ReadTextureCubeFromStream(GtexData gtex, Stream input) { Texture2DDescription descriptor = GetTextureCubeDescription(gtex); using (SafeUnmanagedArray array = new SafeUnmanagedArray(gtex.MipMapData.Sum(d => d.Length))) { DataRectangle[] rects = new DataRectangle[gtex.MipMapData.Length]; using (UnmanagedMemoryStream io = array.OpenStream(FileAccess.Write)) { byte[] buff = new byte[32 * 1024]; for (int index = 0; index < gtex.MipMapData.Length; index++) { GtexMipMapLocation mimMap = gtex.MipMapData[index]; Int32 pitch = GetPitch(descriptor, index); rects[index] = CreateDataRectangle(array, io, pitch); input.SetPosition(mimMap.Offset); input.CopyToStream(io, mimMap.Length, buff); } } Texture2D texture = new Texture2D(_device, descriptor, rects); // Workaround _textureCreatingWorkaround(_device.ImmediateContext, texture, ImageFileFormat.Dds); return new DxTexture(texture, descriptor); } }