protected override PixelBox LockImpl(BasicBox lockBox, BufferLocking options) { //Entering critical section LockDeviceAccess(); // Check for misuse if (((int)usage & (int)TextureUsage.RenderTarget) != 0) { throw new AxiomException( "DirectX does not allow locking of or directly writing to RenderTargets. Use BlitFromMemory if you need the contents."); } // Set locking flags according to options var flags = D3D9Helper.ConvertEnum(options, usage); if (this.mapDeviceToBufferResources.Count == 0) { throw new AxiomException("There are no resources attached to this pixel buffer !!"); } lockedBox = lockBox; this.lockFlags = flags; var bufferResources = this.mapDeviceToBufferResources.First().Value; // Lock the source buffer. var lockedBuf = LockBuffer(bufferResources, lockBox, flags); //Leaving critical section UnlockDeviceAccess(); return(lockedBuf); }
public void Bind(D3D9.Device dev, D3D9.Volume volume, D3D9.BaseTexture mipTex) { //Entering critical section LockDeviceAccess(); var bufferResources = GetBufferResources(dev); var isNewBuffer = false; if (bufferResources == null) { bufferResources = new BufferResources(); this.mapDeviceToBufferResources.Add(dev, bufferResources); isNewBuffer = true; } bufferResources.MipTex = mipTex; bufferResources.Volume = volume; var desc = volume.Description; width = desc.Width; height = desc.Height; depth = desc.Depth; format = D3D9Helper.ConvertEnum(desc.Format); // Default rowPitch = Width; slicePitch = Height * Width; sizeInBytes = PixelUtil.GetMemorySize(Width, Height, Depth, Format); if (isNewBuffer && this.ownerTexture.IsManuallyLoaded) { foreach (var it in this.mapDeviceToBufferResources) { if (it.Value != bufferResources && it.Value.Volume != null && it.Key.TestCooperativeLevel().Success&& dev.TestCooperativeLevel().Success) { var fullBufferBox = new BasicBox(0, 0, 0, Width, Height, Depth); var dstBox = new PixelBox(fullBufferBox, Format); var data = new byte[sizeInBytes]; using (var d = BufferBase.Wrap(data)) { dstBox.Data = d; BlitToMemory(fullBufferBox, dstBox, it.Value, it.Key); BlitFromMemory(dstBox, fullBufferBox, bufferResources); Array.Clear(data, 0, sizeInBytes); } break; } } } //Leaving critical section UnlockDeviceAccess(); }
public override Axiom.Media.PixelFormat GetNativeFormat(TextureType ttype, PixelFormat format, TextureUsage usage) { // Basic filtering var d3dPF = D3D9Helper.ConvertEnum(D3D9Helper.GetClosestSupported(format)); // Calculate usage var d3dusage = D3D9.Usage.None; var pool = D3D9.Pool.Managed; if ((usage & TextureUsage.RenderTarget) != 0) { d3dusage |= D3D9.Usage.RenderTarget; pool = D3D9.Pool.Default; } if ((usage & TextureUsage.Dynamic) != 0) { d3dusage |= D3D9.Usage.Dynamic; pool = D3D9.Pool.Default; } var curDevice = D3D9RenderSystem.ActiveD3D9Device; // Use D3DX to adjust pixel format switch (ttype) { case TextureType.OneD: case TextureType.TwoD: var tReqs = D3D9.Texture.CheckRequirements(curDevice, 0, 0, 0, d3dusage, D3D9Helper.ConvertEnum(format), pool); d3dPF = tReqs.Format; break; case TextureType.ThreeD: var volReqs = D3D9.VolumeTexture.CheckRequirements(curDevice, 0, 0, 0, 0, d3dusage, D3D9Helper.ConvertEnum(format), pool); d3dPF = volReqs.Format; break; case TextureType.CubeMap: var cubeReqs = D3D9.CubeTexture.CheckRequirements(curDevice, 0, 0, d3dusage, D3D9Helper.ConvertEnum(format), pool); d3dPF = cubeReqs.Format; break; } return(D3D9Helper.ConvertEnum(d3dPF)); }
/// <summary> /// Updates buffer resources from system memory buffer. /// </summary> private bool _updateBufferResources(BufferBase systemMemoryBuffer, ref BufferResources bufferResources) { Contract.RequiresNotNull(bufferResources, "Cannot update BufferResources in D3D9HardwareIndexBuffer!"); Contract.RequiresNotNull(bufferResources.IndexBuffer, "Cannot update BufferResources in D3D9HardwareIndexBuffer!"); Contract.Requires(bufferResources.IsOutOfDate); DX.DataStream dstBytes; // Lock the buffer try { dstBytes = bufferResources.IndexBuffer.Lock(bufferResources.LockOffset, bufferResources.LockLength, D3D9Helper.ConvertEnum(bufferResources.LockOptions, usage)); } catch (Exception ex) { throw new AxiomException("Cannot lock D3D9 index buffer!", ex); } using (var src = systemMemoryBuffer + bufferResources.LockOffset) { using (var dest = BufferBase.Wrap(dstBytes.DataPointer, (int)dstBytes.Length)) { Memory.Copy(src, dest, bufferResources.LockLength); } } // Unlock the buffer. var hr = bufferResources.IndexBuffer.Unlock(); if (hr.Failure) { throw new AxiomException("Cannot unlock D3D9 index buffer: {0}", hr.ToString()); } bufferResources.IsOutOfDate = false; bufferResources.LockOffset = sizeInBytes; bufferResources.LockLength = 0; bufferResources.LockOptions = BufferLocking.Normal; return(true); }
public void CreateBuffer(D3D9.Device d3d9Device, D3D9.Pool ePool) { //Entering critical section this.LockDeviceAccess(); BufferResources bufferResources; // Find the vertex buffer of this device. if (this._mapDeviceToBufferResources.TryGetValue(d3d9Device, out bufferResources)) { bufferResources.IndexBuffer.SafeDispose(); } else { bufferResources = new BufferResources(); this._mapDeviceToBufferResources.Add(d3d9Device, bufferResources); } bufferResources.IndexBuffer = null; bufferResources.IsOutOfDate = true; bufferResources.LockOffset = 0; bufferResources.LockLength = sizeInBytes; bufferResources.LockOptions = BufferLocking.Normal; bufferResources.LastUsedFrame = Root.Instance.NextFrameNumber; // Create the Index buffer try { bufferResources.IndexBuffer = new D3D9.IndexBuffer(d3d9Device, sizeInBytes, D3D9Helper.ConvertEnum(usage), ePool, D3D9Helper.ConvertEnum(type)); } catch (Exception ex) { throw new AxiomException("Cannot create D3D9 Index buffer", ex); } this._bufferDesc = bufferResources.IndexBuffer.Description; //Leaving critical section this.UnlockDeviceAccess(); }
private bool _checkVertexTextureFormats(D3D9RenderWindow renderWindow) { var anySupported = false; var bbSurf = (D3D9.Surface[])renderWindow["DDBACKBUFFER"]; var bbSurfDesc = bbSurf[0].Description; for (var pf = PixelFormat.L8; pf < PixelFormat.Count; ++pf) { var fmt = D3D9Helper.ConvertEnum(D3D9Helper.GetClosestSupported(pf)); if ( !this._pD3D.CheckDeviceFormat(this._activeD3DDriver.AdapterNumber, D3D9.DeviceType.Hardware, bbSurfDesc.Format, D3D9.Usage.QueryVertexTexture, D3D9.ResourceType.Texture, fmt)) { continue; } // cool, at least one supported anySupported = true; LogManager.Instance.Write("D3D9: Vertex texture format supported - {0}", PixelUtil.GetFormatName(pf)); } return(anySupported); }
protected void BlitToMemory(BasicBox srcBox, PixelBox dst, BufferResources srcBufferResources, D3D9.Device d3d9Device) { // Decide on pixel format of temp surface PixelFormat tmpFormat = Format; if (D3D9Helper.ConvertEnum(dst.Format) != D3D9.Format.Unknown) { tmpFormat = dst.Format; } if (srcBufferResources.Surface != null) { Debug.Assert(srcBox.Depth == 1 && dst.Depth == 1); var srcDesc = srcBufferResources.Surface.Description; var temppool = D3D9.Pool.Scratch; // if we're going to try to use GetRenderTargetData, need to use system mem pool var tryGetRenderTargetData = false; if (((srcDesc.Usage & D3D9.Usage.RenderTarget) != 0) && (srcBox.Width == dst.Width) && (srcBox.Height == dst.Height) && (srcBox.Width == Width) && (srcBox.Height == Height) && (Format == tmpFormat)) { tryGetRenderTargetData = true; temppool = D3D9.Pool.SystemMemory; } // Create temp texture var tmp = new D3D9.Texture(d3d9Device, dst.Width, dst.Height, 1, // 1 mip level ie topmost, generate no mipmaps 0, D3D9Helper.ConvertEnum(tmpFormat), temppool); var surface = tmp.GetSurfaceLevel(0); // Copy texture to this temp surface var srcRect = ToD3DRectangle(srcBox); var destRect = ToD3DRectangle(dst); // Get the real temp surface format var dstDesc = surface.Description; tmpFormat = D3D9Helper.ConvertEnum(dstDesc.Format); // Use fast GetRenderTargetData if we are in its usage conditions var fastLoadSuccess = false; if (tryGetRenderTargetData) { var result = d3d9Device.GetRenderTargetData(srcBufferResources.Surface, surface); fastLoadSuccess = result.Success; } if (!fastLoadSuccess) { var res = D3D9.Surface.FromSurface(surface, srcBufferResources.Surface, D3D9.Filter.Default, 0, srcRect, destRect); if (res.Failure) { surface.SafeDispose(); tmp.SafeDispose(); throw new AxiomException("D3D9.Surface.FromSurface failed in D3D9HardwarePixelBuffer.BlitToMemory"); } } // Lock temp surface and copy it to memory var lrect = surface.LockRectangle(D3D9.LockFlags.ReadOnly); // Copy it var locked = new PixelBox(dst.Width, dst.Height, dst.Depth, tmpFormat); FromD3DLock(locked, lrect); PixelConverter.BulkPixelConversion(locked, dst); surface.UnlockRectangle(); // Release temporary surface and texture surface.SafeDispose(); tmp.SafeDispose(); } else if (srcBufferResources.Volume != null) { // Create temp texture var tmp = new D3D9.VolumeTexture(d3d9Device, dst.Width, dst.Height, dst.Depth, 0, 0, D3D9Helper.ConvertEnum(tmpFormat), D3D9.Pool.Scratch); var surface = tmp.GetVolumeLevel(0); // Volume var ddestBox = ToD3DBoxExtent(dst); var dsrcBox = ToD3DBox(srcBox); var res = D3D9.Volume.FromVolume(surface, srcBufferResources.Volume, D3D9.Filter.Default, 0, dsrcBox, ddestBox); if (res.Failure) { surface.SafeDispose(); tmp.SafeDispose(); throw new AxiomException("D3D9.Surface.FromVolume failed in D3D9HardwarePixelBuffer.BlitToMemory"); } // Lock temp surface and copy it to memory var lbox = surface.LockBox(D3D9.LockFlags.ReadOnly); // Filled in by D3D // Copy it var locked = new PixelBox(dst.Width, dst.Height, dst.Depth, tmpFormat); FromD3DLock(locked, lbox); PixelConverter.BulkPixelConversion(locked, dst); surface.UnlockBox(); // Release temporary surface and texture surface.SafeDispose(); tmp.SafeDispose(); } }
protected void BlitFromMemory(PixelBox src, BasicBox dstBox, BufferResources dstBufferResources) { // for scoped deletion of conversion buffer var converted = src; var bufSize = 0; // convert to pixelbuffer's native format if necessary if (D3D9Helper.ConvertEnum(src.Format) == D3D9.Format.Unknown) { bufSize = PixelUtil.GetMemorySize(src.Width, src.Height, src.Depth, Format); var newBuffer = new byte[bufSize]; using (var data = BufferBase.Wrap(newBuffer)) { converted = new PixelBox(src.Width, src.Height, src.Depth, Format, data); } PixelConverter.BulkPixelConversion(src, converted); } int rowWidth = 0; if (PixelUtil.IsCompressed(converted.Format)) { rowWidth = converted.RowPitch / 4; // D3D wants the width of one row of cells in bytes if (converted.Format == PixelFormat.DXT1) { // 64 bits (8 bytes) per 4x4 block rowWidth *= 8; } else { // 128 bits (16 bytes) per 4x4 block rowWidth *= 16; } } else { rowWidth = converted.RowPitch * PixelUtil.GetNumElemBytes(converted.Format); } if (dstBufferResources.Surface != null) { var srcRect = ToD3DRectangle(converted); var destRect = ToD3DRectangle(dstBox); bufSize = PixelUtil.GetMemorySize(converted.Width, converted.Height, converted.Depth, converted.Format); var data = new byte[bufSize]; using (var dest = BufferBase.Wrap(data)) { Memory.Copy(converted.Data, dest, bufSize); } try { D3D9.Surface.FromMemory(dstBufferResources.Surface, data, D3D9.Filter.Default, 0, D3D9Helper.ConvertEnum(converted.Format), rowWidth, srcRect, destRect); } catch (Exception e) { throw new AxiomException("D3D9.Surface.FromMemory failed in D3D9HardwarePixelBuffer.BlitFromMemory", e); } } else if (dstBufferResources.Volume != null) { var srcBox = ToD3DBox(converted); var destBox = ToD3DBox(dstBox); var sliceWidth = 0; if (PixelUtil.IsCompressed(converted.Format)) { sliceWidth = converted.SlicePitch / 16; // D3D wants the width of one slice of cells in bytes if (converted.Format == PixelFormat.DXT1) { // 64 bits (8 bytes) per 4x4 block sliceWidth *= 8; } else { // 128 bits (16 bytes) per 4x4 block sliceWidth *= 16; } } else { sliceWidth = converted.SlicePitch * PixelUtil.GetNumElemBytes(converted.Format); } bufSize = PixelUtil.GetMemorySize(converted.Width, converted.Height, converted.Depth, converted.Format); var data = new byte[bufSize]; using (var dest = BufferBase.Wrap(data)) { Memory.Copy(converted.Data, dest, bufSize); } //TODO note sliceWidth and rowWidth are ignored.. D3D9.ImageInformation info; try { //D3D9.D3DX9.LoadVolumeFromMemory() not accessible 'cause D3D9.D3DX9 static class is not public D3D9.Volume.FromFileInMemory(dstBufferResources.Volume, data, D3D9.Filter.Default, 0, srcBox, destBox, null, out info); } catch (Exception e) { throw new AxiomException("D3D9.Volume.FromFileInMemory failed in D3D9HardwarePixelBuffer.BlitFromMemory", e); } } if (this.doMipmapGen) { GenMipmaps(dstBufferResources.MipTex); } }
public override void SetTextureMatrix(int stage, Matrix4 xform) { // the matrix we'll apply after conv. to D3D format var newMat = xform; // cache this since it's used often var autoTexCoordType = this._texStageDesc[stage].AutoTexCoordType; // if a vertex program is bound, we mustn't set texture transforms if (vertexProgramBound) { _setTextureStageState(stage, D3D9.TextureStage.TextureTransformFlags, (int)TextureTransform.Disable); return; } if (autoTexCoordType == TexCoordCalcMethod.EnvironmentMap) { if ((this._deviceManager.ActiveDevice.D3D9DeviceCaps.VertexProcessingCaps & D3D9.VertexProcessingCaps.TexGenSphereMap) == D3D9.VertexProcessingCaps.TexGenSphereMap) { // inverts the texture for a spheremap var matEnvMap = Matrix4.Identity; // set env_map values matEnvMap.m11 = -1.0f; // concatenate newMat = newMat * matEnvMap; } else { /* If envmap is applied, but device doesn't support spheremap, * then we have to use texture transform to make the camera space normal * reference the envmap properly. This isn't exactly the same as spheremap * (it looks nasty on flat areas because the camera space normals are the same) * but it's the best approximation we have in the absence of a proper spheremap */ // concatenate with the xform newMat = newMat * Matrix4.ClipSpace2DToImageSpace; } } // If this is a cubic reflection, we need to modify using the view matrix if (autoTexCoordType == TexCoordCalcMethod.EnvironmentMapReflection) { // Get transposed 3x3, ie since D3D is transposed just copy // We want to transpose since that will invert an orthonormal matrix ie rotation var viewTransposed = Matrix4.Identity; viewTransposed.m00 = this._viewMatrix.m00; viewTransposed.m01 = this._viewMatrix.m10; viewTransposed.m02 = this._viewMatrix.m20; viewTransposed.m03 = 0.0f; viewTransposed.m10 = this._viewMatrix.m01; viewTransposed.m11 = this._viewMatrix.m11; viewTransposed.m12 = this._viewMatrix.m21; viewTransposed.m13 = 0.0f; viewTransposed.m20 = this._viewMatrix.m02; viewTransposed.m21 = this._viewMatrix.m12; viewTransposed.m22 = this._viewMatrix.m22; viewTransposed.m23 = 0.0f; viewTransposed.m30 = 0; viewTransposed.m31 = 0; viewTransposed.m32 = 0; viewTransposed.m33 = 1.0f; // concatenate newMat = newMat * viewTransposed; } if (autoTexCoordType == TexCoordCalcMethod.ProjectiveTexture) { // Derive camera space to projector space transform // To do this, we need to undo the camera view matrix, then // apply the projector view & projection matrices newMat = this._viewMatrix.Inverse(); if (texProjRelative) { Matrix4 viewMatrix; this._texStageDesc[stage].Frustum.CalcViewMatrixRelative(texProjRelativeOrigin, out viewMatrix); newMat = viewMatrix * newMat; } else { newMat = this._texStageDesc[stage].Frustum.ViewMatrix * newMat; } newMat = this._texStageDesc[stage].Frustum.ProjectionMatrix * newMat; newMat = Matrix4.ClipSpace2DToImageSpace * newMat; newMat = xform * newMat; } // need this if texture is a cube map, to invert D3D's z coord if (autoTexCoordType != TexCoordCalcMethod.None && autoTexCoordType != TexCoordCalcMethod.ProjectiveTexture) { newMat.m20 = -newMat.m20; newMat.m21 = -newMat.m21; newMat.m22 = -newMat.m22; newMat.m23 = -newMat.m23; } // convert our matrix to D3D format DX.Mathematics.Interop.RawMatrix d3dMat = D3D9Helper.MakeD3DMatrix(newMat); // set the matrix if it is not the identity if (!D3D9Helper.IsIdentity(d3dMat)) { //It's seems D3D automatically add a texture coordinate with value 1, //and fill up the remaining texture coordinates with 0 for the input //texture coordinates before pass to texture coordinate transformation. //NOTE: It's difference with D3DDECLTYPE enumerated type expand in //DirectX SDK documentation! //So we should prepare the texcoord transform, make the transformation //just like standardized vector expand, thus, fill w with value 1 and //others with 0. if (autoTexCoordType == TexCoordCalcMethod.None) { //FIXME: The actually input texture coordinate dimensions should //be determine by texture coordinate vertex element. Now, just trust //user supplied texture type matchs texture coordinate vertex element. if (this._texStageDesc[stage].TexType == D3D9TextureType.Normal) { /* It's 2D input texture coordinate: * * texcoord in vertex buffer D3D expanded to We are adjusted to * --> --> * (u, v) (u, v, 1, 0) (u, v, 0, 1) */ Utility.Swap(ref d3dMat.M31, ref d3dMat.M41); Utility.Swap(ref d3dMat.M32, ref d3dMat.M42); Utility.Swap(ref d3dMat.M33, ref d3dMat.M43); Utility.Swap(ref d3dMat.M34, ref d3dMat.M44); } } //else //{ // // All texgen generate 3D input texture coordinates. //} // tell D3D the dimension of tex. coord var texCoordDim = TextureTransform.Count2; if (autoTexCoordType == TexCoordCalcMethod.ProjectiveTexture) { //We want texcoords (u, v, w, q) always get divided by q, but D3D //projected texcoords is divided by the last element (in the case of //2D texcoord, is w). So we tweak the transform matrix, transform the //texcoords with w and q swapped: (u, v, q, w), and then D3D will //divide u, v by q. The w and q just ignored as it wasn't used by //rasterizer. switch (this._texStageDesc[stage].TexType) { case D3D9TextureType.Normal: Utility.Swap(ref d3dMat.M13, ref d3dMat.M14); Utility.Swap(ref d3dMat.M23, ref d3dMat.M24); Utility.Swap(ref d3dMat.M33, ref d3dMat.M34); Utility.Swap(ref d3dMat.M43, ref d3dMat.M44); texCoordDim = TextureTransform.Projected | TextureTransform.Count3; break; case D3D9TextureType.Cube: case D3D9TextureType.Volume: // Yes, we support 3D projective texture. texCoordDim = TextureTransform.Projected | TextureTransform.Count4; break; } } else { switch (this._texStageDesc[stage].TexType) { case D3D9TextureType.Normal: texCoordDim = TextureTransform.Count2; break; case D3D9TextureType.Cube: case D3D9TextureType.Volume: texCoordDim = TextureTransform.Count3; break; } } // note: int values of D3D.TextureTransform correspond directly with tex dimension, so direct conversion is possible // i.e. Count1 = 1, Count2 = 2, etc _setTextureStageState(stage, D3D9.TextureStage.TextureTransformFlags, (int)texCoordDim); // set the manually calculated texture matrix var d3DTransType = (D3D9.TransformState)((int)(D3D9.TransformState.Texture0) + stage); ActiveD3D9Device.SetTransform(d3DTransType, d3dMat); } else { // disable texture transformation _setTextureStageState(stage, D3D9.TextureStage.TextureTransformFlags, (int)TextureTransform.Disable); // Needless to sets texture transform here, it's never used at all } }
public void CreateBuffer(D3D9.Device d3d9Device, D3D9.Pool ePool) { // Find the vertex buffer of this device. BufferResources bufferResources; if (this._mapDeviceToBufferResources.TryGetValue(d3d9Device, out bufferResources)) { bufferResources.VertexBuffer.SafeDispose(); } else { bufferResources = new BufferResources(); this._mapDeviceToBufferResources.Add(d3d9Device, bufferResources); } bufferResources.VertexBuffer = null; bufferResources.IsOutOfDate = true; bufferResources.LockOffset = 0; bufferResources.LockLength = sizeInBytes; bufferResources.LockOptions = BufferLocking.Normal; bufferResources.LastUsedFrame = Root.Instance.NextFrameNumber; // Create the vertex buffer try { bufferResources.VertexBuffer = new D3D9.VertexBuffer(d3d9Device, sizeInBytes, D3D9Helper.ConvertEnum(usage), 0, // No FVF here, thank you. ePool); } catch (Exception ex) { throw new AxiomException("Cannot restore D3D9 vertex buffer", ex); } this._bufferDesc = bufferResources.VertexBuffer.Description; }