public void DrawBall(Vector4 clipPlane, Matrix view) { var viewProj = view * _proj; ImmediateContext.InputAssembler.SetVertexBuffers(0, new D3D11.VertexBufferBinding(_ballVB, VertexPN.Stride, 0)); ImmediateContext.InputAssembler.SetIndexBuffer(_ballIB, DXGI.Format.R32_UInt, 0); int len; for (int p = 0; p < _tech.Description.PassCount; p++) { var wvp = _ballWorld * view * _proj; _fxWVP.SetMatrix(wvp); _fxClipPlane.Set(clipPlane); var invTranspose = Matrix.Invert(Matrix.Transpose(_ballWorld)); _fxWIT.SetMatrix(invTranspose); _fxWorld.SetMatrix(_ballWorld); var array = Util.GetArray(_landMaterial, out len); _fxMaterial.SetRawValue(DataStream.Create <byte>(array, false, false), len); var pass = _tech.GetPassByIndex(p); pass.Apply(ImmediateContext); ImmediateContext.DrawIndexed(_ballIndexCount, 0, 0); _fxDiffuseMap.SetResource(null); _fxClipPlane.Set(new float[] { 0, 0, 0, 1 }); } }
public override void DrawScene() { base.DrawScene(); //First update reflection and refraction maps RenderRefractionMap(); RenderReflectionMap(); ImmediateContext.ClearRenderTargetView(RenderTargetView, Color.LightSteelBlue); ImmediateContext.ClearDepthStencilView(DepthStencilView, D3D11.DepthStencilClearFlags.Depth | D3D11.DepthStencilClearFlags.Stencil, 1.0f, 0); ImmediateContext.InputAssembler.InputLayout = _inputLayout; ImmediateContext.InputAssembler.PrimitiveTopology = D3D.PrimitiveTopology.TriangleList; int len; var array = Util.GetArray(_dirLight, out len); //Pass lights data and position of eye (used for specular lighting) _fxDirLight.SetRawValue(DataStream.Create <byte>(array, false, false), len); array = Util.GetArray(_pointLight, out len); _fxPointLight.SetRawValue(DataStream.Create <byte>(array, false, false), len); array = Util.GetArray(_spotLight, out len); _fxSpotLight.SetRawValue(DataStream.Create <byte>(array, false, false), len); _fxEyePosW.Set(_eyePosW); DrawLand(); DrawWater(); DrawBall(); SwapChain.Present(0, DXGI.PresentFlags.None); }
private void BuildWavesGeometryBuffers() { var vbd = new D3D11.BufferDescription(VertexPN.Stride * _awave.VertexCount, D3D11.ResourceUsage.Dynamic, D3D11.BindFlags.VertexBuffer, D3D11.CpuAccessFlags.Write, D3D11.ResourceOptionFlags.None, 0); _wavesVB = new D3D11.Buffer(Device, vbd); var indices = new List <int>(); var m = _awave.RowCount; var n = _awave.ColumnCount; for (int i = 0; i < m - 1; i++) { for (int j = 0; j < n - 1; j++) { indices.Add(i * n + j); indices.Add(i * n + j + 1); indices.Add((i + 1) * n + j); indices.Add((i + 1) * n + j); indices.Add(i * n + j + 1); indices.Add((i + 1) * n + j + 1); } } var ibd = new D3D11.BufferDescription(sizeof(int) * indices.Count, D3D11.ResourceUsage.Immutable, D3D11.BindFlags.IndexBuffer, D3D11.CpuAccessFlags.None, D3D11.ResourceOptionFlags.None, 0); _wavesIB = new D3D11.Buffer(Device, DataStream.Create <int>(indices.ToArray(), false, false), ibd); }
private static IContainer LoadMetadataFromFFprobe(this IContainer resource) { var mediaLoader = new MediaLoader(resource); if (mediaLoader.HasAudio) { resource.Streams.AddRange(mediaLoader.BaseData.Streams .OfType <AudioStreamMetadata>() .Select(audioMetadata => AudioStream.Create(MetadataInfo.Create(audioMetadata)))); } if (mediaLoader.HasVideo) { resource.Streams.AddRange(mediaLoader.BaseData.Streams .OfType <VideoStreamMetadata>() .Select(videoMetadata => VideoStream.Create(MetadataInfo.Create(videoMetadata)))); } if (mediaLoader.HasData) { resource.Streams.AddRange(mediaLoader.BaseData.Streams .OfType <DataStreamMetadata>() .Select(dataMetadata => DataStream.Create(MetadataInfo.Create(dataMetadata)))); } return(resource); }
/// <summary> /// /// </summary> /// <param name="format"></param> /// <param name="buffer"></param> /// <param name="offset"></param> /// <param name="count"></param> /// <param name="loopStart"></param> /// <param name="loopLength"></param> private void Initialize(WaveFormat format, byte[] buffer, int offset, int count, int loopStart, int loopLength) { _format = format; _dataStream = DataStream.Create <byte>(buffer, true, false); // Use the loopStart and loopLength also as the range // when playing this SoundEffect a single time / unlooped. _buffer = new AudioBuffer() { Stream = _dataStream, AudioBytes = count, Flags = BufferFlags.EndOfStream, PlayBegin = loopStart, PlayLength = loopLength, Context = new IntPtr(42), }; _loopedBuffer = new AudioBuffer() { Stream = _dataStream, AudioBytes = count, Flags = BufferFlags.EndOfStream, LoopBegin = loopStart, LoopLength = loopLength, LoopCount = AudioBuffer.LoopInfinite, Context = new IntPtr(42), }; }
public void CreateVertexBuffer(Device device) { if (VertexBuffer != null) { VertexBuffer.Dispose(); } var count = 0; count = Animated ? SkinnedVertices.Count : Vertices.Count; var desc = new BufferDescription() { BindFlags = BindFlags.VertexBuffer, CpuAccessFlags = CpuAccessFlags.None, OptionFlags = ResourceOptionFlags.None, Usage = ResourceUsage.Immutable, SizeInBytes = VertexStride * count }; //Create Vertex buffer VertexBuffer = Animated ? new Buffer(device, DataStream.Create(SkinnedVertices.ToArray(), false, false), desc) : new Buffer(device, DataStream.Create(Vertices.ToArray(), false, false), desc); }
private unsafe void PlatformSubmitBuffer <T>(ReadOnlySpan <T> data, AudioDepth depth) where T : unmanaged { int dataByteCount = data.Length * sizeof(T); int sampleCount = dataByteCount / ((int)depth / 8); // The XAudio voice is always 16-bit, but we support 16-bit and 32-bit data. int bufferByteCount = sampleCount * sizeof(short); byte[] pooledBuffer = _pool.Rent(bufferByteCount); // we need to copy so datastream does not pin the buffer that the user might modify later if (depth == AudioDepth.Float) { // we need to convert to 16-bit var srcSpan = MemoryMarshal.Cast <T, float>(data); var dstSpan = MemoryMarshal.Cast <byte, short>(pooledBuffer.AsSpan(0, bufferByteCount)); AudioLoader.ConvertSingleToInt16(srcSpan, dstSpan); } else { // the data was 16-bit, so just copy over var srcSpan = MemoryMarshal.AsBytes(data); srcSpan.CopyTo(pooledBuffer); } var stream = DataStream.Create(pooledBuffer, true, false, 0, true); var audioBuffer = new AudioBuffer(stream) { AudioBytes = bufferByteCount }; _voice.SubmitSourceBuffer(audioBuffer, null); _queuedItems.Enqueue(new DataItem(audioBuffer, pooledBuffer)); }
protected override void InitializeInternal() { base.InitializeInternal(); var device = D3DApp11.I.D3DDevice; using (var r = CubeMapSRV.Resource) { r.DebugName = "sky cubemap"; } var sphere = GeometryGenerator.CreateSphere(_skySphereRadius, 30, 30); var vertices = sphere.Vertices.Select(v => v.Position).ToArray(); var vbd = new BufferDescription( VertPos.Stride * vertices.Length, ResourceUsage.Immutable, BindFlags.VertexBuffer, CpuAccessFlags.None, ResourceOptionFlags.None, 0 ); _vb = new Buffer(device, DataStream.Create(vertices, false, false), vbd); _indexCount = sphere.Indices.Count; var ibd = new BufferDescription( _indexCount * sizeof(int), ResourceUsage.Immutable, BindFlags.IndexBuffer, CpuAccessFlags.None, ResourceOptionFlags.None, 0 ); _ib = new Buffer(device, DataStream.Create(sphere.Indices.ToArray(), false, false), ibd); }
static AudioPlayer() { SampleBuffers[0] = new byte[AudioFrequency / AudioSlices]; var dataStream0 = DataStream.Create(SampleBuffers[0], true, true); AudioBuffers[0] = new AudioBuffer { Stream = dataStream0, AudioBytes = (int)dataStream0.Length, Flags = BufferFlags.None, Context = new IntPtr(0) }; BufferEvents[0] = new AutoResetEvent(false); SampleBuffers[1] = new byte[AudioFrequency / AudioSlices]; var dataStream1 = DataStream.Create(SampleBuffers[1], true, true); AudioBuffers[1] = new AudioBuffer { Stream = dataStream1, AudioBytes = (int)dataStream1.Length, Flags = BufferFlags.None, Context = new IntPtr(1) }; BufferEvents[1] = new AutoResetEvent(false); var waveFormat = new WaveFormat(AudioFrequency, 8, 1); XAudio2 xaudio = new XAudio2(); MasteringVoice masteringVoice = new MasteringVoice(xaudio); SourceVoice = new SourceVoice(xaudio, waveFormat, true); SourceVoice.BufferStart += SourceVoice_BufferStart; }
private void playSound() { while (true) { if (xaDevice == null || audioSource == null) { return; } if (buffer.Count != 0) { DataStream dataStream = DataStream.Create(buffer.Dequeue(), true, false); AudioBuffer buf = new AudioBuffer() { AudioBytes = (int)dataStream.Length, Stream = dataStream, LoopCount = XAudio2.NoLoopRegion, LoopBegin = 0, LoopLength = 0, PlayBegin = 0, PlayLength = 0, Flags = BufferFlags.EndOfStream }; WaveFormat waveFormat = new WaveFormat(audioSource.SampleRate, 32, audioSource.Channels); sourceVoices[whichVoice ? 0 : 1] = new SourceVoice(xaDevice, waveFormat, false); sourceVoices[whichVoice ? 0 : 1].SubmitSourceBuffer(buf, null); sourceVoices[whichVoice ? 0 : 1].Start(); whichVoice = !whichVoice; } } }
private void PlatformInitialize(byte[] buffer, int sampleRate, AudioChannels channels) { CreateBuffers(new WaveFormat(sampleRate, (int)channels), DataStream.Create(buffer, true, false), 0, buffer.Length); }
private void _バッファを作成する() { //エフェクトを読み込む using (ShaderBytecode byteCode = ShaderBytecode.CompileFromFile(@"Shader\grid.fx", "fx_5_0")) { _エフェクト = new Effect(RenderContext.Instance.DeviceManager.D3DDevice, byteCode); } //まずリストに頂点を格納 List <float> axisVector = new List <float>(); _軸として頂点を格納する(axisVector, _軸の長さ, 0, 0, new Vector4(1, 0, 0, 1)); _軸として頂点を格納する(axisVector, 0, _軸の長さ, 0, new Vector4(0, 1, 0, 1)); _軸として頂点を格納する(axisVector, 0, 0, _軸の長さ, new Vector4(0, 0, 1, 1)); //バッファを作成 using (var vs = DataStream.Create(axisVector.ToArray(), true, true)) { BufferDescription bufDesc = new BufferDescription { BindFlags = BindFlags.VertexBuffer, SizeInBytes = (int)vs.Length }; _軸の頂点バッファ = new Buffer(RenderContext.Instance.DeviceManager.D3DDevice, vs, bufDesc); } _軸の頂点数 = axisVector.Count; //入力レイアウトを作成 var v = _エフェクト.GetTechniqueByIndex(0).GetPassByIndex(1).Description.Signature; _軸の入力レイアウト = new InputLayout(RenderContext.Instance.DeviceManager.D3DDevice, v, AxisGridLayout.VertexElements); 頂点数 = _軸の頂点数; }
private void PlatformInitialize(byte[] buffer, int offset, int count, int sampleRate, AudioChannels channels, int loopStart, int loopLength) { CreateBuffers(new WaveFormat(sampleRate, (int)channels), DataStream.Create(buffer, true, false, offset), loopStart, loopLength); }
public void SetMaterial(Material m) { // 这里如果内存释放不当非常容易引发内存泄漏! using (var dataStream = DataStream.Create(NoireUtilities.StructureToBytes(m), false, false)) { _mat.SetRawValue(dataStream, Material.Stride); } }
public void BuildMesh() { if (indexBuffer != null) { indexBuffer.Dispose(); vertexBuffer.Dispose(); } VertexTexture[] waterVertices = new VertexTexture[4]; int i = 0; float x, y, width, height; x = 0; y = 0; width = info.width; height = info.height; float size = 1; waterVertices[i++] = new VertexTexture(new Vector3(x, info.waterLevel, y), new Vector2(0, 0)); waterVertices[i++] = new VertexTexture(new Vector3(x, info.waterLevel, height), new Vector2(0, size)); waterVertices[i++] = new VertexTexture(new Vector3(width, info.waterLevel, y), new Vector2(size, 0)); waterVertices[i++] = new VertexTexture(new Vector3(width, info.waterLevel, height), new Vector2(size, size)); indices = new short[] { 0, 3, 1, 0, 2, 3 }; IndicesLength = indices.Length; indexBuffer = new Buffer(Display.device, DataStream.Create(indices, false, false), new BufferDescription(indices.Length * 4, ResourceUsage.Default, BindFlags.IndexBuffer, CpuAccessFlags.None, ResourceOptionFlags.None, 0)); vertexBuffer = Buffer.Create <VertexTexture>(Display.device, waterVertices, new BufferDescription(4 * VertexTexture.SizeInBytes, ResourceUsage.Default, BindFlags.VertexBuffer, CpuAccessFlags.None, ResourceOptionFlags.None, 0)); }
void WriteIndices(Mesh assimpMesh, ModelData.MeshPart meshPart, ModelData.IndexBuffer indexBuffer) { // Write all indices var indices = assimpMesh.GetIntIndices(); indexBuffer.Count = indices.Length; meshPart.IndexBufferRange.Count = indices.Length; if (meshPart.VertexBufferRange.Count < 65536) { // Write only short indices if count is less than the size of a short indexBuffer.Buffer = new byte[indices.Length * 2]; using (var indexStream = DataStream.Create(indexBuffer.Buffer, true, true)) foreach (int index in indices) { indexStream.Write((ushort)index); } } else { // Otherwise, use full 32-bit precision to store indices indexBuffer.Buffer = new byte[indices.Length * 4]; using (var indexStream = DataStream.Create(indexBuffer.Buffer, true, true)) indexStream.WriteRange(indices); } // Update the MaximumBufferSizeInBytes needed to load this model if (indexBuffer.Buffer.Length > model.MaximumBufferSizeInBytes) { model.MaximumBufferSizeInBytes = indexBuffer.Buffer.Length; } }
private DataStream ConverStreamToIeeeFloat(DataStream dataSream) { var length = dataSream.Length / sizeof(short); var shorts = dataSream.ReadRange <short>((int)length); var floats = Utils.MakeFloatFromShortSoundArray(shorts); return(DataStream.Create(floats, true, true)); }
/// <summary> /// Initializes a new instance of the <see cref="SoundBank"/> class from a soundbank stream. /// </summary> /// <param name="audioEngine">The engine.</param> /// <param name="stream">The soundbank stream stream.</param> /// <unmanaged>HRESULT IXACT3Engine::CreateSoundBank([In] const void* pvBuffer,[In] unsigned int dwSize,[In] unsigned int dwFlags,[In] unsigned int dwAllocAttributes,[Out, Fast] IXACT3SoundBank** ppSoundBank)</unmanaged> public SoundBank(AudioEngine audioEngine, Stream stream) { this.audioEngine = audioEngine; isAudioEngineReadonly = true; soundBankSourceStream = stream as DataStream ?? DataStream.Create(Utilities.ReadStream(stream), true, true); audioEngine.CreateSoundBank(soundBankSourceStream.PositionPointer, (int)(soundBankSourceStream.Length - soundBankSourceStream.Position), 0, 0, this); callback = OnNotificationDelegate; }
private void InitializeComposeTexture(SizeInt32 size) { var description = new SharpDX.Direct3D11.Texture2DDescription { Width = size.Width, Height = size.Height, MipLevels = 1, ArraySize = 1, Format = SharpDX.DXGI.Format.B8G8R8A8_UNorm, SampleDescription = new SharpDX.DXGI.SampleDescription() { Count = 1, Quality = 0 }, Usage = SharpDX.Direct3D11.ResourceUsage.Default, BindFlags = SharpDX.Direct3D11.BindFlags.ShaderResource | SharpDX.Direct3D11.BindFlags.RenderTarget, CpuAccessFlags = SharpDX.Direct3D11.CpuAccessFlags.None, OptionFlags = SharpDX.Direct3D11.ResourceOptionFlags.None }; var tardescription = new SharpDX.Direct3D11.Texture2DDescription { Width = 1920, Height = 1080, MipLevels = 1, ArraySize = 1, Format = SharpDX.DXGI.Format.B8G8R8A8_UNorm, SampleDescription = new SharpDX.DXGI.SampleDescription() { Count = 1, Quality = 0 }, Usage = SharpDX.Direct3D11.ResourceUsage.Default, BindFlags = SharpDX.Direct3D11.BindFlags.ShaderResource | SharpDX.Direct3D11.BindFlags.RenderTarget, CpuAccessFlags = SharpDX.Direct3D11.CpuAccessFlags.None, OptionFlags = SharpDX.Direct3D11.ResourceOptionFlags.None }; _tarComposeTexture = new SharpDX.Direct3D11.Texture2D(_d3dDevice, tardescription); _composeTexture = new SharpDX.Direct3D11.Texture2D(_d3dDevice, description); _composeRenderTargetView = new SharpDX.Direct3D11.RenderTargetView(_d3dDevice, _composeTexture); byte[] data = new byte[1920 * 1080 * 4]; for (int i = 0; i < 1920 * 1080 * 4; i++) { if ((i + 1) % 4 == 0) { data[i] = 255; } else { data[i] = 255; } } DataStream s = DataStream.Create(data, true, true); DataRectangle rect = new DataRectangle(s.DataPointer, 1920 * 4); _blankComposeTexture = new SharpDX.Direct3D11.Texture2D(_d3dDevice, tardescription, rect); }
protected void WriteStruct <T>(EffectVariable variable, T value) where T : struct { var stride = Marshal.SizeOf(typeof(T)); var bytes = InteropHelper.StructureToBytes(value); using (var dataStream = DataStream.Create(bytes, false, false)) { variable.SetRawValue(dataStream, stride); } }
void WriteBarycentricVertices(Mesh assimpMesh, ModelData.MeshPart meshPart, ModelData.VertexBuffer vertexBuffer, int vertexBufferElementSize) { //System.Diagnostics.Debugger.Launch(); int[] indices = assimpMesh.GetIntIndices(); int indexCount = indices.Length; // Write all vertices meshPart.VertexBufferRange.Count = indexCount; vertexBuffer.Count = indexCount; vertexBuffer.Buffer = new byte[vertexBufferElementSize * indexCount]; // Update the MaximumBufferSizeInBytes needed to load this model if (vertexBuffer.Buffer.Length > model.MaximumBufferSizeInBytes) { model.MaximumBufferSizeInBytes = vertexBuffer.Buffer.Length; } var vertexStream = DataStream.Create(vertexBuffer.Buffer, true, true); boundingPoints = new Vector3[indexCount]; var newVertices = new VertexPositionNormalTexture[indices.Length]; for (int i = 0; i < indexCount; i++) { try { int i0 = indices[i]; var v0 = assimpMesh.Vertices[i0]; var v = new Vector3(v0.X, v0.Y, v0.Z); var n0 = assimpMesh.Normals[i0]; var n = new Vector3(n0.X, n0.Y, n0.Z); var uv = assimpMesh.GetTextureCoords(0)[i0]; var t = new Vector2(uv.X, uv.Y); // Store bounding points for BoundingSphere pre-calculation boundingPoints[currentBoundingPointIndex++] = v; newVertices[i0] = new VertexPositionNormalTexture(v, n, t); } catch (Exception ex) { Debug.WriteLine("!" + ex); } } var barycentricVertices = ModelEditor.ConvertToBarycentricEdgeNormalVertices(newVertices, indices); foreach (var vertex in barycentricVertices) { vertexStream.Write(vertex.Position); vertexStream.Write(vertex.Normal); vertexStream.Write(vertex.TextureUV); vertexStream.Write(vertex.Barycentric); } vertexStream.Dispose(); }
public プレーンボードシェイプ(WeakReference <ShaderResourceView> resourceView, Vector2 プレーンボードのサイズ) { _シェーダーリソースビュー = resourceView; 表示中 = true; スプライトの描画に利用するエフェクト = CGHelper.EffectFx5を作成するFromResource("MikuMikuFlex.Resource.Shader.SpriteShader.fx", RenderContext.Instance.DeviceManager.D3DDevice); VertexInputLayout = new InputLayout( RenderContext.Instance.DeviceManager.D3DDevice, スプライトの描画に利用するエフェクト.GetTechniqueByIndex(0).GetPassByIndex(0).Description.Signature, スプライトの頂点レイアウト.InputElements); _描画パス = スプライトの描画に利用するエフェクト.GetTechniqueByIndex(0).GetPassByIndex(0); // 頂点リストを作成 float width = プレーンボードのサイズ.X / 2f; float height = プレーンボードのサイズ.Y / 2f; var 頂点リスト = new List <byte>(); // 三角形1 CGHelper.AddListBuffer(new Vector3(-width, height, 0), 頂点リスト); // x, y, z CGHelper.AddListBuffer(new Vector2(0, 0), 頂点リスト); // u, v 以下同 CGHelper.AddListBuffer(new Vector3(width, height, 0), 頂点リスト); CGHelper.AddListBuffer(new Vector2(1, 0), 頂点リスト); CGHelper.AddListBuffer(new Vector3(-width, -height, 0), 頂点リスト); CGHelper.AddListBuffer(new Vector2(0, 1), 頂点リスト); // 三角形2 CGHelper.AddListBuffer(new Vector3(width, height, 0), 頂点リスト); CGHelper.AddListBuffer(new Vector2(1, 0), 頂点リスト); CGHelper.AddListBuffer(new Vector3(width, -height, 0), 頂点リスト); CGHelper.AddListBuffer(new Vector2(1, 1), 頂点リスト); CGHelper.AddListBuffer(new Vector3(-width, -height, 0), 頂点リスト); CGHelper.AddListBuffer(new Vector2(0, 1), 頂点リスト); // 頂点リストから頂点バッファを作成 using (DataStream ds = DataStream.Create(頂点リスト.ToArray(), true, true)) { var bufDesc = new BufferDescription() { BindFlags = BindFlags.VertexBuffer, SizeInBytes = (int)ds.Length }; D3D頂点バッファ = new SharpDX.Direct3D11.Buffer(RenderContext.Instance.DeviceManager.D3DDevice, ds, bufDesc); } モデル状態 = new モデル状態既定実装(); モデル状態.率 = new Vector3(0.2f); // さいしょっから 0.2 倍? }
private static DataStream ToDataStream(int offset, byte[] buffer, int length) { // NOTE: We make a copy here because old versions of // DataStream.Create didn't work correctly for offsets. var data = new byte[length]; Buffer.BlockCopy(buffer, offset, data, 0, length); return(DataStream.Create(data, true, false)); }
public void SubmitBuffer(byte[] buffer, int size) { var dataStream = DataStream.Create(buffer, true, false); AudioBuffer buff = new AudioBuffer(dataStream); buff.Flags = BufferFlags.None; m_dataStreams.Enqueue(dataStream); m_voice.SubmitSourceBuffer(buff, null); }
/// <summary> /// Converts this stream to a DataStream by loading all the data from the source stream. /// </summary> /// <returns></returns> public DataStream ToDataStream() { var buffer = new byte[Length]; if (Read(buffer, 0, (int)Length) != Length) { throw new InvalidOperationException("Unable to get a valid DataStream"); } return(DataStream.Create(buffer, true, true)); }
public void SubmitBuffer(byte[] buffer, int size) { Debug.Assert(m_dataStreams != null, "SourceVoice wasnt created with buffer support"); var dataStream = DataStream.Create(buffer, true, false); AudioBuffer buff = new AudioBuffer(dataStream); buff.Flags = BufferFlags.None; m_dataStreams.Enqueue(dataStream); m_voice.SubmitSourceBuffer(buff, null); }
public XAudioBuffer(int size) { _samples = new byte[size]; _dataStream = DataStream.Create(_samples, true, true); Buffer = new AudioBuffer { Stream = _dataStream, AudioBytes = (int)_dataStream.Length, Flags = BufferFlags.EndOfStream }; }
public void Create(Device1 device) { PrimitiveTopology = PrimitiveTopology.TriangleList; VertexStride = Marshal.SizeOf <VertexPosColNormTex>(); var verts = new List <VertexPosColNormTex>(); var importer = new AssimpContext(); var s = importer.GetSupportedImportFormats(); if (!importer.IsImportFormatSupported(Path.GetExtension(_FileName))) { throw new ArgumentException( "Model format " + Path.GetExtension(_FileName) + " is not supported! Cannot load {1}", "filename"); } var model = importer.ImportFile(_FileName, PostProcessSteps.GenerateSmoothNormals | PostProcessSteps.CalculateTangentSpace | PostProcessSteps.Triangulate); foreach (var mesh in model.Meshes) { for (int i = 0; i < mesh.VertexCount; i++) { var pos = new Vector3(mesh.Vertices[i].X, mesh.Vertices[i].Y, mesh.Vertices[i].Z); var color = Color.AliceBlue; var norm = new Vector3(mesh.Normals[i].X, mesh.Normals[i].Y, mesh.Normals[i].Z); var tx = new Vector2(mesh.TextureCoordinateChannels[0][i].X, -mesh.TextureCoordinateChannels[0][i].Y); verts.Add(new VertexPosColNormTex(pos, color, norm, tx)); } var bufferDesc = new BufferDescription { Usage = ResourceUsage.Immutable, BindFlags = BindFlags.VertexBuffer, SizeInBytes = VertexStride * verts.Count, CpuAccessFlags = CpuAccessFlags.None, OptionFlags = ResourceOptionFlags.None }; VertexBuffer = new Buffer(device, DataStream.Create(verts.ToArray(), false, false), bufferDesc); IndexCount = mesh.GetIndices().Length; //buffer creations bufferDesc = new BufferDescription { Usage = ResourceUsage.Immutable, BindFlags = BindFlags.IndexBuffer, SizeInBytes = sizeof(uint) * IndexCount, CpuAccessFlags = CpuAccessFlags.None, OptionFlags = ResourceOptionFlags.None }; IndexBuffer = new Buffer(device, DataStream.Create(mesh.GetIndices(), false, false), bufferDesc); } }
public DataStream ToDataStream() { byte[] numArray = new byte[this.Length]; if ((long)this.Read(numArray, 0, (int)this.Length) != this.Length) { throw new InvalidOperationException("Unable to get a valid DataStream"); } else { return(DataStream.Create <byte>(numArray, true, true, 0, true)); } }
private void BuildScreenQuadGeometryBuffers(Device device) { var quad = GeometryGenerator.CreateFullScreenQuad(); var verts = quad.Vertices.Select(v => new VertPosNormTex(v.Position, v.Normal, v.TexCoords)).ToList(); var vbd = new BufferDescription(VertPosNormTex.Stride * verts.Count, ResourceUsage.Immutable, BindFlags.VertexBuffer, CpuAccessFlags.None, ResourceOptionFlags.None, 0); _screenQuadVB = new Buffer(device, DataStream.Create(verts.ToArray(), false, false), vbd); var ibd = new BufferDescription(sizeof(int) * quad.Indices.Count, ResourceUsage.Immutable, BindFlags.IndexBuffer, CpuAccessFlags.None, ResourceOptionFlags.None, 0); _screenQuadIB = new Buffer(device, DataStream.Create(quad.Indices.ToArray(), false, false), ibd); }