/// <summary> /// Adds the wave to the source voices and starts playing it. /// </summary> /// <param name="wave">The wave.</param> public void AddSound(Wave wave) { SourceVoice source = new SourceVoice(audio, wave.Data.Format); source.Start(); source.SubmitSourceBuffer(wave.Buffer); sources.Add(source); }
static void PlayPCM(XAudio2 device, string fileName) { //WaveStream stream = new WaveStream(fileName); var s = System.IO.File.OpenRead(fileName); WaveStream stream = new WaveStream(s); s.Close(); AudioBuffer buffer = new AudioBuffer(); buffer.AudioData = stream; buffer.AudioBytes = (int)stream.Length; buffer.Flags = BufferFlags.EndOfStream; SourceVoice sourceVoice = new SourceVoice(device, stream.Format); sourceVoice.SubmitSourceBuffer(buffer); sourceVoice.Start(); // loop until the sound is done playing while (sourceVoice.State.BuffersQueued > 0) { if (GetAsyncKeyState(VK_ESCAPE) != 0) break; Thread.Sleep(10); } // wait until the escape key is released while (GetAsyncKeyState(VK_ESCAPE) != 0) Thread.Sleep(10); // cleanup the voice buffer.Dispose(); sourceVoice.Dispose(); stream.Dispose(); }
/// <summary> /// Plays the audio. /// </summary> /// <param name="stop">If true, will stop the sound and return its position to 0 before playing it. Passing false will have the effect of resuming the sound from the last position it was stopped at.</param> /// <param name="loop">Whether or not to loop the sound.</param> public void play(bool stop, bool loop) { this.looping = loop; isInitializingPlayback = true; if (loop) { buffer.LoopCount = AudioBuffer.LoopInfinite; } // We'll start the buffer from the beginning if we've never played this buffer before so that the sound can be loaded. // Otherwise, the sound might start from a random position in the buffer. if (stop || hasNeverPlayed) { hasNeverPlayed = false; voice.Stop(); voice.FlushSourceBuffers(); buffer.Stream.Position = 0; voice.SubmitSourceBuffer(buffer, null); } voice.Start(); isStopped = false; isInitializingPlayback = false; }
public void StartSound() { BufferSizeSamples = Sound.MillisecondsToSamples(GlobalWin.Config.SoundBufferSizeMs); MaxSamplesDeficit = BufferSizeSamples; var format = new WaveFormat { SamplesPerSecond = Sound.SampleRate, BitsPerSample = Sound.BytesPerSample * 8, Channels = Sound.ChannelCount, FormatTag = WaveFormatTag.Pcm, BlockAlignment = Sound.BlockAlign, AverageBytesPerSecond = Sound.SampleRate * Sound.BlockAlign }; _sourceVoice = new SourceVoice(_device, format); _bufferPool = new BufferPool(); _runningSamplesQueued = 0; _sourceVoice.Start(); }
public static void PlayXAudioSound(object soundFile) { try { xaudio2 = new XAudio2(); masteringVoice = new MasteringVoice(xaudio2); var stream = new SoundStream(File.OpenRead(soundFile as string)); var waveFormat = stream.Format; buffer = new AudioBuffer { Stream = stream.ToDataStream(), AudioBytes = (int)stream.Length, Flags = BufferFlags.EndOfStream }; stream.Close(); sourceVoice = new SourceVoice(xaudio2, waveFormat, true); sourceVoice.SubmitSourceBuffer(buffer, stream.DecodedPacketsInfo); sourceVoice.Start(); while (sourceVoice.State.BuffersQueued > 0) { Thread.Sleep(1); } sourceVoice.DestroyVoice(); sourceVoice.Dispose(); sourceVoice = null; buffer.Stream.Dispose(); xaudio2.Dispose(); masteringVoice.Dispose(); } catch (Exception e) { Console.WriteLine(e); } }
public void Reset() { if (sVoice != null) { sVoice.Stop(); sVoice.Dispose(); } if (audioWriter != null) { audioWriter.Close(); } if (audioBuffer != null) { audioBuffer.Dispose(); } sVoice = new SourceVoice(device, audioFormat, VoiceFlags.None); audioBuffer = new AudioBuffer(); audioBuffer.AudioData = new MemoryStream(); audioWriter = new BinaryWriter(audioBuffer.AudioData); mVoice.Volume = volume; sVoice.Start(); }
public void StartSound() { BufferSizeSamples = Sound.MillisecondsToSamples(Global.Config.SoundBufferSizeMs); MaxSamplesDeficit = BufferSizeSamples; var format = new WaveFormat { SamplesPerSecond = Sound.SampleRate, BitsPerSample = Sound.BytesPerSample * 8, Channels = Sound.ChannelCount, FormatTag = WaveFormatTag.Pcm, BlockAlignment = Sound.BlockAlign, AverageBytesPerSecond = Sound.SampleRate * Sound.BlockAlign }; _sourceVoice = new SourceVoice(_device, format); _bufferPool = new BufferPool(); _runningSamplesQueued = 0; _sourceVoice.Start(); }
public void QueueDataBlock(short[] buffer, int length, int sampleRate) { // Initialize source if it's null if (source == null) { var fmt = new WaveFormat { SamplesPerSecond = sampleRate, BitsPerSample = BITS_PER_SAMPLE, AverageBytesPerSecond = sampleRate * BYTES_PER_SAMPLE, Channels = NUM_CHANNELS, BlockAlignment = BLOCK_ALIGN, FormatTag = WaveFormatTag.Pcm }; source = new SourceVoice(device, fmt); } // Copy the samples to a stream using (var ms = new MemoryStream(length * BYTES_PER_SAMPLE)) { using (var writer = new BinaryWriter(ms, Encoding.Default, true)) { for (int i = 0; i < length; i++) { writer.Write(buffer[i]); } writer.Flush(); } ms.Position = 0; // Queue the buffer source.SubmitSourceBuffer(new AudioBuffer { AudioData = ms, AudioBytes = length * BYTES_PER_SAMPLE }); } // Make sure it's playing source.Start(); }
void SoundPlayerThread() { Globals.bPlaySignal = true; WaveMemStream = new MemoryStream(WaveDaten); WaveBuffer = new AudioBuffer(); WaveBuffer.Flags = BufferFlags.EndOfStream; WaveBuffer.AudioData = WaveMemStream; // WaveBuffer.AudioBytes = clGlobals.BytesProSekunde; WaveBuffer.AudioBytes = (int)WaveMemStream.Length; WaveBuffer.LoopCount = XAudio2.LoopInfinite; WaveSourceVoice = new SourceVoice(AudioDevice, SignalFormat); WaveSourceVoice.SubmitSourceBuffer(WaveBuffer); WaveSourceVoice.Start(); while (Globals.bPlaySignal) { Thread.Sleep(10); } WaveSourceVoice.Stop(); Thread.Sleep(10); WaveMemStream.Close(); WaveMemStream.Dispose(); WaveMemStream = null; WaveBuffer.Dispose(); WaveBuffer = null; WaveSourceVoice.Dispose(); WaveSourceVoice = null; // Thread.Sleep(100); soundThreadStart = null; // this.ClearWaveContainer(); // this.InitWaveContainer(); this.m_soundThread.Abort(); }
public BrowserForm() { InitializeComponent(); var examples = new Folder { Text = "Examples", Path = @"..\..\Examples" }; treeView.Nodes.Add(examples); Audio = new XAudio2(); Audio.StartEngine(); Master = new MasteringVoice(Audio); var format = new WaveFormat(32000, 16, 1); Voice = new SourceVoice(Audio, format, VoiceFlags.None, 4.0f); Voice.BufferEnd += Voice_BufferEnd; Voice.Start(); }
public void Play() { if (!IsPlaying) { IsPlaying = true; if (!Loaded) { Load(); } if (!Background) { m_PlaySync.Reset(); } m_Audio.SubmitSourceBuffer(m_AudioBuffer, m_SoundStream.DecodedPacketsInfo); m_Audio.Start(); if (!Background) { m_PlaySync.Wait(); } } }
public void Start() { Debug.Assert(sourceVoice == null); sourceVoice = new SourceVoice(xaudio2, waveFormat); sourceVoice.BufferEnd += SourceVoice_BufferEnd; sourceVoice.Start(); quitEvent.Reset(); try { while (true) { bufferSemaphore.Release(); } } catch (SemaphoreFullException) { } playingTask = Task.Factory.StartNew(PlayAsync, TaskCreationOptions.LongRunning); }
public void Generate() { var xaudio2 = new XAudio2(); var masteringVoice = new MasteringVoice(xaudio2); var waveFormat = new WaveFormat(44100, 32, 2); var sourceVoice = new SourceVoice(xaudio2, waveFormat); int bufferSize = waveFormat.ConvertLatencyToByteSize(60000); var dataStream = new DataStream(bufferSize, true, true); int numberOfSamples = bufferSize / waveFormat.BlockAlign; for (int i = 0; i < numberOfSamples; i++) { float value = (float)(Math.Cos(2 * Math.PI * (220.0 + 4.0) * i / waveFormat.SampleRate) * 0.5); dataStream.Write(value); dataStream.Write(value); } dataStream.Position = 0; var audioBuffer = new AudioBuffer { Stream = dataStream, Flags = BufferFlags.EndOfStream, AudioBytes = bufferSize }; sourceVoice.SubmitSourceBuffer(audioBuffer, null); sourceVoice.Start(); Console.WriteLine("Play sound"); for (int i = 0; i < 60; i++) { Console.Write("."); Console.Out.Flush(); Thread.Sleep(1000); } }
internal override void PlayImpl() { SourceVoice.Start(); }
static void Main() { var initError = EVRInitError.None; system = OpenVR.Init(ref initError); if (initError != EVRInitError.None) { return; } compositor = OpenVR.Compositor; compositor.CompositorBringToFront(); compositor.FadeGrid(5.0f, false); count = OpenVR.k_unMaxTrackedDeviceCount; currentPoses = new TrackedDevicePose_t[count]; nextPoses = new TrackedDevicePose_t[count]; controllers = new List <uint>(); controllerModels = new RenderModel_t[count]; controllerTextures = new RenderModel_TextureMap_t[count]; controllerTextureViews = new ShaderResourceView[count]; controllerVertexBuffers = new SharpDX.Direct3D11.Buffer[count]; controllerIndexBuffers = new SharpDX.Direct3D11.Buffer[count]; controllerVertexBufferBindings = new VertexBufferBinding[count]; controllerEmitters = new Emitter[count]; controllerVoices = new SourceVoice[count]; for (uint device = 0; device < count; device++) { var deviceClass = system.GetTrackedDeviceClass(device); switch (deviceClass) { case ETrackedDeviceClass.HMD: headset = device; break; case ETrackedDeviceClass.Controller: controllers.Add(device); break; } } uint width = 0; uint height = 0; system.GetRecommendedRenderTargetSize(ref width, ref height); headsetSize = new Size((int)width, (int)height); windowSize = new Size(960, 540); var leftEyeProjection = Convert(system.GetProjectionMatrix(EVREye.Eye_Left, 0.01f, 1000.0f)); var rightEyeProjection = Convert(system.GetProjectionMatrix(EVREye.Eye_Right, 0.01f, 1000.0f)); var leftEyeView = Convert(system.GetEyeToHeadTransform(EVREye.Eye_Left)); var rightEyeView = Convert(system.GetEyeToHeadTransform(EVREye.Eye_Right)); foreach (var controller in controllers) { var modelName = new StringBuilder(255, 255); var propertyError = ETrackedPropertyError.TrackedProp_Success; var length = system.GetStringTrackedDeviceProperty(controller, ETrackedDeviceProperty.Prop_RenderModelName_String, modelName, 255, ref propertyError); if (propertyError == ETrackedPropertyError.TrackedProp_Success) { var modelName2 = modelName.ToString(); while (true) { var pointer = IntPtr.Zero; var modelError = EVRRenderModelError.None; modelError = OpenVR.RenderModels.LoadRenderModel_Async(modelName2, ref pointer); if (modelError == EVRRenderModelError.Loading) { continue; } if (modelError == EVRRenderModelError.None) { var renderModel = System.Runtime.InteropServices.Marshal.PtrToStructure <RenderModel_t>(pointer); controllerModels[controller] = renderModel; break; } } while (true) { var pointer = IntPtr.Zero; var textureError = EVRRenderModelError.None; textureError = OpenVR.RenderModels.LoadTexture_Async(controllerModels[controller].diffuseTextureId, ref pointer); if (textureError == EVRRenderModelError.Loading) { continue; } if (textureError == EVRRenderModelError.None) { var texture = System.Runtime.InteropServices.Marshal.PtrToStructure <RenderModel_TextureMap_t>(pointer); controllerTextures[controller] = texture; break; } } } } int adapterIndex = 0; system.GetDXGIOutputInfo(ref adapterIndex); using (var form = new Form()) using (var factory = new Factory4()) { form.ClientSize = windowSize; var adapter = factory.GetAdapter(adapterIndex); var swapChainDescription = new SwapChainDescription { BufferCount = 1, Flags = SwapChainFlags.None, IsWindowed = true, ModeDescription = new ModeDescription { Format = Format.B8G8R8A8_UNorm, Width = form.ClientSize.Width, Height = form.ClientSize.Height, RefreshRate = new Rational(60, 1) }, OutputHandle = form.Handle, SampleDescription = new SampleDescription(1, 0), SwapEffect = SwapEffect.Discard, Usage = Usage.RenderTargetOutput }; SharpDX.Direct3D11.Device.CreateWithSwapChain(adapter, DeviceCreationFlags.None, swapChainDescription, out device, out swapChain); factory.MakeWindowAssociation(form.Handle, WindowAssociationFlags.None); context = device.ImmediateContext; using (var backBuffer = swapChain.GetBackBuffer <Texture2D>(0)) backBufferView = new RenderTargetView(device, backBuffer); var depthBufferDescription = new Texture2DDescription { Format = Format.D16_UNorm, ArraySize = 1, MipLevels = 1, Width = form.ClientSize.Width, Height = form.ClientSize.Height, SampleDescription = new SampleDescription(1, 0), Usage = ResourceUsage.Default, BindFlags = BindFlags.DepthStencil, CpuAccessFlags = CpuAccessFlags.None, OptionFlags = ResourceOptionFlags.None }; using (var depthBuffer = new Texture2D(device, depthBufferDescription)) depthStencilView = new DepthStencilView(device, depthBuffer); // Create Eye Textures var eyeTextureDescription = new Texture2DDescription { ArraySize = 1, BindFlags = BindFlags.RenderTarget, CpuAccessFlags = CpuAccessFlags.None, Format = Format.B8G8R8A8_UNorm, Width = headsetSize.Width, Height = headsetSize.Height, MipLevels = 1, OptionFlags = ResourceOptionFlags.None, SampleDescription = new SampleDescription(1, 0), Usage = ResourceUsage.Default }; var leftEyeTexture = new Texture2D(device, eyeTextureDescription); var rightEyeTexture = new Texture2D(device, eyeTextureDescription); var leftEyeTextureView = new RenderTargetView(device, leftEyeTexture); var rightEyeTextureView = new RenderTargetView(device, rightEyeTexture); // Create Eye Depth Buffer eyeTextureDescription.BindFlags = BindFlags.DepthStencil; eyeTextureDescription.Format = Format.D32_Float; var eyeDepth = new Texture2D(device, eyeTextureDescription); var eyeDepthView = new DepthStencilView(device, eyeDepth); Shapes.Cube.Load(device); Shapes.Sphere.Load(device); Shaders.Load(device); // Load Controller Models foreach (var controller in controllers) { var model = controllerModels[controller]; controllerVertexBuffers[controller] = new SharpDX.Direct3D11.Buffer(device, model.rVertexData, new BufferDescription { BindFlags = BindFlags.VertexBuffer, SizeInBytes = (int)model.unVertexCount * 32 }); controllerVertexBufferBindings[controller] = new VertexBufferBinding(controllerVertexBuffers[controller], 32, 0); controllerIndexBuffers[controller] = new SharpDX.Direct3D11.Buffer(device, model.rIndexData, new BufferDescription { BindFlags = BindFlags.IndexBuffer, SizeInBytes = (int)model.unTriangleCount * 3 * 2 }); var texture = controllerTextures[controller]; using (var texture2d = new Texture2D(device, new Texture2DDescription { ArraySize = 1, BindFlags = BindFlags.ShaderResource, Format = Format.R8G8B8A8_UNorm, Width = texture.unWidth, Height = texture.unHeight, MipLevels = 1, SampleDescription = new SampleDescription(1, 0) }, new DataRectangle(texture.rubTextureMapData, texture.unWidth * 4))) controllerTextureViews[controller] = new ShaderResourceView(device, texture2d); } var controllerVertexShaderByteCode = SharpDX.D3DCompiler.ShaderBytecode.Compile(Properties.Resources.NormalTextureShader, "VS", "vs_5_0"); controllerVertexShader = new VertexShader(device, controllerVertexShaderByteCode); controllerPixelShader = new PixelShader(device, SharpDX.D3DCompiler.ShaderBytecode.Compile(Properties.Resources.NormalTextureShader, "PS", "ps_5_0")); var controllerLayout = new InputLayout(device, SharpDX.D3DCompiler.ShaderSignature.GetInputSignature(controllerVertexShaderByteCode), new InputElement[] { new InputElement("POSITION", 0, Format.R32G32B32_Float, 0, 0), new InputElement("NORMAL", 0, Format.R32G32B32_Float, 12, 0), new InputElement("TEXCOORD", 0, Format.R32G32_Float, 24, 0) }); worldViewProjectionBuffer = new SharpDX.Direct3D11.Buffer(device, Utilities.SizeOf <Matrix>(), ResourceUsage.Default, BindFlags.ConstantBuffer, CpuAccessFlags.None, ResourceOptionFlags.None, 0); var rasterizerStateDescription = RasterizerStateDescription.Default(); //rasterizerStateDescription.FillMode = FillMode.Wireframe; rasterizerStateDescription.IsFrontCounterClockwise = true; //rasterizerStateDescription.CullMode = CullMode.None; rasterizerState = new RasterizerState(device, rasterizerStateDescription); var blendStateDescription = BlendStateDescription.Default(); blendStateDescription.RenderTarget[0].BlendOperation = BlendOperation.Add; blendStateDescription.RenderTarget[0].SourceBlend = BlendOption.SourceAlpha; blendStateDescription.RenderTarget[0].DestinationBlend = BlendOption.InverseSourceAlpha; blendStateDescription.RenderTarget[0].IsBlendEnabled = false; blendState = new BlendState(device, blendStateDescription); var depthStateDescription = DepthStencilStateDescription.Default(); depthStateDescription.DepthComparison = Comparison.LessEqual; depthStateDescription.IsDepthEnabled = true; depthStateDescription.IsStencilEnabled = false; depthStencilState = new DepthStencilState(device, depthStateDescription); var samplerStateDescription = SamplerStateDescription.Default(); samplerStateDescription.Filter = Filter.MinMagMipLinear; samplerStateDescription.AddressU = TextureAddressMode.Wrap; samplerStateDescription.AddressV = TextureAddressMode.Wrap; samplerState = new SamplerState(device, samplerStateDescription); startTime = DateTime.Now; frame = 0; windowSize = form.ClientSize; backgroundColor = new RawColor4(0.1f, 0.1f, 0.1f, 1); var vrEvent = new VREvent_t(); var eventSize = (uint)Utilities.SizeOf <VREvent_t>(); head = Matrix.Identity; // Initialize Audio audio = new XAudio2(); var voice = new MasteringVoice(audio); audio3d = new X3DAudio(Speakers.Stereo); foreach (var controller in controllers) { controllerEmitters[controller] = new Emitter { ChannelCount = 1, CurveDistanceScaler = 0.15f, OrientFront = Vector3.ForwardLH, OrientTop = Vector3.Up, Position = new Vector3(0, 0, 1000), //Velocity = Vector3.Zero }; } listener = new Listener { OrientFront = Vector3.ForwardLH, OrientTop = Vector3.Up, Position = new Vector3(0, 0, 1000) }; var audioFormat = new WaveFormat(44100, 32, 1); //var audioSource = new SourceVoice(audio, audioFormat); var audioBufferSize = audioFormat.ConvertLatencyToByteSize(1000); var audioStream = new DataStream(audioBufferSize, true, true); var audioSamples = audioBufferSize / audioFormat.BlockAlign; var random = new Random(); for (var sample = 0; sample < audioSamples; sample++) { audioStream.Write((float)random.NextFloat(-1, 1)); } audioStream.Position = 0; var audioBuffer = new AudioBuffer { Stream = audioStream, AudioBytes = audioBufferSize, LoopCount = 255 }; var audioSettings = new DspSettings(1, 2); foreach (var controller in controllers) { var audioSource = new SourceVoice(audio, audioFormat); audioSource.SubmitSourceBuffer(audioBuffer, null); audio3d.Calculate(listener, controllerEmitters[controller], CalculateFlags.Matrix, audioSettings); audioSource.SetOutputMatrix(1, 2, audioSettings.MatrixCoefficients); audioSource.Start(); controllerVoices[controller] = audioSource; } RenderLoop.Run(form, () => { while (system.PollNextEvent(ref vrEvent, eventSize)) { switch ((EVREventType)vrEvent.eventType) { case EVREventType.VREvent_TrackedDeviceActivated: var controller = vrEvent.trackedDeviceIndex; controllers.Add(controller); var modelName = new StringBuilder(255, 255); var propertyError = ETrackedPropertyError.TrackedProp_Success; var length = system.GetStringTrackedDeviceProperty(controller, ETrackedDeviceProperty.Prop_RenderModelName_String, modelName, 255, ref propertyError); if (propertyError == ETrackedPropertyError.TrackedProp_Success) { var modelName2 = modelName.ToString(); while (true) { var pointer = IntPtr.Zero; var modelError = EVRRenderModelError.None; modelError = OpenVR.RenderModels.LoadRenderModel_Async(modelName2, ref pointer); if (modelError == EVRRenderModelError.Loading) { continue; } if (modelError == EVRRenderModelError.None) { var renderModel = System.Runtime.InteropServices.Marshal.PtrToStructure <RenderModel_t>(pointer); controllerModels[controller] = renderModel; // Load Controller Model var model = controllerModels[controller]; controllerVertexBuffers[controller] = new SharpDX.Direct3D11.Buffer(device, model.rVertexData, new BufferDescription { BindFlags = BindFlags.VertexBuffer, SizeInBytes = (int)model.unVertexCount * 32 }); controllerVertexBufferBindings[controller] = new VertexBufferBinding(controllerVertexBuffers[controller], 32, 0); controllerIndexBuffers[controller] = new SharpDX.Direct3D11.Buffer(device, model.rIndexData, new BufferDescription { BindFlags = BindFlags.IndexBuffer, SizeInBytes = (int)model.unTriangleCount * 3 * 2 }); break; } } while (true) { var pointer = IntPtr.Zero; var textureError = EVRRenderModelError.None; textureError = OpenVR.RenderModels.LoadTexture_Async(controllerModels[controller].diffuseTextureId, ref pointer); if (textureError == EVRRenderModelError.Loading) { continue; } if (textureError == EVRRenderModelError.None) { var textureMap = System.Runtime.InteropServices.Marshal.PtrToStructure <RenderModel_TextureMap_t>(pointer); controllerTextures[controller] = textureMap; using (var texture2d = new Texture2D(device, new Texture2DDescription { ArraySize = 1, BindFlags = BindFlags.ShaderResource, Format = Format.R8G8B8A8_UNorm, Width = textureMap.unWidth, Height = textureMap.unHeight, MipLevels = 1, SampleDescription = new SampleDescription(1, 0) }, new DataRectangle(textureMap.rubTextureMapData, textureMap.unWidth * 4))) controllerTextureViews[controller] = new ShaderResourceView(device, texture2d); break; } } controllerEmitters[controller] = new Emitter { ChannelCount = 1, CurveDistanceScaler = 0.15f, OrientFront = Vector3.ForwardLH, OrientTop = Vector3.Up, Position = new Vector3(0, 0, 1000), //Velocity = Vector3.Zero }; var audioSource = new SourceVoice(audio, audioFormat); audioSource.SubmitSourceBuffer(audioBuffer, null); audio3d.Calculate(listener, controllerEmitters[controller], CalculateFlags.Matrix, audioSettings); audioSource.SetOutputMatrix(1, 2, audioSettings.MatrixCoefficients); audioSource.Start(); controllerVoices[controller] = audioSource; } break; case EVREventType.VREvent_TrackedDeviceDeactivated: controllers.RemoveAll(c => c == vrEvent.trackedDeviceIndex); break; default: System.Diagnostics.Debug.WriteLine((EVREventType)vrEvent.eventType); break; } } if (form.ClientSize != windowSize) { Utilities.Dispose(ref backBufferView); if (form.ClientSize.Width != 0 && form.ClientSize.Height != 0) { swapChain.ResizeBuffers(1, form.ClientSize.Width, form.ClientSize.Height, Format.B8G8R8A8_UNorm, SwapChainFlags.None); using (var backBuffer = swapChain.GetBackBuffer <Texture2D>(0)) backBufferView = new RenderTargetView(device, backBuffer); } windowSize = form.ClientSize; } // Update Device Tracking compositor.WaitGetPoses(currentPoses, nextPoses); if (currentPoses[headset].bPoseIsValid) { Convert(ref currentPoses[headset].mDeviceToAbsoluteTracking, ref head); // Update Audio Listener listener.Position = head.TranslationVector * new Vector3(1, 1, -1); listener.OrientFront = head.Forward * new Vector3(1, 1, -1); listener.OrientTop = head.Up * new Vector3(1, 1, -1); } foreach (var controller in controllers) { var controllerMatrix = Matrix.Identity; Convert(ref currentPoses[controller].mDeviceToAbsoluteTracking, ref controllerMatrix); var position = controllerMatrix.TranslationVector * new Vector3(1, 1, -1); controllerEmitters[controller].Position = position; audio3d.Calculate(listener, controllerEmitters[controller], CalculateFlags.Matrix, audioSettings); controllerVoices[controller].SetOutputMatrix(1, 2, audioSettings.MatrixCoefficients); } // Render Left Eye context.Rasterizer.SetViewport(0, 0, headsetSize.Width, headsetSize.Height); context.OutputMerger.SetTargets(eyeDepthView, leftEyeTextureView); context.OutputMerger.SetDepthStencilState(depthStencilState); context.ClearRenderTargetView(leftEyeTextureView, backgroundColor); context.ClearDepthStencilView(eyeDepthView, DepthStencilClearFlags.Depth, 1.0f, 0); Shaders.Apply(context); context.Rasterizer.State = rasterizerState; context.OutputMerger.SetBlendState(blendState); context.OutputMerger.SetDepthStencilState(depthStencilState); context.PixelShader.SetSampler(0, samplerState); var ratio = (float)headsetSize.Width / (float)headsetSize.Height; var projection = leftEyeProjection; var view = Matrix.Invert(leftEyeView * head); var world = Matrix.Scaling(0.5f) * Matrix.Translation(0, 1.0f, 0); worldViewProjection = world * view * projection; context.UpdateSubresource(ref worldViewProjection, worldViewProjectionBuffer); context.VertexShader.SetConstantBuffer(0, worldViewProjectionBuffer); //Shapes.Cube.Begin(context); //Shapes.Cube.Draw(context); Shapes.Sphere.Begin(context); Shapes.Sphere.Draw(context); // Draw Controllers context.InputAssembler.InputLayout = controllerLayout; context.InputAssembler.PrimitiveTopology = PrimitiveTopology.TriangleList; context.VertexShader.Set(controllerVertexShader); context.PixelShader.Set(controllerPixelShader); context.GeometryShader.Set(null); context.DomainShader.Set(null); context.HullShader.Set(null); context.PixelShader.SetSampler(0, samplerState); foreach (var controller in controllers) { context.InputAssembler.SetVertexBuffers(0, controllerVertexBufferBindings[controller]); context.InputAssembler.SetIndexBuffer(controllerIndexBuffers[controller], Format.R16_UInt, 0); context.PixelShader.SetShaderResource(0, controllerTextureViews[controller]); Convert(ref currentPoses[controller].mDeviceToAbsoluteTracking, ref world); worldViewProjection = world * view * projection; context.UpdateSubresource(ref worldViewProjection, worldViewProjectionBuffer); context.VertexShader.SetConstantBuffer(0, worldViewProjectionBuffer); context.DrawIndexed((int)controllerModels[controller].unTriangleCount * 3 * 4, 0, 0); } var texture = new Texture_t { eType = ETextureType.DirectX, eColorSpace = EColorSpace.Gamma, handle = leftEyeTextureView.Resource.NativePointer }; var bounds = new VRTextureBounds_t { uMin = 0.0f, uMax = 1.0f, vMin = 0.0f, vMax = 1.0f, }; var submitError = compositor.Submit(EVREye.Eye_Left, ref texture, ref bounds, EVRSubmitFlags.Submit_Default); if (submitError != EVRCompositorError.None) { System.Diagnostics.Debug.WriteLine(submitError); } // Render Right Eye context.Rasterizer.SetViewport(0, 0, headsetSize.Width, headsetSize.Height); context.OutputMerger.SetTargets(eyeDepthView, rightEyeTextureView); context.OutputMerger.SetDepthStencilState(depthStencilState); context.ClearRenderTargetView(rightEyeTextureView, backgroundColor); context.ClearDepthStencilView(eyeDepthView, DepthStencilClearFlags.Depth, 1.0f, 0); Shaders.Apply(context); context.Rasterizer.State = rasterizerState; context.OutputMerger.SetBlendState(blendState); context.OutputMerger.SetDepthStencilState(depthStencilState); context.PixelShader.SetSampler(0, samplerState); projection = rightEyeProjection; view = Matrix.Invert(rightEyeView * head); world = Matrix.Scaling(0.5f) * Matrix.Translation(0, 1.0f, 0); worldViewProjection = world * view * projection; context.UpdateSubresource(ref worldViewProjection, worldViewProjectionBuffer); context.VertexShader.SetConstantBuffer(0, worldViewProjectionBuffer); //Shapes.Cube.Begin(context); //Shapes.Cube.Draw(context); Shapes.Sphere.Begin(context); Shapes.Sphere.Draw(context); // Draw Controllers context.InputAssembler.InputLayout = controllerLayout; context.InputAssembler.PrimitiveTopology = PrimitiveTopology.TriangleList; context.VertexShader.Set(controllerVertexShader); context.PixelShader.Set(controllerPixelShader); context.GeometryShader.Set(null); context.DomainShader.Set(null); context.HullShader.Set(null); context.PixelShader.SetSampler(0, samplerState); foreach (var controller in controllers) { context.InputAssembler.SetVertexBuffers(0, controllerVertexBufferBindings[controller]); context.InputAssembler.SetIndexBuffer(controllerIndexBuffers[controller], Format.R16_UInt, 0); context.PixelShader.SetShaderResource(0, controllerTextureViews[controller]); Convert(ref currentPoses[controller].mDeviceToAbsoluteTracking, ref world); worldViewProjection = world * view * projection; context.UpdateSubresource(ref worldViewProjection, worldViewProjectionBuffer); context.VertexShader.SetConstantBuffer(0, worldViewProjectionBuffer); context.DrawIndexed((int)controllerModels[controller].unTriangleCount * 3 * 4, 0, 0); } texture.handle = rightEyeTextureView.Resource.NativePointer; submitError = compositor.Submit(EVREye.Eye_Right, ref texture, ref bounds, EVRSubmitFlags.Submit_Default); if (submitError != EVRCompositorError.None) { System.Diagnostics.Debug.WriteLine(submitError); } // Render Window context.Rasterizer.SetViewport(0, 0, windowSize.Width, windowSize.Height); context.OutputMerger.SetTargets(depthStencilView, backBufferView); context.OutputMerger.SetDepthStencilState(depthStencilState); context.ClearRenderTargetView(backBufferView, backgroundColor); context.ClearDepthStencilView(depthStencilView, DepthStencilClearFlags.Depth, 1.0f, 0); Shaders.Apply(context); context.Rasterizer.State = rasterizerState; context.OutputMerger.SetBlendState(blendState); context.OutputMerger.SetDepthStencilState(depthStencilState); context.PixelShader.SetSampler(0, samplerState); ratio = (float)form.ClientSize.Width / (float)form.ClientSize.Height; projection = Matrix.PerspectiveFovRH(3.14F / 3.0F, ratio, 0.01f, 1000); view = Matrix.Invert(head); world = Matrix.Scaling(0.5f) * Matrix.Translation(0, 1.0f, 0); worldViewProjection = world * view * projection; context.UpdateSubresource(ref worldViewProjection, worldViewProjectionBuffer); context.VertexShader.SetConstantBuffer(0, worldViewProjectionBuffer); //Shapes.Cube.Begin(context); //Shapes.Cube.Draw(context); Shapes.Sphere.Begin(context); Shapes.Sphere.Draw(context); // Draw Controllers context.InputAssembler.InputLayout = controllerLayout; context.InputAssembler.PrimitiveTopology = PrimitiveTopology.TriangleList; context.VertexShader.Set(controllerVertexShader); context.PixelShader.Set(controllerPixelShader); context.GeometryShader.Set(null); context.DomainShader.Set(null); context.HullShader.Set(null); context.PixelShader.SetSampler(0, samplerState); foreach (var controller in controllers) { context.InputAssembler.SetVertexBuffers(0, controllerVertexBufferBindings[controller]); context.InputAssembler.SetIndexBuffer(controllerIndexBuffers[controller], Format.R16_UInt, 0); Convert(ref currentPoses[controller].mDeviceToAbsoluteTracking, ref world); worldViewProjection = world * view * projection; context.UpdateSubresource(ref worldViewProjection, worldViewProjectionBuffer); context.VertexShader.SetConstantBuffer(0, worldViewProjectionBuffer); context.DrawIndexed((int)controllerModels[controller].unTriangleCount * 3 * 4, 0, 0); } // Show Backbuffer swapChain.Present(0, PresentFlags.None); }); audio.Dispose(); } }
public override void Play() { _voice?.Start(); }
public void Resume() { _sourceVoice.Start(); }
private void process(int playPointer, bool loop) { bool noMoreData = false; VorbisReader vorbis = new VorbisReader(fileNames[playPointer]); if (fileNames.Length > 1) { if (playPointer < fileNames.Length - 1) { loop = false; } else { loop = true; } } float[] outBuffer = new float[vorbis.Channels * vorbis.SampleRate / 5]; // If this is a consecutive track, we've already initialized the sourceVoice so we don't need to do it again. // We can just fill the already-playing voice with data from the new track. if (playPointer == 0) { WaveFormat waveFormat = new WaveFormat(vorbis.SampleRate, bitsPerSample, vorbis.Channels); sourceVoice = new SourceVoice(device, waveFormat, false); } const int rescaleFactor = 32767; Func <int, List <DataStream> > getAtLeast = howMany => { List <DataStream> samples = new List <DataStream>(); if (noMoreData) { return(samples); } int PcmBytes = 0; int howManySoFar = 0; while ((PcmBytes = vorbis.ReadSamples(outBuffer, 0, outBuffer.Length)) > 0) { short[] intData = new short[PcmBytes]; byte[] data = new byte[PcmBytes * 2]; for (int index = 0; index < PcmBytes; index++) { intData[index] = (short)(outBuffer[index] * rescaleFactor); byte[] b = BitConverter.GetBytes(intData[index]); b.CopyTo(data, index * 2); } samples.Add(DataStream.Create <byte>(data, true, false)); if (++howManySoFar == howMany) { break; } } if (howManySoFar < howMany) { noMoreData = true; } return(samples); }; Func <List <DataStream>, List <AudioBuffer> > convertToAudioBuffers = dataStreams => { List <AudioBuffer> audioBuffers = new List <AudioBuffer>(); foreach (DataStream s in dataStreams) { audioBuffers.Add(new AudioBuffer { Stream = s, Flags = BufferFlags.None, AudioBytes = (int)s.Length }); } return(audioBuffers); }; Action <List <AudioBuffer> > submitToSourceVoice = (audioBuffers) => { foreach (AudioBuffer a in audioBuffers) { sourceVoice.SubmitSourceBuffer(a, null); } }; new Thread(() => { VoiceState state; List <DataStream> streams = getAtLeast(minimumNumberOfBuffers); List <AudioBuffer> buffers = convertToAudioBuffers(streams); memories.AddRange(buffers); submitToSourceVoice(buffers); // If this isn't the first consecutive track, we've already started playing this sourceVoice and are just filling it with data from the new track. if (playPointer == 0) { sourceVoice.Start(); } started = true; while (true) { if (stopNow) { break; } state = sourceVoice.State; if (state.BuffersQueued < minimumNumberOfBuffers) { // Fill the source with more samples since we're running low. List <DataStream> moreStreams = getAtLeast(minimumNumberOfBuffers); if (moreStreams.Count < minimumNumberOfBuffers && loop) { vorbis.DecodedPosition = 0; noMoreData = false; } if (state.BuffersQueued == 0 && moreStreams.Count == 0) { break; // Nothing remaining to fill the source with and we've played everything. } List <AudioBuffer> moreBuffers = convertToAudioBuffers(moreStreams); // The buffers that are already played can now be removed. for (int i = 0; i < memories.Count - state.BuffersQueued; i++) { memories[i].Stream.Close(); } memories.RemoveRange(0, memories.Count - state.BuffersQueued); memories.AddRange(moreBuffers); submitToSourceVoice(moreBuffers); } Thread.Sleep(10); } // If we're transitioning to the next track and haven't received a stop signal. if (!stopNow && playPointer < (fileNames.Length - 1)) { process(playPointer + 1, loop); vorbis.Dispose(); return; } sourceVoice.Stop(); sourceVoice.FlushSourceBuffers(); for (int i = 0; i < memories.Count; i++) { memories[i].Stream.Close(); } memories.Clear(); vorbis.Dispose(); m_status = Status.stopped; }).Start(); }
public void Reset() { if (sVoice != null) { sVoice.Stop(); sVoice.Dispose(); } if(audioWriter != null) audioWriter.Close(); if(audioBuffer != null) audioBuffer.Dispose(); sVoice = new SourceVoice(device, audioFormat, VoiceFlags.None); audioBuffer = new AudioBuffer(); audioBuffer.AudioData = new MemoryStream(); audioWriter = new BinaryWriter(audioBuffer.AudioData); mVoice.Volume = volume; sVoice.Start(); }
public void Play() { DateTime start = DateTime.Now; Console.WriteLine("Play() start"); sourceVoice = new SourceVoice(Program.audioDevice, Format); Console.WriteLine("Create source voice"); sourceVoice.BufferEnd += new EventHandler<ContextEventArgs>(sourceVoice_BufferEnd); sourceVoice.StreamEnd += new EventHandler(sourceVoice_StreamEnd); sourceVoice.SubmitSourceBuffer(Buffer); Console.WriteLine("Submitted source buffers"); sourceVoice.Start(); Console.WriteLine("Started source voice"); var channel = new Channel { SourceVoice = sourceVoice }; DateTime end = DateTime.Now; Console.WriteLine("Play() end (" + (end - start).TotalMilliseconds + " ms)"); }
protected override void EndBufferChange() { if (AudioBuffer != null) { if (audioBuffer == null) audioBuffer = new XAudioBuffer(); audioBuffer.AudioData = new DataStream(AudioBuffer.RawBuffer, true, true); audioBuffer.AudioBytes = (int)audioBuffer.AudioData.Length; audioBuffer.LoopLength = AudioBuffer.RawBuffer.Length / 2; audioBuffer.LoopCount = XAudio2.LoopInfinite; waveFormat.AverageBytesPerSecond = (waveFormat.BlockAlignment = (short)((waveFormat.BitsPerSample = (short)BitsPerSample) / 8 * (waveFormat.Channels = 2))) * (waveFormat.SamplesPerSecond = Frequency); sourceVoice = new SourceVoice(xAudio, waveFormat); sourceVoice.SubmitSourceBuffer(audioBuffer); sourceVoice.Start(); } }
static void Main() { var form = new RenderForm("DotRocket/SlimDX example"); var description = new SwapChainDescription() { BufferCount = 1, Usage = Usage.RenderTargetOutput, OutputHandle = form.Handle, IsWindowed = true, ModeDescription = new ModeDescription(0, 0, new Rational(60, 1), Format.R8G8B8A8_UNorm), SampleDescription = new SampleDescription(1, 0), Flags = SwapChainFlags.AllowModeSwitch, SwapEffect = SwapEffect.Discard }; // Setup rendering Device device; SwapChain swapChain; Device.CreateWithSwapChain(DriverType.Hardware, DeviceCreationFlags.None, description, out device, out swapChain); RenderTargetView renderTarget; using (var resource = Resource.FromSwapChain <Texture2D>(swapChain, 0)) renderTarget = new RenderTargetView(device, resource); var context = device.ImmediateContext; var viewport = new Viewport(0.0f, 0.0f, form.ClientSize.Width, form.ClientSize.Height); context.OutputMerger.SetTargets(renderTarget); context.Rasterizer.SetViewports(viewport); // Prevent alt+enter (broken on WinForms) using (var factory = swapChain.GetParent <Factory>()) factory.SetWindowAssociation(form.Handle, WindowAssociationFlags.IgnoreAltEnter); // Setup audio-streaming XAudio2 xaudio2 = new XAudio2(); stream = new XWMAStream("tune.xwma"); MasteringVoice masteringVoice = new MasteringVoice(xaudio2); sourceVoice = new SourceVoice(xaudio2, stream.Format); audioBuffer = new AudioBuffer(); audioBuffer.AudioData = stream; audioBuffer.AudioBytes = (int)stream.Length; audioBuffer.Flags = BufferFlags.EndOfStream; sourceVoice.SubmitSourceBuffer(audioBuffer, stream.DecodedPacketsInfo); sourceVoice.Start(); // Setup DotRocket #if DEBUG DotRocket.Device rocket = new DotRocket.ClientDevice("sync"); rocket.OnPause += Pause; rocket.OnSetRow += SetRow; rocket.OnIsPlaying += IsPlaying; rocket.Connect("localhost", 1338); #else DotRocket.Device rocket = new DotRocket.PlayerDevice("sync"); #endif // Get our belowed tracks! DotRocket.Track clear_r = rocket.GetTrack("clear.r"); DotRocket.Track clear_g = rocket.GetTrack("clear.g"); DotRocket.Track clear_b = rocket.GetTrack("clear.b"); MessagePump.Run(form, () => { // Hammertime. double row = ((double)(sourceVoice.State.SamplesPlayed - samplesBias) / stream.Format.SamplesPerSecond) * rowRate; // Paint some stuff. rocket.Update((int)System.Math.Floor(row)); context.ClearRenderTargetView(renderTarget, new Color4( clear_r.GetValue(row), clear_g.GetValue(row), clear_b.GetValue(row))); swapChain.Present(0, PresentFlags.None); }); // clean up all resources // anything we missed will show up in the debug output renderTarget.Dispose(); swapChain.Dispose(); device.Dispose(); }
void OpenDevice() { try { // 相关内容已在Version28中移除,但仍建议保留这块代码,请优先选择XAudio2 // 详见 https://docs.microsoft.com/en-us/windows/win32/xaudio2/xaudio2-versions _xaudio2 = new XAudio2(XAudio2Version.Version27); for (int i = 0; i < _xaudio2.DeviceCount; i++) { DeviceDetails device = _xaudio2.GetDeviceDetails(i); if (device.Role == DeviceRole.GlobalDefaultDevice) { _dev = device.DeviceID; break; } } _xaudio2.Dispose(); } catch { var enumerator = new MMDeviceEnumerator(); var device = enumerator.GetDefaultAudioEndpoint(DataFlow.Render, Role.Console); _dev = device?.ID; } Debug.Assert(_xaudio2 == null || _xaudio2.IsDisposed);// 如果不是可能会导致内存溢出 if (_dev == null) { // 在CreateMasteringVoice时将szDeviceId指定默认值NULL会使XAudio2选择全局默认音频设备 // 由于之前我们已经主动获取设备ID了,为了避免出现意外情况,这里直接抛错就行了 throw new NotSupportedException("没有扬声器"); } _xaudio2 = new XAudio2(XAudio2Flags.None, ProcessorSpecifier.DefaultProcessor); /* * We use XAUDIO2_DEFAULT_CHANNELS instead of _channels. On * Xbox360, this means 5.1 output, but on Windows, it means "figure out * what the system has." It might be preferable to let XAudio2 blast * stereo output to appropriate surround sound configurations * instead of clamping to 2 channels, even though we'll configure the * Source Voice for whatever number of channels you supply. */ _masteringVoice = new MasteringVoice(_xaudio2, XAUDIO2_DEFAULT_CHANNELS, _sampleRate, _dev); _waveFormat = new WaveFormatEx(SDL_AudioFormat.F32, _channels, _sampleRate); _sourceVoice = new SourceVoice(_xaudio2 , _waveFormat , VoiceFlags.NoSampleRateConversion | VoiceFlags.NoPitch , 1.0f //, Callbacks.Instance); , true); _sourceVoice.BufferEnd += OnBufferEnd; _sourceVoice.VoiceError += OnVoiceError; _bufferSize = _waveFormat.BlockAlign * _samples; //_hidden.handle = GCHandle.Alloc(this); //_hidden.device = GCHandle.ToIntPtr(_hidden.handle); _hidden.semaphore = new Semaphore(1, 1); // We feed a Source, it feeds the Mastering, which feeds the device. _hidden.mixlen = _bufferSize; _hidden.mixbuf = (byte *)Marshal.AllocHGlobal(2 * _hidden.mixlen); _hidden.nextbuf = _hidden.mixbuf; Native.SetMemory(_hidden.mixbuf, _waveFormat.Silence, (size_t)(2 * _hidden.mixlen)); // Pre-allocate buffers _hidden.audioBuffersRing = new AudioBuffer[2]; for (int i = 0; i < _hidden.audioBuffersRing.Length; i++) { _hidden.audioBuffersRing[i] = new AudioBuffer(); } // Start everything playing! _xaudio2.StartEngine(); _sourceVoice.Start(XAUDIO2_COMMIT_NOW); }
private void Play() { _isPlaying = true; _voice.Start(); }
public void Play() { WaveStream stream; if (!soundManager.SoundDictionary.ContainsKey(filename)) { // Add our sound to the sound library var s = System.IO.File.OpenRead(Path.Combine("Assets", filename)); stream = new WaveStream(s); s.Close(); soundManager.SoundDictionary[filename] = stream; } else { stream = soundManager.SoundDictionary[filename]; } WaveFormat format = stream.Format; buffer = new AudioBuffer(); buffer.AudioData = stream; buffer.AudioBytes = (int)stream.Length; buffer.Flags = BufferFlags.EndOfStream; buffer.AudioData.Position = 0; if (Looping == true) { buffer.LoopCount = XAudio2.LoopInfinite; buffer.LoopLength = 0; } currentlyPlaying = new SourceVoice(soundManager.device, format); currentlyPlaying.Volume = this.Volume; currentlyPlaying.BufferEnd += (s, e) => playing = false; currentlyPlaying.Start(); currentlyPlaying.SubmitSourceBuffer(buffer); playing = true; }
public void Start() { sourceVoice.Start(); }
private void TestOutputMatrixBehaviour(Sound sound) { int inputChannels = sound.Format.Channels; int outputChannels = audioDevice.GetDeviceDetails(0).OutputFormat.Channels; SourceVoice sourceVoice = new SourceVoice(audioDevice, sound.Format); sourceVoice.SubmitSourceBuffer(sound.Buffer); Console.WriteLine("Pre: "); PrintVoiceInfo(inputChannels, outputChannels, sourceVoice); sourceVoice.Start(); Console.WriteLine("Started: "); PrintVoiceInfo(inputChannels, outputChannels, sourceVoice); sourceVoice.Volume = 0.7f; Console.WriteLine("Volume set: "); PrintVoiceInfo(inputChannels, outputChannels, sourceVoice); System.Threading.Thread.Sleep(300); PrintVoiceInfo(inputChannels, outputChannels, sourceVoice); }
static void Main(string[] args) { Keys = BassKeys; KeyNotes = BassKeyNotes; KeyOctaves = BassKeyOctaves; var devices = Midi.midiInGetNumDevs(); var deviceHandle = IntPtr.Zero; var deviceCaps = new Midi.MidiInCaps(); for (var device = 0U; device < devices; device++) { Midi.midiInOpen(out deviceHandle, device, MidiProc, IntPtr.Zero, Midi.CALLBACK_FUNCTION); Midi.midiInGetDevCaps(deviceHandle, ref deviceCaps, (uint)Marshal.SizeOf(deviceCaps)); Console.WriteLine(deviceCaps.name); Midi.midiInStart(deviceHandle); } var input = new DirectInput(); var keyboard = new Keyboard(input); keyboard.Acquire(); var audio = new XAudio2(); audio.StartEngine(); var master = new MasteringVoice(audio); var format = new WaveFormat(44100, 16, 1); var source = new SourceVoice(audio, format); BufferEnd = new AutoResetEvent(false); source.BufferEnd += Source_BufferEnd; source.Start(); var buffers = new AudioBuffer[2]; var pointers = new DataPointer[buffers.Length]; for (int buffer = 0; buffer < buffers.Length; buffer++) { pointers[buffer] = new DataPointer(Utilities.AllocateClearedMemory(1024), 1024); buffers[buffer] = new AudioBuffer(pointers[buffer]); source.SubmitSourceBuffer(buffers[buffer], null); } var index = 0; var data = new byte[1024]; var time = 0.0; var keyboardState = new KeyboardState(); while (true) { BufferEnd.WaitOne(); keyboard.GetCurrentState(ref keyboardState); for (int x = 0; x < data.Length; x += 2) { var delta = 1.0 / format.SampleRate; var value = 0d; var count = 0; for (var note = 24; note < MidiNotes.Length; note++) { MidiNotes[note] = false; } for (var key = 0; key < Keys.Length; key++) { var noteIndex = 24 + (KeyOctaves[key] * 12) + KeyNotes[key]; if (keyboardState.IsPressed(Keys[key])) { MidiNotes[noteIndex] = true; MidiVelocity[noteIndex] = 1.0f; } } for (var note = 24; note < MidiNotes.Length; note++) { if (MidiNotes[note]) { if (NoteVelocity[note] >= 1.0 - (Attack * delta)) { NoteVelocity[note] = 1.0f; } else { NoteVelocity[note] += (Attack * delta); } } else { if (NoteVelocity[note] <= (Release * delta)) { NoteVelocity[note] = 0.0f; } else { NoteVelocity[note] -= (Release * delta); } } } for (var octave = 0; octave < 8; octave++) { for (var note = 0; note < 12; note++) { var noteIndex = 24 + (octave * 12) + note; if (NoteVelocity[noteIndex] != 0.0) { value += Waves.Sine(time, Notes[note] * MidiOctaves[octave], 0.0) * MidiVelocity[noteIndex] * NoteVelocity[noteIndex]; //value += Waves.Square(time, Notes[note] * MidiOctaves[octave], 0.0) * MidiVelocity[noteIndex] * NoteVelocity[noteIndex]; //value += Waves.Triangle(time, Notes[note] * MidiOctaves[octave], 0.0) * MidiVelocity[noteIndex] * NoteVelocity[noteIndex]; value += Waves.Sawtooth(time, Notes[note] * MidiOctaves[octave], 0.0) * MidiVelocity[noteIndex] * NoteVelocity[noteIndex]; count++; } } } var value2 = (short)((value / 10.0) * short.MaxValue); data[x] = (byte)(value2 & 0xff); data[x + 1] = (byte)(value2 >> 8); time += delta; } pointers[index].CopyFrom(data); source.SubmitSourceBuffer(buffers[index], null); index++; if (index == buffers.Length) { index = 0; } } }
public void Start() { m_voice.Start(); m_isPlaying = true; }
public void Play( Form on ) { var screens = Screen.AllScreens; var screens_left = screens.Min( screen => screen.Bounds.Left ); var screens_right = screens.Max( screen => screen.Bounds.Right ); var screens_width = screens_right-screens_left; var bestScreen = screens.OrderByDescending( screen => { var area = screen.Bounds; area.Intersect( on.Bounds ); return area.Width*area.Height; }).First(); var balances = new[]{1.5f,1.5f}; if ( screens.Length==3 && DisplayBalances.ContainsKey(bestScreen.DeviceName) ) balances = DisplayBalances[bestScreen.DeviceName]; var path = Registry.CurrentUser.OpenSubKey(@"AppEvents\Schemes\Apps\.Default\"+Name+@"\.Current").GetValue(null) as string; var stream = new WaveStream(path); var buffer = new AudioBuffer() { AudioBytes=(int)stream.Length, AudioData=stream, Flags=BufferFlags.EndOfStream }; var voice = new SourceVoice( XAudio2, stream.Format ); voice.SubmitSourceBuffer( buffer ); voice.SetChannelVolumes( balances.Length, balances ); voice.BufferEnd += (sender,ctx) => { try { on.BeginInvoke(new Action(()=>{ voice.Dispose(); buffer.Dispose(); stream.Dispose(); })); } catch ( InvalidOperationException ) { // herp derp on must be disposed/gone } }; voice.Start(); }
public void Play() { isMute = false; sourceVoice.Start(); }
static void Main() { var form = new RenderForm("DotRocket/SlimDX example"); var description = new SwapChainDescription() { BufferCount = 1, Usage = Usage.RenderTargetOutput, OutputHandle = form.Handle, IsWindowed = true, ModeDescription = new ModeDescription(0, 0, new Rational(60, 1), Format.R8G8B8A8_UNorm), SampleDescription = new SampleDescription(1, 0), Flags = SwapChainFlags.AllowModeSwitch, SwapEffect = SwapEffect.Discard }; // Setup rendering Device device; SwapChain swapChain; Device.CreateWithSwapChain(DriverType.Hardware, DeviceCreationFlags.None, description, out device, out swapChain); RenderTargetView renderTarget; using (var resource = Resource.FromSwapChain<Texture2D>(swapChain, 0)) renderTarget = new RenderTargetView(device, resource); var context = device.ImmediateContext; var viewport = new Viewport(0.0f, 0.0f, form.ClientSize.Width, form.ClientSize.Height); context.OutputMerger.SetTargets(renderTarget); context.Rasterizer.SetViewports(viewport); // Prevent alt+enter (broken on WinForms) using (var factory = swapChain.GetParent<Factory>()) factory.SetWindowAssociation(form.Handle, WindowAssociationFlags.IgnoreAltEnter); // Setup audio-streaming XAudio2 xaudio2 = new XAudio2(); stream = new XWMAStream("tune.xwma"); MasteringVoice masteringVoice = new MasteringVoice(xaudio2); sourceVoice = new SourceVoice(xaudio2, stream.Format); audioBuffer = new AudioBuffer(); audioBuffer.AudioData = stream; audioBuffer.AudioBytes = (int)stream.Length; audioBuffer.Flags = BufferFlags.EndOfStream; sourceVoice.SubmitSourceBuffer(audioBuffer, stream.DecodedPacketsInfo); sourceVoice.Start(); // Setup DotRocket #if DEBUG DotRocket.Device rocket = new DotRocket.ClientDevice("sync"); rocket.OnPause += Pause; rocket.OnSetRow += SetRow; rocket.OnIsPlaying += IsPlaying; rocket.Connect("localhost", 1338); #else DotRocket.Device rocket = new DotRocket.PlayerDevice("sync"); #endif // Get our belowed tracks! DotRocket.Track clear_r = rocket.GetTrack("clear.r"); DotRocket.Track clear_g = rocket.GetTrack("clear.g"); DotRocket.Track clear_b = rocket.GetTrack("clear.b"); MessagePump.Run(form, () => { // Hammertime. double row = ((double)(sourceVoice.State.SamplesPlayed - samplesBias) / stream.Format.SamplesPerSecond) * rowRate; // Paint some stuff. rocket.Update((int)System.Math.Floor(row)); context.ClearRenderTargetView(renderTarget, new Color4( clear_r.GetValue(row), clear_g.GetValue(row), clear_b.GetValue(row))); swapChain.Present(0, PresentFlags.None); }); // clean up all resources // anything we missed will show up in the debug output renderTarget.Dispose(); swapChain.Dispose(); device.Dispose(); }
/// <summary> /// SharpDX X3DAudio sample. Plays a generated sound rotating around the listener. /// </summary> static void Main(string[] args) { var xaudio2 = new XAudio2(); using (var masteringVoice = new MasteringVoice(xaudio2)) { // Instantiate X3DAudio var x3dAudio = new X3DAudio(Speakers.Stereo); var emitter = new Emitter { ChannelCount = 1, CurveDistanceScaler = float.MinValue, OrientFront = new Vector3(0, 0, 1), OrientTop = new Vector3(0, 1, 0), Position = new Vector3(0, 0, 0), Velocity = new Vector3(0, 0, 0) }; var listener = new Listener { OrientFront = new Vector3(0, 0, 1), OrientTop = new Vector3(0, 1, 0), Position = new Vector3(0, 0, 0), Velocity = new Vector3(0, 0, 0) }; var waveFormat = new WaveFormat(44100, 32, 1); var sourceVoice = new SourceVoice(xaudio2, waveFormat); int bufferSize = waveFormat.ConvertLatencyToByteSize(60000); var dataStream = new DataStream(bufferSize, true, true); int numberOfSamples = bufferSize / waveFormat.BlockAlign; for (int i = 0; i < numberOfSamples; i++) { float value = (float)(Math.Cos(2 * Math.PI * 220.0 * i / waveFormat.SampleRate) * 0.5); dataStream.Write(value); } dataStream.Position = 0; var audioBuffer = new AudioBuffer { Stream = dataStream, Flags = BufferFlags.EndOfStream, AudioBytes = bufferSize }; //var reverb = new Reverb(); //var effectDescriptor = new EffectDescriptor(reverb); //sourceVoice.SetEffectChain(effectDescriptor); //sourceVoice.EnableEffect(0); sourceVoice.SubmitSourceBuffer(audioBuffer, null); sourceVoice.Start(); Console.WriteLine("Play a sound rotating around the listener"); for (int i = 0; i < 1200; i++) { // Rotates the emitter var rotateEmitter = Matrix.RotationY(i / 5.0f); var newPosition = Vector3.Transform(new Vector3(0, 0, 1000), rotateEmitter); var newPositionVector3 = new Vector3(newPosition.X, newPosition.Y, newPosition.Z); emitter.Velocity = (newPositionVector3 - emitter.Position) / 0.05f; emitter.Position = newPositionVector3; // Calculate X3DAudio settings var dspSettings = x3dAudio.Calculate(listener, emitter, CalculateFlags.Matrix | CalculateFlags.Doppler, 1, 2); // Modify XAudio2 source voice settings sourceVoice.SetOutputMatrix(1, 2, dspSettings.MatrixCoefficients); sourceVoice.SetFrequencyRatio(dspSettings.DopplerFactor); // Wait for 50ms Thread.Sleep(50); } } }
private void process() { lock (lockObject) preparing = true; byte[] outBuffer = new Byte[4096]; if (byteStream == null) { oggFile = new OggVorbisFileStream(fileNames[playPointer]); } else { oggStream = new OggVorbisEncodedStream(byteStream); } MemoryStream PcmStream = null; int PcmBytes = -1; PcmStream = new MemoryStream(); WaveFormat waveFormat = new WaveFormat(); AudioBuffer[] theBuffers = new AudioBuffer[maxBuffers]; int nextBuffer = 0; bool firstLoop = true; bool startedSourceVoice = false; // Decode the Ogg Vorbis data into its PCM data while (PcmBytes != 0) { // Get the next chunk of PCM data, pin these so the GC can't while (true) { PcmBytes = (oggStream == null) ? oggFile.Read(outBuffer, 0, outBuffer.Length) : oggStream.Read(outBuffer, 0, outBuffer.Length); if (PcmBytes == 0) //Reached the end { break; } PcmStream.Flush(); PcmStream.Position = 0; PcmStream.Write(outBuffer, 0, PcmBytes); PcmStream.Position = 0; if (theBuffers[nextBuffer] != null) { theBuffers[nextBuffer].Stream.Dispose(); theBuffers[nextBuffer] = null; } theBuffers[nextBuffer] = new AudioBuffer(SharpDX.DataStream.Create <byte>(PcmStream.ToArray(), true, false)); theBuffers[nextBuffer].AudioBytes = PcmBytes; theBuffers[nextBuffer].LoopCount = 0; if (firstLoop) { VorbisInfo info = (oggStream == null) ? oggFile.Info : oggStream.Info; //BlockAlign = info.Channels * (bitsPerSample / 8); //AverageBytesPerSecond = info.Rate * BlockAlign; //waveFormat.AverageBytesPerSecond = AverageBytesPerSecond; //waveFormat.BitsPerSample = (short)bitsPerSample; //waveFormat.BlockAlignment = (short)BlockAlign; //waveFormat.Channels = (short)info.Channels; //waveFormat.SamplesPerSecond = info.Rate; waveFormat = new WaveFormat(info.Rate, bitsPerSample, info.Channels); //waveFormat.Encoding= WaveFormatEncoding.Pcm; sourceVoice = new SourceVoice(device, waveFormat); sourceVoice.SetVolume(volume); } //if first time looping, create sourcevoice sourceVoice.SubmitSourceBuffer(theBuffers[nextBuffer], null); if (nextBuffer == theBuffers.Length - 1) { nextBuffer = 0; } else { nextBuffer++; } //If we're done filling the buffer for the first time if (!startedSourceVoice && sourceVoice.State.BuffersQueued == maxBuffers) { sourceVoice.Start(); startedSourceVoice = true; lock (lockObject) { playing = true; preparing = false; } //lock } firstLoop = false; if (startedSourceVoice) { while (sourceVoice.State.BuffersQueued > maxBuffers - 1) { if (stopNow) { break; } Thread.Sleep(5); } } //if started source voice if (stopNow) { break; } } //while if (stopNow) { break; } //We don't have any more data but file could still be playing the remaining data. if (PcmBytes == 0 /*&& !loop*/) { if (!stopNow) { while (sourceVoice.State.BuffersQueued > 0 && !stopNow) { Thread.Sleep(10); } } //if doesn't want to stop ogg if (!loop) { break; //exit the loop since we ran out of data and don't want to loop back } } //if we ran out of data if (PcmBytes == 0 && loop) { PcmBytes = -1; if (oggFile != null) { oggFile.Position = 0; } if (oggStream != null) { oggStream.Position = 0; } } //if we ran out of data but want to loop back } //while more data //Done playing, or file requested stop, //so clean up and tell calling thread that //buffer has stopped and cleaned up. //calling thread doesn't know buffer has stopped until we clean things up //so we don't lose memory //Clean up the resources if (sourceVoice != null) { sourceVoice.ExitLoop(); //stop looping if looping sourceVoice.Stop(); } sourceVoice.Dispose(); sourceVoice = null; if (oggFile != null) { oggFile.Close(); oggFile = null; } outBuffer = null; for (int i = 0; i < theBuffers.Length; i++) { if (theBuffers[i] != null) { theBuffers[i].Stream.Dispose(); theBuffers[i] = null; } } theBuffers = null; if (oggStream != null) { oggStream.Close(); oggStream = null; } PcmStream.Dispose(); PcmStream = null; if (stopEvent != null) { stopEvent(); } } //method
public void PlayPPM(IntPtr win) { Rate = 192000; //44100 on cheapo, 96000 on AC97, 192000 on HD Audio // its the number of samples that exist for each second of audio channels = 2; // 1 = mono, 2 = stereo PPMSamples = (int)(0.0225 * Rate * channels); // 22 or 22.5ms in samples, rounded up // no. of bytes per second = channels * rate * bytes in one sample microsec = Rate / 10000.0; // 192 = 1ms, 19.2 = 0.1ms or 1mis @ 192khz PPMchannels = new Dictionary<int, double>(); frame = new List<short>(); Amplitude = 32760; /*WaveFile wFile; wFile = new WaveFile(channels, 16, Rate); */ //Set channels to neutral except throttle, throttle = zero. PPMchannels.Add(1, 10.0); //Throttle PPMchannels.Add(2, 50.0); //Ailerons PPMchannels.Add(3, 50.0); //Stab PPMchannels.Add(4, 50.0); //Rudder PPMchannels.Add(5, 50.0); PPMchannels.Add(6, 50.0); PPMchannels.Add(7, 50.0); PPMchannels.Add(8, 50.0); byte[] data = GenPPM(); /*wFile.SetData(data, data.Length); wFile.WriteFile(@"C:\Users\kang\Desktop\test.wav"); */ ms = new MemoryStream(); ms.SetLength(0); ms.Write(data, 0, data.Length); ms.Position = 0; wf = new WaveFormat(); wf.FormatTag = WaveFormatTag.Pcm; wf.BitsPerSample = (short)16; wf.Channels = channels; wf.SamplesPerSecond = Rate; wf.BlockAlignment = (short)(wf.Channels * wf.BitsPerSample / 8); wf.AverageBytesPerSecond = wf.SamplesPerSecond * wf.BlockAlignment; device = new XAudio2(); device.StartEngine(); masteringVoice = new MasteringVoice(device); srcVoice = new SourceVoice(device, wf); buffer = new AudioBuffer(); buffer.AudioData = ms; buffer.AudioBytes = (int)data.Length; buffer.Flags = SlimDX.XAudio2.BufferFlags.None; srcVoice.BufferStart += new EventHandler<ContextEventArgs>(srcVoice_BufferStart); srcVoice.FrequencyRatio = 1; srcVoice.SubmitSourceBuffer(buffer); srcVoice.Start(); }
public override void Play() { SourceVoice?.Start(); }
internal SourceVoice Start(float pan = 0) { if (_voices.Any(v => v.State.BuffersQueued <= 0)) { s = _voices.First(v => v.State.BuffersQueued <= 0); } else { s = new SourceVoice(Device, Stream.Format, true); _voices.Add(s); } var b = new AudioBuffer { Stream = Stream.ToDataStream(), AudioBytes = (int)Stream.Length, LoopCount = Loop ? AudioBuffer.LoopInfinite : 0, Flags = BufferFlags.EndOfStream }; s.SubmitSourceBuffer(b, Stream.DecodedPacketsInfo); float[] outputMatrix = new float[8]; for (int i = 0; i < 8; i++) { outputMatrix[i] = 0; } // pan of -1.0 indicates all left speaker, // 1.0 is all right speaker, 0.0 is split between left and right float left = 0.5f - pan / 2; float right = 0.5f + pan / 2; switch (Master.ChannelMask) { case (int)Speakers.Mono: outputMatrix[0] = 1.0f; break; case (int)Speakers.Stereo: case (int)Speakers.TwoPointOne: case (int)Speakers.Surround: outputMatrix[0] = left; outputMatrix[1] = right; break; case (int)Speakers.Quad: outputMatrix[0] = outputMatrix[2] = left; outputMatrix[1] = outputMatrix[3] = right; break; case (int)Speakers.FourPointOne: outputMatrix[0] = outputMatrix[3] = left; outputMatrix[1] = outputMatrix[4] = right; break; case (int)Speakers.FivePointOne: case (int)Speakers.SevenPointOne: case (int)Speakers.FivePointOneSurround: outputMatrix[0] = outputMatrix[4] = left; outputMatrix[1] = outputMatrix[5] = right; break; case (int)Speakers.SevenPointOneSurround: outputMatrix[0] = outputMatrix[4] = outputMatrix[6] = left; outputMatrix[1] = outputMatrix[5] = outputMatrix[7] = right; break; } s.SetOutputMatrix(null, s.VoiceDetails.InputChannelCount, Master.VoiceDetails.InputChannelCount, outputMatrix); s.Start(); return(s); }
public override void Play() { mVoice.Stop(); mVoice.Start(); mIsPlaying = true; }