public Sound(IntPtr handle, DirectSound device) { if (device != null) { device.SetCooperativeLevel(handle, CooperativeLevel.Priority); var format = new WaveFormat { SamplesPerSecond = 44100, BitsPerSample = 16, Channels = 2, FormatTag = WaveFormatTag.Pcm, BlockAlignment = 4 }; format.AverageBytesPerSecond = format.SamplesPerSecond * format.Channels * (format.BitsPerSample / 8); var desc = new SoundBufferDescription { Format = format, Flags = BufferFlags.GlobalFocus | BufferFlags.Software | BufferFlags.GetCurrentPosition2 | BufferFlags.ControlVolume, SizeInBytes = BufferSize }; DSoundBuffer = new SecondarySoundBuffer(device, desc); ChangeVolume(Global.Config.SoundVolume); } SoundBuffer = new byte[BufferSize]; disposed = false; }
void InitDirectSound(IntPtr handle) { //Create the device _SoundDevice = new DirectSound(); _SoundDevice.SetCooperativeLevel(handle, CooperativeLevel.Priority); //Creat the wav format, it will be mono-44100-pcm-16bit //TODO: support more wave formats WaveFormat wav = new WaveFormat(); wav.FormatTag = WaveFormatTag.Pcm; wav.SamplesPerSecond = 44100; wav.Channels = 1;//mono wav.BitsPerSample = 16; wav.AverageBytesPerSecond = 88200;//wav.SamplesPerSecond * wav.Channels * (wav.BitsPerSample / 8); wav.BlockAlignment = 2;//(wfx.Channels * wfx.BitsPerSample / 8); BufferSize = 88200 * 5; //Description SoundBufferDescription des = new SoundBufferDescription(); des.Format = wav; des.SizeInBytes = BufferSize; des.Flags = BufferFlags.GlobalFocus | BufferFlags.Software; //buffer buffer = new SecondarySoundBuffer(_SoundDevice, des); DATA = new byte[BufferSize]; buffer.Play(0, PlayFlags.Looping); //channels InitChannels(); }
public DirectSoundSoundOutput(Sound sound, IntPtr mainWindowHandle) { _sound = sound; var deviceInfo = DirectSound.GetDevices().FirstOrDefault(d => d.Description == Global.Config.SoundDevice); _device = deviceInfo != null ? new DirectSound(deviceInfo.DriverGuid) : new DirectSound(); _device.SetCooperativeLevel(mainWindowHandle, CooperativeLevel.Priority); }
public void Dispose() { if (_disposed) return; _device.Dispose(); _device = null; _disposed = true; }
public void Create() { device = new DirectSound(); device.SetCooperativeLevel(handle, CooperativeLevel.Priority); pBufferDescription.Flags = BufferFlags.PrimaryBuffer; pBuffer = new PrimarySoundBuffer(device, pBufferDescription); pBuffer.Format = audioFormat; Reset(); }
public void Initialize(IntPtr handle) { if (isInitialized) { Dispose(); } isInitialized = false; LoadSettings(); //Create the device Console.WriteLine("DirectSound: Initializing directSound ..."); _SoundDevice = new DirectSound(); _SoundDevice.SetCooperativeLevel(handle, CooperativeLevel.Normal); //Create the wav format WaveFormat wav = new WaveFormat(); wav.FormatTag = WaveFormatTag.Pcm; wav.SamplesPerSecond = Program.Settings.Audio_Frequency; wav.Channels = 1; wav.BitsPerSample = Program.Settings.Audio_BitsPerSample; wav.AverageBytesPerSecond = wav.SamplesPerSecond * wav.Channels * (wav.BitsPerSample / 8); wav.BlockAlignment = (short)(wav.Channels * wav.BitsPerSample / 8); //BufferSize = (int)(wav.AverageBytesPerSecond * ((double)Program.Settings.Audio_BufferSizeInMilliseconds) / (double)1000); BufferSize = Program.Settings.Audio_BufferSizeInBytes; //latency_in_bytes = (int)((double)wav.AverageBytesPerSecond * (double)(Program.Settings.Audio_LatencyInPrecentage / (double)1000)); latency_in_bytes = (Program.Settings.Audio_LatencyInPrecentage * BufferSize) / 100; latency_in_samples = latency_in_bytes / 2; Console.WriteLine("DirectSound: BufferSize = " + BufferSize + " Byte"); Console.WriteLine("DirectSound: Latency in bytes = " + latency_in_bytes + " Byte"); //Description SoundBufferDescription des = new SoundBufferDescription(); des.Format = wav; des.SizeInBytes = BufferSize; des.Flags = BufferFlags.ControlVolume | BufferFlags.ControlFrequency | BufferFlags.ControlPan | BufferFlags.Software; buffer = new SecondarySoundBuffer(_SoundDevice, des); //buffer.Play(0, PlayFlags.Looping); // Set volume SetVolume(volume); Console.WriteLine("DirectSound: DirectSound initialized OK."); isInitialized = true; Shutdown(); }
private static void Initialise() { try { if (directSound == null) { //Initialize the DirectSound Device directSound = new DirectSound(); waveFormat = new WaveFormat(); waveFormat.BitsPerSample = 16; waveFormat.Channels = 1; waveFormat.BlockAlignment = (short)(waveFormat.BitsPerSample / 8); waveFormat.FormatTag = WaveFormatTag.Pcm; waveFormat.SamplesPerSecond = 192000; waveFormat.AverageBytesPerSecond = waveFormat.SamplesPerSecond * waveFormat.BlockAlignment; } // Set the priority of the device with the rest of the operating system directSound.SetCooperativeLevel(User32.GetForegroundWindow(), CooperativeLevel.Priority); } catch (Exception ex) { Utilities.OnError(Utilities.GetCurrentMethod(), ex); } }
private void InitDirectSound(Control parent) { Debug.WriteLine(this, "Initializing APU ....", DebugStatus.None); //Create the device _SoundDevice = new DirectSound(); _SoundDevice.SetCooperativeLevel(parent.Parent.Handle, CooperativeLevel.Normal); //Create the wav format var wav = new WaveFormat(); wav.FormatTag = WaveFormatTag.Pcm; wav.SamplesPerSecond = 44100; wav.Channels = (short) (STEREO ? 2 : 1); AD = (STEREO ? 4 : 2); //Stereo / Mono wav.BitsPerSample = 16; wav.AverageBytesPerSecond = wav.SamplesPerSecond*wav.Channels*(wav.BitsPerSample/8); wav.BlockAlignment = (short) (wav.Channels*wav.BitsPerSample/8); BufferSize = wav.AverageBytesPerSecond; //Description var des = new SoundBufferDescription { Format = wav, SizeInBytes = BufferSize, Flags = BufferFlags.ControlVolume | BufferFlags.ControlFrequency | BufferFlags.ControlPan | BufferFlags.ControlEffects }; //des.Flags = BufferFlags.GlobalFocus | BufferFlags.Software; //buffer DATA = new byte[BufferSize]; buffer = new SecondarySoundBuffer(_SoundDevice, des); buffer.Play(0, PlayFlags.Looping); //channels InitChannels(); Debug.WriteLine(this, "APU initialized ok !!", DebugStatus.Cool); }
/// <summary> /// Constructs a new Audio Output Device. /// </summary> /// /// <param name="device">Global identifier of the audio output device.</param> /// <param name="owner">The owner window handle.</param> /// <param name="samplingRate">The sampling rate of the device.</param> /// <param name="channels">The number of channels of the device.</param> /// public AudioOutputDevice(Guid device, IntPtr owner, int samplingRate, int channels) { this.owner = owner; this.samplingRate = samplingRate; this.channels = channels; this.device = device; DirectSound ds = new DirectSound(device); ds.SetCooperativeLevel(owner, CooperativeLevel.Priority); // Set the output format WaveFormat waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(samplingRate, channels); bufferSize = 8 * waveFormat.AverageBytesPerSecond; // Setup the secondary buffer SoundBufferDescription desc2 = new SoundBufferDescription(); desc2.Flags = BufferFlags.GlobalFocus | BufferFlags.ControlPositionNotify | BufferFlags.GetCurrentPosition2; desc2.BufferBytes = bufferSize; desc2.Format = waveFormat; buffer = new SecondarySoundBuffer(ds, desc2); var list = new List <NotificationPosition>(); int numberOfPositions = 32; // Set notification for buffer percentiles for (int i = 0; i < numberOfPositions; i++) { list.Add(new NotificationPosition() { WaitHandle = new AutoResetEvent(false), Offset = i * bufferSize / numberOfPositions + 1, }); } // Set notification for end of buffer list.Add(new NotificationPosition() { Offset = bufferSize - 1, WaitHandle = new AutoResetEvent(false) }); firstHalfBufferIndex = numberOfPositions / 2; secondHalfBufferIndex = numberOfPositions; notifications = list.ToArray(); System.Diagnostics.Debug.Assert(notifications[firstHalfBufferIndex].Offset == bufferSize / 2 + 1); System.Diagnostics.Debug.Assert(notifications[secondHalfBufferIndex].Offset == bufferSize - 1); // Make a copy of the wait handles waitHandles = new WaitHandle[notifications.Length]; for (int i = 0; i < notifications.Length; i++) { waitHandles[i] = notifications[i].WaitHandle; } // Store all notification positions buffer.SetNotificationPositions(notifications); }
// Virtual Methods protected override bool LoadAudioFile(string audioFile, DirectSound directSound) { try { // Open the wave file in binary. BinaryReader reader = new BinaryReader(File.OpenRead(audioFile)); //DSystemConfiguration.DataFilePath + // Read in the wave file header. chunkId = new string(reader.ReadChars(4)); chunkSize = reader.ReadInt32(); format = new string(reader.ReadChars(4)); subChunkId = new string(reader.ReadChars(4)); subChunkSize = reader.ReadInt32(); audioFormat = (WaveFormatEncoding)reader.ReadInt16(); numChannels = reader.ReadInt16(); sampleRate = reader.ReadInt32(); bytesPerSecond = reader.ReadInt32(); blockAlign = reader.ReadInt16(); bitsPerSample = reader.ReadInt16(); dataChunkId = new string(reader.ReadChars(4)); dataSize = reader.ReadInt32(); // Check that the chunk ID is the RIFF format // and the file format is the WAVE format // and sub chunk ID is the fmt format // and the audio format is PCM // and the wave file was recorded in stereo format // and at a sample rate of 44.1 KHz // and at 16 bit format // and there is the data chunk header. // Otherwise return false. if (chunkId != "RIFF" || format != "WAVE" || subChunkId.Trim() != "fmt" || audioFormat != WaveFormatEncoding.Pcm || numChannels != 2 || sampleRate != 44100 || bitsPerSample != 16 || dataChunkId != "data") { return(false); } // Set the buffer description of the secondary sound buffer that the wave file will be loaded onto and the wave format. SoundBufferDescription secondaryBufferDesc = new SoundBufferDescription() { Flags = BufferFlags.ControlVolume, BufferBytes = dataSize, Format = new WaveFormat(44100, 16, 2), AlgorithmFor3D = Guid.Empty }; // Create a temporary sound buffer with the specific buffer settings. _SecondarySoundBuffer = new SecondarySoundBuffer(directSound, secondaryBufferDesc); // Read in the wave file data into the temporary buffer. byte[] waveData = reader.ReadBytes(dataSize); // Close the reader reader.Close(); // Lock the secondary buffer to write wave data into it. DataStream waveBufferData2; DataStream waveBufferData1 = _SecondarySoundBuffer.Lock(0, dataSize, LockFlags.None, out waveBufferData2); // Copy the wave data into the buffer. waveBufferData1.Write(waveData, 0, dataSize); // Unlock the secondary buffer after the data has been written to it. // var result = _SecondaryBuffer.Unlock(waveBufferData1, waveBufferData2); _SecondarySoundBuffer.Unlock(waveBufferData1, waveBufferData2); } catch { return(false); } return(true); }
private void PlayFile(FileInfo FI) { lock (this) { if (this.DS == null) { this.DS = new DirectSound(); this.DS.SetCooperativeLevel(this.Handle, CooperativeLevel.Normal); } this.StopPlayback(); var bd = new SoundBufferDescription { Format = new WaveFormat(FI.AudioFile.SampleRate, 16, FI.AudioFile.Channels), BufferBytes = this.AudioBufferSize, Flags = BufferFlags.GlobalFocus | BufferFlags.StickyFocus | BufferFlags.ControlVolume | BufferFlags.GetCurrentPosition2 | BufferFlags.ControlPositionNotify }; this.CurrentBuffer = new SecondarySoundBuffer(this.DS, bd); if (this.AudioUpdateTrigger == null) { this.AudioUpdateTrigger = new AutoResetEvent(false); } var chunkSize = this.AudioBufferSize / this.AudioBufferMarkers; var updatePositions = new NotificationPosition[this.AudioBufferMarkers]; for (var i = 0; i < this.AudioBufferMarkers; ++i) { updatePositions[i] = new NotificationPosition() { WaitHandle = this.AudioUpdateTrigger, Offset = chunkSize * i }; } this.CurrentBuffer.SetNotificationPositions(updatePositions); this.CurrentStream = FI.AudioFile.OpenStream(); { var bytes = new byte[this.CurrentBuffer.Capabilities.BufferBytes]; var readbytes = this.CurrentStream.Read(bytes, 0, this.CurrentBuffer.Capabilities.BufferBytes); if (readbytes < this.CurrentBuffer.Capabilities.BufferBytes) { Array.Clear(bytes, readbytes, this.CurrentBuffer.Capabilities.BufferBytes - readbytes); } DataStream audiodata2; var audiodata1 = this.CurrentBuffer.Lock(0, this.CurrentBuffer.Capabilities.BufferBytes, LockFlags.EntireBuffer, out audiodata2); audiodata1.Write(bytes, 0, this.CurrentBuffer.Capabilities.BufferBytes); this.CurrentBuffer.Unlock(audiodata1, audiodata2); } if (this.CurrentStream.Position < this.CurrentStream.Length) { this.AudioUpdateTrigger.Reset(); this.AudioUpdateThread = new Thread(this.AudioUpdate); this.AudioUpdateThread.Start(); this.btnPause.Enabled = true; this.btnStop.Enabled = true; this.AudioIsLooping = true; } else { this.CurrentStream.Close(); this.CurrentStream = null; this.AudioIsLooping = false; } this.CurrentBuffer.Play(0, (this.AudioIsLooping ? PlayFlags.Looping : PlayFlags.None)); } }
/// <summary>Initialises the sound system.</summary> public override void Initialise() { _directSound = new DirectSound(); _directSound.SetCooperativeLevel(_control.Handle, CooperativeLevel.Priority); }
void CreateDevice(IntPtr windowHandle, Guid audioDeviceId) { _directSound = new DirectSound(audioDeviceId); // Set the cooperative level to priority so the format of the primary sound buffer can be modified. _directSound.SetCooperativeLevel(windowHandle, CooperativeLevel.Priority); }
public void Dispose() { isInitialized = false; Stop(); buffer.Dispose(); buffer = null; _SoundDevice.Dispose(); _SoundDevice = null; GC.Collect(); }
public void uninit_directsound() { if (critical_failure == false) { for (int i = 0; i < sound_buffers.Length; i++) { if (sound_buffers[i] != null) { sound_buffers[i].Dispose(); sound_buffers[i] = null; } } if (device != null) { device.Dispose(); device = null; } } }
/// <summary>Disposes resources.</summary> public override void Dispose() { _control = null; if(_directSound != null) { try { _directSound.Dispose(); } catch { } _directSound = null; } }
private void init_directsound() { try { device = new DirectSound(DirectSoundGuid.DefaultPlaybackDevice); device.SetCooperativeLevel(((Video)(video_reference)).Screen_Handle, CooperativeLevel.Normal); sound_buffers = new SecondarySoundBuffer[10]; sound_files = new WaveStream[10]; sound_statuses = new bool[10]; load_sound_file(ref stream, ref sound_files[0], ref sound_data, ref buf_desc, ref sound_buffers[0], "Ufo.wav", ref sound_statuses[0]); load_sound_file(ref stream, ref sound_files[1], ref sound_data, ref buf_desc, ref sound_buffers[1], "Shot.wav", ref sound_statuses[1]); load_sound_file(ref stream, ref sound_files[2], ref sound_data, ref buf_desc, ref sound_buffers[2], "BaseHit.wav", ref sound_statuses[2]); load_sound_file(ref stream, ref sound_files[3], ref sound_data, ref buf_desc, ref sound_buffers[3], "InvHit.wav", ref sound_statuses[3]); load_sound_file(ref stream, ref sound_files[4], ref sound_data, ref buf_desc, ref sound_buffers[4], "Walk1.wav", ref sound_statuses[4]); load_sound_file(ref stream, ref sound_files[5], ref sound_data, ref buf_desc, ref sound_buffers[5], "Walk2.wav", ref sound_statuses[5]); load_sound_file(ref stream, ref sound_files[6], ref sound_data, ref buf_desc, ref sound_buffers[6], "Walk3.wav", ref sound_statuses[6]); load_sound_file(ref stream, ref sound_files[7], ref sound_data, ref buf_desc, ref sound_buffers[7], "Walk4.wav", ref sound_statuses[7]); load_sound_file(ref stream, ref sound_files[8], ref sound_data, ref buf_desc, ref sound_buffers[8], "UfoHit.wav", ref sound_statuses[8]); load_sound_file(ref stream, ref sound_files[9], ref sound_data, ref buf_desc, ref sound_buffers[9], "ELife.wav", ref sound_statuses[9]); } catch { MessageBox.Show("A failure has been detected during DirectSound initialization, please contact the author for assistance.", "Error!", MessageBoxButtons.OK); critical_failure = true; } }
public void initialize(int samplesPerSecond, int bytesPerSample, int nrChannels, int bufferSizeBytes) { try { if (directSound == null) { directSound = new DirectSound(); directSound.SetCooperativeLevel(owner.Handle, CooperativeLevel.Priority); } releaseResources(); this.bufferSizeBytes = bufferSizeBytes; this.bytesPerSample = bytesPerSample; this.samplesPerSecond = samplesPerSecond; this.nrChannels = nrChannels; SoundBufferDescription desc = new SoundBufferDescription(); desc.BufferBytes = bufferSizeBytes; desc.Flags = BufferFlags.Defer | BufferFlags.GlobalFocus | BufferFlags.ControlVolume | BufferFlags.ControlFrequency | BufferFlags.GetCurrentPosition2; //desc.AlgorithmFor3D = Guid.Empty; int blockAlign = nrChannels * bytesPerSample; int averageBytesPerSecond = samplesPerSecond * blockAlign; WaveFormat format = WaveFormat.CreateCustomFormat(WaveFormatEncoding.Pcm, samplesPerSecond, nrChannels, averageBytesPerSecond, blockAlign, bytesPerSample * 8); desc.Format = format; //desc.Format.Encoding = WaveFormatEncoding.Pcm; /* * desc.Format.SampleRate = samplesPerSecond; * desc.Format.BitsPerSample = bytesPerSample * 8; * desc.Format.Channels = nrChannels; * desc.Format.FormatTag = WaveFormatTag.Pcm; * desc.Format.BlockAlign = (short)(format.Channels * (format.BitsPerSample / 8)); * desc.Format.AverageBytesPerSecond = format.SamplesPerSecond * format.BlockAlign; */ /*desc.DeferLocation = true; * desc.GlobalFocus = true; * desc.ControlVolume = true; * desc.CanGetCurrentPosition = true; * desc.ControlFrequency = true;*/ silence = new byte[bufferSizeBytes]; Array.Clear(silence, 0, silence.Length); audioBuffer = new SecondarySoundBuffer(directSound, desc); Volume = volume; offsetBytes = 0; prevPlayPos = 0; ptsPos = 0; prevPtsPos = 0; playLoops = 0; ptsLoops = 0; log.Info("Direct Sound Initialized"); } catch (SharpDX.SharpDXException e) { log.Error("Error initializing Direct Sound", e); MessageBox.Show("Error initializing Direct Sound: " + e.Message, "Direct Sound Error"); } catch (Exception e) { log.Error("Error initializing Direct Sound", e); } }
/// <summary> /// Actualiza el Listener3D /// </summary> protected void UpdateSounds3D() { DirectSound.UpdateListener3d(); }
public void SetupAudio(DirectSound device) { Audio.SetupSoundBuffer(device); }
public static IEnumerable <string> GetDeviceNames() { return(DirectSound.GetDevices().Select(d => d.Description).ToList()); }
/// <summary> /// Constructs a new Audio Output Device. /// </summary> /// /// <param name="device">Global identifier of the audio output device.</param> /// <param name="owner">The owner window handle.</param> /// <param name="samplingRate">The sampling rate of the device.</param> /// <param name="channels">The number of channels of the device.</param> /// public AudioOutputDevice(Guid device, IntPtr owner, int samplingRate, int channels) { this.owner = owner; this.samplingRate = samplingRate; this.channels = channels; this.device = device; DirectSound ds = new DirectSound(device); ds.SetCooperativeLevel(owner, CooperativeLevel.Priority); // Set the output format WaveFormat waveFormat = new WaveFormat(); waveFormat.FormatTag = WaveFormatTag.IeeeFloat; waveFormat.BitsPerSample = 32; waveFormat.BlockAlignment = (short)(waveFormat.BitsPerSample * channels / 8); waveFormat.Channels = (short)channels; waveFormat.SamplesPerSecond = samplingRate; waveFormat.AverageBytesPerSecond = waveFormat.SamplesPerSecond * waveFormat.BlockAlignment; bufferSize = 8 * waveFormat.AverageBytesPerSecond; // Setup the secondary buffer SoundBufferDescription desc2 = new SoundBufferDescription(); desc2.Flags = BufferFlags.GlobalFocus | BufferFlags.ControlPositionNotify | BufferFlags.GetCurrentPosition2; desc2.SizeInBytes = bufferSize; desc2.Format = waveFormat; buffer = new SecondarySoundBuffer(ds, desc2); var list = new List<NotificationPosition>(); int numberOfPositions = 32; // Set notification for buffer percentiles for (int i = 0; i < numberOfPositions; i++) { list.Add(new NotificationPosition() { Event = new AutoResetEvent(false), Offset = i * bufferSize / numberOfPositions + 1, }); } // Set notification for end of buffer list.Add(new NotificationPosition() { Offset = bufferSize - 1, Event = new AutoResetEvent(false) }); firstHalfBufferIndex = numberOfPositions / 2; secondHalfBufferIndex = numberOfPositions; notifications = list.ToArray(); System.Diagnostics.Debug.Assert(notifications[firstHalfBufferIndex].Offset == bufferSize / 2 + 1); System.Diagnostics.Debug.Assert(notifications[secondHalfBufferIndex].Offset == bufferSize - 1); // Make a copy of the wait handles waitHandles = new WaitHandle[notifications.Length]; for (int i = 0; i < notifications.Length; i++) waitHandles[i] = notifications[i].Event; // Store all notification positions buffer.SetNotificationPositions(notifications); }
protected virtual bool LoadAudioFile(string audioFile, DirectSound directSound) { return(true); }
public void TestPlaySoundDirectSound() { DirectSound directSound = new DirectSound(); var form = new Form(); form.Text = "SharpDX - DirectSound Demo"; // Set Cooperative Level to PRIORITY (priority level can call the SetFormat and Compact methods) // directSound.SetCooperativeLevel(form.Handle, CooperativeLevel.Priority); // Create PrimarySoundBuffer var primaryBufferDesc = new SoundBufferDescription(); primaryBufferDesc.Flags = SharpDX.DirectSound.BufferFlags.PrimaryBuffer; primaryBufferDesc.AlgorithmFor3D = Guid.Empty; var primarySoundBuffer = new PrimarySoundBuffer(directSound, primaryBufferDesc); // Play the PrimarySound Buffer primarySoundBuffer.Play(0, SharpDX.DirectSound.PlayFlags.Looping); // Default WaveFormat Stereo 44100 16 bit WaveFormat waveFormat = new WaveFormat(); // Create SecondarySoundBuffer var secondaryBufferDesc = new SoundBufferDescription(); secondaryBufferDesc.BufferBytes = waveFormat.ConvertLatencyToByteSize(60000); secondaryBufferDesc.Format = waveFormat; secondaryBufferDesc.Flags = SharpDX.DirectSound.BufferFlags.GetCurrentPosition2 | SharpDX.DirectSound.BufferFlags.ControlPositionNotify | SharpDX.DirectSound.BufferFlags.GlobalFocus | SharpDX.DirectSound.BufferFlags.ControlVolume | SharpDX.DirectSound.BufferFlags.StickyFocus; secondaryBufferDesc.AlgorithmFor3D = Guid.Empty; var secondarySoundBuffer = new SecondarySoundBuffer(directSound, secondaryBufferDesc); // Get Capabilties from secondary sound buffer var capabilities = secondarySoundBuffer.Capabilities; // Lock the buffer DataStream dataPart2; var dataPart1 = secondarySoundBuffer.Lock(0, capabilities.BufferBytes, LockFlags.EntireBuffer, out dataPart2); // Fill the buffer with some sound int numberOfSamples = capabilities.BufferBytes / waveFormat.BlockAlign; for (int i = 0; i < numberOfSamples; i++) { double vibrato = Math.Cos(2 * Math.PI * 10.0 * i / waveFormat.SampleRate); short value = (short)(Math.Cos(2 * Math.PI * (220.0 + 4.0 * vibrato) * i / waveFormat.SampleRate) * 16384); // Not too loud dataPart1.Write(value); dataPart1.Write(value); } // Unlock the buffer secondarySoundBuffer.Unlock(dataPart1, dataPart2); // Play the song secondarySoundBuffer.Play(0, SharpDX.DirectSound.PlayFlags.Looping); Application.Run(form); }
/// <summary> /// Initialize DirectSound for the specified Window /// </summary> /// <param name="HWnd">Handle of the window for which DirectSound is to be initialized</param> public static void Initialize(Control Parent) { device = new DirectSound(); device.SetCooperativeLevel(Parent.Handle, CooperativeLevel.Priority); device.IsDefaultPool = false; }
public bool LoadAudio(DirectSound directSound) { return(LoadAudioFile(_AudioFileName, directSound)); }
private SecondarySoundBuffer initializeSecondaryBuffer(string fileName, IntPtr handle) { var directSound = new DirectSound(); directSound.SetCooperativeLevel(handle, CooperativeLevel.Exclusive); // Open the wave file in binary. var reader = new BinaryReader(File.OpenRead(FileName)); // Read in the wave file header. var chunkId = new string(reader.ReadChars(4)); var chunkSize = reader.ReadInt32(); var format = new string(reader.ReadChars(4)); var subChunkId = new string(reader.ReadChars(4)); var subChunkSize = reader.ReadInt32(); var audioFormat = (WaveFormatEncoding)reader.ReadInt16(); var numChannels = reader.ReadInt16(); var sampleRate = reader.ReadInt32(); var bytesPerSecond = reader.ReadInt32(); var blockAlign = reader.ReadInt16(); var bitsPerSample = reader.ReadInt16(); var dataChunkId = new string(reader.ReadChars(4)); var dataSize = reader.ReadInt32(); // Check that the chunk ID is the RIFF format // and the file format is the WAVE format // and sub chunk ID is the fmt format // and the audio format is PCM // and the wave file was recorded in stereo format // and at a sample rate of 44.1 KHz // and at 16 bit format // and there is the data chunk header. // Otherwise return false. if (chunkId != "RIFF" || format != "WAVE" || subChunkId.Trim() != "fmt" || audioFormat != WaveFormatEncoding.Pcm || numChannels != 2 || sampleRate != 44100 || bitsPerSample != 16 || dataChunkId != "data") { return(null); } // Set the buffer description of the secondary sound buffer that the wave file will be loaded onto and the wave format. var buffer = new SoundBufferDescription { Flags = BufferFlags.ControlVolume, BufferBytes = dataSize, Format = new WaveFormat(44100, 16, 2), AlgorithmFor3D = Guid.Empty }; // Create a temporary sound buffer with the specific buffer settings. var secondaryBuffer = new SecondarySoundBuffer(directSound, buffer); // Read in the wave file data into the temporary buffer. var waveData = reader.ReadBytes(dataSize); // Close the reader reader.Close(); // Lock the secondary buffer to write wave data into it. var waveBufferData1 = secondaryBuffer.Lock(0, dataSize, LockFlags.None, out var waveBufferData2); // Copy the wave data into the buffer. waveBufferData1.Write(waveData, 0, dataSize); // Unlock the secondary buffer after the data has been written to it. secondaryBuffer.Unlock(waveBufferData1, waveBufferData2); return(secondaryBuffer); }