/// <summary> /// Record sound data for specified number of seconds using given wave format /// The stream will be a properly formatted RIFF file /// </summary> /// <param name="st">Stream into which recorded samples are written</param> /// <param name="Seconds">Seconds of data to record</param> /// <param name="SoundFormat">Sound format to record in.</param> public void RecordFor(Stream st, short Seconds, SoundFormats SoundFormat) { m_hWaveIn = IntPtr.Zero; // only allow 1 recording session at a time if(recording) { throw new InvalidOperationException("Already recording"); } // set our global flag recording = true; if ( m_qBuffers == null ) m_qBuffers = new Queue(MaxBuffers); if ( m_HandleMap == null ) m_HandleMap = new Hashtable(MaxBuffers); m_recformat = new WaveFormatEx(); #if !NDOC // create the callback message window m_recmw = new SoundMessageWindow(); m_recmw.WaveDoneMessage +=new WaveDoneHandler(m_recmw_WaveDoneMessage); m_recmw.WaveCloseMessage +=new WaveCloseHandler(mw_WaveCloseMessage); m_recmw.WaveOpenMessage += new WaveOpenHandler(mw_WaveOpenMessage); #endif // set up format #region long format switch() switch(SoundFormat) { case SoundFormats.Mono16bit11kHz: m_recformat.Channels = 1; m_recformat.SamplesPerSec = 11025; m_recformat.BitsPerSample = 16; break; case SoundFormats.Mono16bit22kHz: m_recformat.Channels = 1; m_recformat.SamplesPerSec = 22050; m_recformat.BitsPerSample = 16; break; case SoundFormats.Mono16bit44kHz: m_recformat.Channels = 1; m_recformat.SamplesPerSec = 44100; m_recformat.BitsPerSample = 16; break; case SoundFormats.Mono8bit11kHz: m_recformat.Channels = 1; m_recformat.SamplesPerSec = 11025; m_recformat.BitsPerSample = 8; break; case SoundFormats.Mono8bit22kHz: m_recformat.Channels = 1; m_recformat.SamplesPerSec = 22050; m_recformat.BitsPerSample = 8; break; case SoundFormats.Mono8bit44kHz: m_recformat.Channels = 1; m_recformat.SamplesPerSec = 44100; m_recformat.BitsPerSample = 8; break; case SoundFormats.Stereo16bit11kHz: m_recformat.Channels = 2; m_recformat.SamplesPerSec = 11025; m_recformat.BitsPerSample = 16; break; case SoundFormats.Stereo16bit22kHz: m_recformat.Channels = 2; m_recformat.SamplesPerSec = 22050; m_recformat.BitsPerSample = 16; break; case SoundFormats.Stereo16bit44kHz: m_recformat.Channels = 2; m_recformat.SamplesPerSec = 44100; m_recformat.BitsPerSample = 16; break; case SoundFormats.Stereo8bit11kHz: m_recformat.Channels = 2; m_recformat.SamplesPerSec = 11025; m_recformat.BitsPerSample = 8; break; case SoundFormats.Stereo8bit22kHz: m_recformat.Channels = 2; m_recformat.SamplesPerSec = 22050; m_recformat.BitsPerSample = 8; break; case SoundFormats.Stereo8bit44kHz: m_recformat.Channels = 2; m_recformat.SamplesPerSec = 44100; m_recformat.BitsPerSample = 8; break; } #endregion long format switch() m_recformat.FormatTag = WAVE_FORMAT_PCM; m_recformat.AvgBytesPerSec = m_recformat.SamplesPerSec * m_recformat.Channels; m_recformat.BlockAlign = (short)((m_recformat.Channels * m_recformat.BitsPerSample) / 8); m_recformat.Size = 0; m_ck = new RiffChunk(st); m_ck.BeginWrite(); WaveChunk wck = m_ck.CreateSubChunk(FourCC.Wave, st) as WaveChunk; wck.BeginWrite(); FmtChunk fck = wck.CreateSubChunk(FourCC.Fmt, st) as FmtChunk; fck.BeginWrite(); fck.WaveFormat = m_recformat; fck.Write(fck.WaveFormat.GetBytes()); fck.EndWrite(); DataChunk dck = wck.CreateSubChunk(FourCC.Data, st) as DataChunk; m_streamRecord = dck.BeginWrite(); #if !NDOC // check for support of selected format CheckWaveError(Wave.waveInOpen(out m_hWaveIn, WAVE_MAPPER, m_recformat, IntPtr.Zero, 0, WAVE_FORMAT_QUERY)); // open wave device CheckWaveError(Wave.waveInOpen(out m_hWaveIn, (uint)m_deviceID, m_recformat, m_recmw.Hwnd, 0, CALLBACK_WINDOW)); m_recBufferSize = (int)(Math.Min( (int)Seconds, BufferLen ) * m_recformat.SamplesPerSec * m_recformat.Channels); for ( int i = 0; i < 2; i ++ ) { WaveHeader hdr = GetNewRecordBuffer( m_recBufferSize ); // send the buffer to the device CheckWaveError(Wave.waveInAddBuffer(m_hWaveIn, hdr.Header, hdr.HeaderLength)); } // begin recording CheckWaveError(Wave.waveInStart(m_hWaveIn)); recordingFinished = false; m_recTimer = new Timer(new TimerCallback(RecTimerCallback), this, Seconds * 1000, Timeout.Infinite); #endif }
/// <summary> /// Record sound data for specified number of seconds using given wave format /// The stream will be a properly formatted RIFF file /// </summary> /// <param name="st">Stream into which recorded samples are written</param> /// <param name="Seconds">Seconds of data to record</param> /// <param name="SoundFormat">Sound format to record in.</param> public void RecordFor(Stream st, short Seconds, SoundFormats SoundFormat) { m_hWaveIn = IntPtr.Zero; // only allow 1 recording session at a time if (recording) { throw new InvalidOperationException("Already recording"); } // set our global flag recording = true; if (m_qBuffers == null) { m_qBuffers = new Queue(MaxBuffers); } if (m_HandleMap == null) { m_HandleMap = new Hashtable(MaxBuffers); } m_recformat = new WaveFormatEx(); #if !NDOC // create the callback message window m_recmw = new SoundMessageWindow(); m_recmw.WaveDoneMessage += new WaveDoneHandler(m_recmw_WaveDoneMessage); m_recmw.WaveCloseMessage += new WaveCloseHandler(mw_WaveCloseMessage); m_recmw.WaveOpenMessage += new WaveOpenHandler(mw_WaveOpenMessage); #endif // set up format #region long format switch() switch (SoundFormat) { case SoundFormats.Mono16bit11kHz: m_recformat.Channels = 1; m_recformat.SamplesPerSec = 11025; m_recformat.BitsPerSample = 16; break; case SoundFormats.Mono16bit22kHz: m_recformat.Channels = 1; m_recformat.SamplesPerSec = 22050; m_recformat.BitsPerSample = 16; break; case SoundFormats.Mono16bit44kHz: m_recformat.Channels = 1; m_recformat.SamplesPerSec = 44100; m_recformat.BitsPerSample = 16; break; case SoundFormats.Mono8bit11kHz: m_recformat.Channels = 1; m_recformat.SamplesPerSec = 11025; m_recformat.BitsPerSample = 8; break; case SoundFormats.Mono8bit22kHz: m_recformat.Channels = 1; m_recformat.SamplesPerSec = 22050; m_recformat.BitsPerSample = 8; break; case SoundFormats.Mono8bit44kHz: m_recformat.Channels = 1; m_recformat.SamplesPerSec = 44100; m_recformat.BitsPerSample = 8; break; case SoundFormats.Stereo16bit11kHz: m_recformat.Channels = 2; m_recformat.SamplesPerSec = 11025; m_recformat.BitsPerSample = 16; break; case SoundFormats.Stereo16bit22kHz: m_recformat.Channels = 2; m_recformat.SamplesPerSec = 22050; m_recformat.BitsPerSample = 16; break; case SoundFormats.Stereo16bit44kHz: m_recformat.Channels = 2; m_recformat.SamplesPerSec = 44100; m_recformat.BitsPerSample = 16; break; case SoundFormats.Stereo8bit11kHz: m_recformat.Channels = 2; m_recformat.SamplesPerSec = 11025; m_recformat.BitsPerSample = 8; break; case SoundFormats.Stereo8bit22kHz: m_recformat.Channels = 2; m_recformat.SamplesPerSec = 22050; m_recformat.BitsPerSample = 8; break; case SoundFormats.Stereo8bit44kHz: m_recformat.Channels = 2; m_recformat.SamplesPerSec = 44100; m_recformat.BitsPerSample = 8; break; } #endregion long format switch() m_recformat.FormatTag = WAVE_FORMAT_PCM; m_recformat.AvgBytesPerSec = m_recformat.SamplesPerSec * m_recformat.Channels; m_recformat.BlockAlign = (short)((m_recformat.Channels * m_recformat.BitsPerSample) / 8); m_recformat.Size = 0; m_ck = new RiffChunk(st); m_ck.BeginWrite(); WaveChunk wck = m_ck.CreateSubChunk(FourCC.Wave, st) as WaveChunk; wck.BeginWrite(); FmtChunk fck = wck.CreateSubChunk(FourCC.Fmt, st) as FmtChunk; fck.BeginWrite(); fck.WaveFormat = m_recformat; fck.Write(fck.WaveFormat.GetBytes()); fck.EndWrite(); DataChunk dck = wck.CreateSubChunk(FourCC.Data, st) as DataChunk; m_streamRecord = dck.BeginWrite(); #if !NDOC // check for support of selected format CheckWaveError(Wave.waveInOpen(out m_hWaveIn, WAVE_MAPPER, m_recformat, IntPtr.Zero, 0, WAVE_FORMAT_QUERY)); // open wave device CheckWaveError(Wave.waveInOpen(out m_hWaveIn, (uint)m_deviceID, m_recformat, m_recmw.Hwnd, 0, CALLBACK_WINDOW)); m_recBufferSize = (int)(Math.Min((int)Seconds, BufferLen) * m_recformat.SamplesPerSec * m_recformat.Channels); for (int i = 0; i < 2; i++) { WaveHeader hdr = GetNewRecordBuffer(m_recBufferSize); // send the buffer to the device CheckWaveError(Wave.waveInAddBuffer(m_hWaveIn, hdr.Header, hdr.HeaderLength)); } // begin recording CheckWaveError(Wave.waveInStart(m_hWaveIn)); recordingFinished = false; m_recTimer = new Timer(new TimerCallback(RecTimerCallback), this, Seconds * 1000, Timeout.Infinite); #endif }
public void RecordFor(Stream st, short Seconds, SoundFormats SoundFormat) { RecordFor(st, Seconds, WaveFormat2.GetPCMWaveFormat(SoundFormat)); }
public static WaveFormat2 GetPCMWaveFormat(SoundFormats SoundFormat) { WaveFormat2 fmt = new WaveFormat2(); #region long format switch() switch (SoundFormat) { case SoundFormats.Mono16bit11kHz: fmt.Channels = 1; fmt.SamplesPerSec = 11025; fmt.BitsPerSample = 16; break; case SoundFormats.Mono16bit22kHz: fmt.Channels = 1; fmt.SamplesPerSec = 22050; fmt.BitsPerSample = 16; break; case SoundFormats.Mono16bit44kHz: fmt.Channels = 1; fmt.SamplesPerSec = 44100; fmt.BitsPerSample = 16; break; case SoundFormats.Mono8bit11kHz: fmt.Channels = 1; fmt.SamplesPerSec = 11025; fmt.BitsPerSample = 8; break; case SoundFormats.Mono8bit22kHz: fmt.Channels = 1; fmt.SamplesPerSec = 22050; fmt.BitsPerSample = 8; break; case SoundFormats.Mono8bit44kHz: fmt.Channels = 1; fmt.SamplesPerSec = 44100; fmt.BitsPerSample = 8; break; case SoundFormats.Stereo16bit11kHz: fmt.Channels = 2; fmt.SamplesPerSec = 11025; fmt.BitsPerSample = 16; break; case SoundFormats.Stereo16bit22kHz: fmt.Channels = 2; fmt.SamplesPerSec = 22050; fmt.BitsPerSample = 16; break; case SoundFormats.Stereo16bit44kHz: fmt.Channels = 2; fmt.SamplesPerSec = 44100; fmt.BitsPerSample = 16; break; case SoundFormats.Stereo8bit11kHz: fmt.Channels = 2; fmt.SamplesPerSec = 11025; fmt.BitsPerSample = 8; break; case SoundFormats.Stereo8bit22kHz: fmt.Channels = 2; fmt.SamplesPerSec = 22050; fmt.BitsPerSample = 8; break; case SoundFormats.Stereo8bit44kHz: fmt.Channels = 2; fmt.SamplesPerSec = 44100; fmt.BitsPerSample = 8; break; } #endregion long format switch() return(GetPCMWaveFormat(fmt.SamplesPerSec, fmt.Channels, fmt.BitsPerSample)); }