Пример #1
0
		/// <summary>
		/// Plays waveform contained in the given stream. Stream is exepcted to contain full riff header
		/// </summary>
		/// <param name="playStream">Stream with the waveform</param>
		public void Play( Stream playStream )
		{
			if(m_playing) return;

			
			MMChunk ck = MMChunk.FromStream(playStream);
			if ( ck != null && ck.FourCC == FourCC.Riff )
			{
				if ( ck[0] is WaveChunk )
				{
					DataChunk dck = ck[0].FindChunk(FourCC.Data) as DataChunk;
					if ( dck != null )
					{
						playStream = dck.BeginRead();
					}
					FmtChunk fck = ck[0].FindChunk(FourCC.Fmt) as FmtChunk;
					if ( fck != null )
					{
						m_format = fck.WaveFormat;
					}
				}
			}
			if ( playStream == null )
			{
				throw new Exception("No valid WAV file has been opened");
			}

#if!NDOC

			if ( m_qBuffers == null )
				m_qBuffers = new Queue(MaxBuffers);
			if ( m_HandleMap == null )
				m_HandleMap = new Hashtable(MaxBuffers);

			// create a window to catch waveOutxxx messages
			SoundMessageWindow	mw = new SoundMessageWindow();

			// wire in events
			mw.WaveOpenMessage += new WaveOpenHandler(mw_WaveOpenMessage);
			mw.WaveCloseMessage += new WaveCloseHandler(mw_WaveCloseMessage);
			mw.WaveDoneMessage += new WaveDoneHandler(mw_WaveDoneMessage);

			// add it to the global array
			int i = m_mwArray.Add(mw);
			m_streamArray.Add(playStream);

			// open the waveOut device and register the callback
			CheckWaveError(Wave.waveOutOpen(out m_hWaveOut, m_deviceID, m_format, ((SoundMessageWindow)m_mwArray[i]).Hwnd, 0, CALLBACK_WINDOW));

			RefillPlayBuffers();

			Monitor.Enter(m_qBuffers.SyncRoot);
			WaveHeader hdr = m_qBuffers.Dequeue() as WaveHeader;
			Monitor.Exit(m_qBuffers.SyncRoot);

			// play the file
			int ret = Wave.waveOutWrite(m_hWaveOut, hdr.Header, hdr.HeaderLength);
			CheckWaveError(ret);

			m_playing = true;
			
#endif
		}
Пример #2
0
        /// <summary>
        /// Record sound data for specified number of seconds using given wave format
        /// The stream will be a properly formatted RIFF file
        /// </summary>
        /// <param name="st">Stream into which recorded samples are written</param>
        /// <param name="Seconds">Seconds of data to record</param>
        /// <param name="SoundFormat">Sound format to record in.</param>
        public void RecordFor(Stream st, short Seconds, SoundFormats SoundFormat)
        {
            m_hWaveIn = IntPtr.Zero;

            // only allow 1 recording session at a time
            if (recording)
            {
                throw new InvalidOperationException("Already recording");
            }

            // set our global flag
            recording = true;

            if (m_qBuffers == null)
            {
                m_qBuffers = new Queue(MaxBuffers);
            }
            if (m_HandleMap == null)
            {
                m_HandleMap = new Hashtable(MaxBuffers);
            }

            m_recformat = new WaveFormatEx();
#if !NDOC
            // create the callback message window
            m_recmw = new SoundMessageWindow();
            m_recmw.WaveDoneMessage  += new WaveDoneHandler(m_recmw_WaveDoneMessage);
            m_recmw.WaveCloseMessage += new WaveCloseHandler(mw_WaveCloseMessage);
            m_recmw.WaveOpenMessage  += new WaveOpenHandler(mw_WaveOpenMessage);
#endif
            // set up format
            #region long format switch()
            switch (SoundFormat)
            {
            case SoundFormats.Mono16bit11kHz:
                m_recformat.Channels      = 1;
                m_recformat.SamplesPerSec = 11025;
                m_recformat.BitsPerSample = 16;
                break;

            case SoundFormats.Mono16bit22kHz:
                m_recformat.Channels      = 1;
                m_recformat.SamplesPerSec = 22050;
                m_recformat.BitsPerSample = 16;
                break;

            case SoundFormats.Mono16bit44kHz:
                m_recformat.Channels      = 1;
                m_recformat.SamplesPerSec = 44100;
                m_recformat.BitsPerSample = 16;
                break;

            case SoundFormats.Mono8bit11kHz:
                m_recformat.Channels      = 1;
                m_recformat.SamplesPerSec = 11025;
                m_recformat.BitsPerSample = 8;
                break;

            case SoundFormats.Mono8bit22kHz:
                m_recformat.Channels      = 1;
                m_recformat.SamplesPerSec = 22050;
                m_recformat.BitsPerSample = 8;
                break;

            case SoundFormats.Mono8bit44kHz:
                m_recformat.Channels      = 1;
                m_recformat.SamplesPerSec = 44100;
                m_recformat.BitsPerSample = 8;
                break;

            case SoundFormats.Stereo16bit11kHz:
                m_recformat.Channels      = 2;
                m_recformat.SamplesPerSec = 11025;
                m_recformat.BitsPerSample = 16;
                break;

            case SoundFormats.Stereo16bit22kHz:
                m_recformat.Channels      = 2;
                m_recformat.SamplesPerSec = 22050;
                m_recformat.BitsPerSample = 16;
                break;

            case SoundFormats.Stereo16bit44kHz:
                m_recformat.Channels      = 2;
                m_recformat.SamplesPerSec = 44100;
                m_recformat.BitsPerSample = 16;
                break;

            case SoundFormats.Stereo8bit11kHz:
                m_recformat.Channels      = 2;
                m_recformat.SamplesPerSec = 11025;
                m_recformat.BitsPerSample = 8;
                break;

            case SoundFormats.Stereo8bit22kHz:
                m_recformat.Channels      = 2;
                m_recformat.SamplesPerSec = 22050;
                m_recformat.BitsPerSample = 8;
                break;

            case SoundFormats.Stereo8bit44kHz:
                m_recformat.Channels      = 2;
                m_recformat.SamplesPerSec = 44100;
                m_recformat.BitsPerSample = 8;
                break;
            }
            #endregion long format switch()

            m_recformat.FormatTag      = WAVE_FORMAT_PCM;
            m_recformat.AvgBytesPerSec = m_recformat.SamplesPerSec * m_recformat.Channels;
            m_recformat.BlockAlign     = (short)((m_recformat.Channels * m_recformat.BitsPerSample) / 8);
            m_recformat.Size           = 0;

            m_ck = new RiffChunk(st);
            m_ck.BeginWrite();
            WaveChunk wck = m_ck.CreateSubChunk(FourCC.Wave, st) as WaveChunk;
            wck.BeginWrite();
            FmtChunk fck = wck.CreateSubChunk(FourCC.Fmt, st) as FmtChunk;
            fck.BeginWrite();
            fck.WaveFormat = m_recformat;
            fck.Write(fck.WaveFormat.GetBytes());
            fck.EndWrite();
            DataChunk dck = wck.CreateSubChunk(FourCC.Data, st) as DataChunk;
            m_streamRecord = dck.BeginWrite();
#if !NDOC
            // check for support of selected format
            CheckWaveError(Wave.waveInOpen(out m_hWaveIn, WAVE_MAPPER, m_recformat, IntPtr.Zero, 0, WAVE_FORMAT_QUERY));

            // open wave device
            CheckWaveError(Wave.waveInOpen(out m_hWaveIn, (uint)m_deviceID, m_recformat, m_recmw.Hwnd, 0, CALLBACK_WINDOW));

            m_recBufferSize = (int)(Math.Min((int)Seconds, BufferLen) * m_recformat.SamplesPerSec * m_recformat.Channels);

            for (int i = 0; i < 2; i++)
            {
                WaveHeader hdr = GetNewRecordBuffer(m_recBufferSize);

                // send the buffer to the device
                CheckWaveError(Wave.waveInAddBuffer(m_hWaveIn, hdr.Header, hdr.HeaderLength));
            }

            // begin recording
            CheckWaveError(Wave.waveInStart(m_hWaveIn));
            recordingFinished = false;
            m_recTimer        = new Timer(new TimerCallback(RecTimerCallback), this, Seconds * 1000, Timeout.Infinite);
#endif
        }
Пример #3
0
		/// <summary>
		/// Record sound data for specified number of seconds using given wave format
		/// The stream will be a properly formatted RIFF file
		/// </summary>
		/// <param name="st">Stream into which recorded samples are written</param>
		/// <param name="Seconds">Seconds of data to record</param>
		/// <param name="SoundFormat">Sound format to record in.</param>
		public void RecordFor(Stream st, short Seconds, SoundFormats SoundFormat)
		{
			m_hWaveIn	= IntPtr.Zero;

			// only allow 1 recording session at a time
			if(recording)
			{
				throw new InvalidOperationException("Already recording");
			}

			// set our global flag
			recording = true;

			if ( m_qBuffers == null )
				m_qBuffers = new Queue(MaxBuffers);
			if ( m_HandleMap  == null )
				m_HandleMap = new Hashtable(MaxBuffers);

			m_recformat	= new WaveFormatEx();
#if !NDOC
			// create the callback message window
			m_recmw = new SoundMessageWindow();
			m_recmw.WaveDoneMessage +=new WaveDoneHandler(m_recmw_WaveDoneMessage);
			m_recmw.WaveCloseMessage +=new WaveCloseHandler(mw_WaveCloseMessage);
			m_recmw.WaveOpenMessage += new WaveOpenHandler(mw_WaveOpenMessage);
#endif
			// set up format
			#region long format switch()
			switch(SoundFormat)
			{
				case SoundFormats.Mono16bit11kHz:
					m_recformat.Channels = 1;
					m_recformat.SamplesPerSec = 11025;
					m_recformat.BitsPerSample = 16;
					break;
				case SoundFormats.Mono16bit22kHz:
					m_recformat.Channels = 1;
					m_recformat.SamplesPerSec = 22050;
					m_recformat.BitsPerSample = 16;
					break;
				case SoundFormats.Mono16bit44kHz:
					m_recformat.Channels = 1;
					m_recformat.SamplesPerSec = 44100;
					m_recformat.BitsPerSample = 16;
					break;
				case SoundFormats.Mono8bit11kHz:
					m_recformat.Channels = 1;
					m_recformat.SamplesPerSec = 11025;
					m_recformat.BitsPerSample = 8;
					break;
				case SoundFormats.Mono8bit22kHz:
					m_recformat.Channels = 1;
					m_recformat.SamplesPerSec = 22050;
					m_recformat.BitsPerSample = 8;
					break;
				case SoundFormats.Mono8bit44kHz:
					m_recformat.Channels = 1;
					m_recformat.SamplesPerSec = 44100;
					m_recformat.BitsPerSample = 8;
					break;
				case SoundFormats.Stereo16bit11kHz:
					m_recformat.Channels = 2;
					m_recformat.SamplesPerSec = 11025;
					m_recformat.BitsPerSample = 16;
					break;
				case SoundFormats.Stereo16bit22kHz:
					m_recformat.Channels = 2;
					m_recformat.SamplesPerSec = 22050;
					m_recformat.BitsPerSample = 16;
					break;
				case SoundFormats.Stereo16bit44kHz:
					m_recformat.Channels = 2;
					m_recformat.SamplesPerSec = 44100;
					m_recformat.BitsPerSample = 16;
					break;
				case SoundFormats.Stereo8bit11kHz:
					m_recformat.Channels = 2;
					m_recformat.SamplesPerSec = 11025;
					m_recformat.BitsPerSample = 8;
					break;
				case SoundFormats.Stereo8bit22kHz:
					m_recformat.Channels = 2;
					m_recformat.SamplesPerSec = 22050;
					m_recformat.BitsPerSample = 8;
					break;
				case SoundFormats.Stereo8bit44kHz:
					m_recformat.Channels = 2;
					m_recformat.SamplesPerSec = 44100;
					m_recformat.BitsPerSample = 8;
					break;
			}
			#endregion long format switch()

			m_recformat.FormatTag = WAVE_FORMAT_PCM;
			m_recformat.AvgBytesPerSec = m_recformat.SamplesPerSec * m_recformat.Channels;
			m_recformat.BlockAlign = (short)((m_recformat.Channels * m_recformat.BitsPerSample) / 8);
			m_recformat.Size = 0;

			m_ck = new RiffChunk(st);
			m_ck.BeginWrite();
			WaveChunk wck = m_ck.CreateSubChunk(FourCC.Wave, st) as WaveChunk;
			wck.BeginWrite();
			FmtChunk fck = wck.CreateSubChunk(FourCC.Fmt, st) as FmtChunk;
			fck.BeginWrite();
			fck.WaveFormat = m_recformat;
			fck.Write(fck.WaveFormat.GetBytes());
			fck.EndWrite();
			DataChunk dck = wck.CreateSubChunk(FourCC.Data, st) as DataChunk;
			m_streamRecord = dck.BeginWrite();
#if !NDOC
			// check for support of selected format
			CheckWaveError(Wave.waveInOpen(out m_hWaveIn, WAVE_MAPPER, m_recformat, IntPtr.Zero, 0, WAVE_FORMAT_QUERY));

			// open wave device
			CheckWaveError(Wave.waveInOpen(out m_hWaveIn, (uint)m_deviceID, m_recformat, m_recmw.Hwnd, 0, CALLBACK_WINDOW));

			m_recBufferSize = (int)(Math.Min( (int)Seconds, BufferLen ) * m_recformat.SamplesPerSec * m_recformat.Channels);
			
			for ( int i = 0; i < 2; i ++ )
			{
				WaveHeader hdr = GetNewRecordBuffer( m_recBufferSize );
		
				// send the buffer to the device
				CheckWaveError(Wave.waveInAddBuffer(m_hWaveIn, hdr.Header, hdr.HeaderLength));
			}

			// begin recording
			CheckWaveError(Wave.waveInStart(m_hWaveIn));
			recordingFinished = false;
			m_recTimer = new Timer(new TimerCallback(RecTimerCallback), this, Seconds * 1000, Timeout.Infinite);
#endif
		}
Пример #4
0
        /// <summary>
        /// Plays waveform contained in the given stream. Stream is exepcted to contain full riff header
        /// </summary>
        /// <param name="playStream">Stream with the waveform</param>
        public void Play(Stream playStream)
        {
            if (m_playing)
            {
                return;
            }


            MMChunk ck = MMChunk.FromStream(playStream);

            if (ck != null && ck.FourCC == FourCC.Riff)
            {
                if (ck[0] is WaveChunk)
                {
                    DataChunk dck = ck[0].FindChunk(FourCC.Data) as DataChunk;
                    if (dck != null)
                    {
                        playStream = dck.BeginRead();
                    }
                    FmtChunk fck = ck[0].FindChunk(FourCC.Fmt) as FmtChunk;
                    if (fck != null)
                    {
                        m_format = fck.WaveFormat;
                    }
                }
            }
            if (playStream == null)
            {
                throw new Exception("No valid WAV file has been opened");
            }

#if !NDOC
            if (m_qBuffers == null)
            {
                m_qBuffers = new Queue(MaxBuffers);
            }
            if (m_HandleMap == null)
            {
                m_HandleMap = new Hashtable(MaxBuffers);
            }

            // create a window to catch waveOutxxx messages
            SoundMessageWindow mw = new SoundMessageWindow();

            // wire in events
            mw.WaveOpenMessage  += new WaveOpenHandler(mw_WaveOpenMessage);
            mw.WaveCloseMessage += new WaveCloseHandler(mw_WaveCloseMessage);
            mw.WaveDoneMessage  += new WaveDoneHandler(mw_WaveDoneMessage);

            // add it to the global array
            int i = m_mwArray.Add(mw);
            m_streamArray.Add(playStream);

            // open the waveOut device and register the callback
            CheckWaveError(Wave.waveOutOpen(out m_hWaveOut, m_deviceID, m_format, ((SoundMessageWindow)m_mwArray[i]).Hwnd, 0, CALLBACK_WINDOW));

            RefillPlayBuffers();

            Monitor.Enter(m_qBuffers.SyncRoot);
            WaveHeader hdr = m_qBuffers.Dequeue() as WaveHeader;
            Monitor.Exit(m_qBuffers.SyncRoot);

            // play the file
            int ret = Wave.waveOutWrite(m_hWaveOut, hdr.Header, hdr.HeaderLength);
            CheckWaveError(ret);

            m_playing = true;
#endif
        }