Beispiel #1
0
		virtual public MMChunk CreateSubChunk(FourCC fourCC, Stream st)
		{
			if ( fourCC == FourCC.Riff )
			{
				MMChunk ck = new RiffChunk(st);
				m_chunkList.Add(ck);
				return ck;
			}
			else if ( fourCC == FourCC.Wave )
			{
				MMChunk ck = new WaveChunk(st);
				m_chunkList.Add(ck);
				return ck;
			}
			else if ( fourCC == FourCC.Fmt )
			{
				MMChunk ck = new FmtChunk(st, null);
				m_chunkList.Add(ck);
				return ck;
			}
			else if ( fourCC == FourCC.Data )
			{
				MMChunk ck = new DataChunk(st);
				m_chunkList.Add(ck);
				return ck;
			}
			
			return null;
		}
Beispiel #2
0
        static public MMChunk FromStream(Stream st)
        {
            if (!st.CanRead)
            {
                return(null);
            }
            if (st.Position > st.Length - 4)
            {
                return(null);
            }
            byte[] data = new byte[4];
            st.Read(data, 0, 4);
            FourCC fourCC = BitConverter.ToInt32(data, 0);

            if (fourCC == FourCC.Riff)
            {
                RiffChunk ck = new RiffChunk(st);
                ck.LoadFromStream(st);
                return(ck);
            }
            else if (fourCC == FourCC.Wave)
            {
                WaveChunk ck = new WaveChunk(st);
                ck.LoadFromStream(st);
                return(ck);
            }
            else if (fourCC == FourCC.Fmt)
            {
                FmtChunk ck = new FmtChunk(st, null);
                ck.LoadFromStream(st);
                return(ck);
            }
            else if (fourCC == FourCC.Data)
            {
                DataChunk ck = new DataChunk(st);
                ck.LoadFromStream(st);
                return(ck);
            }
            else
            {
                return(new MMChunk(fourCC, st));
            }
        }
Beispiel #3
0
		/// <summary>
		/// Record sound data for specified number of seconds using given wave format
		/// The stream will be a properly formatted RIFF file
		/// </summary>
		/// <param name="st">Stream into which recorded samples are written</param>
		/// <param name="Seconds">Seconds of data to record</param>
		/// <param name="SoundFormat">Sound format to record in.</param>
		public void RecordFor(Stream st, short Seconds, SoundFormats SoundFormat)
		{
			m_hWaveIn	= IntPtr.Zero;

			// only allow 1 recording session at a time
			if(recording)
			{
				throw new InvalidOperationException("Already recording");
			}

			// set our global flag
			recording = true;

			if ( m_qBuffers == null )
				m_qBuffers = new Queue(MaxBuffers);
			if ( m_HandleMap  == null )
				m_HandleMap = new Hashtable(MaxBuffers);

			m_recformat	= new WaveFormatEx();
#if !NDOC
			// create the callback message window
			m_recmw = new SoundMessageWindow();
			m_recmw.WaveDoneMessage +=new WaveDoneHandler(m_recmw_WaveDoneMessage);
			m_recmw.WaveCloseMessage +=new WaveCloseHandler(mw_WaveCloseMessage);
			m_recmw.WaveOpenMessage += new WaveOpenHandler(mw_WaveOpenMessage);
#endif
			// set up format
			#region long format switch()
			switch(SoundFormat)
			{
				case SoundFormats.Mono16bit11kHz:
					m_recformat.Channels = 1;
					m_recformat.SamplesPerSec = 11025;
					m_recformat.BitsPerSample = 16;
					break;
				case SoundFormats.Mono16bit22kHz:
					m_recformat.Channels = 1;
					m_recformat.SamplesPerSec = 22050;
					m_recformat.BitsPerSample = 16;
					break;
				case SoundFormats.Mono16bit44kHz:
					m_recformat.Channels = 1;
					m_recformat.SamplesPerSec = 44100;
					m_recformat.BitsPerSample = 16;
					break;
				case SoundFormats.Mono8bit11kHz:
					m_recformat.Channels = 1;
					m_recformat.SamplesPerSec = 11025;
					m_recformat.BitsPerSample = 8;
					break;
				case SoundFormats.Mono8bit22kHz:
					m_recformat.Channels = 1;
					m_recformat.SamplesPerSec = 22050;
					m_recformat.BitsPerSample = 8;
					break;
				case SoundFormats.Mono8bit44kHz:
					m_recformat.Channels = 1;
					m_recformat.SamplesPerSec = 44100;
					m_recformat.BitsPerSample = 8;
					break;
				case SoundFormats.Stereo16bit11kHz:
					m_recformat.Channels = 2;
					m_recformat.SamplesPerSec = 11025;
					m_recformat.BitsPerSample = 16;
					break;
				case SoundFormats.Stereo16bit22kHz:
					m_recformat.Channels = 2;
					m_recformat.SamplesPerSec = 22050;
					m_recformat.BitsPerSample = 16;
					break;
				case SoundFormats.Stereo16bit44kHz:
					m_recformat.Channels = 2;
					m_recformat.SamplesPerSec = 44100;
					m_recformat.BitsPerSample = 16;
					break;
				case SoundFormats.Stereo8bit11kHz:
					m_recformat.Channels = 2;
					m_recformat.SamplesPerSec = 11025;
					m_recformat.BitsPerSample = 8;
					break;
				case SoundFormats.Stereo8bit22kHz:
					m_recformat.Channels = 2;
					m_recformat.SamplesPerSec = 22050;
					m_recformat.BitsPerSample = 8;
					break;
				case SoundFormats.Stereo8bit44kHz:
					m_recformat.Channels = 2;
					m_recformat.SamplesPerSec = 44100;
					m_recformat.BitsPerSample = 8;
					break;
			}
			#endregion long format switch()

			m_recformat.FormatTag = WAVE_FORMAT_PCM;
			m_recformat.AvgBytesPerSec = m_recformat.SamplesPerSec * m_recformat.Channels;
			m_recformat.BlockAlign = (short)((m_recformat.Channels * m_recformat.BitsPerSample) / 8);
			m_recformat.Size = 0;

			m_ck = new RiffChunk(st);
			m_ck.BeginWrite();
			WaveChunk wck = m_ck.CreateSubChunk(FourCC.Wave, st) as WaveChunk;
			wck.BeginWrite();
			FmtChunk fck = wck.CreateSubChunk(FourCC.Fmt, st) as FmtChunk;
			fck.BeginWrite();
			fck.WaveFormat = m_recformat;
			fck.Write(fck.WaveFormat.GetBytes());
			fck.EndWrite();
			DataChunk dck = wck.CreateSubChunk(FourCC.Data, st) as DataChunk;
			m_streamRecord = dck.BeginWrite();
#if !NDOC
			// check for support of selected format
			CheckWaveError(Wave.waveInOpen(out m_hWaveIn, WAVE_MAPPER, m_recformat, IntPtr.Zero, 0, WAVE_FORMAT_QUERY));

			// open wave device
			CheckWaveError(Wave.waveInOpen(out m_hWaveIn, (uint)m_deviceID, m_recformat, m_recmw.Hwnd, 0, CALLBACK_WINDOW));

			m_recBufferSize = (int)(Math.Min( (int)Seconds, BufferLen ) * m_recformat.SamplesPerSec * m_recformat.Channels);
			
			for ( int i = 0; i < 2; i ++ )
			{
				WaveHeader hdr = GetNewRecordBuffer( m_recBufferSize );
		
				// send the buffer to the device
				CheckWaveError(Wave.waveInAddBuffer(m_hWaveIn, hdr.Header, hdr.HeaderLength));
			}

			// begin recording
			CheckWaveError(Wave.waveInStart(m_hWaveIn));
			recordingFinished = false;
			m_recTimer = new Timer(new TimerCallback(RecTimerCallback), this, Seconds * 1000, Timeout.Infinite);
#endif
		}
Beispiel #4
0
		static public MMChunk FromStream(Stream st)
		{
			if ( !st.CanRead )
				return null;
			if ( st.Position > st.Length - 4 )
				return null;
			byte[] data = new byte[4];
			st.Read(data, 0, 4);
			FourCC fourCC = BitConverter.ToInt32(data, 0);
			if ( fourCC == FourCC.Riff )
			{
				RiffChunk ck = new RiffChunk(st);
				ck.LoadFromStream(st);
				return ck;
			}
			else if ( fourCC == FourCC.Wave )
			{
				WaveChunk ck = new WaveChunk(st);
				ck.LoadFromStream(st);
				return ck;
			}
			else if ( fourCC == FourCC.Fmt )
			{
				FmtChunk ck = new FmtChunk(st, null);
				ck.LoadFromStream(st);
				return ck;
			}
			else if ( fourCC == FourCC.Data )
			{
				DataChunk ck = new DataChunk(st);
				ck.LoadFromStream(st);
				return ck;
			}
			else
				return new MMChunk(fourCC, st);
		}
Beispiel #5
0
        /// <summary>
        /// Record sound data for specified number of seconds using given wave format
        /// The stream will be a properly formatted RIFF file
        /// </summary>
        /// <param name="st">Stream into which recorded samples are written</param>
        /// <param name="Seconds">Seconds of data to record</param>
        /// <param name="SoundFormat">Sound format to record in.</param>
        public void RecordFor(Stream st, short Seconds, SoundFormats SoundFormat)
        {
            m_hWaveIn = IntPtr.Zero;

            // only allow 1 recording session at a time
            if (recording)
            {
                throw new InvalidOperationException("Already recording");
            }

            // set our global flag
            recording = true;

            if (m_qBuffers == null)
            {
                m_qBuffers = new Queue(MaxBuffers);
            }
            if (m_HandleMap == null)
            {
                m_HandleMap = new Hashtable(MaxBuffers);
            }

            m_recformat = new WaveFormatEx();
#if !NDOC
            // create the callback message window
            m_recmw = new SoundMessageWindow();
            m_recmw.WaveDoneMessage  += new WaveDoneHandler(m_recmw_WaveDoneMessage);
            m_recmw.WaveCloseMessage += new WaveCloseHandler(mw_WaveCloseMessage);
            m_recmw.WaveOpenMessage  += new WaveOpenHandler(mw_WaveOpenMessage);
#endif
            // set up format
            #region long format switch()
            switch (SoundFormat)
            {
            case SoundFormats.Mono16bit11kHz:
                m_recformat.Channels      = 1;
                m_recformat.SamplesPerSec = 11025;
                m_recformat.BitsPerSample = 16;
                break;

            case SoundFormats.Mono16bit22kHz:
                m_recformat.Channels      = 1;
                m_recformat.SamplesPerSec = 22050;
                m_recformat.BitsPerSample = 16;
                break;

            case SoundFormats.Mono16bit44kHz:
                m_recformat.Channels      = 1;
                m_recformat.SamplesPerSec = 44100;
                m_recformat.BitsPerSample = 16;
                break;

            case SoundFormats.Mono8bit11kHz:
                m_recformat.Channels      = 1;
                m_recformat.SamplesPerSec = 11025;
                m_recformat.BitsPerSample = 8;
                break;

            case SoundFormats.Mono8bit22kHz:
                m_recformat.Channels      = 1;
                m_recformat.SamplesPerSec = 22050;
                m_recformat.BitsPerSample = 8;
                break;

            case SoundFormats.Mono8bit44kHz:
                m_recformat.Channels      = 1;
                m_recformat.SamplesPerSec = 44100;
                m_recformat.BitsPerSample = 8;
                break;

            case SoundFormats.Stereo16bit11kHz:
                m_recformat.Channels      = 2;
                m_recformat.SamplesPerSec = 11025;
                m_recformat.BitsPerSample = 16;
                break;

            case SoundFormats.Stereo16bit22kHz:
                m_recformat.Channels      = 2;
                m_recformat.SamplesPerSec = 22050;
                m_recformat.BitsPerSample = 16;
                break;

            case SoundFormats.Stereo16bit44kHz:
                m_recformat.Channels      = 2;
                m_recformat.SamplesPerSec = 44100;
                m_recformat.BitsPerSample = 16;
                break;

            case SoundFormats.Stereo8bit11kHz:
                m_recformat.Channels      = 2;
                m_recformat.SamplesPerSec = 11025;
                m_recformat.BitsPerSample = 8;
                break;

            case SoundFormats.Stereo8bit22kHz:
                m_recformat.Channels      = 2;
                m_recformat.SamplesPerSec = 22050;
                m_recformat.BitsPerSample = 8;
                break;

            case SoundFormats.Stereo8bit44kHz:
                m_recformat.Channels      = 2;
                m_recformat.SamplesPerSec = 44100;
                m_recformat.BitsPerSample = 8;
                break;
            }
            #endregion long format switch()

            m_recformat.FormatTag      = WAVE_FORMAT_PCM;
            m_recformat.AvgBytesPerSec = m_recformat.SamplesPerSec * m_recformat.Channels;
            m_recformat.BlockAlign     = (short)((m_recformat.Channels * m_recformat.BitsPerSample) / 8);
            m_recformat.Size           = 0;

            m_ck = new RiffChunk(st);
            m_ck.BeginWrite();
            WaveChunk wck = m_ck.CreateSubChunk(FourCC.Wave, st) as WaveChunk;
            wck.BeginWrite();
            FmtChunk fck = wck.CreateSubChunk(FourCC.Fmt, st) as FmtChunk;
            fck.BeginWrite();
            fck.WaveFormat = m_recformat;
            fck.Write(fck.WaveFormat.GetBytes());
            fck.EndWrite();
            DataChunk dck = wck.CreateSubChunk(FourCC.Data, st) as DataChunk;
            m_streamRecord = dck.BeginWrite();
#if !NDOC
            // check for support of selected format
            CheckWaveError(Wave.waveInOpen(out m_hWaveIn, WAVE_MAPPER, m_recformat, IntPtr.Zero, 0, WAVE_FORMAT_QUERY));

            // open wave device
            CheckWaveError(Wave.waveInOpen(out m_hWaveIn, (uint)m_deviceID, m_recformat, m_recmw.Hwnd, 0, CALLBACK_WINDOW));

            m_recBufferSize = (int)(Math.Min((int)Seconds, BufferLen) * m_recformat.SamplesPerSec * m_recformat.Channels);

            for (int i = 0; i < 2; i++)
            {
                WaveHeader hdr = GetNewRecordBuffer(m_recBufferSize);

                // send the buffer to the device
                CheckWaveError(Wave.waveInAddBuffer(m_hWaveIn, hdr.Header, hdr.HeaderLength));
            }

            // begin recording
            CheckWaveError(Wave.waveInStart(m_hWaveIn));
            recordingFinished = false;
            m_recTimer        = new Timer(new TimerCallback(RecTimerCallback), this, Seconds * 1000, Timeout.Infinite);
#endif
        }