Ejemplo n.º 1
0
        /// <summary>
        /// Cleans up any resources being used.
        /// </summary>
        public void Dispose()
        {
            if (m_IsDisposed)
            {
                return;
            }
            m_IsDisposed = true;

            try{
                // If recording, we need to reset wav device first.
                waveInReset(m_pWavDevHandle);

                // If there are unprepared wav headers, we need to unprepare these.
                foreach (BufferItem item in m_pBuffers.Values)
                {
                    item.Dispose();
                }

                // Close input device.
                waveInClose(m_pWavDevHandle);

                m_pInDevice     = null;
                m_pWavDevHandle = IntPtr.Zero;

                this.AudioFrameReceived = null;
            }
            catch {
            }
        }
Ejemplo n.º 2
0
        /// <summary>
        /// Default constructor.
        /// </summary>
        /// <param name="device">Input device.</param>
        /// <param name="samplesPerSec">Sample rate, in samples per second (hertz). For PCM common values are
        /// 8.0 kHz, 11.025 kHz, 22.05 kHz, and 44.1 kHz.</param>
        /// <param name="bitsPerSample">Bits per sample. For PCM 8 or 16 are the only valid values.</param>
        /// <param name="channels">Number of channels.</param>
        /// <param name="bufferSize">Specifies recording buffer size.</param>
        /// <exception cref="ArgumentNullException">Is raised when <b>outputDevice</b> is null.</exception>
        /// <exception cref="ArgumentException">Is raised when any of the aruments has invalid value.</exception>
        public _WaveIn(AudioInDevice device, int samplesPerSec, int bitsPerSample, int channels, int bufferSize)
        {
            if (device == null)
            {
                throw new ArgumentNullException("device");
            }
            if (samplesPerSec < 8000)
            {
                throw new ArgumentException("Argument 'samplesPerSec' value must be >= 8000.");
            }
            if (bitsPerSample < 8)
            {
                throw new ArgumentException("Argument 'bitsPerSample' value must be >= 8.");
            }
            if (channels < 1)
            {
                throw new ArgumentException("Argument 'channels' value must be >= 1.");
            }

            m_pInDevice     = device;
            m_SamplesPerSec = samplesPerSec;
            m_BitsPerSample = bitsPerSample;
            m_Channels      = channels;
            m_BufferSize    = bufferSize;
            m_BlockSize     = m_Channels * (m_BitsPerSample / 8);
            m_pBuffers      = new Dictionary <long, BufferItem>();

            // Try to open wav device.
            WAVEFORMATEX format = new WAVEFORMATEX();

            format.wFormatTag      = WavFormat.PCM;
            format.nChannels       = (ushort)m_Channels;
            format.nSamplesPerSec  = (uint)samplesPerSec;
            format.nAvgBytesPerSec = (uint)(m_SamplesPerSec * (m_Channels * (m_BitsPerSample / 8)));
            format.nBlockAlign     = (ushort)m_BlockSize;
            format.wBitsPerSample  = (ushort)m_BitsPerSample;
            format.cbSize          = 0;
            // We must delegate reference, otherwise GC will collect it.
            m_pWaveInProc = new waveInProc(this.OnWaveInProc);

            int result = waveInOpen(out m_pWavDevHandle, m_pInDevice.Index, format, m_pWaveInProc, 0, WavConstants.CALLBACK_FUNCTION);

            if (result != MMSYSERR.NOERROR)
            {
                throw new Exception("Failed to open wav device, error: " + result.ToString() + ".");
            }

            CreateBuffers();
        }
Ejemplo n.º 3
0
        /// <summary>
        /// Cleans up any resources being used.
        /// </summary>
        public void Dispose()
        {
            if (m_IsDisposed)
            {
                return;
            }

            Stop();

            m_IsDisposed = true;

            this.Error       = null;
            m_pAudioInDevice = null;
            m_pAudioCodecs   = null;
            m_pRTP_Stream.Session.PayloadChanged -= new EventHandler(m_pRTP_Stream_PayloadChanged);
            m_pRTP_Stream  = null;
            m_pActiveCodec = null;
        }
Ejemplo n.º 4
0
        /// <summary>
        /// Default constructor.
        /// </summary>
        /// <param name="audioInDevice">Audio-in device to capture.</param>
        /// <param name="audioFrameSize">Audio frame size in milliseconds.</param>
        /// <param name="codecs">Audio codecs with RTP payload number. For example: 0-PCMU,8-PCMA.</param>
        /// <param name="stream">RTP stream to use for audio sending.</param>
        /// <exception cref="ArgumentNullException">Is raised when <b>audioInDevice</b>,<b>codecs</b> or <b>stream</b> is null reference.</exception>
        public AudioIn_RTP(AudioInDevice audioInDevice, int audioFrameSize, Dictionary <int, AudioCodec> codecs, RTP_SendStream stream)
        {
            if (audioInDevice == null)
            {
                throw new ArgumentNullException("audioInDevice");
            }
            if (codecs == null)
            {
                throw new ArgumentNullException("codecs");
            }
            if (stream == null)
            {
                throw new ArgumentNullException("stream");
            }

            m_pAudioInDevice = audioInDevice;
            m_AudioFrameSize = audioFrameSize;
            m_pAudioCodecs   = codecs;
            m_pRTP_Stream    = stream;

            m_pRTP_Stream.Session.PayloadChanged += new EventHandler(m_pRTP_Stream_PayloadChanged);
            m_pAudioCodecs.TryGetValue(m_pRTP_Stream.Session.Payload, out m_pActiveCodec);
        }