/// <summary> /// Cleans up any resources being used. /// </summary> public void Dispose() { if (m_IsDisposed) { return; } m_IsDisposed = true; try{ lock (m_pLock){ // If recording, we need to reset wav device first. waveInReset(m_pWavDevHandle); // If there are unprepared wav headers, we need to unprepare these. foreach (BufferItem item in m_pBuffers.Values) { item.Dispose(); } // Close input device. waveInClose(m_pWavDevHandle); m_pInDevice = null; m_pWavDevHandle = IntPtr.Zero; this.AudioFrameReceived = null; } } catch { } }
/// <summary> /// Default constructor. /// </summary> /// <param name="device">Input device.</param> /// <param name="samplesPerSec">Sample rate, in samples per second (hertz). For PCM common values are /// 8.0 kHz, 11.025 kHz, 22.05 kHz, and 44.1 kHz.</param> /// <param name="bitsPerSample">Bits per sample. For PCM 8 or 16 are the only valid values.</param> /// <param name="channels">Number of channels.</param> /// <param name="bufferSize">Specifies recording buffer size.</param> /// <exception cref="ArgumentNullException">Is raised when <b>outputDevice</b> is null.</exception> /// <exception cref="ArgumentException">Is raised when any of the aruments has invalid value.</exception> public _WaveIn(AudioInDevice device, int samplesPerSec, int bitsPerSample, int channels, int bufferSize) { if (device == null) { throw new ArgumentNullException("device"); } if (samplesPerSec < 8000) { throw new ArgumentException("Argument 'samplesPerSec' value must be >= 8000."); } if (bitsPerSample < 8) { throw new ArgumentException("Argument 'bitsPerSample' value must be >= 8."); } if (channels < 1) { throw new ArgumentException("Argument 'channels' value must be >= 1."); } m_pInDevice = device; m_SamplesPerSec = samplesPerSec; m_BitsPerSample = bitsPerSample; m_Channels = channels; m_BufferSize = bufferSize; m_BlockSize = m_Channels * (m_BitsPerSample / 8); m_pBuffers = new Dictionary <long, BufferItem>(); // Try to open wav device. WAVEFORMATEX format = new WAVEFORMATEX(); format.wFormatTag = WavFormat.PCM; format.nChannels = (ushort)m_Channels; format.nSamplesPerSec = (uint)samplesPerSec; format.nAvgBytesPerSec = (uint)(m_SamplesPerSec * (m_Channels * (m_BitsPerSample / 8))); format.nBlockAlign = (ushort)m_BlockSize; format.wBitsPerSample = (ushort)m_BitsPerSample; format.cbSize = 0; // We must delegate reference, otherwise GC will collect it. m_pWaveInProc = new waveInProc(this.OnWaveInProc); int result = waveInOpen(out m_pWavDevHandle, m_pInDevice.Index, format, m_pWaveInProc, 0, WavConstants.CALLBACK_FUNCTION); if (result != MMSYSERR.NOERROR) { throw new Exception("Failed to open wav device, error: " + result.ToString() + "."); } CreateBuffers(); }
/// <summary> /// Cleans up any resources being used. /// </summary> public void Dispose() { if(m_IsDisposed){ return; } Stop(); m_IsDisposed = true; this.Error = null; m_pAudioInDevice = null; m_pAudioCodecs = null; m_pRTP_Stream.Session.PayloadChanged -= new EventHandler(m_pRTP_Stream_PayloadChanged); m_pRTP_Stream = null; m_pActiveCodec = null; }
/// <summary> /// Cleans up any resources being used. /// </summary> public void Dispose() { if (m_IsDisposed) { return; } Stop(); m_IsDisposed = true; this.Error = null; m_pAudioInDevice = null; m_pAudioCodecs = null; m_pRTP_Stream.Session.PayloadChanged -= new EventHandler(m_pRTP_Stream_PayloadChanged); m_pRTP_Stream = null; m_pActiveCodec = null; }
/// <summary> /// Default constructor. /// </summary> /// <param name="device">Input device.</param> /// <param name="samplesPerSec">Sample rate, in samples per second (hertz). For PCM common values are /// 8.0 kHz, 11.025 kHz, 22.05 kHz, and 44.1 kHz.</param> /// <param name="bitsPerSample">Bits per sample. For PCM 8 or 16 are the only valid values.</param> /// <param name="channels">Number of channels.</param> /// <param name="bufferSize">Specifies recording buffer size.</param> /// <exception cref="ArgumentNullException">Is raised when <b>outputDevice</b> is null.</exception> /// <exception cref="ArgumentException">Is raised when any of the aruments has invalid value.</exception> public _WaveIn(AudioInDevice device,int samplesPerSec,int bitsPerSample,int channels,int bufferSize) { if(device == null){ throw new ArgumentNullException("device"); } if(samplesPerSec < 8000){ throw new ArgumentException("Argument 'samplesPerSec' value must be >= 8000."); } if(bitsPerSample < 8){ throw new ArgumentException("Argument 'bitsPerSample' value must be >= 8."); } if(channels < 1){ throw new ArgumentException("Argument 'channels' value must be >= 1."); } m_pInDevice = device; m_SamplesPerSec = samplesPerSec; m_BitsPerSample = bitsPerSample; m_Channels = channels; m_BufferSize = bufferSize; m_BlockSize = m_Channels * (m_BitsPerSample / 8); m_pBuffers = new Dictionary<long,BufferItem>(); // Try to open wav device. WAVEFORMATEX format = new WAVEFORMATEX(); format.wFormatTag = WavFormat.PCM; format.nChannels = (ushort)m_Channels; format.nSamplesPerSec = (uint)samplesPerSec; format.nAvgBytesPerSec = (uint)(m_SamplesPerSec * (m_Channels * (m_BitsPerSample / 8))); format.nBlockAlign = (ushort)m_BlockSize; format.wBitsPerSample = (ushort)m_BitsPerSample; format.cbSize = 0; // We must delegate reference, otherwise GC will collect it. m_pWaveInProc = new waveInProc(this.OnWaveInProc); int result = waveInOpen(out m_pWavDevHandle,m_pInDevice.Index,format,m_pWaveInProc,0,WavConstants.CALLBACK_FUNCTION); if(result != MMSYSERR.NOERROR){ throw new Exception("Failed to open wav device, error: " + result.ToString() + "."); } CreateBuffers(); }
/// <summary> /// Default constructor. /// </summary> /// <param name="audioInDevice">Audio-in device to capture.</param> /// <param name="audioFrameSize">Audio frame size in milliseconds.</param> /// <param name="codecs">Audio codecs with RTP payload number. For example: 0-PCMU,8-PCMA.</param> /// <param name="stream">RTP stream to use for audio sending.</param> /// <exception cref="ArgumentNullException">Is raised when <b>audioInDevice</b>,<b>codecs</b> or <b>stream</b> is null reference.</exception> public AudioIn_RTP(AudioInDevice audioInDevice,int audioFrameSize,Dictionary<int,AudioCodec> codecs,RTP_SendStream stream) { if(audioInDevice == null){ throw new ArgumentNullException("audioInDevice"); } if(codecs == null){ throw new ArgumentNullException("codecs"); } if(stream == null){ throw new ArgumentNullException("stream"); } m_pAudioInDevice = audioInDevice; m_AudioFrameSize = audioFrameSize; m_pAudioCodecs = codecs; m_pRTP_Stream = stream; m_pRTP_Stream.Session.PayloadChanged += new EventHandler(m_pRTP_Stream_PayloadChanged); m_pAudioCodecs.TryGetValue(m_pRTP_Stream.Session.Payload,out m_pActiveCodec); }
/// <summary> /// Default constructor. /// </summary> /// <param name="audioInDevice">Audio-in device to capture.</param> /// <param name="audioFrameSize">Audio frame size in milliseconds.</param> /// <param name="codecs">Audio codecs with RTP payload number. For example: 0-PCMU,8-PCMA.</param> /// <param name="stream">RTP stream to use for audio sending.</param> /// <exception cref="ArgumentNullException">Is raised when <b>audioInDevice</b>,<b>codecs</b> or <b>stream</b> is null reference.</exception> public AudioIn_RTP(AudioInDevice audioInDevice, int audioFrameSize, Dictionary <int, AudioCodec> codecs, RTP_SendStream stream) { if (audioInDevice == null) { throw new ArgumentNullException("audioInDevice"); } if (codecs == null) { throw new ArgumentNullException("codecs"); } if (stream == null) { throw new ArgumentNullException("stream"); } m_pAudioInDevice = audioInDevice; m_AudioFrameSize = audioFrameSize; m_pAudioCodecs = codecs; m_pRTP_Stream = stream; m_pRTP_Stream.Session.PayloadChanged += new EventHandler(m_pRTP_Stream_PayloadChanged); m_pAudioCodecs.TryGetValue(m_pRTP_Stream.Session.Payload, out m_pActiveCodec); }
/// <summary> /// Default constructor. /// </summary> /// <param name="device">Audio input device.</param> /// <param name="samplesPerSec">Sample rate, in samples per second (hertz).</param> /// <param name="bitsPerSample">Bits per sample. For PCM 8 or 16 are the only valid values.</param> /// <param name="channels">Number of channels.</param> /// <exception cref="ArgumentNullException">Is raised when <b>device</b> is null reference.</exception> /// <exception cref="ArgumentException">Is raised when any of the arguments has invalid value.</exception> public AudioIn(AudioInDevice device,int samplesPerSec,int bitsPerSample,int channels) { if(device == null){ throw new ArgumentNullException("device"); } if(samplesPerSec < 1){ throw new ArgumentException("Argument 'samplesPerSec' value must be >= 1.","samplesPerSec"); } if(bitsPerSample < 8){ throw new ArgumentException("Argument 'bitsPerSample' value must be >= 8.","bitsPerSample"); } if(channels < 1){ throw new ArgumentException("Argument 'channels' value must be >= 1.","channels"); } m_pDevice = device; m_SamplesPerSec = samplesPerSec; m_BitsPerSample = bitsPerSample; m_Channels = channels; m_pWaveIn = new WaveIn(device,samplesPerSec,bitsPerSample,channels,320); m_pWaveIn.Start(); }
/// <summary> /// Initializes SIP stack. /// </summary> private void InitStack() { #region Init audio devices if (AudioOut.Devices.Length == 0) { MessageBox.Show("Calling not possible, there are no speakers in computer.", "Error:", MessageBoxButtons.OK, MessageBoxIcon.Error); return; } if (AudioIn.Devices.Length == 0) { MessageBox.Show("Calling not possible, there is no microphone in computer.", "Error:", MessageBoxButtons.OK, MessageBoxIcon.Error); return; } m_pAudioOutDevice = AudioOut.Devices[0]; m_pAudioInDevice = AudioIn.Devices[0]; m_pAudioCodecs = new Dictionary<int, AudioCodec>(); m_pAudioCodecs.Add(0, new PCMU()); m_pAudioCodecs.Add(8, new PCMA()); m_pPlayer = new WavePlayer(AudioOut.Devices[0]); #endregion #region Get NAT handling methods m_pUPnP = new UPnP_NAT_Client(); STUN_Result stunResult = new STUN_Result(STUN_NetType.UdpBlocked, null); try { stunResult = STUN_Client.Query(m_StunServer, 3478, new IPEndPoint(IPAddress.Any, 0)); } catch { } if (stunResult.NetType == STUN_NetType.Symmetric || stunResult.NetType == STUN_NetType.UdpBlocked) { ToolStripMenuItem item_stun = new ToolStripMenuItem("STUN (" + stunResult.NetType + ")"); item_stun.Name = "stun"; item_stun.Enabled = false; ((ToolStripDropDownButton)m_pToolbar.Items["nat"]).DropDownItems.Add(item_stun); } else { ToolStripMenuItem item_stun = new ToolStripMenuItem("STUN (" + stunResult.NetType + ")"); item_stun.Name = "stun"; ((ToolStripDropDownButton)m_pToolbar.Items["nat"]).DropDownItems.Add(item_stun); } if (m_pUPnP.IsSupported) { ToolStripMenuItem item_upnp = new ToolStripMenuItem("UPnP"); item_upnp.Name = "upnp"; ((ToolStripDropDownButton)m_pToolbar.Items["nat"]).DropDownItems.Add(item_upnp); } else { ToolStripMenuItem item_upnp = new ToolStripMenuItem("UPnP Not Supported"); item_upnp.Name = "upnp"; item_upnp.Enabled = false; ((ToolStripDropDownButton)m_pToolbar.Items["nat"]).DropDownItems.Add(item_upnp); } //if(!((ToolStripDropDownButton)m_pToolbar.Items["nat"]).DropDownItems["stun"].Enabled && !((ToolStripDropDownButton)m_pToolbar.Items["nat"]).DropDownItems["upnp"].Enabled){ //MessageBox.Show("Calling may not possible, your firewall or router blocks STUN and doesn't support UPnP.\r\n\r\nSTUN Net Type: " + stunResult.NetType + "\r\n\r\nUPnP Supported: " + m_pUPnP.IsSupported,"Error:",MessageBoxButtons.OK,MessageBoxIcon.Error); //} ToolStripMenuItem item_no_nat = new ToolStripMenuItem("No NAT handling"); item_no_nat.Name = "no_nat"; ((ToolStripDropDownButton)m_pToolbar.Items["nat"]).DropDownItems.Add(item_no_nat); // Select first enabled item. foreach (ToolStripItem it in ((ToolStripDropDownButton)m_pToolbar.Items["nat"]).DropDownItems) { if (it.Enabled) { ((ToolStripMenuItem)it).Checked = true; m_NatHandlingType = it.Name; break; } } #endregion m_pStack = new SIP_Stack(); m_pStack.UserAgent = "GSDR"; m_pStack.BindInfo = new IPBindInfo[] { new IPBindInfo("", BindInfoProtocol.UDP, IPAddress.Any, m_SipPort) }; //m_pStack.Allow m_pStack.Error += new EventHandler<ExceptionEventArgs>(m_pStack_Error); m_pStack.RequestReceived += new EventHandler<SIP_RequestReceivedEventArgs>(m_pStack_RequestReceived); m_pStack.Start(); if (m_IsDebug) { wfrm_SIP_Debug debug = new wfrm_SIP_Debug(m_pStack); debug.Show(); } }
private void m_pToolbar_Mic_ItemClicked(object sender, ToolStripItemClickedEventArgs e) { try { foreach (ToolStripMenuItem item in ((ToolStripDropDownMenu)sender).Items) { if (item.Equals(e.ClickedItem)) { item.Checked = true; mic_source = item.Text; } else { item.Checked = false; } } m_pAudioInDevice = (AudioInDevice)e.ClickedItem.Tag; // Update active call audio-in device. if (m_pCall != null && m_pCall.LocalSDP != null) { foreach (SDP_MediaDescription media in m_pCall.LocalSDP.MediaDescriptions) { if (media.Tags.ContainsKey("rtp_audio_in")) { ((AudioIn_RTP)media.Tags["rtp_audio_in"]).AudioInDevice = m_pAudioInDevice; } } } } catch (Exception x) { MessageBox.Show("Error: " + x.Message, "Error:", MessageBoxButtons.OK, MessageBoxIcon.Error); } }
/// <summary> /// Cleans up any resources being used. /// </summary> public void Dispose() { if(m_IsDisposed){ return; } m_IsDisposed = true; try{ // If recording, we need to reset wav device first. waveInReset(m_pWavDevHandle); // If there are unprepared wav headers, we need to unprepare these. foreach(BufferItem item in m_pBuffers.ToArray()){ waveInUnprepareHeader(m_pWavDevHandle,item.HeaderHandle.AddrOfPinnedObject(),Marshal.SizeOf(item.Header)); item.Dispose(); } // Close input device. waveInClose(m_pWavDevHandle); m_pInDevice = null; m_pWavDevHandle = IntPtr.Zero; } catch{ } }
/// <summary> /// Cleans up any resources being used. /// </summary> public void Dispose() { if(m_IsDisposed){ return; } m_IsDisposed = true; try{ // If recording, we need to reset wav device first. waveInReset(m_pWavDevHandle); // If there are unprepared wav headers, we need to unprepare these. foreach(BufferItem item in m_pBuffers.Values){ item.Dispose(); } // Close input device. waveInClose(m_pWavDevHandle); m_pInDevice = null; m_pWavDevHandle = IntPtr.Zero; this.AudioFrameReceived = null; } catch{ } }