private void m_pToggleSend_Click(object sender,EventArgs e) { if(m_pAudioInRTP == null){ m_pSendStream = m_pSession.CreateSendStream(); m_pAudioInRTP = new AudioIn_RTP(AudioIn.Devices[m_pInDevices.SelectedIndex],20,m_pMainUI.AudioCodecs,m_pSendStream); m_pAudioInRTP.Start(); m_pTimer = new Timer(); m_pTimer.Interval = 500; m_pTimer.Tick += delegate(object s1,EventArgs e1){ m_pCodec.Text = m_pAudioInRTP.AudioCodec.Name; m_pPacketsSent.Text = m_pSendStream.RtpPacketsSent.ToString(); m_pKBSent.Text = Convert.ToString(m_pSendStream.RtpBytesSent / 1000); }; m_pTimer.Start(); m_pToggleSend.Text = "Stop"; } else{ m_pTimer.Dispose(); m_pTimer = null; m_pAudioInRTP.Dispose(); m_pAudioInRTP = null; m_pSendStream.Close(); m_pSendStream = null; m_pToggleSend.Text = "Send"; } }
public void Start() { if (IsDisposed) { throw new ObjectDisposedException(this.GetType().Name); } if (!Started) { _hTrigger.Init(); _multiSession = new RTP_MultimediaSession(RTP_Utils.GenerateCNAME()); RTP_Session session = _multiSession.CreateSession(new RTP_Address(IPAddress.Parse(_localIP), _localPort, _localPort + 1), new RTP_Clock(0, VideoRate)); session.Payload = RTP_PayloadTypes.H264; session.Start(); _sendStream = session.CreateSendStream(); _vSoure = VideoSourceCreator.Instance.GetVideoSource(_videoId); if (_targets.Count > 0) { foreach (string key in _targets.Keys) { TargetItem ti = _targets[key]; session.AddTarget(new RTP_Address(ti.IP, ti.Port, ti.Port + 1)); } startPlay(); } Started = true; } }
/// <summary> /// Default constructor. /// </summary> /// <param name="stream">RTP send stream.</param> /// <exception cref="ArgumentNullException">Is raised when <b>stream</b> is null reference.</exception> public RTP_SendStreamInfo(RTP_SendStream stream) { if (stream == null) { throw new ArgumentNullException("stream"); } m_pStream = stream; }
public void SendPPS(RTP_SendStream sender, uint timestamp) { if (_pps != null) { RTP_Packet packet = new RTP_Packet(); packet.Timestamp = timestamp; packet.Data = _pps.NaluBytes(); sender.Send(packet); _ppsStamp = Environment.TickCount; } }
private void wfrm_SendMic_FormClosing(object sender, FormClosingEventArgs e) { if(m_pTimer != null){ m_pTimer.Dispose(); m_pTimer = null; } if(m_pAudioInRTP != null){ m_pAudioInRTP.Dispose(); m_pAudioInRTP = null; } if(m_pSendStream != null){ m_pSendStream.Close(); m_pSendStream = null; } }
/// <summary> /// Sends audio to RTP session target(s). /// </summary> private void SendAudio() { try{ using (FileStream fs = File.OpenRead(m_SendFile)){ RTP_SendStream sendStream = m_pSession.CreateSendStream(); byte[] buffer = new byte[400]; int readedCount = fs.Read(buffer, 0, buffer.Length); long lastSendTime = DateTime.Now.Ticks; long packetsSent = 0; long totalSent = 0; while (readedCount > 0) { if (m_pMainUI.ActiveCodec != null) { byte[] encodedData = m_pMainUI.ActiveCodec.Encode(buffer, 0, buffer.Length); // Send audio frame. RTP_Packet packet = new RTP_Packet(); packet.Timestamp = m_pSession.RtpClock.RtpTimestamp; packet.Data = encodedData; sendStream.Send(packet); // Read next audio frame. readedCount = fs.Read(buffer, 0, buffer.Length); totalSent += encodedData.Length; packetsSent++; this.BeginInvoke(new MethodInvoker(delegate(){ m_pCodec.Text = m_pMainUI.ActiveCodec.Name; m_pPacketsSent.Text = packetsSent.ToString(); m_pKBSent.Text = Convert.ToString(totalSent / 1000); })); } Thread.Sleep(25); lastSendTime = DateTime.Now.Ticks; } sendStream.Close(); } } catch (Exception x) { string dummy = x.Message; } }
private void btnCall_Click_1(object sender, RoutedEventArgs e) { IsConnected = false; m_pSession = m_pRtpSession.Sessions[0]; if (m_pAudioInRTP == null) { m_pSendStream = m_pSession.CreateSendStream(); } else { m_pAudioInRTP.Dispose(); m_pAudioInRTP = null; m_pSendStream.Close(); m_pSendStream = null; } // _soundSender.Start(); // _soundReceiver.Start(); }
/// <summary> /// Cleans up any resources being used. /// </summary> public void Dispose() { if (m_IsDisposed) { return; } Stop(); m_IsDisposed = true; this.Error = null; m_pAudioInDevice = null; m_pAudioCodecs = null; m_pRTP_Stream.Session.PayloadChanged -= new EventHandler(m_pRTP_Stream_PayloadChanged); m_pRTP_Stream = null; m_pActiveCodec = null; }
/// <summary> /// Default constructor. /// </summary> /// <param name="audioInDevice">Audio-in device to capture.</param> /// <param name="audioFrameSize">Audio frame size in milliseconds.</param> /// <param name="codecs">Audio codecs with RTP payload number. For example: 0-PCMU,8-PCMA.</param> /// <param name="stream">RTP stream to use for audio sending.</param> /// <exception cref="ArgumentNullException">Is raised when <b>audioInDevice</b>,<b>codecs</b> or <b>stream</b> is null reference.</exception> public AudioIn_RTP(AudioInDevice audioInDevice, int audioFrameSize, Dictionary <int, AudioCodec> codecs, RTP_SendStream stream) { if (audioInDevice == null) { throw new ArgumentNullException("audioInDevice"); } if (codecs == null) { throw new ArgumentNullException("codecs"); } if (stream == null) { throw new ArgumentNullException("stream"); } m_pAudioInDevice = audioInDevice; m_AudioFrameSize = audioFrameSize; m_pAudioCodecs = codecs; m_pRTP_Stream = stream; m_pRTP_Stream.Session.PayloadChanged += new EventHandler(m_pRTP_Stream_PayloadChanged); m_pAudioCodecs.TryGetValue(m_pRTP_Stream.Session.Payload, out m_pActiveCodec); }