private static async Task StartFfmpegListener(string sdpPath, CancellationToken cancel) { while (!File.Exists(FFMPEG_SDP_FILE) && !cancel.IsCancellationRequested) { await Task.Delay(500); } if (!cancel.IsCancellationRequested) { var sdp = SDP.ParseSDPDescription(File.ReadAllText(FFMPEG_SDP_FILE)); // The SDP is only expected to contain a single video media announcement. var videoAnn = sdp.Media.Single(x => x.Media == SDPMediaTypesEnum.video); _ffmpegVideoFormat = videoAnn.MediaFormats.Values.First(); _ffmpegListener = new RTPSession(false, false, false, IPAddress.Loopback, FFMPEG_DEFAULT_RTP_PORT); _ffmpegListener.AcceptRtpFromAny = true; MediaStreamTrack videoTrack = new MediaStreamTrack(SDPMediaTypesEnum.video, false, new List <SDPAudioVideoMediaFormat> { _ffmpegVideoFormat }, MediaStreamStatusEnum.RecvOnly); _ffmpegListener.addTrack(videoTrack); _ffmpegListener.SetRemoteDescription(SIP.App.SdpType.answer, sdp); // Set a dummy destination end point or the RTP session will end up sending RTCP reports // to itself. var dummyIPEndPoint = new IPEndPoint(IPAddress.Loopback, 0); _ffmpegListener.SetDestination(SDPMediaTypesEnum.video, dummyIPEndPoint, dummyIPEndPoint); await _ffmpegListener.Start(); } }
/// <summary> /// RTP工厂构造方法 /// </summary> /// <param name="RTPipAddress">RTP发送ip地址</param> /// <param name="RTPport">RTP发送端口</param> /// <param name="RTCPipAddress">RTP接收端ip地址</param> /// <param name="RTCPport">RTP接收端口</param> /// <param name="forwardIP"></param> /// <param name="forwardPort"></param> public RtpFramer(String RTPipAddress, int RTPport, String RTCPipAddress, int RTCPport, String forwardIP, int forwardPort) { //实例化RTP会话端 Session = new RTPSession(); //实例化RTP发送端 Sender = new RTPSender(); //实例化RTP接收端 Receiver = new RTPReceiver(); //生成网络端点 var senderEp = new IPEndPoint(IPAddress.Parse(forwardIP), forwardPort); //RTP参与者 senderParticipant = new RTPParticipant(senderEp); //给RTP发送端添加RTP参与者 Sender.AddParticipant(senderParticipant); //RTP会话端添加RTP发送端 Session.AddSender(Sender); //生成RTP网络端点 var rtpEp = new IPEndPoint(IPAddress.Parse(RTPipAddress), RTPport); //生成RTP接收端网络端点 var rtcpEp = new IPEndPoint(IPAddress.Parse(RTCPipAddress), RTCPport); //新建RTP参与者绑定发送者端口以及接受者端口 participant = new RTPParticipant(rtpEp, rtcpEp); Session.NewRTPPacket = NewRTPPacket; Session.NewRTCPPacket = NewRTCPPacket; Session.NewSSRC = NewSSRC; Receiver.AddParticipant(participant); Session.AddReceiver(Receiver); }
public void CheckDuplicateBindPortFailsUnitTest() { logger.LogDebug("--> " + System.Reflection.MethodBase.GetCurrentMethod().Name); logger.BeginScope(System.Reflection.MethodBase.GetCurrentMethod().Name); // Create two RTP sessions. First one acts as the local session to generate the offer. // Second one acts as the remote session to generate the answer. RTPSession localSession = new RTPSession(false, false, false, IPAddress.Loopback); MediaStreamTrack localAudioTrack = new MediaStreamTrack(SDPMediaTypesEnum.audio, false, new List <SDPAudioVideoMediaFormat> { new SDPAudioVideoMediaFormat(SDPWellKnownMediaFormatsEnum.PCMU) }); localSession.addTrack(localAudioTrack); var rtpEndPoint = localSession.GetRtpChannel(SDPMediaTypesEnum.audio).RTPLocalEndPoint; logger.LogDebug($"RTP session local end point {rtpEndPoint}."); // Now attempt to create a second RTP session on the same port as the previous one. RTPSession duplicateSession = new RTPSession(false, false, false, IPAddress.Loopback, rtpEndPoint.Port); MediaStreamTrack duplicateTrack = new MediaStreamTrack(SDPMediaTypesEnum.audio, false, new List <SDPAudioVideoMediaFormat> { new SDPAudioVideoMediaFormat(SDPWellKnownMediaFormatsEnum.PCMU) }); Assert.Throws <ApplicationException>(() => duplicateSession.addTrack(duplicateTrack)); localSession.Close(null); }
public void CheckSelectedAudioForamtAttributeUnitTest() { logger.LogDebug("--> " + System.Reflection.MethodBase.GetCurrentMethod().Name); logger.BeginScope(System.Reflection.MethodBase.GetCurrentMethod().Name); string remoteSdp = @"v=0 o=- 1986548327 0 IN IP4 127.0.0.1 s=- c=IN IP4 127.0.0.1 t=0 0 m=audio 60640 RTP/AVP 0 111 8 a=rtpmap:0 PCMU/8000 a=rtpmap:111 OPUS/48000/2"; // Create a local session with an audio track. RTPSession rtpSession = new RTPSession(false, false, false); MediaStreamTrack localAudioTrack = new MediaStreamTrack(SDPWellKnownMediaFormatsEnum.PCMA, SDPWellKnownMediaFormatsEnum.G723); rtpSession.addTrack(localAudioTrack); var offer = SDP.ParseSDPDescription(remoteSdp); logger.LogDebug($"Remote offer: {offer}"); var result = rtpSession.SetRemoteDescription(SIP.App.SdpType.offer, offer); logger.LogDebug($"Set remote description on local session result {result}."); Assert.Equal(SetDescriptionResultEnum.OK, result); Assert.Equal(8, rtpSession.AudioLocalTrack.Capabilities.Single(x => x.Name() == "PCMA").ID); Assert.Equal("PCMA", rtpSession.GetSendingFormat(SDPMediaTypesEnum.audio).Name()); rtpSession.Close("normal"); }
/// <summary> /// Sends the sounds of silence. If the destination is on the other side of a NAT this is useful to open /// a pinhole and hopefully get the remote RTP stream through. /// </summary> /// <param name="rtpSocket">The socket we're using to send from.</param> /// <param name="rtpSendSession">Our RTP sending session.</param> /// <param name="cts">Cancellation token to stop the call.</param> private static async void SendRtp(Socket rtpSocket, RTPSession rtpSendSession, CancellationTokenSource cts) { uint bufferSize = (uint)SILENCE_SAMPLE_PERIOD * 8; // PCM transmission rate is 64kbit/s. uint rtpSamplePeriod = (uint)(1000 / SILENCE_SAMPLE_PERIOD); uint rtpSendTimestamp = 0; uint packetSentCount = 0; uint bytesSentCount = 0; while (cts.IsCancellationRequested == false) { byte[] sample = new byte[bufferSize / 2]; int sampleIndex = 0; for (int index = 0; index < bufferSize; index += 2) { sample[sampleIndex] = PCMU_SILENCE_BYTE_ZERO; sample[sampleIndex + 1] = PCMU_SILENCE_BYTE_ONE; } if (_remoteRtpEndPoint != null) { rtpSendSession.SendAudioFrame(rtpSocket, _remoteRtpEndPoint, rtpSendTimestamp, sample); rtpSendTimestamp += rtpSamplePeriod; packetSentCount++; bytesSentCount += (uint)sample.Length; } await Task.Delay((int)rtpSamplePeriod); } }
public RTPFactory(String RTPipAddress, int RTPport, String RTCPipAddress, int RTCPport, String forwardIP, int forwardPort) { //初始会话端 Session = new RTPSession(); //初始化发送者 Sender = new RTPSender(); //初始化接收者 Receiver = new RTPReceiver(); var senderEp = new IPEndPoint(IPAddress.Parse(forwardIP), forwardPort); //将发送参与者初始化绑定到目的端口 senderParticipant = new RTPParticipant(senderEp); //将发送参与者添加到发送者中 Sender.AddParticipant(senderParticipant); //将发送者添加到会话端中 Session.AddSender(Sender); var rtpEp = new IPEndPoint(IPAddress.Parse(RTPipAddress), RTPport); var rtcpEp = new IPEndPoint(IPAddress.Parse(RTCPipAddress), RTCPport); //将RTP参与者初始化绑定到RTP网络端点以及RTCP网络端点 participant = new RTPParticipant(rtpEp, rtcpEp); //将RTP参与者添加到RTP接收者中 Receiver.AddParticipant(participant); //将RTP接收者添加到会话端中 Session.AddReceiver(Receiver); }
public void NoLocalTracksTest() { logger.LogDebug("--> " + System.Reflection.MethodBase.GetCurrentMethod().Name); logger.BeginScope(System.Reflection.MethodBase.GetCurrentMethod().Name); // Create two RTP sessions. First one acts as the local session to generate the offer. // Second one acts as the remote session to generate the answer. // A local session is created but NO media tracks are added to it. RTPSession localSession = new RTPSession(false, false, false); // Create a remote session WITH an audio track. RTPSession remoteSession = new RTPSession(false, false, false); // The track for the track for the remote session is still local relative to the session it's being added to. MediaStreamTrack remoteAudioTrack = new MediaStreamTrack(SDPMediaTypesEnum.audio, false, new List <SDPAudioVideoMediaFormat> { new SDPAudioVideoMediaFormat(SDPWellKnownMediaFormatsEnum.PCMU) }); remoteSession.addTrack(remoteAudioTrack); var offer = remoteSession.CreateOffer(IPAddress.Loopback); // Give the offer to the local session that is missing any media tracks. var result = localSession.SetRemoteDescription(SIP.App.SdpType.offer, offer); logger.LogDebug($"Set remote description on local session result {result}."); Assert.Equal(SetDescriptionResultEnum.NoMatchingMediaType, result); localSession.Close("normal"); remoteSession.Close("normal"); }
public async void SortChecklistUnitTest() { logger.LogDebug("--> " + System.Reflection.MethodBase.GetCurrentMethod().Name); logger.BeginScope(System.Reflection.MethodBase.GetCurrentMethod().Name); RTPSession rtpSession = new RTPSession(true, true, true); MediaStreamTrack dummyTrack = new MediaStreamTrack(null, SDPMediaTypesEnum.audio, false, new List <SDPMediaFormat> { new SDPMediaFormat(SDPMediaFormatsEnum.PCMU) }); rtpSession.addTrack(dummyTrack); var iceSession = new IceSession(rtpSession.GetRtpChannel(SDPMediaTypesEnum.audio), RTCIceComponent.rtp, null); iceSession.StartGathering(); Assert.NotNull(iceSession); Assert.NotEmpty(iceSession.Candidates); foreach (var hostCandidate in iceSession.Candidates) { logger.LogDebug(hostCandidate.ToString()); } var remoteCandidate = RTCIceCandidate.Parse("candidate:408132416 1 udp 2113937151 192.168.11.50 51268 typ host generation 0 ufrag CI7o network-cost 999"); await iceSession.AddRemoteCandidate(remoteCandidate); var remoteCandidate2 = RTCIceCandidate.Parse("candidate:408132417 1 udp 2113937150 192.168.11.51 51268 typ host generation 0 ufrag CI7o network-cost 999"); await iceSession.AddRemoteCandidate(remoteCandidate2); foreach (var entry in iceSession._checklist) { logger.LogDebug($"checklist entry priority {entry.Priority}."); } }
public void GetHostCandidatesForRTPBindUnitTest() { logger.LogDebug("--> " + System.Reflection.MethodBase.GetCurrentMethod().Name); logger.BeginScope(System.Reflection.MethodBase.GetCurrentMethod().Name); var localAddress = NetServices.InternetDefaultAddress; RTPSession rtpSession = new RTPSession(true, true, true, localAddress); // Add a track to the session in order to initialise the RTPChannel. MediaStreamTrack dummyTrack = new MediaStreamTrack(null, SDPMediaTypesEnum.audio, false, new List <SDPMediaFormat> { new SDPMediaFormat(SDPMediaFormatsEnum.PCMU) }); rtpSession.addTrack(dummyTrack); RTPChannel rtpChannel = rtpSession.GetRtpChannel(SDPMediaTypesEnum.audio); logger.LogDebug($"RTP channel RTP socket local end point {rtpChannel.RTPLocalEndPoint}."); var iceSession = new IceSession(rtpChannel, RTCIceComponent.rtp, null); iceSession.StartGathering(); Assert.NotNull(iceSession); Assert.NotEmpty(iceSession.Candidates); Assert.True(localAddress.Equals(IPAddress.Parse(iceSession.Candidates.Single().address))); foreach (var hostCandidate in iceSession.Candidates) { logger.LogDebug(hostCandidate.ToString()); } }
public void NoMatchingMediaTest() { logger.LogDebug("--> " + System.Reflection.MethodBase.GetCurrentMethod().Name); logger.BeginScope(System.Reflection.MethodBase.GetCurrentMethod().Name); RTPSession localSession = new RTPSession(false, false, false); MediaStreamTrack localAudioTrack = new MediaStreamTrack(SDPMediaTypesEnum.audio, false, new List <SDPAudioVideoMediaFormat> { new SDPAudioVideoMediaFormat(SDPWellKnownMediaFormatsEnum.PCMU) }); localSession.addTrack(localAudioTrack); RTPSession remoteSession = new RTPSession(false, false, false); // The track for the track for the remote session is still local relative to the session it's being added to. MediaStreamTrack remoteVideoTrack = new MediaStreamTrack(SDPMediaTypesEnum.video, false, new List <SDPAudioVideoMediaFormat> { new SDPAudioVideoMediaFormat(SDPMediaTypesEnum.video, 96, "VP8", 90000) }); remoteSession.addTrack(remoteVideoTrack); var result = localSession.SetRemoteDescription(SIP.App.SdpType.offer, remoteSession.CreateOffer(IPAddress.Loopback)); logger.LogDebug($"Set remote description on local session result {result}."); Assert.Equal(SetDescriptionResultEnum.NoMatchingMediaType, result); localSession.Close("normal"); remoteSession.Close("normal"); }
/// <summary> /// Sends the sounds of silence. If the destination is on the other side of a NAT this is useful to open /// a pinhole and hopefully get the remote RTP stream through. /// </summary> /// <param name="rtpChannel">The RTP channel we're sending from.</param> /// <param name="rtpSendSession">Our RTP sending session.</param> /// <param name="cts">Cancellation token to stop the call.</param> private static async void SendSilence(RTPSession rtpSession, CancellationTokenSource cts) { int samplingFrequency = rtpSession.MediaFormat.GetClockRate(); uint rtpTimestampStep = (uint)(samplingFrequency * SILENCE_SAMPLE_PERIOD / 1000); uint bufferSize = (uint)SILENCE_SAMPLE_PERIOD; uint rtpSampleTimestamp = 0; while (cts.IsCancellationRequested == false) { if (rtpSession.DestinationEndPoint != null) { byte[] sample = new byte[bufferSize / 2]; int sampleIndex = 0; for (int index = 0; index < bufferSize; index += 2) { sample[sampleIndex] = PCMU_SILENCE_BYTE_ZERO; sample[sampleIndex + 1] = PCMU_SILENCE_BYTE_ONE; } rtpSession.SendAudioFrame(rtpSampleTimestamp, sample); rtpSampleTimestamp += rtpTimestampStep; } await Task.Delay(SILENCE_SAMPLE_PERIOD); } }
private static void SendRtp(Socket rtpSocket, RTPSession rtpSendSession, CancellationTokenSource cts) { WaveFormat waveFormat = new WaveFormat(8000, 16, 1); // The format that both the input and output audio streams will use, i.e. PCMU. // Set up the input device that will provide audio samples that can be encoded, packaged into RTP and sent to // the remote end of the call. if (WaveInEvent.DeviceCount == 0) { Log.LogWarning("No audio input devices available. No audio will be sent."); } else { DateTime lastSendReportAt = DateTime.Now; uint rtpSendTimestamp = 0; uint packetSentCount = 0; uint bytesSentCount = 0; // Device used to get audio sample from, e.g. microphone. using (WaveInEvent waveInEvent = new WaveInEvent()) { waveInEvent.BufferMilliseconds = 20; // This sets the frequency of the RTP packets. waveInEvent.NumberOfBuffers = 1; waveInEvent.DeviceNumber = 0; waveInEvent.WaveFormat = waveFormat; waveInEvent.DataAvailable += (object sender, WaveInEventArgs args) => { byte[] sample = new byte[args.Buffer.Length / 2]; int sampleIndex = 0; for (int index = 0; index < args.BytesRecorded; index += 2) { var ulawByte = NAudio.Codecs.MuLawEncoder.LinearToMuLawSample(BitConverter.ToInt16(args.Buffer, index)); sample[sampleIndex++] = ulawByte; } if (_remoteRtpEndPoint != null) { rtpSendSession.SendAudioFrame(rtpSocket, _remoteRtpEndPoint, rtpSendTimestamp, sample); rtpSendTimestamp += (uint)(8000 / waveInEvent.BufferMilliseconds); packetSentCount++; bytesSentCount += (uint)sample.Length; } if (DateTime.Now.Subtract(lastSendReportAt).TotalSeconds > RTP_REPORTING_PERIOD_SECONDS) { // This is typically where RTCP sender (SR) reports would be sent. Omitted here for brevity. lastSendReportAt = DateTime.Now; var remoteRtpEndPoint = _remoteRtpEndPoint as IPEndPoint; Log.LogDebug($"RTP send report {rtpSocket.LocalEndPoint}->{remoteRtpEndPoint} pkts {packetSentCount} bytes {bytesSentCount}"); } }; waveInEvent.StartRecording(); cts.Token.WaitHandle.WaitOne(); } } }
public void MediaOrderMatchesRemoteOfferUnitTest() { logger.LogDebug("--> " + System.Reflection.MethodBase.GetCurrentMethod().Name); logger.BeginScope(System.Reflection.MethodBase.GetCurrentMethod().Name); // By default offers made by us always put audio first. Create a remote SDP offer // with the video first. string remoteSdp = @"v=0 o=- 1986548327 0 IN IP4 127.0.0.1 s=- c=IN IP4 127.0.0.1 t=0 0 m=video 60638 RTP/AVP 100 a=rtpmap:100 VP8/90000 a=sendrecv m=audio 60640 RTP/AVP 0 111 a=rtpmap:0 PCMU/8000 a=rtpmap:111 OPUS/48000/2 a=sendrecv"; // Create a local session and add the video track first. RTPSession rtpSession = new RTPSession(false, false, false); MediaStreamTrack localAudioTrack = new MediaStreamTrack(SDPMediaTypesEnum.audio, false, new List <SDPAudioVideoMediaFormat> { new SDPAudioVideoMediaFormat(SDPWellKnownMediaFormatsEnum.PCMU), new SDPAudioVideoMediaFormat(SDPMediaTypesEnum.audio, 110, "OPUS/48000/2") }); rtpSession.addTrack(localAudioTrack); MediaStreamTrack localVideoTrack = new MediaStreamTrack(SDPMediaTypesEnum.video, false, new List <SDPAudioVideoMediaFormat> { new SDPAudioVideoMediaFormat(SDPMediaTypesEnum.video, 96, "VP8", 90000) }); rtpSession.addTrack(localVideoTrack); var offer = SDP.ParseSDPDescription(remoteSdp); logger.LogDebug($"Remote offer: {offer}"); var result = rtpSession.SetRemoteDescription(SIP.App.SdpType.offer, offer); logger.LogDebug($"Set remote description on local session result {result}."); Assert.Equal(SetDescriptionResultEnum.OK, result); var answer = rtpSession.CreateAnswer(null); logger.LogDebug($"Local answer: {answer}"); Assert.Equal(111, rtpSession.AudioLocalTrack.Capabilities.Single(x => x.Name() == "OPUS").ID); Assert.Equal(100, rtpSession.VideoLocalTrack.Capabilities.Single(x => x.Name() == "VP8").ID); //Assert.True(SDPAudioVideoMediaFormat.AreMatch(offer.Media.Single(x => x.Media == SDPMediaTypesEnum.audio)., answer.Media.First().Media)); //Assert.Equal(offer.Media.Last().Media, answer.Media.Last().Media); rtpSession.Close("normal"); }
/// <summary> /// Handling packets received on the RTP socket. One of the simplest, if not the simplest, cases, is /// PCMU audio packets. THe handling can get substantially more complicated if the RTP socket is being /// used to multiplex different protocols. This is what WebRTC does with STUN, RTP and RTCP. /// </summary> /// <param name="rtpSocket">The raw RTP socket.</param> /// <param name="rtpSendSession">The session infor for the RTP pakcets being sent.</param> private static async void RecvRtp(Socket rtpSocket, RTPSession rtpRecvSession, CancellationTokenSource cts) { try { DateTime lastRecvReportAt = DateTime.Now; uint packetReceivedCount = 0; uint bytesReceivedCount = 0; byte[] buffer = new byte[512]; IPEndPoint anyEndPoint = new IPEndPoint((rtpSocket.AddressFamily == AddressFamily.InterNetworkV6) ? IPAddress.IPv6Any : IPAddress.Any, 0); Log.LogDebug($"Listening on RTP socket {rtpSocket.LocalEndPoint}."); using (var waveOutEvent = new WaveOutEvent()) { var waveProvider = new BufferedWaveProvider(new WaveFormat(8000, 16, 1)); waveProvider.DiscardOnBufferOverflow = true; waveOutEvent.Init(waveProvider); waveOutEvent.Play(); var recvResult = await rtpSocket.ReceiveFromAsync(buffer, SocketFlags.None, anyEndPoint); Log.LogDebug($"Initial RTP packet recieved from {recvResult.RemoteEndPoint}."); while (recvResult.ReceivedBytes > 0 && !cts.IsCancellationRequested) { var rtpPacket = new RTPPacket(buffer.Take(recvResult.ReceivedBytes).ToArray()); packetReceivedCount++; bytesReceivedCount += (uint)rtpPacket.Payload.Length; for (int index = 0; index < rtpPacket.Payload.Length; index++) { short pcm = NAudio.Codecs.MuLawDecoder.MuLawToLinearSample(rtpPacket.Payload[index]); byte[] pcmSample = new byte[] { (byte)(pcm & 0xFF), (byte)(pcm >> 8) }; waveProvider.AddSamples(pcmSample, 0, 2); } recvResult = await rtpSocket.ReceiveFromAsync(buffer, SocketFlags.None, anyEndPoint); if (DateTime.Now.Subtract(lastRecvReportAt).TotalSeconds > RTP_REPORTING_PERIOD_SECONDS) { // This is typically where RTCP receiver (SR) reports would be sent. Omitted here for brevity. lastRecvReportAt = DateTime.Now; var remoteRtpEndPoint = recvResult.RemoteEndPoint as IPEndPoint; Log.LogDebug($"RTP recv report {rtpSocket.LocalEndPoint}<-{remoteRtpEndPoint} pkts {packetReceivedCount} bytes {bytesReceivedCount}"); } } } } catch (ObjectDisposedException) { } // This is how .Net deals with an in use socket being closed. Safe to ignore. catch (Exception excp) { Log.LogError($"Exception processing RTP. {excp}"); } }
/// <summary> /// Sends two separate RTP streams to an application like ffplay. /// /// ffplay -i ffplay_av.sdp -protocol_whitelist "file,rtp,udp" -loglevel debug /// /// The SDP that describes the streams is: /// /// v=0 /// o=- 1129870806 2 IN IP4 127.0.0.1 /// s=- /// c=IN IP4 192.168.11.50 /// t=0 0 /// m=audio 4040 RTP/AVP 0 /// a=rtpmap:0 PCMU/8000 /// m=video 4042 RTP/AVP 100 /// a=rtpmap:100 VP8/90000 /// </summary> private void SendSamplesAsRtp(IPEndPoint dstBaseEndPoint) { try { Socket videoSrcRtpSocket = null; Socket videoSrcControlSocket = null; Socket audioSrcRtpSocket = null; Socket audioSrcControlSocket = null; // WebRtc multiplexes all the RTP and RTCP sessions onto a single UDP connection. // The approach needed for ffplay is the original way where each media type has it's own UDP connection and the RTCP // also require a separate UDP connection on RTP port + 1. IPAddress localIPAddress = IPAddress.Any; IPEndPoint audioRtpEP = dstBaseEndPoint; IPEndPoint audioRtcpEP = new IPEndPoint(dstBaseEndPoint.Address, dstBaseEndPoint.Port + 1); IPEndPoint videoRtpEP = new IPEndPoint(dstBaseEndPoint.Address, dstBaseEndPoint.Port + 2); IPEndPoint videoRtcpEP = new IPEndPoint(dstBaseEndPoint.Address, dstBaseEndPoint.Port + 3); RTPSession audioRtpSession = new RTPSession((int)RTPPayloadTypesEnum.PCMU, null, null); RTPSession videoRtpSession = new RTPSession(VP8_PAYLOAD_TYPE_ID, null, null); DateTime lastRtcpSenderReportSentAt = DateTime.Now; NetServices.CreateRtpSocket(localIPAddress, RAW_RTP_START_PORT_RANGE, RAW_RTP_END_PORT_RANGE, true, out audioSrcRtpSocket, out audioSrcControlSocket); NetServices.CreateRtpSocket(localIPAddress, ((IPEndPoint)audioSrcRtpSocket.LocalEndPoint).Port, RAW_RTP_END_PORT_RANGE, true, out videoSrcRtpSocket, out videoSrcControlSocket); OnMediaSampleReady += (mediaType, timestamp, sample) => { if (mediaType == MediaSampleTypeEnum.VP8) { videoRtpSession.SendVp8Frame(videoSrcRtpSocket, videoRtpEP, timestamp, sample); } else { audioRtpSession.SendAudioFrame(audioSrcRtpSocket, audioRtpEP, timestamp, sample); } // Deliver periodic RTCP sender reports. This helps the receiver to sync the audio and video stream timestamps. // If there are gaps in the media, silence supression etc. then the sender repors shouldn't be triggered from the media samples. // In this case the samples are from an mp4 file which provides a constant uninterrupted stream. if (DateTime.Now.Subtract(lastRtcpSenderReportSentAt).TotalSeconds >= RTCP_SR_PERIOD_SECONDS) { videoRtpSession.SendRtcpSenderReport(videoSrcControlSocket, videoRtcpEP, _vp8Timestamp); audioRtpSession.SendRtcpSenderReport(audioSrcControlSocket, audioRtcpEP, _mulawTimestamp); lastRtcpSenderReportSentAt = DateTime.Now; } }; } catch (Exception excp) { logger.Error("Exception SendSamplesAsRtp. " + excp); } }
public void InitEncryptionDisabledSession(IceCandidate iceCandidate, IPEndPoint remoteEndPoint) { if (_audioRtpSession == null || _videoRtpSession == null) { logger.Debug($"Initialising non encrypted WebRtc session for remote end point {remoteEndPoint.ToString()}."); iceCandidate.RemoteRtpEndPoint = remoteEndPoint; _audioRtpSession = new RTPSession((int)RTPPayloadTypesEnum.PCMU, null, null); _videoRtpSession = new RTPSession(VP8_PAYLOAD_TYPE_ID, null, null); } }
private static RTPSession CreateRtpSession(List <SDPMediaFormat> videoFormats) { var rtpSession = new RTPSession(false, false, false, IPAddress.Loopback); MediaStreamTrack videoTrack = new MediaStreamTrack(SDPMediaTypesEnum.video, false, videoFormats, MediaStreamStatusEnum.SendRecv); rtpSession.addTrack(videoTrack); rtpSession.SetDestination(SDPMediaTypesEnum.video, new IPEndPoint(IPAddress.Loopback, FFPLAY_DEFAULT_VIDEO_PORT), new IPEndPoint(IPAddress.Loopback, FFPLAY_DEFAULT_VIDEO_PORT + 1)); return(rtpSession); }
private static RTPSession CreateRtpSession(List <SDPAudioVideoMediaFormat> audioFormats, List <SDPAudioVideoMediaFormat> videoFormats) { var rtpSession = new RTPSession(false, false, false, IPAddress.Loopback); bool hasAudio = false; bool hasVideo = false; if (audioFormats != null && audioFormats.Count > 0) { MediaStreamTrack audioTrack = new MediaStreamTrack(SDPMediaTypesEnum.audio, false, audioFormats, MediaStreamStatusEnum.SendRecv); rtpSession.addTrack(audioTrack); hasAudio = true; } if (videoFormats != null && videoFormats.Count > 0) { MediaStreamTrack videoTrack = new MediaStreamTrack(SDPMediaTypesEnum.video, false, videoFormats, MediaStreamStatusEnum.SendRecv); rtpSession.addTrack(videoTrack); hasVideo = true; } var sdpOffer = rtpSession.CreateOffer(null); // Because the SDP being written to the file is the input to ffplay the connection ports need to be changed // to the ones ffplay will be listening on. if (hasAudio) { sdpOffer.Media.Single(x => x.Media == SDPMediaTypesEnum.audio).Port = FFPLAY_DEFAULT_AUDIO_PORT; } if (hasVideo) { sdpOffer.Media.Single(x => x.Media == SDPMediaTypesEnum.video).Port = FFPLAY_DEFAULT_VIDEO_PORT; } Console.WriteLine(sdpOffer); using (StreamWriter sw = new StreamWriter(FFPLAY_DEFAULT_SDP_PATH)) { sw.Write(sdpOffer); } string ffplayCommand = String.Format(FFPLAY_DEFAULT_COMMAND, FFPLAY_DEFAULT_SDP_PATH); Console.WriteLine($"Start ffplay using the command below:"); Console.WriteLine(ffplayCommand); Console.WriteLine($"To request the remote peer to send a video key frame press 'k'"); rtpSession.Start(); rtpSession.SetDestination(SDPMediaTypesEnum.audio, new IPEndPoint(IPAddress.Loopback, FFPLAY_DEFAULT_AUDIO_PORT), new IPEndPoint(IPAddress.Loopback, FFPLAY_DEFAULT_AUDIO_PORT + 1)); rtpSession.SetDestination(SDPMediaTypesEnum.video, new IPEndPoint(IPAddress.Loopback, FFPLAY_DEFAULT_VIDEO_PORT), new IPEndPoint(IPAddress.Loopback, FFPLAY_DEFAULT_VIDEO_PORT + 1)); return(rtpSession); }
public void MediaOrderMatchesRemoteOfferUnitTest() { logger.LogDebug("--> " + System.Reflection.MethodBase.GetCurrentMethod().Name); logger.BeginScope(System.Reflection.MethodBase.GetCurrentMethod().Name); // By default offers made by us always put audio first. Create a remote SDP offer // with the video first. string remoteSdp = @"v=0 o=- 1986548327 0 IN IP4 127.0.0.1 s=- c=IN IP4 127.0.0.1 t=0 0 m=video 60638 RTP/AVP 100 a=rtpmap:100 VP8/90000 a=sendrecv m=audio 60640 RTP/AVP 0 a=rtpmap:0 PCMU/8000 a=sendrecv"; // Create a local session and add the video track first. RTPSession localSession = new RTPSession(false, false, false); MediaStreamTrack localAudioTrack = new MediaStreamTrack(SDPMediaTypesEnum.audio, false, new List <SDPMediaFormat> { new SDPMediaFormat(SDPMediaFormatsEnum.PCMU) }); localSession.addTrack(localAudioTrack); MediaStreamTrack localVideoTrack = new MediaStreamTrack(SDPMediaTypesEnum.video, false, new List <SDPMediaFormat> { new SDPMediaFormat(SDPMediaFormatsEnum.VP8) }); localSession.addTrack(localVideoTrack); var offer = SDP.ParseSDPDescription(remoteSdp); logger.LogDebug($"Remote offer: {offer}"); var result = localSession.SetRemoteDescription(SIP.App.SdpType.offer, offer); logger.LogDebug($"Set remote description on local session result {result}."); Assert.Equal(SetDescriptionResultEnum.OK, result); var answer = localSession.CreateAnswer(null); logger.LogDebug($"Local answer: {answer}"); Assert.Equal(offer.Media.First().Media, answer.Media.First().Media); Assert.Equal(offer.Media.Last().Media, answer.Media.Last().Media); localSession.Close("normal"); }
/// <summary> /// Requests the SIP device to send a video key frame. /// </summary> /// <param name="rtpSession"></param> private static void RequestSIPAgentKeyFrame(RTPSession rtpSession) { if (rtpSession != null && !rtpSession.IsClosed) { var localVideoSsrc = rtpSession.VideoLocalTrack.Ssrc; var remoteVideoSsrc = rtpSession.VideoRemoteTrack.Ssrc; Console.WriteLine($"Requesting key frame from SIP connection for remote ssrc {remoteVideoSsrc}."); RTCPFeedback pli = new RTCPFeedback(localVideoSsrc, remoteVideoSsrc, PSFBFeedbackTypesEnum.PLI); rtpSession.SendRtcpFeedback(SDPMediaTypesEnum.video, pli); } }
public void AudioOnlyOfferAnswerTest() { logger.LogDebug("--> " + System.Reflection.MethodBase.GetCurrentMethod().Name); logger.BeginScope(System.Reflection.MethodBase.GetCurrentMethod().Name); // Create two RTP sessions. First one acts as the local session to generate the offer. // Second one acts as the remote session to generate the answer. RTPSession localSession = new RTPSession(false, false, false); MediaStreamTrack localAudioTrack = new MediaStreamTrack(SDPMediaTypesEnum.audio, false, new List <SDPAudioVideoMediaFormat> { new SDPAudioVideoMediaFormat(SDPWellKnownMediaFormatsEnum.PCMU) }); localSession.addTrack(localAudioTrack); // Generate the offer to send to the remote party. var offer = localSession.CreateOffer(IPAddress.Loopback); logger.LogDebug("Local offer: " + offer.ToString()); RTPSession remoteSession = new RTPSession(false, false, false); // The track for the track for the remote session is still local relative to the session it's being added to. MediaStreamTrack remoteAudioTrack = new MediaStreamTrack(SDPMediaTypesEnum.audio, false, new List <SDPAudioVideoMediaFormat> { new SDPAudioVideoMediaFormat(SDPWellKnownMediaFormatsEnum.PCMU) }); remoteSession.addTrack(remoteAudioTrack); var result = remoteSession.SetRemoteDescription(SIP.App.SdpType.offer, offer); logger.LogDebug($"Set remote description on remote session result {result}."); Assert.Equal(SetDescriptionResultEnum.OK, result); // Get the answer from the remote session. var answer = remoteSession.CreateAnswer(IPAddress.Loopback); logger.LogDebug("Remote answer: " + offer.ToString()); // Provide the answer back to the local session. result = localSession.SetRemoteDescription(SIP.App.SdpType.answer, answer); logger.LogDebug($"Set remote description on local session result {result}."); Assert.Equal(SetDescriptionResultEnum.OK, result); localSession.Close("normal"); remoteSession.Close("normal"); }
public void CreateInstanceUnitTest() { logger.LogDebug("--> " + System.Reflection.MethodBase.GetCurrentMethod().Name); logger.BeginScope(System.Reflection.MethodBase.GetCurrentMethod().Name); RTPSession rtpSession = new RTPSession(true, true, true); // Add a track to the session in order to initialise the RTPChannel. MediaStreamTrack dummyTrack = new MediaStreamTrack(null, SDPMediaTypesEnum.audio, false, new List<SDPMediaFormat> { new SDPMediaFormat(SDPMediaFormatsEnum.PCMU) }); rtpSession.addTrack(dummyTrack); var iceSession = new IceSession(rtpSession.GetRtpChannel(SDPMediaTypesEnum.audio), RTCIceComponent.rtp); Assert.NotNull(iceSession); }
public void Init() { doRead = true; TempSessions = new Dictionary <string, List <string> >(); session = new RTPSession(); rawSessionData = new List <string>(); sessionId = ""; currentFrameNumber = 0; chart.ChartAreas[0].AxisY.Maximum = 0.3; chart.ChartAreas[0].AxisY.Minimum = -0.1; chart.ChartAreas[0].AxisY2.Maximum = 30; chart.ChartAreas[0].AxisY2.Minimum = -10; }
public void AudioVideoOfferNoLocalAudioUnitTest() { logger.LogDebug("--> " + System.Reflection.MethodBase.GetCurrentMethod().Name); logger.BeginScope(System.Reflection.MethodBase.GetCurrentMethod().Name); // Create two RTP sessions. First one acts as the local session to generate the offer. // Second one acts as the remote session to generate the answer. // A local session is created but only has a video track added to it. RTPSession localSession = new RTPSession(false, false, false); MediaStreamTrack localVideoTrack = new MediaStreamTrack(SDPMediaTypesEnum.video, false, new List <SDPAudioVideoMediaFormat> { new SDPAudioVideoMediaFormat(SDPMediaTypesEnum.video, 96, "VP8", 90000) }); localSession.addTrack(localVideoTrack); // Create a remote session with both audio and video tracks. RTPSession remoteSession = new RTPSession(false, false, false); // The track for the track for the remote session is still local relative to the session it's being added to. MediaStreamTrack remoteAudioTrack = new MediaStreamTrack(SDPMediaTypesEnum.audio, false, new List <SDPAudioVideoMediaFormat> { new SDPAudioVideoMediaFormat(SDPWellKnownMediaFormatsEnum.PCMU) }); remoteSession.addTrack(remoteAudioTrack); MediaStreamTrack remoteVideoTrack = new MediaStreamTrack(SDPMediaTypesEnum.video, false, new List <SDPAudioVideoMediaFormat> { new SDPAudioVideoMediaFormat(SDPMediaTypesEnum.video, 96, "VP8", 90000) }); remoteSession.addTrack(remoteVideoTrack); var offer = remoteSession.CreateOffer(IPAddress.Loopback); // Give the offer to the local session that is missing a video tracks. var result = localSession.SetRemoteDescription(SIP.App.SdpType.offer, offer); logger.LogDebug($"Set remote description on local session result {result}."); Assert.Equal(SetDescriptionResultEnum.OK, result); var answer = localSession.CreateAnswer(null); Assert.Equal(MediaStreamStatusEnum.Inactive, answer.Media.Where(x => x.Media == SDPMediaTypesEnum.audio).Single().MediaStreamStatus); Assert.Equal(MediaStreamStatusEnum.SendRecv, answer.Media.Where(x => x.Media == SDPMediaTypesEnum.video).Single().MediaStreamStatus); localSession.Close("normal"); remoteSession.Close("normal"); }
public async Task HandleInvalidSdpPortOnAnswerUnitTest() { logger.LogDebug("--> " + System.Reflection.MethodBase.GetCurrentMethod().Name); logger.BeginScope(System.Reflection.MethodBase.GetCurrentMethod().Name); SIPTransport transport = new SIPTransport(); SIPUserAgent userAgent = new SIPUserAgent(transport, null); string inviteReqStr = @"INVITE sip:[email protected] SIP/2.0 Via: SIP/2.0/UDP 0.0.0.0;branch=z9hG4bK57441c4980b94e1686a06ae080be2935;rport To: <sip:[email protected]> From: <sip:0.0.0.0:0>;tag=MYILIYPHQD Call-ID: ddf0e5a9687b4745925438da9000445d CSeq: 1 INVITE Max-Forwards: 70 Allow: ACK, BYE, CANCEL, INFO, INVITE, NOTIFY, OPTIONS, PRACK, REFER, REGISTER, SUBSCRIBE Content-Length: 0 v=0 o=- 1838015445 0 IN IP4 127.0.0.1 s=- c=IN IP4 127.0.0.1 t=0 0 m=audio 79762 RTP/AVP 0 a=rtpmap:0 PCMU/8000 a=sendrecv"; SIPEndPoint dummySipEndPoint = new SIPEndPoint(new IPEndPoint(IPAddress.Any, 0)); SIPMessageBuffer sipMessageBuffer = SIPMessageBuffer.ParseSIPMessage(inviteReqStr, dummySipEndPoint, dummySipEndPoint); SIPRequest inviteReq = SIPRequest.ParseSIPRequest(sipMessageBuffer); var uas = userAgent.AcceptCall(inviteReq); RTPSession rtpSession = new RTPSession(false, false, false); MediaStreamTrack audioTrack = new MediaStreamTrack(SDPMediaTypesEnum.audio, false, new List <SDPMediaFormat> { new SDPMediaFormat(SDPMediaFormatsEnum.PCMU) }); rtpSession.addTrack(audioTrack); var result = await userAgent.Answer(uas, rtpSession); Assert.False(result); rtpSession.Close("normal"); }
/// <summary> /// Sends the sounds of silence. If the destination is on the other side of a NAT this is useful to open /// a pinhole and hopefully get the remote RTP stream through. /// </summary> /// <param name="rtpSocket">The socket we're using to send from.</param> /// <param name="rtpSendSession">Our RTP sending session.</param> /// <param name="cts">Cancellation token to stop the call.</param> private static async void SendRtp(Socket rtpSocket, RTPSession rtpSendSession, CancellationTokenSource cts) { int samplingFrequency = RTPPayloadTypes.GetSamplingFrequency((RTPPayloadTypesEnum)rtpSendSession.PayloadType); uint rtpTimestampStep = (uint)(samplingFrequency * SILENCE_SAMPLE_PERIOD / 1000); uint bufferSize = (uint)SILENCE_SAMPLE_PERIOD; uint rtpSendTimestamp = 0; uint packetSentCount = 0; uint bytesSentCount = 0; while (cts.IsCancellationRequested == false) { if (_remoteRtpEndPoint != null) { if (!_dtmfEvents.IsEmpty) { // Check if there are any DTMF events to send. _dtmfEvents.TryDequeue(out var rtpEvent); if (rtpEvent != null) { await rtpSendSession.SendDtmfEvent(rtpSocket, _remoteRtpEndPoint, rtpEvent, rtpSendTimestamp, (ushort)SILENCE_SAMPLE_PERIOD, (ushort)rtpTimestampStep, cts); } rtpSendTimestamp += rtpEvent.TotalDuration + rtpTimestampStep; } else { // If there are no DTMF events to send we'll send silence. byte[] sample = new byte[bufferSize / 2]; int sampleIndex = 0; for (int index = 0; index < bufferSize; index += 2) { sample[sampleIndex] = PCMU_SILENCE_BYTE_ZERO; sample[sampleIndex + 1] = PCMU_SILENCE_BYTE_ONE; } rtpSendSession.SendAudioFrame(rtpSocket, _remoteRtpEndPoint, rtpSendTimestamp, sample); rtpSendTimestamp += rtpTimestampStep; packetSentCount++; bytesSentCount += (uint)sample.Length; } } await Task.Delay(SILENCE_SAMPLE_PERIOD); } }
public void ModifiedWellKnownFormatIDUnitTest() { logger.LogDebug("--> " + System.Reflection.MethodBase.GetCurrentMethod().Name); logger.BeginScope(System.Reflection.MethodBase.GetCurrentMethod().Name); string remoteSdp = @"v=0 o=- 1986548327 0 IN IP4 127.0.0.1 c=IN IP4 127.0.0.1 m=audio 60640 RTP/AVP 8 12 a=rtpmap:8 OPUS/48000/2 a=rtpmap:12 PCMA/8000"; // Create a local session with an audio track. RTPSession rtpSession = new RTPSession(false, false, false); MediaStreamTrack localAudioTrack = new MediaStreamTrack( SDPWellKnownMediaFormatsEnum.PCMU, SDPWellKnownMediaFormatsEnum.PCMA, SDPWellKnownMediaFormatsEnum.G722); rtpSession.addTrack(localAudioTrack); var offer = SDP.ParseSDPDescription(remoteSdp); logger.LogDebug($"Remote offer: {offer}"); var result = rtpSession.SetRemoteDescription(SIP.App.SdpType.offer, offer); logger.LogDebug($"Set remote description on local session result {result}."); Assert.Equal(SetDescriptionResultEnum.OK, result); Assert.Equal(12, rtpSession.AudioLocalTrack.Capabilities.Single(x => x.Name() == "PCMA").ID); Assert.Equal("PCMA", rtpSession.GetSendingFormat(SDPMediaTypesEnum.audio).Name()); var answer = rtpSession.CreateAnswer(null); logger.LogDebug($"Local answer: {answer}"); Assert.Equal(12, answer.Media.Single().MediaFormats.Single().Key); Assert.Equal("PCMA", answer.Media.Single().MediaFormats.Single().Value.Name()); rtpSession.Close("normal"); }
/// <summary> /// Sends the sounds of silence. If the destination is on the other side of a NAT this is useful to open /// a pinhole and hopefully get the remote RTP stream through. /// </summary> /// <param name="rtpSocket">The socket we're using to send from.</param> /// <param name="rtpSendSession">Our RTP sending session.</param> /// <param name="cts">Cancellation token to stop the call.</param> private static async void SendRtp(Socket rtpSocket, RTPSession rtpSendSession, CancellationTokenSource cts) { try { while (cts.IsCancellationRequested == false) { uint timestamp = 0; using (StreamReader sr = new StreamReader(AUDIO_FILE_PCMU)) { DateTime lastSendReportAt = DateTime.Now; uint packetsSentCount = 0; uint bytesSentCount = 0; byte[] buffer = new byte[320]; int bytesRead = sr.BaseStream.Read(buffer, 0, buffer.Length); while (bytesRead > 0 && !cts.IsCancellationRequested) { if (rtpSendSession.DestinationEndPoint != null) { packetsSentCount++; bytesSentCount += (uint)bytesRead; rtpSendSession.SendAudioFrame(rtpSocket, rtpSendSession.DestinationEndPoint, timestamp, buffer); } timestamp += (uint)buffer.Length; if (DateTime.Now.Subtract(lastSendReportAt).TotalSeconds > RTP_REPORTING_PERIOD_SECONDS) { lastSendReportAt = DateTime.Now; SIPSorcery.Sys.Log.Logger.LogDebug($"RTP send report {rtpSocket.LocalEndPoint}->{rtpSendSession.DestinationEndPoint} pkts {packetsSentCount} bytes {bytesSentCount}"); } await Task.Delay(40, cts.Token); bytesRead = sr.BaseStream.Read(buffer, 0, buffer.Length); } } } } catch (ObjectDisposedException) // Gets thrown when the RTP socket is closed. Can safely ignore. { } }
public void SetRemoteSDPNoMediaStreamAttributeUnitTest() { logger.LogDebug("--> " + System.Reflection.MethodBase.GetCurrentMethod().Name); logger.BeginScope(System.Reflection.MethodBase.GetCurrentMethod().Name); string remoteSdp = @"v=0 o=- 1986548327 0 IN IP4 127.0.0.1 s=- c=IN IP4 127.0.0.1 t=0 0 m=audio 60640 RTP/AVP 0 111 a=rtpmap:0 PCMU/8000 a=rtpmap:111 OPUS/48000/2"; // Create a local session with an audio track. RTPSession rtpSession = new RTPSession(false, false, false); MediaStreamTrack localAudioTrack = new MediaStreamTrack(new AudioFormat(SDPWellKnownMediaFormatsEnum.PCMU)); rtpSession.addTrack(localAudioTrack); var offer = SDP.ParseSDPDescription(remoteSdp); logger.LogDebug($"Remote offer: {offer}"); var result = rtpSession.SetRemoteDescription(SIP.App.SdpType.offer, offer); logger.LogDebug($"Set remote description on local session result {result}."); Assert.Equal(SetDescriptionResultEnum.OK, result); var answer = rtpSession.CreateAnswer(null); logger.LogDebug($"Local answer: {answer}"); Assert.Equal(0, rtpSession.AudioLocalTrack.Capabilities.Single(x => x.Name() == "PCMU").ID); Assert.Equal(MediaStreamStatusEnum.SendRecv, rtpSession.AudioLocalTrack.StreamStatus); Assert.NotNull(rtpSession.AudioRemoteTrack); Assert.Equal(MediaStreamStatusEnum.SendRecv, rtpSession.AudioRemoteTrack.StreamStatus); rtpSession.Close("normal"); }
public RtpFramer(String RTPipAddress, int RTPport, String RTCPipAddress, int RTCPport, String forwardIP, int forwardPort) { Session = new RTPSession(); Sender = new RTPSender(); Receiver = new RTPReceiver(); var senderEp = new IPEndPoint(IPAddress.Parse(forwardIP), forwardPort); senderParticipant = new RTPParticipant(senderEp); Sender.AddParticipant(senderParticipant); Session.AddSender(Sender); var rtpEp = new IPEndPoint(IPAddress.Parse(RTPipAddress), RTPport); var rtcpEp = new IPEndPoint(IPAddress.Parse(RTCPipAddress), RTCPport); participant = new RTPParticipant(rtpEp, rtcpEp); Session.NewRTPPacket = NewRTPPacket; Session.NewRTCPPacket = NewRTCPPacket; Receiver.AddParticipant(participant); Session.AddReceiver(Receiver); }
// Process each RTSP message that is received private void RTSP_Message_Received(object sender, RtspChunkEventArgs e) { // Cast the 'sender' and 'e' into the RTSP Listener (the Socket) and the RTSP Message Rtsp.RtspListener listener = sender as Rtsp.RtspListener; Rtsp.Messages.RtspMessage message = e.Message as Rtsp.Messages.RtspMessage; Console.WriteLine("RTSP message received " + message); // Handle OPTIONS message if (message is Rtsp.Messages.RtspRequestOptions) { // Create the reponse to OPTIONS Rtsp.Messages.RtspResponse options_response = (e.Message as Rtsp.Messages.RtspRequestOptions).CreateResponse(); listener.SendMessage(options_response); } // Handle DESCRIBE message if (message is Rtsp.Messages.RtspRequestDescribe) { String requested_url = (message as Rtsp.Messages.RtspRequestDescribe).RtspUri.ToString(); Console.WriteLine("Request for " + requested_url); // TODO. Check the requsted_url is valid. In this example we accept any RTSP URL // Make the Base64 SPS and PPS byte[] raw_sps = h264_encoder.GetRawSPS(); // no 0x00 0x00 0x00 0x01 or 32 bit size header byte[] raw_pps = h264_encoder.GetRawPPS(); // no 0x00 0x00 0x00 0x01 or 32 bit size header String sps_str = Convert.ToBase64String(raw_sps); String pps_str = Convert.ToBase64String(raw_pps); StringBuilder sdp = new StringBuilder(); // Generate the SDP // The sprop-parameter-sets provide the SPS and PPS for H264 video // The packetization-mode defines the H264 over RTP payloads used but is Optional sdp.Append("v=0\n"); sdp.Append("o=user 123 0 IN IP4 0.0.0.0\n"); sdp.Append("s=SharpRTSP Test Camera\n"); sdp.Append("m=video 0 RTP/AVP 96\n"); sdp.Append("c=IN IP4 0.0.0.0\n"); sdp.Append("a=control:trackID=0\n"); sdp.Append("a=rtpmap:96 H264/90000\n"); sdp.Append("a=fmtp:96 profile-level-id=42A01E; sprop-parameter-sets="+sps_str+","+pps_str+";\n"); byte[] sdp_bytes = Encoding.ASCII.GetBytes(sdp.ToString()); // Create the reponse to DESCRIBE // This must include the Session Description Protocol (SDP) Rtsp.Messages.RtspResponse describe_response = (e.Message as Rtsp.Messages.RtspRequestDescribe).CreateResponse(); describe_response.AddHeader("Content-Base: " + requested_url); describe_response.AddHeader("Content-Type: application/sdp"); describe_response.Data = sdp_bytes; describe_response.AdjustContentLength(); listener.SendMessage(describe_response); } // Handle SETUP message if (message is Rtsp.Messages.RtspRequestSetup) { // var setupMessage = message as Rtsp.Messages.RtspRequestSetup; // Check the RTSP transport // If it is UDP or Multicast, create the sockets // If it is RTP over RTSP we send data via the RTSP Listener // FIXME client may send more than one possible transport. // very rare Rtsp.Messages.RtspTransport transport = setupMessage.GetTransports()[0]; // Create a 'Session' and add it to the Session List // ToDo - Check the Track ID. In the SDP the H264 video track is TrackID 0 RTPSession new_session = new RTPSession(); new_session.session_id = session_count.ToString(); new_session.listener = listener; new_session.sequence_number = (UInt16)rnd.Next(65535); // start with a random 16 bit sequence number new_session.ssrc = 1; // Construct the Transport: reply from the Server to the client Rtsp.Messages.RtspTransport transport_reply = new Rtsp.Messages.RtspTransport(); if (transport.LowerTransport == Rtsp.Messages.RtspTransport.LowerTransportType.TCP) { // RTP over RTSP mode} transport_reply.LowerTransport = Rtsp.Messages.RtspTransport.LowerTransportType.TCP; transport_reply.Interleaved = new Rtsp.Messages.PortCouple(transport.Interleaved.First,transport.Interleaved.Second); } if (transport.LowerTransport == Rtsp.Messages.RtspTransport.LowerTransportType.UDP && transport.IsMulticast == false) { // RTP over UDP mode} // Create a pair of UDP sockets // Pass the Port of the two sockets back in the reply transport_reply.LowerTransport = Rtsp.Messages.RtspTransport.LowerTransportType.UDP; transport_reply.IsMulticast = false; transport_reply.ClientPort = transport.ClientPort; // FIX // for now until implemented transport_reply = null; } if (transport.LowerTransport == Rtsp.Messages.RtspTransport.LowerTransportType.UDP && transport.IsMulticast == true) { // RTP over Multicast UDP mode} // Create a pair of UDP sockets in Multicast Mode // Pass the Ports of the two sockets back in the reply transport_reply.LowerTransport = Rtsp.Messages.RtspTransport.LowerTransportType.UDP; transport_reply.IsMulticast = true; transport_reply.Port = new Rtsp.Messages.PortCouple(7000,7001); // FIX // for now until implemented transport_reply = null; } if (transport_reply != null) { // Add the transports to the Session new_session.client_transport = transport; new_session.transport_reply = transport_reply; // Add the new session to the Sessions List rtp_list.Add(new_session); session_count++; Rtsp.Messages.RtspResponse setup_response = setupMessage.CreateResponse(); setup_response.Headers[Rtsp.Messages.RtspHeaderNames.Transport] = transport_reply.ToString(); setup_response.Session = new_session.session_id; listener.SendMessage(setup_response); } else { Rtsp.Messages.RtspResponse setup_response = setupMessage.CreateResponse(); // unsuported transport setup_response.ReturnCode =461; listener.SendMessage(setup_response); } } // Handle PLAY message if (message is Rtsp.Messages.RtspRequestPlay) { lock (rtp_list) { // Search for the Session in the Sessions List. Change the state of "PLAY" foreach (RTPSession session in rtp_list) { if (session.session_id.Equals(message.Session)) { // found the session session.play = true; break; } } } // ToDo - only send back the OK response if the Session in the RTSP message was found Rtsp.Messages.RtspResponse play_response = (e.Message as Rtsp.Messages.RtspRequestPlay).CreateResponse(); listener.SendMessage(play_response); } // Handle PLAUSE message if (message is Rtsp.Messages.RtspRequestPause) { lock (rtp_list) { // Search for the Session in the Sessions List. Change the state of "PLAY" foreach (RTPSession session in rtp_list) { if (session.session_id.Equals(message.Session)) { // found the session session.play = false; break; } } } // ToDo - only send back the OK response if the Session in the RTSP message was found Rtsp.Messages.RtspResponse pause_response = (e.Message as Rtsp.Messages.RtspRequestPause).CreateResponse(); listener.SendMessage(pause_response); } // Handle GET_PARAMETER message, often used as a Keep Alive if (message is Rtsp.Messages.RtspRequestGetParameter) { // Create the reponse to GET_PARAMETER Rtsp.Messages.RtspResponse getparameter_response = (e.Message as Rtsp.Messages.RtspRequestGetParameter).CreateResponse(); listener.SendMessage(getparameter_response); } }