示例#1
0
        /// <summary>
        /// Adds an additional RTP stream to this session. The effect of this is to multiplex
        /// two or more RTP sessions on a single socket. Multiplexing is used by WebRTC.
        /// </summary>
        /// <param name="payloadTypeID">The payload type ID for this RTP stream. It's what gets set in the payload
        /// type ID field in the RTP header.</param>
        /// <returns>The ID of the stream of this media type. It must be supplied when
        /// doing a send for this stream.</returns>
        public int AddStream(int payloadTypeID, SDPMediaAnnouncement mediaAnnouncement)
        {
            int nextID        = m_sessionStreams.OrderByDescending(x => x.ID).First().ID + 1;
            var sessionStream = new RTPSessionStream(nextID, payloadTypeID);

            m_sessionStreams.Add(sessionStream);
            return(sessionStream.ID);
        }
示例#2
0
        /// <summary>
        /// Creates a new RTP session. The synchronisation source and sequence number are initialised to
        /// pseudo random values.
        /// </summary>
        /// <param name="payloadTypeID">The payload type ID for this RTP stream. It's what gets set in the payload
        /// type ID field in the RTP header.</param>
        /// <param name="addrFamily">Determines whether the RTP channel will use an IPv4 or IPv6 socket.</param>
        /// <param name="isRtcpMultiplexed">If true RTCP reports will be multiplexed with RTP on a single channel.
        /// If false (standard mode) then a separate socket is used to send and receive RTCP reports.</param>
        public RTPSession(
            int payloadTypeID,
            AddressFamily addrFamily,
            bool isRtcpMultiplexed)
        {
            var sessionStream = new RTPSessionStream(0, payloadTypeID);

            m_sessionStreams.Add(sessionStream);
            InitialiseRtpChannel(addrFamily, isRtcpMultiplexed);
        }
示例#3
0
        public void SendVp8Frame(uint timestamp, byte[] buffer, int streamID = 0)
        {
            if (m_isClosed || m_rtpEventInProgress || DestinationEndPoint == null)
            {
                return;
            }

            try
            {
                RTPSessionStream sessionStream = m_sessionStreams.Where(x => x.ID == streamID).Single();

                for (int index = 0; index *RTP_MAX_PAYLOAD < buffer.Length; index++)
                {
                    sessionStream.SeqNum = (ushort)(sessionStream.SeqNum % UInt16.MaxValue);

                    int offset               = (index == 0) ? 0 : (index * RTP_MAX_PAYLOAD);
                    int payloadLength        = (offset + RTP_MAX_PAYLOAD < buffer.Length) ? RTP_MAX_PAYLOAD : buffer.Length - offset;
                    int srtpProtectionLength = (SrtpProtect != null) ? SRTP_AUTH_KEY_LENGTH : 0;

                    byte[] vp8HeaderBytes = (index == 0) ? new byte[] { 0x10 } : new byte[] { 0x00 };

                    RTPPacket rtpPacket = new RTPPacket(payloadLength + vp8HeaderBytes.Length + srtpProtectionLength);
                    rtpPacket.Header.SyncSource     = sessionStream.Ssrc;
                    rtpPacket.Header.SequenceNumber = sessionStream.SeqNum++;
                    rtpPacket.Header.Timestamp      = timestamp;
                    rtpPacket.Header.MarkerBit      = ((offset + payloadLength) >= buffer.Length) ? 1 : 0; // Set marker bit for the last packet in the frame.
                    rtpPacket.Header.PayloadType    = sessionStream.PayloadTypeID;

                    Buffer.BlockCopy(vp8HeaderBytes, 0, rtpPacket.Payload, 0, vp8HeaderBytes.Length);
                    Buffer.BlockCopy(buffer, offset, rtpPacket.Payload, vp8HeaderBytes.Length, payloadLength);

                    OnRtpPacketSent?.Invoke(rtpPacket);

                    var rtpBuffer = rtpPacket.GetBytes();

                    int rtperr = SrtpProtect == null ? 0 : SrtpProtect(rtpBuffer, rtpBuffer.Length - srtpProtectionLength);
                    if (rtperr != 0)
                    {
                        logger.LogError("SendVp8Frame SRTP packet protection failed, result " + rtperr + ".");
                    }
                    else
                    {
                        RtpChannel.SendAsync(RTPChannelSocketsEnum.RTP, DestinationEndPoint, rtpBuffer);
                    }

                    m_lastRtpTimestamp = timestamp;
                }
            }
            catch (SocketException sockExcp)
            {
                logger.LogError("SocketException SendVp8Frame. " + sockExcp.Message);
            }
        }
示例#4
0
        public void SendAudioFrame(uint timestamp, byte[] buffer, int streamID = 0)
        {
            if (m_isClosed || m_rtpEventInProgress || DestinationEndPoint == null)
            {
                return;
            }

            try
            {
                RTPSessionStream sessionStream = m_sessionStreams.Where(x => x.ID == streamID).Single();

                for (int index = 0; index *RTP_MAX_PAYLOAD < buffer.Length; index++)
                {
                    sessionStream.SeqNum = (ushort)(sessionStream.SeqNum % UInt16.MaxValue);

                    int offset               = (index == 0) ? 0 : (index * RTP_MAX_PAYLOAD);
                    int payloadLength        = (offset + RTP_MAX_PAYLOAD < buffer.Length) ? RTP_MAX_PAYLOAD : buffer.Length - offset;
                    int srtpProtectionLength = (SrtpProtect != null) ? SRTP_AUTH_KEY_LENGTH : 0;

                    RTPPacket rtpPacket = new RTPPacket(payloadLength + srtpProtectionLength);
                    rtpPacket.Header.SyncSource     = sessionStream.Ssrc;
                    rtpPacket.Header.SequenceNumber = sessionStream.SeqNum++;
                    rtpPacket.Header.Timestamp      = timestamp;
                    // RFC3551 specifies that for audio the marker bit should always be 0 except for when returning
                    // from silence suppression. For video the marker bit DOES get set to 1 for the last packet
                    // in a frame.
                    rtpPacket.Header.MarkerBit   = 0;
                    rtpPacket.Header.PayloadType = sessionStream.PayloadTypeID;

                    Buffer.BlockCopy(buffer, offset, rtpPacket.Payload, 0, payloadLength);

                    OnRtpPacketSent?.Invoke(rtpPacket);

                    var rtpBuffer = rtpPacket.GetBytes();

                    int rtperr = SrtpProtect == null ? 0 : SrtpProtect(rtpBuffer, rtpBuffer.Length - srtpProtectionLength);
                    if (rtperr != 0)
                    {
                        logger.LogError("SendAudioFrame SRTP packet protection failed, result " + rtperr + ".");
                    }
                    else
                    {
                        RtpChannel.SendAsync(RTPChannelSocketsEnum.RTP, DestinationEndPoint, rtpBuffer);
                    }

                    m_lastRtpTimestamp = timestamp;
                }
            }
            catch (SocketException sockExcp)
            {
                logger.LogError("SocketException SendAudioFrame. " + sockExcp.Message);
            }
        }
示例#5
0
        /// <summary>
        /// Helper method to send a low quality JPEG image over RTP. This method supports a very abbreviated version of RFC 2435 "RTP Payload Format for JPEG-compressed Video".
        /// It's intended as a quick convenient way to send something like a test pattern image over an RTSP connection. More than likely it won't be suitable when a high
        /// quality image is required since the header used in this method does not support quantization tables.
        /// </summary>
        /// <param name="jpegBytes">The raw encoded bytes of the JPEG image to transmit.</param>
        /// <param name="jpegQuality">The encoder quality of the JPEG image.</param>
        /// <param name="jpegWidth">The width of the JPEG image.</param>
        /// <param name="jpegHeight">The height of the JPEG image.</param>
        /// <param name="framesPerSecond">The rate at which the JPEG frames are being transmitted at. used to calculate the timestamp.</param>
        public void SendJpegFrame(uint timestamp, byte[] jpegBytes, int jpegQuality, int jpegWidth, int jpegHeight, int streamID = 0)
        {
            if (m_isClosed || m_rtpEventInProgress || DestinationEndPoint == null)
            {
                return;
            }

            try
            {
                //System.Diagnostics.Debug.WriteLine("Sending " + jpegBytes.Length + " encoded bytes to client, timestamp " + _timestamp + ", starting sequence number " + _sequenceNumber + ", image dimensions " + jpegWidth + " x " + jpegHeight + ".");

                RTPSessionStream sessionStream = m_sessionStreams.Where(x => x.ID == streamID).Single();

                for (int index = 0; index *RTP_MAX_PAYLOAD < jpegBytes.Length; index++)
                {
                    uint offset        = Convert.ToUInt32(index * RTP_MAX_PAYLOAD);
                    int  payloadLength = ((index + 1) * RTP_MAX_PAYLOAD < jpegBytes.Length) ? RTP_MAX_PAYLOAD : jpegBytes.Length - index * RTP_MAX_PAYLOAD;

                    byte[] jpegHeader = CreateLowQualityRtpJpegHeader(offset, jpegQuality, jpegWidth, jpegHeight);

                    List <byte> packetPayload = new List <byte>();
                    packetPayload.AddRange(jpegHeader);
                    packetPayload.AddRange(jpegBytes.Skip(index * RTP_MAX_PAYLOAD).Take(payloadLength));

                    RTPPacket rtpPacket = new RTPPacket(packetPayload.Count);
                    rtpPacket.Header.SyncSource     = sessionStream.Ssrc;
                    rtpPacket.Header.SequenceNumber = sessionStream.SeqNum++;
                    rtpPacket.Header.Timestamp      = timestamp;
                    rtpPacket.Header.MarkerBit      = ((index + 1) * RTP_MAX_PAYLOAD < jpegBytes.Length) ? 0 : 1;
                    rtpPacket.Header.PayloadType    = (int)SDPMediaFormatsEnum.JPEG;
                    rtpPacket.Payload = packetPayload.ToArray();

                    OnRtpPacketSent?.Invoke(rtpPacket);

                    byte[] rtpBytes = rtpPacket.GetBytes();

                    RtpChannel.SendAsync(RTPChannelSocketsEnum.RTP, DestinationEndPoint, rtpBytes);
                }
            }
            catch (SocketException sockExcp)
            {
                logger.LogError("SocketException SendJpegFrame. " + sockExcp.Message);
            }
        }
示例#6
0
 /// <summary>
 /// Adds an additional RTP stream to this session. The effect of this is to multiplex
 /// two or more RTP sessions on a single socket. Multiplexing is used by WebRTC.
 /// </summary>
 /// <param name="mediaType">The type of media for this stream. When multiplexing streams on an
 /// RTP session there can be only one stream per media type.</param>
 /// <param name="payloadTypeID">The payload type ID for this RTP stream. It's what gets set in the payload
 /// type ID field in the RTP header.</param>
 /// <param name="remotePayloadIDs">A list of the payload IDs the remote party can set in their RTP headers.</param>
 public void AddStream(SDPMediaTypesEnum mediaType, int payloadTypeID, List <int> remotePayloadIDs)
 {
     if (!(mediaType == SDPMediaTypesEnum.audio || mediaType == SDPMediaTypesEnum.video))
     {
         throw new ApplicationException($"The RTPSession does not know how to transmit media type {mediaType}.");
     }
     else if (mediaType == SDPMediaTypesEnum.audio && m_audioStream != null)
     {
         m_audioStream.UpdateRemotePayloadIDs(remotePayloadIDs);
     }
     else if (mediaType == SDPMediaTypesEnum.video && m_videoStream != null)
     {
         m_videoStream.UpdateRemotePayloadIDs(remotePayloadIDs);
     }
     else
     {
         if (mediaType == SDPMediaTypesEnum.audio)
         {
             m_audioStream      = new RTPSessionStream(SDPMediaTypesEnum.audio, payloadTypeID, remotePayloadIDs);
             m_audioRtcpSession = new RTCPSession(SDPMediaTypesEnum.audio, m_audioStream.Ssrc);
             m_audioRtcpSession.OnReportReadyToSend += SendRtcpReport;
             if (!IsSecure)
             {
                 m_audioRtcpSession.Start();
             }
         }
         else if (mediaType == SDPMediaTypesEnum.video)
         {
             m_videoStream      = new RTPSessionStream(SDPMediaTypesEnum.video, payloadTypeID, remotePayloadIDs);
             m_videoRtcpSession = new RTCPSession(SDPMediaTypesEnum.video, m_videoStream.Ssrc);
             m_videoRtcpSession.OnReportReadyToSend += SendRtcpReport;
             if (!IsSecure)
             {
                 m_videoRtcpSession.Start();
             }
         }
     }
 }
示例#7
0
        /// <summary>
        /// Sends an RTP event for a DTMF tone as per RFC2833. Sending the event requires multiple packets to be sent.
        /// This method will hold onto the socket until all the packets required for the event have been sent. The send
        /// can be cancelled using the cancellation token.
        /// </summary>
        /// <param name="rtpEvent">The RTP event to send.</param>
        /// <param name="cancellationToken">CancellationToken to allow the operation to be cancelled prematurely.</param>
        /// <param name="clockRate">To send an RTP event the clock rate of the underlying stream needs to be known.</param>
        /// <param name="streamID">For multiplexed sessions the ID of the stream to send the event on. Defaults to 0
        /// for single stream sessions.</param>
        public async Task SendDtmfEvent(
            RTPEvent rtpEvent,
            CancellationToken cancellationToken,
            int clockRate = DEFAULT_AUDIO_CLOCK_RATE,
            int streamID  = 0)
        {
            if (m_isClosed || m_rtpEventInProgress == true || DestinationEndPoint == null)
            {
                logger.LogWarning("SendDtmfEvent request ignored as an RTP event is already in progress.");
            }

            try
            {
                RTPSessionStream sessionStream = m_sessionStreams.Where(x => x.ID == streamID).Single();

                m_rtpEventInProgress = true;
                uint startTimestamp = m_lastRtpTimestamp;

                // The sample period in milliseconds being used for the media stream that the event
                // is being inserted into. Should be set to 50ms if main media stream is dynamic or
                // sample period is unknown.
                int samplePeriod = RTP_EVENT_DEFAULT_SAMPLE_PERIOD_MS;

                // The RTP timestamp step corresponding to the sampling period. This can change depending
                // on the codec being used. For example using PCMU with a sampling frequency of 8000Hz and a sample period of 50ms
                // the timestamp step is 400 (8000 / (1000 / 50)). For a sample period of 20ms it's 160 (8000 / (1000 / 20)).
                ushort rtpTimestampStep = (ushort)(clockRate * samplePeriod / 1000);

                // If only the minimum number of packets are being sent then they are both the start and end of the event.
                rtpEvent.EndOfEvent = (rtpEvent.TotalDuration <= rtpTimestampStep);
                // The DTMF tone is generally multiple RTP events. Each event has a duration of the RTP timestamp step.
                rtpEvent.Duration = rtpTimestampStep;

                // Send the start of event packets.
                for (int i = 0; i < RTPEvent.DUPLICATE_COUNT && !cancellationToken.IsCancellationRequested; i++)
                {
                    byte[] buffer = rtpEvent.GetEventPayload();

                    int markerBit = (i == 0) ? 1 : 0;  // Set marker bit for the first packet in the event.
                    SendRtpPacket(RtpChannel, DestinationEndPoint, buffer, startTimestamp, markerBit, rtpEvent.PayloadTypeID, sessionStream.Ssrc, sessionStream.SeqNum);

                    sessionStream.SeqNum++;
                }

                await Task.Delay(samplePeriod, cancellationToken);

                if (!rtpEvent.EndOfEvent)
                {
                    // Send the progressive event packets
                    while ((rtpEvent.Duration + rtpTimestampStep) < rtpEvent.TotalDuration && !cancellationToken.IsCancellationRequested)
                    {
                        rtpEvent.Duration += rtpTimestampStep;
                        byte[] buffer = rtpEvent.GetEventPayload();

                        SendRtpPacket(RtpChannel, DestinationEndPoint, buffer, startTimestamp, 0, rtpEvent.PayloadTypeID, sessionStream.Ssrc, sessionStream.SeqNum);

                        sessionStream.SeqNum++;

                        await Task.Delay(samplePeriod, cancellationToken);
                    }

                    // Send the end of event packets.
                    for (int j = 0; j < RTPEvent.DUPLICATE_COUNT && !cancellationToken.IsCancellationRequested; j++)
                    {
                        rtpEvent.EndOfEvent = true;
                        rtpEvent.Duration   = rtpEvent.TotalDuration;
                        byte[] buffer = rtpEvent.GetEventPayload();

                        SendRtpPacket(RtpChannel, DestinationEndPoint, buffer, startTimestamp, 0, rtpEvent.PayloadTypeID, sessionStream.Ssrc, sessionStream.SeqNum);

                        sessionStream.SeqNum++;
                    }
                }
            }
            catch (SocketException sockExcp)
            {
                logger.LogError("SocketException SendDtmfEvent. " + sockExcp.Message);
            }
            catch (TaskCanceledException)
            {
                logger.LogWarning("SendDtmfEvent was cancelled by caller.");
            }
            finally
            {
                m_rtpEventInProgress = false;
            }
        }
示例#8
0
        /// <summary>
        /// H264 frames need a two byte header when transmitted over RTP.
        /// </summary>
        /// <param name="frame">The H264 encoded frame to transmit.</param>
        /// <param name="frameSpacing">The increment to add to the RTP timestamp for each new frame.</param>
        /// <param name="payloadType">The payload type to set on the RTP packet.</param>
        public void SendH264Frame(uint timestamp, byte[] frame, uint frameSpacing, int payloadType, int streamID = 0)
        {
            if (m_isClosed || m_rtpEventInProgress || DestinationEndPoint == null)
            {
                return;
            }

            try
            {
                //System.Diagnostics.Debug.WriteLine("Sending " + frame.Length + " H264 encoded bytes to client, timestamp " + _timestamp + ", starting sequence number " + _sequenceNumber + ".");

                RTPSessionStream sessionStream = m_sessionStreams.Where(x => x.ID == streamID).Single();

                for (int index = 0; index *RTP_MAX_PAYLOAD < frame.Length; index++)
                {
                    uint offset        = Convert.ToUInt32(index * RTP_MAX_PAYLOAD);
                    int  payloadLength = ((index + 1) * RTP_MAX_PAYLOAD < frame.Length) ? RTP_MAX_PAYLOAD : frame.Length - index * RTP_MAX_PAYLOAD;

                    RTPPacket rtpPacket = new RTPPacket(payloadLength + H264_RTP_HEADER_LENGTH);
                    rtpPacket.Header.SyncSource     = sessionStream.Ssrc;
                    rtpPacket.Header.SequenceNumber = sessionStream.SeqNum++;
                    rtpPacket.Header.Timestamp      = timestamp;
                    rtpPacket.Header.MarkerBit      = 0;
                    rtpPacket.Header.PayloadType    = payloadType;

                    // Start RTP packet in frame 0x1c 0x89
                    // Middle RTP packet in frame 0x1c 0x09
                    // Last RTP packet in frame 0x1c 0x49

                    byte[] h264Header = new byte[] { 0x1c, 0x09 };

                    if (index == 0 && frame.Length < RTP_MAX_PAYLOAD)
                    {
                        // First and last RTP packet in the frame.
                        h264Header = new byte[] { 0x1c, 0x49 };
                        rtpPacket.Header.MarkerBit = 1;
                    }
                    else if (index == 0)
                    {
                        h264Header = new byte[] { 0x1c, 0x89 };
                    }
                    else if ((index + 1) * RTP_MAX_PAYLOAD > frame.Length)
                    {
                        h264Header = new byte[] { 0x1c, 0x49 };
                        rtpPacket.Header.MarkerBit = 1;
                    }

                    var h264Stream = frame.Skip(index * RTP_MAX_PAYLOAD).Take(payloadLength).ToList();
                    h264Stream.InsertRange(0, h264Header);
                    rtpPacket.Payload = h264Stream.ToArray();

                    OnRtpPacketSent?.Invoke(rtpPacket);

                    byte[] rtpBytes = rtpPacket.GetBytes();

                    RtpChannel.SendAsync(RTPChannelSocketsEnum.RTP, DestinationEndPoint, rtpBytes);
                }
            }
            catch (SocketException sockExcp)
            {
                logger.LogError("SocketException SendH264Frame. " + sockExcp.Message);
            }
        }