Exemplo n.º 1
0
        public void SendVp8Frame(uint timestamp, byte[] buffer, int streamID = 0)
        {
            if (m_isClosed || m_rtpEventInProgress || DestinationEndPoint == null)
            {
                return;
            }

            try
            {
                RTPSessionStream sessionStream = m_sessionStreams.Where(x => x.ID == streamID).Single();

                for (int index = 0; index *RTP_MAX_PAYLOAD < buffer.Length; index++)
                {
                    sessionStream.SeqNum = (ushort)(sessionStream.SeqNum % UInt16.MaxValue);

                    int offset               = (index == 0) ? 0 : (index * RTP_MAX_PAYLOAD);
                    int payloadLength        = (offset + RTP_MAX_PAYLOAD < buffer.Length) ? RTP_MAX_PAYLOAD : buffer.Length - offset;
                    int srtpProtectionLength = (SrtpProtect != null) ? SRTP_AUTH_KEY_LENGTH : 0;

                    byte[] vp8HeaderBytes = (index == 0) ? new byte[] { 0x10 } : new byte[] { 0x00 };

                    RTPPacket rtpPacket = new RTPPacket(payloadLength + vp8HeaderBytes.Length + srtpProtectionLength);
                    rtpPacket.Header.SyncSource     = sessionStream.Ssrc;
                    rtpPacket.Header.SequenceNumber = sessionStream.SeqNum++;
                    rtpPacket.Header.Timestamp      = timestamp;
                    rtpPacket.Header.MarkerBit      = ((offset + payloadLength) >= buffer.Length) ? 1 : 0; // Set marker bit for the last packet in the frame.
                    rtpPacket.Header.PayloadType    = sessionStream.PayloadTypeID;

                    Buffer.BlockCopy(vp8HeaderBytes, 0, rtpPacket.Payload, 0, vp8HeaderBytes.Length);
                    Buffer.BlockCopy(buffer, offset, rtpPacket.Payload, vp8HeaderBytes.Length, payloadLength);

                    OnRtpPacketSent?.Invoke(rtpPacket);

                    var rtpBuffer = rtpPacket.GetBytes();

                    int rtperr = SrtpProtect == null ? 0 : SrtpProtect(rtpBuffer, rtpBuffer.Length - srtpProtectionLength);
                    if (rtperr != 0)
                    {
                        logger.LogError("SendVp8Frame SRTP packet protection failed, result " + rtperr + ".");
                    }
                    else
                    {
                        RtpChannel.SendAsync(RTPChannelSocketsEnum.RTP, DestinationEndPoint, rtpBuffer);
                    }

                    m_lastRtpTimestamp = timestamp;
                }
            }
            catch (SocketException sockExcp)
            {
                logger.LogError("SocketException SendVp8Frame. " + sockExcp.Message);
            }
        }
Exemplo n.º 2
0
        public void SendAudioFrame(uint timestamp, byte[] buffer, int streamID = 0)
        {
            if (m_isClosed || m_rtpEventInProgress || DestinationEndPoint == null)
            {
                return;
            }

            try
            {
                RTPSessionStream sessionStream = m_sessionStreams.Where(x => x.ID == streamID).Single();

                for (int index = 0; index *RTP_MAX_PAYLOAD < buffer.Length; index++)
                {
                    sessionStream.SeqNum = (ushort)(sessionStream.SeqNum % UInt16.MaxValue);

                    int offset               = (index == 0) ? 0 : (index * RTP_MAX_PAYLOAD);
                    int payloadLength        = (offset + RTP_MAX_PAYLOAD < buffer.Length) ? RTP_MAX_PAYLOAD : buffer.Length - offset;
                    int srtpProtectionLength = (SrtpProtect != null) ? SRTP_AUTH_KEY_LENGTH : 0;

                    RTPPacket rtpPacket = new RTPPacket(payloadLength + srtpProtectionLength);
                    rtpPacket.Header.SyncSource     = sessionStream.Ssrc;
                    rtpPacket.Header.SequenceNumber = sessionStream.SeqNum++;
                    rtpPacket.Header.Timestamp      = timestamp;
                    // RFC3551 specifies that for audio the marker bit should always be 0 except for when returning
                    // from silence suppression. For video the marker bit DOES get set to 1 for the last packet
                    // in a frame.
                    rtpPacket.Header.MarkerBit   = 0;
                    rtpPacket.Header.PayloadType = sessionStream.PayloadTypeID;

                    Buffer.BlockCopy(buffer, offset, rtpPacket.Payload, 0, payloadLength);

                    OnRtpPacketSent?.Invoke(rtpPacket);

                    var rtpBuffer = rtpPacket.GetBytes();

                    int rtperr = SrtpProtect == null ? 0 : SrtpProtect(rtpBuffer, rtpBuffer.Length - srtpProtectionLength);
                    if (rtperr != 0)
                    {
                        logger.LogError("SendAudioFrame SRTP packet protection failed, result " + rtperr + ".");
                    }
                    else
                    {
                        RtpChannel.SendAsync(RTPChannelSocketsEnum.RTP, DestinationEndPoint, rtpBuffer);
                    }

                    m_lastRtpTimestamp = timestamp;
                }
            }
            catch (SocketException sockExcp)
            {
                logger.LogError("SocketException SendAudioFrame. " + sockExcp.Message);
            }
        }
Exemplo n.º 3
0
        /// <summary>
        /// Helper method to send a low quality JPEG image over RTP. This method supports a very abbreviated version of RFC 2435 "RTP Payload Format for JPEG-compressed Video".
        /// It's intended as a quick convenient way to send something like a test pattern image over an RTSP connection. More than likely it won't be suitable when a high
        /// quality image is required since the header used in this method does not support quantization tables.
        /// </summary>
        /// <param name="jpegBytes">The raw encoded bytes of the JPEG image to transmit.</param>
        /// <param name="jpegQuality">The encoder quality of the JPEG image.</param>
        /// <param name="jpegWidth">The width of the JPEG image.</param>
        /// <param name="jpegHeight">The height of the JPEG image.</param>
        /// <param name="framesPerSecond">The rate at which the JPEG frames are being transmitted at. used to calculate the timestamp.</param>
        public void SendJpegFrame(uint timestamp, byte[] jpegBytes, int jpegQuality, int jpegWidth, int jpegHeight, int streamID = 0)
        {
            if (m_isClosed || m_rtpEventInProgress || DestinationEndPoint == null)
            {
                return;
            }

            try
            {
                //System.Diagnostics.Debug.WriteLine("Sending " + jpegBytes.Length + " encoded bytes to client, timestamp " + _timestamp + ", starting sequence number " + _sequenceNumber + ", image dimensions " + jpegWidth + " x " + jpegHeight + ".");

                RTPSessionStream sessionStream = m_sessionStreams.Where(x => x.ID == streamID).Single();

                for (int index = 0; index *RTP_MAX_PAYLOAD < jpegBytes.Length; index++)
                {
                    uint offset        = Convert.ToUInt32(index * RTP_MAX_PAYLOAD);
                    int  payloadLength = ((index + 1) * RTP_MAX_PAYLOAD < jpegBytes.Length) ? RTP_MAX_PAYLOAD : jpegBytes.Length - index * RTP_MAX_PAYLOAD;

                    byte[] jpegHeader = CreateLowQualityRtpJpegHeader(offset, jpegQuality, jpegWidth, jpegHeight);

                    List <byte> packetPayload = new List <byte>();
                    packetPayload.AddRange(jpegHeader);
                    packetPayload.AddRange(jpegBytes.Skip(index * RTP_MAX_PAYLOAD).Take(payloadLength));

                    RTPPacket rtpPacket = new RTPPacket(packetPayload.Count);
                    rtpPacket.Header.SyncSource     = sessionStream.Ssrc;
                    rtpPacket.Header.SequenceNumber = sessionStream.SeqNum++;
                    rtpPacket.Header.Timestamp      = timestamp;
                    rtpPacket.Header.MarkerBit      = ((index + 1) * RTP_MAX_PAYLOAD < jpegBytes.Length) ? 0 : 1;
                    rtpPacket.Header.PayloadType    = (int)SDPMediaFormatsEnum.JPEG;
                    rtpPacket.Payload = packetPayload.ToArray();

                    OnRtpPacketSent?.Invoke(rtpPacket);

                    byte[] rtpBytes = rtpPacket.GetBytes();

                    RtpChannel.SendAsync(RTPChannelSocketsEnum.RTP, DestinationEndPoint, rtpBytes);
                }
            }
            catch (SocketException sockExcp)
            {
                logger.LogError("SocketException SendJpegFrame. " + sockExcp.Message);
            }
        }
Exemplo n.º 4
0
        /// <summary>
        /// Sends the RTCP report to the remote call party.
        /// </summary>
        /// <param name="report">RTCP report to send.</param>
        private void SendRtcpReport(SDPMediaTypesEnum mediaType, RTCPCompoundPacket report)
        {
            if (IsSecure && !IsSecureContextReady)
            {
                logger.LogWarning("SendRtcpReport cannot be called on a secure session before calling SetSecurityContext.");
            }
            else if (ControlDestinationEndPoint != null)
            {
                var reportBytes = report.GetBytes();

                //logger.LogDebug($"SendRtcpReport: {reportBytes.HexStr()}");

                var sendOnSocket = (m_isRtcpMultiplexed) ? RTPChannelSocketsEnum.RTP : RTPChannelSocketsEnum.Control;

                if (m_srtcpControlProtect == null)
                {
                    RtpChannel.SendAsync(sendOnSocket, ControlDestinationEndPoint, reportBytes);
                }
                else
                {
                    byte[] sendBuffer = new byte[reportBytes.Length + SRTP_MAX_PREFIX_LENGTH];
                    Buffer.BlockCopy(reportBytes, 0, sendBuffer, 0, reportBytes.Length);

                    int outBufLen = 0;
                    int rtperr    = m_srtcpControlProtect(sendBuffer, sendBuffer.Length - SRTP_MAX_PREFIX_LENGTH, out outBufLen);
                    if (rtperr != 0)
                    {
                        logger.LogWarning("SRTP RTCP packet protection failed, result " + rtperr + ".");
                    }
                    else
                    {
                        RtpChannel.SendAsync(sendOnSocket, ControlDestinationEndPoint, sendBuffer.Take(outBufLen).ToArray());
                    }
                }

                OnSendReport?.Invoke(mediaType, report);
            }
        }
Exemplo n.º 5
0
        /// <summary>
        /// H264 frames need a two byte header when transmitted over RTP.
        /// </summary>
        /// <param name="frame">The H264 encoded frame to transmit.</param>
        /// <param name="frameSpacing">The increment to add to the RTP timestamp for each new frame.</param>
        /// <param name="payloadType">The payload type to set on the RTP packet.</param>
        public void SendH264Frame(uint timestamp, byte[] frame, uint frameSpacing, int payloadType)
        {
            if (m_rtpEventInProgress || DestinationEndPoint == null)
            {
                return;
            }

            try
            {
                //System.Diagnostics.Debug.WriteLine("Sending " + frame.Length + " H264 encoded bytes to client, timestamp " + _timestamp + ", starting sequence number " + _sequenceNumber + ".");

                for (int index = 0; index *RTP_MAX_PAYLOAD < frame.Length; index++)
                {
                    uint offset        = Convert.ToUInt32(index * RTP_MAX_PAYLOAD);
                    int  payloadLength = ((index + 1) * RTP_MAX_PAYLOAD < frame.Length) ? RTP_MAX_PAYLOAD : frame.Length - index * RTP_MAX_PAYLOAD;

                    RTPPacket rtpPacket = new RTPPacket(payloadLength + H264_RTP_HEADER_LENGTH);
                    rtpPacket.Header.SyncSource     = Ssrc;
                    rtpPacket.Header.SequenceNumber = SeqNum++;
                    rtpPacket.Header.Timestamp      = timestamp;
                    rtpPacket.Header.MarkerBit      = 0;
                    rtpPacket.Header.PayloadType    = payloadType;

                    // Start RTP packet in frame 0x1c 0x89
                    // Middle RTP packet in frame 0x1c 0x09
                    // Last RTP packet in frame 0x1c 0x49

                    byte[] h264Header = new byte[] { 0x1c, 0x09 };

                    if (index == 0 && frame.Length < RTP_MAX_PAYLOAD)
                    {
                        // First and last RTP packet in the frame.
                        h264Header = new byte[] { 0x1c, 0x49 };
                        rtpPacket.Header.MarkerBit = 1;
                    }
                    else if (index == 0)
                    {
                        h264Header = new byte[] { 0x1c, 0x89 };
                    }
                    else if ((index + 1) * RTP_MAX_PAYLOAD > frame.Length)
                    {
                        h264Header = new byte[] { 0x1c, 0x49 };
                        rtpPacket.Header.MarkerBit = 1;
                    }

                    var h264Stream = frame.Skip(index * RTP_MAX_PAYLOAD).Take(payloadLength).ToList();
                    h264Stream.InsertRange(0, h264Header);
                    rtpPacket.Payload = h264Stream.ToArray();

                    OnRtpPacketSent?.Invoke(rtpPacket);

                    byte[] rtpBytes = rtpPacket.GetBytes();

                    RtpChannel.SendAsync(RTPChannelSocketsEnum.RTP, DestinationEndPoint, rtpBytes);
                }
            }
            catch (SocketException sockExcp)
            {
                logger.LogError("SocketException SendH264Frame. " + sockExcp.Message);
            }
        }