Example #1
0
 /// <summary>
 /// Forwards media from the WebRTC Peer Connection to the remote SIP user agent.
 /// </summary>
 /// <param name="remote">The remote endpoint the RTP packet was received from.</param>
 /// <param name="mediaType">The type of media.</param>
 /// <param name="rtpPacket">The RTP packet received on the SIP session.</param>
 private static void ForwardAudioToSIP(IPEndPoint remote, SDPMediaTypesEnum mediaType, RTPPacket rtpPacket)
 {
     if (_rtpSession != null && !_rtpSession.IsClosed && mediaType == SDPMediaTypesEnum.audio)
     {
         _rtpSession.SendAudio((uint)rtpPacket.Payload.Length, rtpPacket.Payload);
     }
 }
Example #2
0
 /// <summary>
 /// Event handler for receiving RTP packets.
 /// </summary>
 /// <param name="type">The media type of the RTP packet (audio or video).</param>
 /// <param name="rtpPacket">The RTP packet received from the remote party.</param>
 private static void OnRtpPacketReceived(SDPMediaTypesEnum type, RTPPacket rtpPacket)
 {
     // The raw audio data is available in rtpPacket.Payload.
     Log.LogDebug($"rtp pkt received ssrc {rtpPacket.Header.SyncSource} seqnum {rtpPacket.Header.SequenceNumber}.");
 }
Example #3
0
 /// <summary>
 /// Decode the packets.  Don't return bytes until we have all the fragments
 /// </summary>
 /// <param name="packet"></param>
 /// <returns></returns>
 public override byte[] DecodeToBytes(RTPPacket packet)
 {
     return(base.DecodeToBytes(packet));
 }
        /// <summary>
        /// Depacketizes a packet.
        /// </summary>
        /// <param name="packet">The packet.</param>
        /// <returns></returns>
        public override byte[] Depacketize(RTPPacket packet)
        {
            CurrentRTPSequenceNumber = packet.SequenceNumber;

            return _Padep.Depacketize(packet);
        }
Example #5
0
        private static void RtpSession_OnRtpPacketReceived(IPEndPoint remoteEndPoint, SDPMediaTypesEnum kind, RTPPacket pkt)
        {
            //Log.LogDebug($"{kind} RTP packet received {pkt.Header.SequenceNumber}.");

            if (kind == SDPMediaTypesEnum.audio)
            {
                var sample = pkt.Payload;

                for (int index = 0; index < sample.Length; index++)
                {
                    short pcm = NAudio.Codecs.MuLawDecoder.MuLawToLinearSample(sample[index]);
                    float s16 = pcm / 32768f;

                    for (int i = 0; i < _ratio; i++)
                    {
                        _waveFile.WriteSample(s16);
                    }
                }
            }
        }
Example #6
0
 /// <summary>
 /// Depacketizes a packet.
 /// </summary>
 /// <param name="packet">The packet.</param>
 /// <returns></returns>
 public override byte[] Depacketize(RTPPacket packet)
 {
     return(_Padep.Depacketize(packet));
 }
Example #7
0
 /// <summary>
 /// Event handler for receiving RTP packets.
 /// </summary>
 /// <param name="ua">The SIP user agent associated with the RTP session.</param>
 /// <param name="type">The media type of the RTP packet (audio or video).</param>
 /// <param name="rtpPacket">The RTP packet received from the remote party.</param>
 private static void OnRtpPacketReceived(SIPUserAgent ua, SDPMediaTypesEnum type, RTPPacket rtpPacket)
 {
     // The raw audio data is available in rtpPacket.Payload.
 }
        /// <summary>
        /// Event handler for receiving RTP packets from the remote party.
        /// </summary>
        /// <param name="mediaType">The media type of the packets.</param>
        /// <param name="rtpPacket">The RTP packet with the media sample.</param>
        private void RtpPacketReceived(SDPMediaTypesEnum mediaType, RTPPacket rtpPacket)
        {
            if (mediaType == SDPMediaTypesEnum.audio)
            {
                bool wants8kSamples  = OnRemoteAudioSampleReady != null;
                bool wants16kSamples = OnRemote16KHzPcmSampleReady != null;

                if (wants8kSamples || wants16kSamples)
                {
                    var sample = rtpPacket.Payload;

                    if (_sendingFormat.FormatCodec == SDPMediaFormatsEnum.G722)
                    {
                        short[] decodedPcm16k      = new short[sample.Length * 2];
                        int     decodedSampleCount =
                            _g722Decoder.Decode(_g722DecoderState, decodedPcm16k, sample, sample.Length);

                        // The decoder provides short samples but streams and devices generally seem to want
                        // byte samples so convert them.
                        byte[] pcm8kBuffer  = (wants8kSamples) ? new byte[decodedSampleCount] : null;
                        byte[] pcm16kBuffer = (wants16kSamples) ? new byte[decodedSampleCount * 2] : null;

                        for (int i = 0; i < decodedSampleCount; i++)
                        {
                            var bufferSample = BitConverter.GetBytes(decodedPcm16k[i]);

                            // For 8K samples the crude re-sampling to get from 16K to 8K is to skip
                            // every second sample.
                            if (pcm8kBuffer != null && i % 2 == 0)
                            {
                                pcm8kBuffer[(i / 2) * 2]     = bufferSample[0];
                                pcm8kBuffer[(i / 2) * 2 + 1] = bufferSample[1];
                            }

                            // G722 provides 16k samples.
                            if (pcm16kBuffer != null)
                            {
                                pcm16kBuffer[i * 2]     = bufferSample[0];
                                pcm16kBuffer[i * 2 + 1] = bufferSample[1];
                            }
                        }

                        OnRemoteAudioSampleReady?.Invoke(pcm8kBuffer);
                        OnRemote16KHzPcmSampleReady?.Invoke(pcm16kBuffer);
                    }
                    else if (_sendingFormat.FormatCodec == SDPMediaFormatsEnum.PCMA ||
                             _sendingFormat.FormatCodec == SDPMediaFormatsEnum.PCMU)
                    {
                        Func <byte, short> decode = (_sendingFormat.FormatCodec == SDPMediaFormatsEnum.PCMA)
                            ? (Func <byte, short>)ALawDecoder.ALawToLinearSample
                            : MuLawDecoder.MuLawToLinearSample;

                        byte[] pcm8kBuffer  = (wants8kSamples) ? new byte[sample.Length * 2] : null;
                        byte[] pcm16kBuffer = (wants16kSamples) ? new byte[sample.Length * 4] : null;

                        for (int i = 0; i < sample.Length; i++)
                        {
                            var bufferSample = BitConverter.GetBytes(decode(sample[i]));

                            // G711 samples at 8KHz.
                            if (pcm8kBuffer != null)
                            {
                                pcm8kBuffer[i * 2]     = bufferSample[0];
                                pcm8kBuffer[i * 2 + 1] = bufferSample[1];
                            }

                            // The crude up-sampling approach to get 16K samples from G711 is to
                            // duplicate each 8K sample.
                            // TODO: This re-sampling approach introduces artifacts. Applying a low pass
                            // filter seems to be recommended.
                            if (pcm16kBuffer != null)
                            {
                                pcm16kBuffer[i * 4]     = bufferSample[0];
                                pcm16kBuffer[i * 4 + 1] = bufferSample[1];
                                pcm16kBuffer[i * 4 + 2] = bufferSample[0];
                                pcm16kBuffer[i * 4 + 3] = bufferSample[1];
                            }
                        }

                        OnRemoteAudioSampleReady?.Invoke(pcm8kBuffer);
                        OnRemote16KHzPcmSampleReady?.Invoke(pcm16kBuffer);
                    }
                    else
                    {
                        // Ignore the sample. It's for an unsupported codec. It will be up to the application
                        // to decode.
                    }
                }
            }
        }
Example #9
0
        private static async Task SendRecvRtp(Socket rtpSocket, RTPSession rtpSession, IPEndPoint dstRtpEndPoint, string audioFileName, CancellationTokenSource cts)
        {
            try
            {
                SIPSorcery.Sys.Log.Logger.LogDebug($"Sending from RTP socket {rtpSocket.LocalEndPoint} to {dstRtpEndPoint}.");

                // Nothing is being done with the data being received from the client. But if the remote socket data socket will
                // be switched if it differs from the one in the SDP. This helps cope with NAT.
                var rtpRecvTask = Task.Run(async() =>
                {
                    DateTime lastRecvReportAt = DateTime.Now;
                    uint packetReceivedCount  = 0;
                    uint bytesReceivedCount   = 0;
                    byte[] buffer             = new byte[512];
                    EndPoint remoteEP         = new IPEndPoint(IPAddress.Any, 0);

                    SIPSorcery.Sys.Log.Logger.LogDebug($"Listening on RTP socket {rtpSocket.LocalEndPoint}.");

                    var recvResult = await rtpSocket.ReceiveFromAsync(buffer, SocketFlags.None, remoteEP);

                    while (recvResult.ReceivedBytes > 0 && !cts.IsCancellationRequested)
                    {
                        RTPPacket rtpPacket = new RTPPacket(buffer.Take(recvResult.ReceivedBytes).ToArray());

                        packetReceivedCount++;
                        bytesReceivedCount += (uint)rtpPacket.Payload.Length;

                        recvResult = await rtpSocket.ReceiveFromAsync(buffer, SocketFlags.None, remoteEP);

                        if (DateTime.Now.Subtract(lastRecvReportAt).TotalSeconds > RTP_REPORTING_PERIOD_SECONDS)
                        {
                            lastRecvReportAt = DateTime.Now;
                            dstRtpEndPoint   = recvResult.RemoteEndPoint as IPEndPoint;

                            SIPSorcery.Sys.Log.Logger.LogDebug($"RTP recv {rtpSocket.LocalEndPoint}<-{dstRtpEndPoint} pkts {packetReceivedCount} bytes {bytesReceivedCount}");
                        }
                    }
                });

                switch (Path.GetExtension(audioFileName).ToLower())
                {
                case ".ulaw":
                {
                    uint timestamp = 0;
                    using (StreamReader sr = new StreamReader(audioFileName))
                    {
                        DateTime lastSendReportAt    = DateTime.Now;
                        uint     packetReceivedCount = 0;
                        uint     bytesReceivedCount  = 0;
                        byte[]   buffer    = new byte[320];
                        int      bytesRead = sr.BaseStream.Read(buffer, 0, buffer.Length);

                        while (bytesRead > 0 && !cts.IsCancellationRequested)
                        {
                            packetReceivedCount++;
                            bytesReceivedCount += (uint)bytesRead;

                            if (!dstRtpEndPoint.Address.Equals(IPAddress.Any))
                            {
                                rtpSession.SendAudioFrame(rtpSocket, dstRtpEndPoint, timestamp, buffer);
                            }

                            timestamp += (uint)buffer.Length;

                            if (DateTime.Now.Subtract(lastSendReportAt).TotalSeconds > RTP_REPORTING_PERIOD_SECONDS)
                            {
                                lastSendReportAt = DateTime.Now;
                                SIPSorcery.Sys.Log.Logger.LogDebug($"RTP send {rtpSocket.LocalEndPoint}->{dstRtpEndPoint} pkts {packetReceivedCount} bytes {bytesReceivedCount}");
                            }

                            await Task.Delay(40, cts.Token);

                            bytesRead = sr.BaseStream.Read(buffer, 0, buffer.Length);
                        }
                    }
                }
                break;

                case ".mp3":
                {
                    DateTime lastSendReportAt    = DateTime.Now;
                    uint     packetReceivedCount = 0;
                    uint     bytesReceivedCount  = 0;
                    var      pcmFormat           = new WaveFormat(8000, 16, 1);
                    var      ulawFormat          = WaveFormat.CreateMuLawFormat(8000, 1);

                    uint timestamp = 0;

                    using (WaveFormatConversionStream pcmStm = new WaveFormatConversionStream(pcmFormat, new Mp3FileReader(audioFileName)))
                    {
                        using (WaveFormatConversionStream ulawStm = new WaveFormatConversionStream(ulawFormat, pcmStm))
                        {
                            byte[] buffer    = new byte[320];
                            int    bytesRead = ulawStm.Read(buffer, 0, buffer.Length);

                            while (bytesRead > 0 && !cts.IsCancellationRequested)
                            {
                                packetReceivedCount++;
                                bytesReceivedCount += (uint)bytesRead;

                                byte[] sample = new byte[bytesRead];
                                Array.Copy(buffer, sample, bytesRead);

                                if (dstRtpEndPoint.Address != IPAddress.Any)
                                {
                                    rtpSession.SendAudioFrame(rtpSocket, dstRtpEndPoint, timestamp, buffer);
                                }

                                timestamp += (uint)buffer.Length;

                                if (DateTime.Now.Subtract(lastSendReportAt).TotalSeconds > RTP_REPORTING_PERIOD_SECONDS)
                                {
                                    lastSendReportAt = DateTime.Now;
                                    SIPSorcery.Sys.Log.Logger.LogDebug($"RTP send {rtpSocket.LocalEndPoint}->{dstRtpEndPoint} pkts {packetReceivedCount} bytes {bytesReceivedCount}");
                                }

                                await Task.Delay(40, cts.Token);

                                bytesRead = ulawStm.Read(buffer, 0, buffer.Length);
                            }
                        }
                    }
                }
                break;

                default:
                    throw new NotImplementedException("Only ulaw and mp3 files are understood by this example.");
                }
            }
            catch (OperationCanceledException) { }
            catch (Exception excp)
            {
                SIPSorcery.Sys.Log.Logger.LogError($"Exception sending RTP. {excp.Message}");
            }
        }
Example #10
0
        /// <summary>
        /// Handling packets received on the RTP socket. One of the simplest, if not the simplest, cases, is
        /// PCMU audio packets. The handling can get substantially more complicated if the RTP socket is being
        /// used to multiplex different protocols. This is what WebRTC does with STUN, RTP and RTCP.
        /// </summary>
        /// <param name="rtpSocket">The raw RTP socket.</param>
        /// <param name="rtpSendSession">The session infor for the RTP pakcets being sent.</param>
        private static async void RecvRtp(Socket rtpSocket, RTPSession rtpRecvSession, CancellationTokenSource cts)
        {
            try
            {
                DateTime lastRecvReportAt    = DateTime.Now;
                uint     packetReceivedCount = 0;
                uint     bytesReceivedCount  = 0;
                byte[]   buffer = new byte[512];

                IPEndPoint anyEndPoint = new IPEndPoint((rtpSocket.AddressFamily == AddressFamily.InterNetworkV6) ? IPAddress.IPv6Any : IPAddress.Any, 0);

                Log.LogDebug($"Listening on RTP socket {rtpSocket.LocalEndPoint}.");

                using (var waveOutEvent = new WaveOutEvent())
                {
                    var waveProvider = new BufferedWaveProvider(new WaveFormat(8000, 16, 1));
                    waveProvider.DiscardOnBufferOverflow = true;
                    waveOutEvent.Init(waveProvider);
                    waveOutEvent.Play();

                    var recvResult = await rtpSocket.ReceiveFromAsync(buffer, SocketFlags.None, anyEndPoint);

                    Log.LogDebug($"Initial RTP packet recieved from {recvResult.RemoteEndPoint}.");

                    if (_remoteRtpEndPoint == null || !recvResult.RemoteEndPoint.Equals(_remoteRtpEndPoint))
                    {
                        _remoteRtpEndPoint = recvResult.RemoteEndPoint as IPEndPoint;
                        Log.LogDebug($"Adjusting remote RTP end point for sends adjusted to {_remoteRtpEndPoint}.");
                    }

                    while (recvResult.ReceivedBytes > 0 && !cts.IsCancellationRequested)
                    {
                        var rtpPacket = new RTPPacket(buffer.Take(recvResult.ReceivedBytes).ToArray());

                        packetReceivedCount++;
                        bytesReceivedCount += (uint)rtpPacket.Payload.Length;

                        for (int index = 0; index < rtpPacket.Payload.Length; index++)
                        {
                            short  pcm       = NAudio.Codecs.MuLawDecoder.MuLawToLinearSample(rtpPacket.Payload[index]);
                            byte[] pcmSample = new byte[] { (byte)(pcm & 0xFF), (byte)(pcm >> 8) };
                            waveProvider.AddSamples(pcmSample, 0, 2);
                        }

                        recvResult = await rtpSocket.ReceiveFromAsync(buffer, SocketFlags.None, anyEndPoint);

                        if (DateTime.Now.Subtract(lastRecvReportAt).TotalSeconds > RTP_REPORTING_PERIOD_SECONDS)
                        {
                            // This is typically where RTCP receiver (SR) reports would be sent. Omitted here for brevity.
                            lastRecvReportAt = DateTime.Now;
                            var remoteRtpEndPoint = recvResult.RemoteEndPoint as IPEndPoint;
                            Log.LogDebug($"RTP recv report {rtpSocket.LocalEndPoint}<-{remoteRtpEndPoint} pkts {packetReceivedCount} bytes {bytesReceivedCount}");
                        }
                    }
                }
            }
            catch (SocketException sockExcp)
            {
                Log.LogWarning($"RecvRTP socket error {sockExcp.SocketErrorCode}");
            }
            catch (ObjectDisposedException) { } // This is how .Net deals with an in use socket being closed. Safe to ignore.
            catch (Exception excp)
            {
                Log.LogError($"Exception RecvRTP. {excp.Message}");
            }
        }
        /// <summary>
        /// Render a video RTP packet received from a remote party.
        /// </summary>
        /// <param name="rtpPacket">The RTP packet containing the video payload.</param>
        private void RenderVideo(RTPPacket rtpPacket)
        {
            if (_currVideoFramePosn > 0 || (rtpPacket.Payload[0] & 0x10) > 0)
            {
                RtpVP8Header vp8Header = RtpVP8Header.GetVP8Header(rtpPacket.Payload);
                Buffer.BlockCopy(rtpPacket.Payload, vp8Header.Length, _currVideoFrame, _currVideoFramePosn, rtpPacket.Payload.Length - vp8Header.Length);
                _currVideoFramePosn += rtpPacket.Payload.Length - vp8Header.Length;

                if (rtpPacket.Header.MarkerBit == 1)
                {
                    unsafe
                    {
                        fixed(byte *p = _currVideoFrame)
                        {
                            uint width = 0, height = 0;

                            byte[] i420 = null;

                            //Console.WriteLine($"Attempting vpx decode {_currVideoFramePosn} bytes.");

                            int decodeResult = _vpxDecoder.Decode(p, _currVideoFramePosn, ref i420, ref width, ref height);

                            if (decodeResult != 0)
                            {
                                Console.WriteLine("VPX decode of video sample failed.");
                            }
                            else
                            {
                                if (OnVideoSampleReady != null)
                                {
                                    fixed(byte *r = i420)
                                    {
                                        byte[] bmp     = null;
                                        int    stride  = 0;
                                        int    convRes = _imgConverter.ConvertYUVToRGB(r, VideoSubTypesEnum.I420, (int)width, (int)height, VideoSubTypesEnum.BGR24, ref bmp, ref stride);

                                        if (convRes == 0)
                                        {
                                            //fixed (byte* s = bmp)
                                            //{
                                            //    System.Drawing.Bitmap bmpImage = new System.Drawing.Bitmap((int)width, (int)height, stride, System.Drawing.Imaging.PixelFormat.Format24bppRgb, (IntPtr)s);
                                            //}
                                            OnVideoSampleReady(bmp, width, height, stride);
                                        }
                                        else
                                        {
                                            Log.LogWarning("Pixel format conversion of decoded sample failed.");
                                        }
                                    }
                                }
                            }
                        }
                    }

                    _currVideoFramePosn = 0;
                }
            }
            else
            {
                Log.LogWarning("Discarding RTP packet, VP8 header Start bit not set.");
                Log.LogWarning($"rtp video, seqnum {rtpPacket.Header.SequenceNumber}, ts {rtpPacket.Header.Timestamp}, marker {rtpPacket.Header.MarkerBit}, payload {rtpPacket.Payload.Length}, payload[0-5] {rtpPacket.Payload.HexStr(5)}.");
            }
        }
Example #12
0
 /// <summary>
 /// Event handler for the availability of a new RTP packet from a remote party.
 /// </summary>
 /// <param name="rtpPacket">The RTP packet from the remote party.</param>
 private void RemoteRtpPacketReceived(RTPPacket rtpPacket)
 {
     _mediaManager.EncodedAudioSampleReceived(rtpPacket.Payload);
 }
Example #13
0
        private static void OnRtpPacketReceived(IPEndPoint remoteEndPoint, SDPMediaTypesEnum mediaType, RTPPacket rtpPacket)
        {
            if (mediaType == SDPMediaTypesEnum.audio)
            {
                var sample = rtpPacket.Payload;

                for (int index = 0; index < sample.Length; index++)
                {
                    if (rtpPacket.Header.PayloadType == (int)SDPWellKnownMediaFormatsEnum.PCMA)
                    {
                        short  pcm       = NAudio.Codecs.ALawDecoder.ALawToLinearSample(sample[index]);
                        byte[] pcmSample = new byte[] { (byte)(pcm & 0xFF), (byte)(pcm >> 8) };
                        _waveFile.Write(pcmSample, 0, 2);
                    }
                    else
                    {
                        short  pcm       = NAudio.Codecs.MuLawDecoder.MuLawToLinearSample(sample[index]);
                        byte[] pcmSample = new byte[] { (byte)(pcm & 0xFF), (byte)(pcm >> 8) };
                        _waveFile.Write(pcmSample, 0, 2);
                    }
                }
            }
        }
Example #14
0
 private bool NewRTPPacket(RTPPacket packet)
 {
     return(true);
 }
Example #15
0
 /// <summary>
 /// Forwards media from the SIP session to the WebRTC session.
 /// </summary>
 /// <param name="remote">The remote endpoint the RTP packet was received from.</param>
 /// <param name="mediaType">The type of media.</param>
 /// <param name="rtpPacket">The RTP packet received on the SIP session.</param>
 private static void ForwardAudioToPeerConnection(IPEndPoint remote, SDPMediaTypesEnum mediaType, RTPPacket rtpPacket)
 {
     if (_peerConnection != null && _peerConnection.connectionState == RTCPeerConnectionState.connected &&
         mediaType == SDPMediaTypesEnum.audio)
     {
         _peerConnection.SendAudio((uint)rtpPacket.Payload.Length, rtpPacket.Payload);
     }
 }
Example #16
0
        private static void RtpSession_OnRtpPacketReceived(SDPMediaTypesEnum mediaType, RTPPacket rtpPacket)
        {
            if (mediaType == SDPMediaTypesEnum.audio)
            {
                //Console.WriteLine($"rtp audio, seqnum {rtpPacket.Header.SequenceNumber}, payload type {rtpPacket.Header.PayloadType}, marker {rtpPacket.Header.MarkerBit}.");
            }
            else if (mediaType == SDPMediaTypesEnum.video)
            {
                //Console.WriteLine($"rtp video, seqnum {rtpPacket.Header.SequenceNumber}, ts {rtpPacket.Header.Timestamp}, marker {rtpPacket.Header.MarkerBit}, payload {rtpPacket.Payload.Length}, payload[0-5] {rtpPacket.Payload.HexStr(5)}.");

                // New frames must have the VP8 Payload Descriptor Start bit set.
                // The tracking of the current video frame position is to deal with a VP8 frame being split across multiple RTP packets
                // as per https://tools.ietf.org/html/rfc7741#section-4.4.
                if (_currVideoFramePosn > 0 || (rtpPacket.Payload[0] & 0x10) > 0)
                {
                    RtpVP8Header vp8Header = RtpVP8Header.GetVP8Header(rtpPacket.Payload);

                    Buffer.BlockCopy(rtpPacket.Payload, vp8Header.Length, _currVideoFrame, _currVideoFramePosn, rtpPacket.Payload.Length - vp8Header.Length);
                    _currVideoFramePosn += rtpPacket.Payload.Length - vp8Header.Length;

                    if (rtpPacket.Header.MarkerBit == 1)
                    {
                        unsafe
                        {
                            fixed(byte *p = _currVideoFrame)
                            {
                                uint width = 0, height = 0;

                                byte[] i420 = null;

                                //Console.WriteLine($"Attempting vpx decode {_currVideoFramePosn} bytes.");

                                int decodeResult = _vpxEncoder.Decode(p, _currVideoFramePosn, ref i420, ref width, ref height);

                                if (decodeResult != 0)
                                {
                                    Console.WriteLine("VPX decode of video sample failed.");
                                }
                                else
                                {
                                    //Console.WriteLine($"Video frame ready {width}x{height}.");

                                    fixed(byte *r = i420)
                                    {
                                        byte[] bmp     = null;
                                        int    stride  = 0;
                                        int    convRes = _imgConverter.ConvertYUVToRGB(r, VideoSubTypesEnum.I420, (int)width, (int)height, VideoSubTypesEnum.BGR24, ref bmp, ref stride);

                                        if (convRes == 0)
                                        {
                                            _form.BeginInvoke(new Action(() =>
                                            {
                                                fixed(byte *s = bmp)
                                                {
                                                    System.Drawing.Bitmap bmpImage = new System.Drawing.Bitmap((int)width, (int)height, stride, System.Drawing.Imaging.PixelFormat.Format24bppRgb, (IntPtr)s);
                                                    _picBox.Image = bmpImage;
                                                }
                                            }));
                                        }
                                        else
                                        {
                                            Console.WriteLine("Pixel format conversion of decoded sample failed.");
                                        }
                                    }
                                }
                            }
                        }

                        _currVideoFramePosn = 0;
                    }
                }
                else
                {
                    Console.WriteLine("Discarding RTP packet, VP8 header Start bit not set.");
                    Console.WriteLine($"rtp video, seqnum {rtpPacket.Header.SequenceNumber}, ts {rtpPacket.Header.Timestamp}, marker {rtpPacket.Header.MarkerBit}, payload {rtpPacket.Payload.Length}, payload[0-5] {rtpPacket.Payload.HexStr(5)}.");
                }
            }
        }
Example #17
0
 public void AddData(RTPPacket rtp)
 {
 }
Example #18
0
 private bool NewRTPPacket(RTPPacket packet)
 {
     Console.WriteLine(packet.ToString());
     return(true);
 }
Example #19
0
 private static void RtpAVSession_OnRtpPacketReceived(SDPMediaTypesEnum arg1, RTPPacket arg2)
 {
     throw new NotImplementedException();
 }
Example #20
0
        /// <summary>
        /// Depacketizes a packet.
        /// </summary>
        /// <param name="packet">The packet.</param>
        /// <returns></returns>
        public override byte[] Depacketize(RTPPacket packet)
        {
            _CurrentRTPSequenceNumber = packet.SequenceNumber;

            return(_Padep.Depacketize(packet));
        }
Example #21
0
        public void SendRTPPacket(string sourceSocket, string destinationSocket)
        {
            try
            {
                //logger.Debug("Attempting to send RTP packet from " + sourceSocket + " to " + destinationSocket + ".");
                FireLogEvent("Attempting to send RTP packet from " + sourceSocket + " to " + destinationSocket + ".");
                
                IPEndPoint sourceEP = IPSocket.GetIPEndPoint(sourceSocket);
                IPEndPoint destEP = IPSocket.GetIPEndPoint(destinationSocket);

                RTPPacket rtpPacket = new RTPPacket(80);
                rtpPacket.Header.SequenceNumber = (UInt16)6500;
                rtpPacket.Header.Timestamp = 100000;

                UDPPacket udpPacket = new UDPPacket(sourceEP.Port, destEP.Port, rtpPacket.GetBytes());
                IPv4Header ipHeader = new IPv4Header(ProtocolType.Udp, Crypto.GetRandomInt(6), sourceEP.Address, destEP.Address);
                IPv4Packet ipPacket = new IPv4Packet(ipHeader, udpPacket.GetBytes());

                byte[] data = ipPacket.GetBytes();

                Socket rawSocket = new Socket(AddressFamily.InterNetwork, SocketType.Raw, ProtocolType.IP);
                rawSocket.SetSocketOption(SocketOptionLevel.IP, SocketOptionName.HeaderIncluded, 1);

                rawSocket.SendTo(data, destEP);
            }
            catch (Exception excp)
            {
                logger.Error("Exception SendRTPPacket. " + excp.Message);
            }
        }
Example #22
0
 /// <summary>
 /// Decode the packets.  Don't return bytes until we have all the fragments
 /// </summary>
 /// <param name="packet"></param>
 /// <returns></returns>
 public override byte[] DecodeToBytes(RTPPacket packet)
 {
     return base.DecodeToBytes(packet);
 }
 /// <summary>
 /// Depacketizes a packet.
 /// </summary>
 /// <param name="packet">The packet.</param>
 /// <returns></returns>
 public override byte[] Depacketize(RTPPacket packet)
 {
     return Padep.Depacketize(packet);
 }
 private static void Pc_OnRtpPacketReceived(IPEndPoint remoteEndPoint, SDPMediaTypesEnum mediaType, RTPPacket rtpPacket)
 {
 }