public RTPMediaSession(SDPMediaTypesEnum mediaType, int formatTypeID, AddressFamily addrFamily)
            : base(mediaType, formatTypeID, addrFamily, false, false)
        {
            // Construct the local SDP. There are a number of assumptions being made here:
            // PCMU audio, RTP event support etc.
            var mediaFormat       = new SDPMediaFormat(formatTypeID);
            var mediaAnnouncement = new SDPMediaAnnouncement
            {
                Media        = mediaType,
                MediaFormats = new List <SDPMediaFormat> {
                    mediaFormat
                },
                MediaStreamStatus = MediaStreamStatusEnum.SendRecv,
                Port = base.RtpChannel.RTPPort
            };

            if (mediaType == SDPMediaTypesEnum.audio)
            {
                // RTP event support.
                int            clockRate      = mediaFormat.GetClockRate();
                SDPMediaFormat rtpEventFormat = new SDPMediaFormat(DTMF_EVENT_PAYLOAD_ID);
                rtpEventFormat.SetFormatAttribute($"{TELEPHONE_EVENT_ATTRIBUTE}/{clockRate}");
                rtpEventFormat.SetFormatParameterAttribute("0-16");
                mediaAnnouncement.MediaFormats.Add(rtpEventFormat);
            }

            MediaAnnouncements.Add(mediaAnnouncement);
        }
Exemple #2
0
        /// <summary>
        /// Example of how to create a basic RTP session object and hook up the event handlers.
        /// </summary>
        /// <param name="ua">The suer agent the RTP session is being created for.</param>
        /// <returns>A new RTP session object.</returns>
        private static RtpAudioSession CreateRtpSession(SIPUserAgent ua)
        {
            var rtpAudioSession = new RtpAudioSession(AddressFamily.InterNetwork);

            // Add the required audio capabilities to the RTP session. These will
            // automatically get used when creating SDP offers/answers.
            var pcma = new SDPMediaFormat(SDPMediaFormatsEnum.PCMA);

            // RTP event support.
            int            clockRate      = pcma.GetClockRate();
            SDPMediaFormat rtpEventFormat = new SDPMediaFormat(DTMF_EVENT_PAYLOAD_ID);

            rtpEventFormat.SetFormatAttribute($"{RTPSession.TELEPHONE_EVENT_ATTRIBUTE}/{clockRate}");
            rtpEventFormat.SetFormatParameterAttribute("0-16");

            var audioCapabilities = new List <SDPMediaFormat> {
                pcma, rtpEventFormat
            };

            MediaStreamTrack audioTrack = new MediaStreamTrack(null, SDPMediaTypesEnum.audio, false, audioCapabilities);

            rtpAudioSession.addTrack(audioTrack);

            // Wire up the event handler for RTP packets received from the remote party.
            rtpAudioSession.OnRtpPacketReceived += (type, rtp) => OnRtpPacketReceived(ua, type, rtp);

            if (_sendSilenceTimer == null)
            {
                _sendSilenceTimer = new Timer(SendSilence, null, 0, SEND_SILENCE_PERIOD_MS);
            }

            return(rtpAudioSession);
        }
        /// <summary>
        /// Creates a new RTP audio visual session with audio/video capturing and rendering capabilities.
        /// </summary>
        /// <param name="addrFamily">The address family to create the underlying socket on (IPv4 or IPv6).</param>
        /// <param name="audioOptions">Options for the send and receive audio streams on this session.</param>
        /// <param name="videoOptions">Options for the send and receive video streams on this session</param>
        public RtpAVSession(AddressFamily addrFamily, AudioOptions audioOptions, VideoOptions videoOptions)
            : base(addrFamily, false, false, false)
        {
            _audioOpts = audioOptions ?? DefaultAudioOptions;
            _videoOpts = videoOptions ?? DefaultVideoOptions;

            // Initialise the video decoding objects. Even if we are not sourcing video
            // we need to be ready to receive and render.
            _vpxDecoder = new VpxEncoder();
            int res = _vpxDecoder.InitDecoder();

            if (res != 0)
            {
                throw new ApplicationException("VPX decoder initialisation failed.");
            }
            _imgConverter = new ImageConvert();

            if (_audioOpts.AudioSource != AudioSourcesEnum.None)
            {
                var pcmu = new SDPMediaFormat(SDPMediaFormatsEnum.PCMU);

                // RTP event support.
                int            clockRate      = pcmu.GetClockRate();
                SDPMediaFormat rtpEventFormat = new SDPMediaFormat(DTMF_EVENT_PAYLOAD_ID);
                rtpEventFormat.SetFormatAttribute($"{TELEPHONE_EVENT_ATTRIBUTE}/{clockRate}");
                rtpEventFormat.SetFormatParameterAttribute("0-16");

                var audioCapabilities = new List <SDPMediaFormat> {
                    pcmu, rtpEventFormat
                };

                MediaStreamTrack audioTrack = new MediaStreamTrack(null, SDPMediaTypesEnum.audio, false, audioCapabilities);
                addTrack(audioTrack);
            }

            if (_videoOpts.VideoSource != VideoSourcesEnum.None)
            {
                MediaStreamTrack videoTrack = new MediaStreamTrack(null, SDPMediaTypesEnum.video, false, new List <SDPMediaFormat> {
                    new SDPMediaFormat(SDPMediaFormatsEnum.VP8)
                });
                addTrack(videoTrack);
            }

            // Where the magic (for processing received media) happens.
            base.OnRtpPacketReceived += RtpPacketReceived;
        }
        /// <summary>
        /// Creates a new basic RTP session that captures and renders audio to/from the default system devices.
        /// </summary>
        public NAudioRtpSession()
            : base(false, false, false)
        {
            var pcmu = new SDPMediaFormat(SDPMediaFormatsEnum.PCMU);
            var pcma = new SDPMediaFormat(SDPMediaFormatsEnum.PCMA);

            // RTP event support.
            int            clockRate      = pcmu.GetClockRate();
            SDPMediaFormat rtpEventFormat = new SDPMediaFormat(DTMF_EVENT_PAYLOAD_ID);

            rtpEventFormat.SetFormatAttribute($"{SDP.TELEPHONE_EVENT_ATTRIBUTE}/{clockRate}");
            rtpEventFormat.SetFormatParameterAttribute("0-16");

            var audioCapabilities = new List <SDPMediaFormat> {
                pcmu, pcma, rtpEventFormat
            };

            MediaStreamTrack audioTrack = new MediaStreamTrack(null, SDPMediaTypesEnum.audio, false, audioCapabilities);

            addTrack(audioTrack);

            // Where the magic (for processing received media) happens.
            base.OnRtpPacketReceived += RtpPacketReceived;
        }
        /// <summary>
        /// Creates a new RTP audio visual session with audio/video capturing and rendering capabilities.
        /// </summary>
        /// <param name="addrFamily">The address family to create the underlying socket on (IPv4 or IPv6).</param>
        /// <param name="audioOptions">Options for the send and receive audio streams on this session.</param>
        /// <param name="videoOptions">Options for the send and receive video streams on this session</param>
        /// <param name="bindAddress">Optional. If specified this address will be used as the bind address for any RTP
        /// and control sockets created. Generally this address does not need to be set. The default behaviour
        /// is to bind to [::] or 0.0.0.0,d depending on system support, which minimises network routing
        /// causing connection issues.</param>
        public RtpAVSession(AudioOptions audioOptions, VideoOptions videoOptions, IPAddress bindAddress = null)
            : base(false, false, false, bindAddress)
        {
            _audioOpts = audioOptions ?? DefaultAudioOptions;
            _videoOpts = videoOptions ?? DefaultVideoOptions;

            if (_audioOpts != null && _audioOpts.AudioCodecs != null &&
                _audioOpts.AudioCodecs.Any(x => !(x == SDPMediaFormatsEnum.PCMU || x == SDPMediaFormatsEnum.PCMA || x == SDPMediaFormatsEnum.G722)))
            {
                throw new ApplicationException("Only PCMA, PCMU and G722 are supported for audio codec options.");
            }

            // Initialise the video decoding objects. Even if we are not sourcing video
            // we need to be ready to receive and render.
            _vpxDecoder = new VpxEncoder();
            int res = _vpxDecoder.InitDecoder();

            if (res != 0)
            {
                throw new ApplicationException("VPX decoder initialisation failed.");
            }
            _imgConverter = new ImageConvert();

            if (_audioOpts.AudioSource != AudioSourcesEnum.None)
            {
                var pcmu = new SDPMediaFormat(SDPMediaFormatsEnum.PCMU);

                // RTP event support.
                int            clockRate      = pcmu.GetClockRate();
                SDPMediaFormat rtpEventFormat = new SDPMediaFormat(DTMF_EVENT_PAYLOAD_ID);
                rtpEventFormat.SetFormatAttribute($"{SDP.TELEPHONE_EVENT_ATTRIBUTE}/{clockRate}");
                rtpEventFormat.SetFormatParameterAttribute("0-16");

                var audioCapabilities = new List <SDPMediaFormat>();
                if (_audioOpts.AudioCodecs == null || _audioOpts.AudioCodecs.Count == 0)
                {
                    audioCapabilities.Add(pcmu);
                }
                else
                {
                    foreach (var codec in _audioOpts.AudioCodecs)
                    {
                        audioCapabilities.Add(new SDPMediaFormat(codec));
                    }
                }
                audioCapabilities.Add(rtpEventFormat);

                if (audioCapabilities.Any(x => x.FormatCodec == SDPMediaFormatsEnum.G722))
                {
                    _g722Encode      = new G722Codec();
                    _g722EncodeState = new G722CodecState(64000, G722Flags.None);
                    _g722Decode      = new G722Codec();
                    _g722DecodeState = new G722CodecState(64000, G722Flags.None);
                }

                MediaStreamTrack audioTrack = new MediaStreamTrack(null, SDPMediaTypesEnum.audio, false, audioCapabilities);
                addTrack(audioTrack);
            }

            if (_videoOpts.VideoSource != VideoSourcesEnum.None)
            {
                MediaStreamTrack videoTrack = new MediaStreamTrack(null, SDPMediaTypesEnum.video, false, new List <SDPMediaFormat> {
                    new SDPMediaFormat(SDPMediaFormatsEnum.VP8)
                });
                addTrack(videoTrack);
            }

            // Where the magic (for processing received media) happens.
            base.OnRtpPacketReceived += RtpPacketReceived;
        }