private static async Task StartFfmpegListener(string sdpPath, CancellationToken cancel) { while (!File.Exists(FFMPEG_SDP_FILE) && !cancel.IsCancellationRequested) { await Task.Delay(500); } if (!cancel.IsCancellationRequested) { var sdp = SDP.ParseSDPDescription(File.ReadAllText(FFMPEG_SDP_FILE)); // The SDP is only expected to contain a single video media announcement. var videoAnn = sdp.Media.Single(x => x.Media == SDPMediaTypesEnum.video); _ffmpegVideoFormat = videoAnn.MediaFormats.First(); _ffmpegListener = new RTPSession(false, false, false, IPAddress.Loopback, FFMPEG_DEFAULT_RTP_PORT); _ffmpegListener.AcceptRtpFromAny = true; MediaStreamTrack videoTrack = new MediaStreamTrack(SDPMediaTypesEnum.video, false, new List <SDPMediaFormat> { _ffmpegVideoFormat }, MediaStreamStatusEnum.RecvOnly); _ffmpegListener.addTrack(videoTrack); _ffmpegListener.SetRemoteDescription(SIP.App.SdpType.answer, sdp); // Set a dummy destination end point or the RTP session will end up sending RTCP reports // to itself. var dummyIPEndPoint = new IPEndPoint(IPAddress.Loopback, 0); _ffmpegListener.SetDestination(SDPMediaTypesEnum.video, dummyIPEndPoint, dummyIPEndPoint); await _ffmpegListener.Start(); } }
public RTPMediaSession(SDPMediaTypesEnum mediaType, int formatTypeID, AddressFamily addrFamily) : base(mediaType, formatTypeID, addrFamily, false, false) { // Construct the local SDP. There are a number of assumptions being made here: // PCMU audio, RTP event support etc. var mediaFormat = new SDPMediaFormat(formatTypeID); var mediaAnnouncement = new SDPMediaAnnouncement { Media = mediaType, MediaFormats = new List <SDPMediaFormat> { mediaFormat }, MediaStreamStatus = MediaStreamStatusEnum.SendRecv, Port = base.RtpChannel.RTPPort }; if (mediaType == SDPMediaTypesEnum.audio) { // RTP event support. int clockRate = mediaFormat.GetClockRate(); SDPMediaFormat rtpEventFormat = new SDPMediaFormat(DTMF_EVENT_PAYLOAD_ID); rtpEventFormat.SetFormatAttribute($"{TELEPHONE_EVENT_ATTRIBUTE}/{clockRate}"); rtpEventFormat.SetFormatParameterAttribute("0-16"); mediaAnnouncement.MediaFormats.Add(rtpEventFormat); } MediaAnnouncements.Add(mediaAnnouncement); }
/// <summary> /// 设置媒体参数请求(实时) /// </summary> /// <param name="localIp">本地ip</param> /// <param name="mediaPort">rtp/rtcp媒体端口(10000/10001)</param> /// <returns></returns> private string SetMediaReq(string localIp, int[] mediaPort) { SDPConnectionInformation sdpConn = new SDPConnectionInformation(localIp); SDP sdp = new SDP(); sdp.Version = 0; sdp.SessionId = "0"; sdp.Username = _msgCore.LocalSIPId; sdp.SessionName = CommandType.Play.ToString(); sdp.Connection = sdpConn; sdp.Timing = "0 0"; sdp.Address = localIp; SDPMediaFormat psFormat = new SDPMediaFormat(SDPMediaFormatsEnum.PS); psFormat.IsStandardAttribute = false; SDPMediaFormat h264Format = new SDPMediaFormat(SDPMediaFormatsEnum.H264); h264Format.IsStandardAttribute = false; SDPMediaAnnouncement media = new SDPMediaAnnouncement(); media.Media = SDPMediaTypesEnum.video; media.MediaFormats.Add(psFormat); media.MediaFormats.Add(h264Format); media.AddExtra("a=recvonly"); media.AddFormatParameterAttribute(psFormat.FormatID, psFormat.Name); media.AddFormatParameterAttribute(h264Format.FormatID, h264Format.Name); media.Port = mediaPort[0]; sdp.Media.Add(media); return(sdp.ToString()); }
/// <summary> /// Starts the media capturing/source devices. /// </summary> public override async Task Start() { if (!_isStarted) { // The sending format needs to be known before initialising some audio // sources. For example the microphone sampling rate needs to be 8KHz // for G711 and 16KHz for G722. _sendingAudioFormat = base.GetSendingFormat(SDPMediaTypesEnum.audio); _isStarted = true; await base.Start(); if (_audioOpts.AudioSource != AudioSourcesEnum.None) { SetAudioSource(_audioOpts, _sendingAudioFormat); StartAudio(); } if (_videoOpts.VideoSource != VideoSourcesEnum.None) { await SetVideoSource(_videoOpts).ConfigureAwait(false); StartVideo(); } } }
/// <summary> /// 设置媒体参数请求(实时) /// </summary> /// <param name="localIp">本地ip</param> /// <param name="mediaPort">rtp/rtcp媒体端口(10000/10001)</param> /// <returns></returns> private string SetMediaAudio(string localIp, int port, string audioId) { SDPConnectionInformation sdpConn = new SDPConnectionInformation(localIp); SDP sdp = new SDP(); sdp.Version = 0; sdp.SessionId = "0"; sdp.Username = audioId; sdp.SessionName = CommandType.Play.ToString(); sdp.Connection = sdpConn; sdp.Timing = "0 0"; sdp.Address = localIp; SDPMediaFormat psFormat = new SDPMediaFormat(SDPMediaFormatsEnum.PS); psFormat.IsStandardAttribute = false; SDPMediaAnnouncement media = new SDPMediaAnnouncement(); media.Media = SDPMediaTypesEnum.audio; media.MediaFormats.Add(psFormat); media.AddExtra("a=sendonly"); media.AddExtra("y=0100000002"); //media.AddExtra("f=v/////a/1/8/1"); media.AddFormatParameterAttribute(psFormat.FormatID, psFormat.Name); media.Port = port; sdp.Media.Add(media); return(sdp.ToString()); }
/// <summary> /// Initializes a new instance of the <see cref="T:SIPLib.SIP.SDPMedia" /> class. /// </summary> /// <param name="value">The input string representing the m line in the SDP</param> /// <param name="attrDict">An optional dictionary containing the m= parameters</param> public SDPMedia(string value = null, Dictionary<string, string> attrDict = null) { Mediaformats = new List<SDPMediaFormat>(); OtherAttributes = new Dictionary<string, string>(); if (value != null) { string[] values = value.Split(" ".ToCharArray(), 4); Media = values[0]; Port = values[1]; Proto = values[2]; string rest = values[3]; Mediaformats = new List<SDPMediaFormat>(); foreach (string s in rest.Split(' ')) { SDPMediaFormat fmt = new SDPMediaFormat {Pt = s}; Mediaformats.Add(fmt); } } else if (attrDict != null && attrDict.ContainsKey("media")) { Media = attrDict["media"]; Port = attrDict.ContainsKey("port") ? attrDict["port"] : "0"; Proto = attrDict.ContainsKey("proto") ? attrDict["proto"] : "RTP/AVP"; Mediaformats = new List<SDPMediaFormat>(); if (attrDict.ContainsKey("fmt")) { foreach (string s in attrDict["fmt"].Split(' ')) { SDPMediaFormat fmt = new SDPMediaFormat {Pt = s}; Mediaformats.Add(fmt); } } } }
/// <summary> /// Example of how to create a basic RTP session object and hook up the event handlers. /// </summary> /// <param name="ua">The suer agent the RTP session is being created for.</param> /// <returns>A new RTP session object.</returns> private static RtpAudioSession CreateRtpSession(SIPUserAgent ua) { var rtpAudioSession = new RtpAudioSession(AddressFamily.InterNetwork); // Add the required audio capabilities to the RTP session. These will // automatically get used when creating SDP offers/answers. var pcma = new SDPMediaFormat(SDPMediaFormatsEnum.PCMA); // RTP event support. int clockRate = pcma.GetClockRate(); SDPMediaFormat rtpEventFormat = new SDPMediaFormat(DTMF_EVENT_PAYLOAD_ID); rtpEventFormat.SetFormatAttribute($"{RTPSession.TELEPHONE_EVENT_ATTRIBUTE}/{clockRate}"); rtpEventFormat.SetFormatParameterAttribute("0-16"); var audioCapabilities = new List <SDPMediaFormat> { pcma, rtpEventFormat }; MediaStreamTrack audioTrack = new MediaStreamTrack(null, SDPMediaTypesEnum.audio, false, audioCapabilities); rtpAudioSession.addTrack(audioTrack); // Wire up the event handler for RTP packets received from the remote party. rtpAudioSession.OnRtpPacketReceived += (type, rtp) => OnRtpPacketReceived(ua, type, rtp); if (_sendSilenceTimer == null) { _sendSilenceTimer = new Timer(SendSilence, null, 0, SEND_SILENCE_PERIOD_MS); } return(rtpAudioSession); }
/// <summary> /// Once the video source has been initialised this method needs to be called to start it. /// </summary> private void StartVideo() { if (_videoOpts.VideoSource == VideoSourcesEnum.TestPattern && _testPatternVideoSource != null) { _sendingVideoFormat = base.GetSendingFormat(SDPMediaTypesEnum.video); _testPatternVideoSource.Start(); } }
/// <summary> /// Creates a new RTP audio visual session with audio/video capturing and rendering capabilities. /// </summary> /// <param name="addrFamily">The address family to create the underlying socket on (IPv4 or IPv6).</param> /// <param name="audioOptions">Options for the send and receive audio streams on this session.</param> /// <param name="videoOptions">Options for the send and receive video streams on this session</param> public RtpAVSession(AddressFamily addrFamily, AudioOptions audioOptions, VideoOptions videoOptions) : base(addrFamily, false, false, false) { _audioOpts = audioOptions ?? DefaultAudioOptions; _videoOpts = videoOptions ?? DefaultVideoOptions; // Initialise the video decoding objects. Even if we are not sourcing video // we need to be ready to receive and render. _vpxDecoder = new VpxEncoder(); int res = _vpxDecoder.InitDecoder(); if (res != 0) { throw new ApplicationException("VPX decoder initialisation failed."); } _imgConverter = new ImageConvert(); if (_audioOpts.AudioSource != AudioSourcesEnum.None) { var pcmu = new SDPMediaFormat(SDPMediaFormatsEnum.PCMU); // RTP event support. int clockRate = pcmu.GetClockRate(); SDPMediaFormat rtpEventFormat = new SDPMediaFormat(DTMF_EVENT_PAYLOAD_ID); rtpEventFormat.SetFormatAttribute($"{TELEPHONE_EVENT_ATTRIBUTE}/{clockRate}"); rtpEventFormat.SetFormatParameterAttribute("0-16"); var audioCapabilities = new List <SDPMediaFormat> { pcmu, rtpEventFormat }; MediaStreamTrack audioTrack = new MediaStreamTrack(null, SDPMediaTypesEnum.audio, false, audioCapabilities); addTrack(audioTrack); } if (_videoOpts.VideoSource != VideoSourcesEnum.None) { MediaStreamTrack videoTrack = new MediaStreamTrack(null, SDPMediaTypesEnum.video, false, new List <SDPMediaFormat> { new SDPMediaFormat(SDPMediaFormatsEnum.VP8) }); addTrack(videoTrack); } // Where the magic (for processing received media) happens. base.OnRtpPacketReceived += RtpPacketReceived; }
private static RTCPeerConnection Createpc(WebSocketContext context, SDPMediaFormat videoFormat) { var pc = new RTCPeerConnection(null); MediaStreamTrack videoTrack = new MediaStreamTrack(SDPMediaTypesEnum.video, false, new List <SDPMediaFormat> { videoFormat }, MediaStreamStatusEnum.SendOnly); pc.addTrack(videoTrack); pc.onicecandidateerror += (candidate, error) => logger.LogWarning($"Error adding remote ICE candidate. {error} {candidate}"); pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}."); //pc.OnReceiveReport += (type, rtcp) => logger.LogDebug($"RTCP {type} report received."); pc.OnRtcpBye += (reason) => logger.LogDebug($"RTCP BYE receive, reason: {(string.IsNullOrWhiteSpace(reason) ? "<none>" : reason)}."); pc.OnRtpClosed += (reason) => logger.LogDebug($"Peer connection closed, reason: {(string.IsNullOrWhiteSpace(reason) ? "<none>" : reason)}."); pc.onicecandidate += (candidate) => { if (pc.signalingState == RTCSignalingState.have_local_offer || pc.signalingState == RTCSignalingState.have_remote_offer) { context.WebSocket.Send($"candidate:{candidate}"); } }; pc.onconnectionstatechange += (state) => { logger.LogDebug($"Peer connection state changed to {state}."); if (state == RTCPeerConnectionState.connected) { logger.LogDebug("Creating RTP session to receive ffmpeg stream."); _ffmpegListener.OnRtpPacketReceived += (ep, media, rtpPkt) => { if (media == SDPMediaTypesEnum.video && pc.VideoDestinationEndPoint != null) { //logger.LogDebug($"Forwarding {media} RTP packet to webrtc peer timestamp {rtpPkt.Header.Timestamp}."); pc.SendRtpRaw(media, rtpPkt.Payload, rtpPkt.Header.Timestamp, rtpPkt.Header.MarkerBit, rtpPkt.Header.PayloadType); } }; } }; return(pc); }
/// <summary> /// Starts the media capturing/source devices. /// </summary> public override async Task Start() { if (!_isStarted) { _sendingAudioFormat = base.GetSendingFormat(SDPMediaTypesEnum.audio); _isStarted = true; await base.Start(); // Render device. _waveOutEvent = new WaveOutEvent(); _waveOutEvent.DeviceNumber = AUDIO_OUTPUTDEVICE_INDEX; _waveProvider = new BufferedWaveProvider(_waveFormat); _waveProvider.DiscardOnBufferOverflow = true; _waveOutEvent.Init(_waveProvider); _waveOutEvent.Play(); // Audio source. if (WaveInEvent.DeviceCount > 0) { _waveInEvent = new WaveInEvent(); _waveInEvent.BufferMilliseconds = AUDIO_SAMPLE_PERIOD_MILLISECONDS; _waveInEvent.NumberOfBuffers = 1; _waveInEvent.DeviceNumber = 0; _waveInEvent.WaveFormat = _waveFormat; _waveInEvent.DataAvailable += LocalAudioSampleAvailable; _waveInEvent.StartRecording(); } else { Log.LogWarning("No audio capture devices are available. No audio stream will be sent."); } if (_rtpAudioTimestampPeriod == 0) { _rtpAudioTimestampPeriod = (uint)(SDPMediaFormatInfo.GetClockRate(SDPMediaFormatsEnum.PCMU) / AUDIO_SAMPLE_PERIOD_MILLISECONDS); } } }
/// <summary> /// Initialise the audio capture and render device. /// </summary> /// <param name="audioSourceOpts">The options that dictate the type of audio source to use.</param> /// <param name="sendingFormat">The codec that will be sued to send the audio.</param> private void SetAudioSource(AudioOptions audioSourceOpts, SDPMediaFormat sendingFormat) { uint sampleRate = (uint)SDPMediaFormatInfo.GetClockRate(sendingFormat.FormatCodec); uint rtpTimestamptRate = (uint)SDPMediaFormatInfo.GetRtpClockRate(sendingFormat.FormatCodec); _rtpAudioTimestampPeriod = rtpTimestamptRate * AUDIO_SAMPLE_PERIOD_MILLISECONDS / 1000; WaveFormat waveFormat = new WaveFormat((int)sampleRate, BITS_PER_SAMPLE, CHANNEL_COUNT); // Render device. if (_waveOutEvent == null) { _waveOutEvent = new WaveOutEvent(); _waveOutEvent.DeviceNumber = (_audioOpts != null) ? _audioOpts.OutputDeviceIndex : AudioOptions.DEFAULT_OUTPUTDEVICE_INDEX; _waveProvider = new BufferedWaveProvider(waveFormat); _waveProvider.DiscardOnBufferOverflow = true; _waveOutEvent.Init(_waveProvider); } // Audio source. if (!_disableExternalAudioSource) { if (_waveInEvent == null) { if (WaveInEvent.DeviceCount > 0) { _waveInEvent = new WaveInEvent(); _waveInEvent.BufferMilliseconds = AUDIO_SAMPLE_PERIOD_MILLISECONDS; _waveInEvent.NumberOfBuffers = INPUT_BUFFERS; _waveInEvent.DeviceNumber = 0; _waveInEvent.WaveFormat = waveFormat; _waveInEvent.DataAvailable += LocalAudioSampleAvailable; } else { Log.LogWarning("No audio capture devices are available. No audio stream will be sent."); } } } }
/// <summary> /// Creates an audio only RTP session that can supply an audio stream to the caller. /// </summary> /// <param name="audioOptions">The options that determine the type of audio to stream to the remote party. Example /// type of audio sources are music, silence, white noise etc.</param> /// <param name="audioCodecs">The audio codecs to support.</param> /// <param name="bindAddress">Optional. If specified this address will be used as the bind address for any RTP /// and control sockets created. Generally this address does not need to be set. The default behaviour /// is to bind to [::] or 0.0.0.0,d depending on system support, which minimises network routing /// causing connection issues.</param> public RtpAudioSession(AudioSourceOptions audioOptions, List <SDPMediaFormatsEnum> audioCodecs, IPAddress bindAddress = null) : base(false, false, false, bindAddress) { if (audioCodecs == null || audioCodecs.Count() == 0) { _audioCodecs = new List <SDPMediaFormatsEnum> { SDPMediaFormatsEnum.PCMU, SDPMediaFormatsEnum.PCMA, SDPMediaFormatsEnum.G722 }; } else if (audioCodecs.Any(x => !(x == SDPMediaFormatsEnum.PCMU || x == SDPMediaFormatsEnum.PCMA || x == SDPMediaFormatsEnum.G722))) { throw new ApplicationException("Only PCMA, PCMU and G722 audio codecs are supported."); } _audioOpts = audioOptions; _audioCodecs = audioCodecs ?? _audioCodecs; var audioCapabilities = new List <SDPMediaFormat>(); foreach (var codec in _audioCodecs) { audioCapabilities.Add(new SDPMediaFormat(codec)); } // RTP event support. SDPMediaFormat rtpEventFormat = new SDPMediaFormat(DTMF_EVENT_PAYLOAD_ID); rtpEventFormat.SetFormatAttribute($"{SDP.TELEPHONE_EVENT_ATTRIBUTE}/{RTP_TIMESTAMP_RATE}"); rtpEventFormat.SetFormatParameterAttribute("0-16"); audioCapabilities.Add(rtpEventFormat); // Add a local audio track to the RTP session. MediaStreamTrack audioTrack = new MediaStreamTrack(SDPMediaTypesEnum.audio, false, audioCapabilities); base.addTrack(audioTrack); }
/// <summary> /// Starts the media capturing/source devices. /// </summary> public override async Task Start() { if (!_isStarted) { _sendingAudioFormat = base.GetSendingFormat(SDPMediaTypesEnum.audio); _isStarted = true; await base.Start(); PortAudio.Initialize(); var outputDevice = PortAudio.DefaultOutputDevice; if (outputDevice == PortAudio.NoDevice) { throw new ApplicationException("No audio output device available."); } else { StreamParameters stmInParams = new StreamParameters { device = 0, channelCount = 2, sampleFormat = SampleFormat.Float32 }; StreamParameters stmOutParams = new StreamParameters { device = outputDevice, channelCount = 2, sampleFormat = SampleFormat.Float32 }; // Combined audio capture and render. _audioIOStream = new Stream(stmInParams, stmOutParams, AUDIO_SAMPLING_RATE, AUDIO_SAMPLE_BUFFER_LENGTH, StreamFlags.NoFlag, AudioSampleAvailable, null); _audioIOStream.Start(); } if (_rtpAudioTimestampPeriod == 0) { _rtpAudioTimestampPeriod = (uint)(SDPMediaFormatInfo.GetClockRate(SDPMediaFormatsEnum.PCMU) / AUDIO_SAMPLE_BUFFER_LENGTH); } } }
/// <summary> /// 设置媒体参数请求(实时) /// </summary> /// <param name="localIp">本地ip</param> /// <param name="mediaPort">rtp/rtcp媒体端口(10000/10001)</param> /// <returns></returns> private string SetMediaAudio(string localIp, int port, string audioId) { var sdpConn = new SDPConnectionInformation(localIp); var sdp = new SDP { Version = 0, SessionId = "0", Username = audioId, SessionName = CommandType.Play.ToString(), Connection = sdpConn, Timing = "0 0", Address = localIp }; var psFormat = new SDPMediaFormat(SDPMediaFormatsEnum.PS) { IsStandardAttribute = false }; var media = new SDPMediaAnnouncement { Media = SDPMediaTypesEnum.audio }; media.MediaFormats.Add(psFormat); media.AddExtra("a=sendonly"); media.AddExtra("y=0100000002"); //media.AddExtra("f=v/////a/1/8/1"); media.AddFormatParameterAttribute(psFormat.FormatID, psFormat.Name); media.Port = port; sdp.Media.Add(media); return(sdp.ToString()); }
public RtpAudioSession(DummyAudioOptions audioOptions, List <SDPMediaFormatsEnum> audioCodecs) : base(false, false, false) { if (audioCodecs == null || audioCodecs.Count() == 0) { _audioCodecs = new List <SDPMediaFormatsEnum> { SDPMediaFormatsEnum.PCMA, SDPMediaFormatsEnum.PCMU, SDPMediaFormatsEnum.G722 }; } else if (audioCodecs.Any(x => !(x == SDPMediaFormatsEnum.PCMU || x == SDPMediaFormatsEnum.PCMA || x == SDPMediaFormatsEnum.G722))) { throw new ApplicationException("Only PCMA, PCMU and G722 audio codecs are supported."); } _audioOpts = audioOptions; _audioCodecs = audioCodecs; // RTP event support. SDPMediaFormat rtpEventFormat = new SDPMediaFormat(DTMF_EVENT_PAYLOAD_ID); rtpEventFormat.SetFormatAttribute($"{SDP.TELEPHONE_EVENT_ATTRIBUTE}/{SAMPLE_RATE}"); rtpEventFormat.SetFormatParameterAttribute("0-16"); var audioCapabilities = new List <SDPMediaFormat> { rtpEventFormat }; foreach (var codec in _audioCodecs) { audioCapabilities.Add(new SDPMediaFormat(codec)); } MediaStreamTrack audioTrack = new MediaStreamTrack(SDPMediaTypesEnum.audio, false, audioCapabilities); base.addTrack(audioTrack); }
/// <summary> /// Creates a new basic RTP session that captures and renders audio to/from the default system devices. /// </summary> public NAudioRtpSession() : base(false, false, false) { var pcmu = new SDPMediaFormat(SDPMediaFormatsEnum.PCMU); var pcma = new SDPMediaFormat(SDPMediaFormatsEnum.PCMA); // RTP event support. int clockRate = pcmu.GetClockRate(); SDPMediaFormat rtpEventFormat = new SDPMediaFormat(DTMF_EVENT_PAYLOAD_ID); rtpEventFormat.SetFormatAttribute($"{SDP.TELEPHONE_EVENT_ATTRIBUTE}/{clockRate}"); rtpEventFormat.SetFormatParameterAttribute("0-16"); var audioCapabilities = new List <SDPMediaFormat> { pcmu, pcma, rtpEventFormat }; MediaStreamTrack audioTrack = new MediaStreamTrack(null, SDPMediaTypesEnum.audio, false, audioCapabilities); addTrack(audioTrack); // Where the magic (for processing received media) happens. base.OnRtpPacketReceived += RtpPacketReceived; }
/// <summary> /// Creates a new RTP audio visual session with audio/video capturing and rendering capabilities. /// </summary> /// <param name="addrFamily">The address family to create the underlying socket on (IPv4 or IPv6).</param> /// <param name="audioOptions">Options for the send and receive audio streams on this session.</param> /// <param name="videoOptions">Options for the send and receive video streams on this session</param> /// <param name="bindAddress">Optional. If specified this address will be used as the bind address for any RTP /// and control sockets created. Generally this address does not need to be set. The default behaviour /// is to bind to [::] or 0.0.0.0, depending on system support, which minimises network routing /// causing connection issues.</param> /// <param name="disableExternalAudioSource">If true then no attempt will be made to use an external audio /// source, e.g. microphone.</param> public RtpAVSession(AudioOptions audioOptions, VideoOptions videoOptions, IPAddress bindAddress = null, bool disableExternalAudioSource = false) : base(false, false, false, bindAddress) { _audioOpts = audioOptions ?? DefaultAudioOptions; _videoOpts = videoOptions ?? DefaultVideoOptions; _disableExternalAudioSource = disableExternalAudioSource; if (_audioOpts != null && _audioOpts.AudioCodecs != null && _audioOpts.AudioCodecs.Any(x => !(x == SDPMediaFormatsEnum.PCMU || x == SDPMediaFormatsEnum.PCMA || x == SDPMediaFormatsEnum.G722))) { throw new ApplicationException("Only PCMA, PCMU and G722 are supported for audio codec options."); } // Initialise the video decoding objects. Even if we are not sourcing video // we need to be ready to receive and render. _vpxDecoder = new VpxEncoder(); int res = _vpxDecoder.InitDecoder(); if (res != 0) { throw new ApplicationException("VPX decoder initialisation failed."); } _imgConverter = new ImageConvert(); if (_audioOpts.AudioSource != AudioSourcesEnum.None) { var pcmu = new SDPMediaFormat(SDPMediaFormatsEnum.PCMU); //// RTP event support. //int clockRate = pcmu.GetClockRate(); //SDPMediaFormat rtpEventFormat = new SDPMediaFormat(DTMF_EVENT_PAYLOAD_ID); //rtpEventFormat.SetFormatAttribute($"{SDP.TELEPHONE_EVENT_ATTRIBUTE}/{clockRate}"); //rtpEventFormat.SetFormatParameterAttribute("0-16"); var audioCapabilities = new List <SDPMediaFormat>(); if (_audioOpts.AudioCodecs == null || _audioOpts.AudioCodecs.Count == 0) { audioCapabilities.Add(pcmu); } else { foreach (var codec in _audioOpts.AudioCodecs) { audioCapabilities.Add(new SDPMediaFormat(codec)); } } //audioCapabilities.Add(rtpEventFormat); if (audioCapabilities.Any(x => x.FormatCodec == SDPMediaFormatsEnum.G722)) { _g722Encode = new G722Codec(); _g722EncodeState = new G722CodecState(64000, G722Flags.None); _g722Decode = new G722Codec(); _g722DecodeState = new G722CodecState(64000, G722Flags.None); } MediaStreamTrack audioTrack = new MediaStreamTrack(SDPMediaTypesEnum.audio, false, audioCapabilities); addTrack(audioTrack); } if (_videoOpts.VideoSource != VideoSourcesEnum.None) { MediaStreamTrack videoTrack = new MediaStreamTrack(SDPMediaTypesEnum.video, false, new List <SDPMediaFormat> { new SDPMediaFormat(SDPMediaFormatsEnum.VP8) }); addTrack(videoTrack); } // Where the magic (for processing received media) happens. base.OnRtpPacketReceived += RtpPacketReceived; }
/// <summary> /// Initialises the audio source as required. /// </summary> public override Task Start() { lock (this) { if (!IsStarted) { if (AudioLocalTrack == null || AudioLocalTrack.Capabilities == null || AudioLocalTrack.Capabilities.Count == 0) { throw new ApplicationException( "Cannot start audio session without a local audio track being available."); } else if (AudioRemoteTrack == null || AudioRemoteTrack.Capabilities == null || AudioRemoteTrack.Capabilities.Count == 0) { throw new ApplicationException( "Cannot start audio session without a remote audio track being available."); } _sendingFormat = base.GetSendingFormat(SDPMediaTypesEnum.audio); _sendingAudioSampleRate = SDPMediaFormatInfo.GetClockRate(_sendingFormat.FormatCodec); _sendingAudioRtpRate = SDPMediaFormatInfo.GetRtpClockRate(_sendingFormat.FormatCodec); Log.LogDebug($"RTP audio session selected sending codec {_sendingFormat.FormatCodec}."); if (_sendingFormat.FormatCodec == SDPMediaFormatsEnum.G722) { _g722Codec = new G722Codec(); _g722CodecState = new G722CodecState(G722_BIT_RATE, G722Flags.None); _g722Decoder = new G722Codec(); _g722DecoderState = new G722CodecState(G722_BIT_RATE, G722Flags.None); } // If required start the audio source. if (_audioOpts != null && _audioOpts.AudioSource != AudioSourcesEnum.None) { if (_audioOpts.AudioSource == AudioSourcesEnum.Silence) { _audioStreamTimer = new Timer(SendSilenceSample, null, 0, AUDIO_SAMPLE_PERIOD_MILLISECONDS); } else if (_audioOpts.AudioSource == AudioSourcesEnum.PinkNoise || _audioOpts.AudioSource == AudioSourcesEnum.WhiteNoise || _audioOpts.AudioSource == AudioSourcesEnum.SineWave) { _signalGenerator = new SignalGenerator(_sendingAudioSampleRate, 1); switch (_audioOpts.AudioSource) { case AudioSourcesEnum.PinkNoise: _signalGenerator.Type = SignalGeneratorType.Pink; break; case AudioSourcesEnum.SineWave: _signalGenerator.Type = SignalGeneratorType.Sin; break; case AudioSourcesEnum.WhiteNoise: default: _signalGenerator.Type = SignalGeneratorType.White; break; } _audioStreamTimer = new Timer(SendSignalGeneratorSample, null, 0, AUDIO_SAMPLE_PERIOD_MILLISECONDS); } else if (_audioOpts.AudioSource == AudioSourcesEnum.Music) { if (_audioOpts.SourceFiles == null || !_audioOpts.SourceFiles.ContainsKey(_sendingFormat.FormatCodec)) { Log.LogWarning($"Source file not set for codec {_sendingFormat.FormatCodec}."); } else { string sourceFile = _audioOpts.SourceFiles[_sendingFormat.FormatCodec]; if (String.IsNullOrEmpty(sourceFile) || !File.Exists(sourceFile)) { Log.LogWarning( "Could not start audio music source as the source file does not exist."); } else { _audioStreamReader = new StreamReader(sourceFile); _audioStreamTimer = new Timer(SendMusicSample, null, 0, AUDIO_SAMPLE_PERIOD_MILLISECONDS); } } } } base.OnRtpPacketReceived += RtpPacketReceived; } return(base.Start()); } }
/// <summary> /// Creates a SDP answer. /// </summary> /// <param name="streams">A list of SDP Media objects..</param> /// <param name="offer">The SDP offer.</param> /// <param name="parameters">Optional parameters.</param> /// <returns>SDP.</returns> public static SDP CreateAnswer(List<SDPMedia> streams, SDP offer, Dictionary<string, string> parameters = null) { SDP s = new SDP(); s.Other["v"] = "0"; foreach (Char a in "iep") { if (parameters.ContainsKey(a.ToString())) { s.Other[a.ToString()] = parameters[a.ToString()]; } } s.Originator = new SDPOriginator(); s.Other["s"] = "-"; s.Other["t"] = offer.Other["t"]; foreach (SDPMedia yourMedia in offer.Media) { SDPMedia myMedia = null; foreach (SDPMedia t in streams) { if (t.Media != yourMedia.Media) continue; myMedia = new SDPMedia(t.ToString()); //streams.RemoveAt(i); List<KeyValuePair<SDPMediaFormat, SDPMediaFormat>> found = new List<KeyValuePair<SDPMediaFormat, SDPMediaFormat>>(); foreach (SDPMediaFormat yourmf in yourMedia.Mediaformats) { foreach (SDPMediaFormat mymf in myMedia.Mediaformats) { int mymfpt = -1; int yourmfpt = -1; try { mymfpt = Int32.Parse(mymf.Pt); yourmfpt = Int32.Parse(yourmf.Pt); } catch (Exception) { mymfpt = -1; yourmfpt = -1; } if ((0 <= mymfpt && mymfpt < 32 && 0 <= yourmfpt && yourmfpt <= 32 && mymfpt == yourmfpt) || (mymfpt < 0 && yourmfpt < 0 && mymfpt == yourmfpt) || (mymf.Name == yourmf.Name && mymf.Rate == yourmf.Rate && mymf.Count == yourmf.Count)) { found.Add(new KeyValuePair<SDPMediaFormat, SDPMediaFormat>(yourmf, mymf)); break; } } } if (found.Count > 0) { foreach (KeyValuePair<SDPMediaFormat, SDPMediaFormat> kvp in found) { myMedia.Mediaformats.Add(kvp.Key); } } else { myMedia.Mediaformats.Clear(); SDPMediaFormat temp = new SDPMediaFormat {Pt = "0"}; myMedia.Mediaformats.Add(temp); myMedia.Port = "0"; } } if (myMedia == null) { myMedia = new SDPMedia(yourMedia.ToString()) {Port = "0"}; } s.Media.Add(myMedia); } bool valid = s.Media.Any(myMedia => myMedia.Port != "0"); return valid ? s : null; }
/// <summary> /// Sets or updates the sources of the audio and/or video streams. /// </summary> /// <param name="audioOptions">Optional. If audio is being switched the new source options. /// Set to null to leave the audio source unchanged.</param> /// <param name="videoOptions">Optional. If video is being switched the new source options. /// Set to null to leave the video source unchanged.</param> /// <param name="disableExternalAudioSource">If true then no attempt will be made to use an external audio /// source, e.g. microphone.</param> public async Task SetSources(AudioOptions audioOptions, VideoOptions videoOptions, bool disableExternalAudioSource = false) { _disableExternalAudioSource = disableExternalAudioSource; // Check whether the underlying media session has changed which dictates whether // an audio or video source needs to be removed. if (!HasAudio) { // Overrule any application supplied options as the session does not currently support audio. audioOptions = new AudioOptions { AudioSource = AudioSourcesEnum.None }; } if (!HasVideo) { // Overrule any application supplied options as the session does not currently support video. videoOptions = new VideoOptions { VideoSource = VideoSourcesEnum.None }; } if (audioOptions == null) { // Do nothing, audio source not being changed. } else if (audioOptions.AudioSource == AudioSourcesEnum.None) { // Audio source no longer required. _waveInEvent?.StopRecording(); if (_audioStreamTimer != null) { _audioStreamTimer?.Dispose(); // Give any currently executing audio sampling time to complete. await Task.Delay(AUDIO_SAMPLE_PERIOD_MILLISECONDS * 2).ConfigureAwait(false); } _audioStreamReader?.Close(); _audioOpts = audioOptions; } else { _sendingAudioFormat = base.GetSendingFormat(SDPMediaTypesEnum.audio); SetAudioSource(audioOptions, _sendingAudioFormat); _audioOpts = audioOptions; StartAudio(); } if (videoOptions == null) { // Do nothing, video source not being changed. } else if (videoOptions.VideoSource == VideoSourcesEnum.None) { // Video source no longer required. _testPatternVideoSource?.Stop(); if (_videoOpts.BitmapSource != null) { _videoOpts.BitmapSource.OnBitmap -= LocalBitmapAvailable; } _videoOpts = videoOptions; } else { await SetVideoSource(videoOptions).ConfigureAwait(false); _videoOpts = videoOptions; StartVideo(); } }
/// <summary> /// Initialises the audio source as required. /// </summary> public override Task Start() { lock (this) { if (!_isStarted) { _isStarted = true; if (AudioLocalTrack == null || AudioLocalTrack.Capabilties == null || AudioLocalTrack.Capabilties.Count == 0) { throw new ApplicationException("Cannot start audio session without a local audio track being available."); } else if (AudioRemoteTrack == null || AudioRemoteTrack.Capabilties == null || AudioRemoteTrack.Capabilties.Count == 0) { throw new ApplicationException("Cannot start audio session without a remote audio track being available."); } // Choose which codec to use. //_sendingFormat = AudioLocalTrack.Capabilties // .Where(x => x.FormatID != DTMF_EVENT_PAYLOAD_ID.ToString() && int.TryParse(x.FormatID, out _)) // .OrderBy(x => int.Parse(x.FormatID)).First(); _sendingFormat = base.GetSendingFormat(SDPMediaTypesEnum.audio); Log.LogDebug($"RTP audio session selected sending codec {_sendingFormat.FormatCodec}."); if (_sendingFormat.FormatCodec == SDPMediaFormatsEnum.G722) { _g722Codec = new G722Codec(); _g722CodecState = new G722CodecState(64000, G722Flags.None); } // If required start the audio source. if (_audioOpts != null && _audioOpts.AudioSource != DummyAudioSourcesEnum.None) { if (_audioOpts.AudioSource == DummyAudioSourcesEnum.Silence) { _audioStreamTimer = new Timer(SendSilenceSample, null, 0, AUDIO_SAMPLE_PERIOD_MILLISECONDS); } else if (_audioOpts.AudioSource == DummyAudioSourcesEnum.PinkNoise || _audioOpts.AudioSource == DummyAudioSourcesEnum.WhiteNoise || _audioOpts.AudioSource == DummyAudioSourcesEnum.SineWave) { _signalGenerator = new SignalGenerator(SAMPLE_RATE, 1); switch (_audioOpts.AudioSource) { case DummyAudioSourcesEnum.PinkNoise: _signalGenerator.Type = SignalGeneratorType.Pink; break; case DummyAudioSourcesEnum.SineWave: _signalGenerator.Type = SignalGeneratorType.Sin; break; case DummyAudioSourcesEnum.WhiteNoise: default: _signalGenerator.Type = SignalGeneratorType.White; break; } _audioStreamTimer = new Timer(SendNoiseSample, null, 0, AUDIO_SAMPLE_PERIOD_MILLISECONDS); } else if (_audioOpts.AudioSource == DummyAudioSourcesEnum.Music) { if (_audioOpts.SourceFiles == null || !_audioOpts.SourceFiles.ContainsKey(_sendingFormat.FormatCodec)) { Log.LogWarning($"Source file not set for codec {_sendingFormat.FormatCodec}."); } else { string sourceFile = _audioOpts.SourceFiles[_sendingFormat.FormatCodec]; if (String.IsNullOrEmpty(sourceFile) || !File.Exists(sourceFile)) { Log.LogWarning("Could not start audio music source as the source file does not exist."); } else { _audioStreamReader = new StreamReader(sourceFile); _audioStreamTimer = new Timer(SendMusicSample, null, 0, AUDIO_SAMPLE_PERIOD_MILLISECONDS); } } } } } return(base.Start()); } }