Esempio n. 1
0
        /// <summary>
        /// Sends audio samples read from a file.
        /// </summary>
        private void SendMusicSample(object state)
        {
            lock (_audioStreamReader)
            {
                int    sampleSize = (SDPMediaFormatInfo.GetClockRate(_sendingAudioFormat.FormatCodec) / 1000) * AUDIO_SAMPLE_PERIOD_MILLISECONDS;
                byte[] sample     = new byte[sampleSize];
                int    bytesRead  = _audioStreamReader.BaseStream.Read(sample, 0, sample.Length);

                if (bytesRead == 0 || _audioStreamReader.EndOfStream)
                {
                    _audioStreamReader.BaseStream.Position = 0;
                    bytesRead = _audioStreamReader.BaseStream.Read(sample, 0, sample.Length);
                }

                SendAudioFrame((uint)bytesRead, Convert.ToInt32(_sendingAudioFormat.FormatID), sample.Take(bytesRead).ToArray());

                #region On hold audio scope.

                if (OnHoldAudioScopeSampleReady != null)
                {
                    Complex[] ieeeSamples = new Complex[sample.Length];

                    for (int index = 0; index < sample.Length; index++)
                    {
                        short  pcm       = NAudio.Codecs.MuLawDecoder.MuLawToLinearSample(sample[index]);
                        byte[] pcmSample = new byte[] { (byte)(pcm & 0xFF), (byte)(pcm >> 8) };
                        ieeeSamples[index] = pcm / 32768f;
                    }

                    OnHoldAudioScopeSampleReady(ieeeSamples.ToArray());
                }

                #endregion
            }
        }
Esempio n. 2
0
        private void AudioFormatsNegotiated(List <SDPMediaFormat> audoFormats)
        {
            var audioCodec = SDPMediaFormatInfo.GetAudioCodecForSdpFormat(audoFormats.First().FormatCodec);

            logger.LogDebug($"Setting audio source format to {audioCodec}.");
            AudioExtrasSource.SetAudioSourceFormat(audioCodec);
        }
Esempio n. 3
0
        private static Task <RTCPeerConnection> CreatePeerConnection()
        {
            RTCConfiguration config = new RTCConfiguration
            {
                iceServers = new List <RTCIceServer> {
                    new RTCIceServer {
                        urls = STUN_URL
                    }
                }
            };
            var pc = new RTCPeerConnection(config);

            var testPatternSource    = new VideoTestPatternSource();
            var videoEncoderEndPoint = new VideoEncoderEndPoint();
            var audioSource          = new AudioExtrasSource(new AudioEncoder(), new AudioSourceOptions {
                AudioSource = AudioSourcesEnum.Music
            });

            MediaStreamTrack videoTrack = new MediaStreamTrack(videoEncoderEndPoint.GetVideoSourceFormats(), MediaStreamStatusEnum.SendRecv);

            pc.addTrack(videoTrack);
            MediaStreamTrack audioTrack = new MediaStreamTrack(audioSource.GetAudioSourceFormats(), MediaStreamStatusEnum.SendRecv);

            pc.addTrack(audioTrack);

            testPatternSource.OnVideoSourceRawSample        += videoEncoderEndPoint.ExternalVideoSourceRawSample;
            videoEncoderEndPoint.OnVideoSourceEncodedSample += pc.SendVideo;
            audioSource.OnAudioSourceEncodedSample          += pc.SendAudio;
            pc.OnVideoFormatsNegotiated += (sdpFormat) =>
                                           videoEncoderEndPoint.SetVideoSourceFormat(SDPMediaFormatInfo.GetVideoCodecForSdpFormat(sdpFormat.First().FormatCodec));
            pc.onconnectionstatechange += async(state) =>
            {
                logger.LogDebug($"Peer connection state change to {state}.");

                if (state == RTCPeerConnectionState.connected)
                {
                    await audioSource.StartAudio();

                    await testPatternSource.StartVideo();
                }
                else if (state == RTCPeerConnectionState.failed)
                {
                    pc.Close("ice disconnection");
                }
                else if (state == RTCPeerConnectionState.closed)
                {
                    await testPatternSource.CloseVideo();

                    await audioSource.CloseAudio();
                }
            };

            // Diagnostics.
            pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}");
            pc.OnSendReport    += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}");
            pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}.");
            pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}.");

            return(Task.FromResult(pc));
        }
Esempio n. 4
0
        private static async Task <RTCPeerConnection> SendSDPOffer(WebSocketContext context)
        {
            logger.LogDebug($"Web socket client connection from {context.UserEndPoint}.");

            var pc = new RTCPeerConnection(null);

            AudioExtrasSource audioSource = new AudioExtrasSource(new AudioEncoder());

            audioSource.OnAudioSourceEncodedSample += pc.SendAudio;

            MediaStreamTrack audioTrack = new MediaStreamTrack(audioSource.GetAudioSourceFormats(), MediaStreamStatusEnum.SendOnly);

            pc.addTrack(audioTrack);

            pc.OnAudioFormatsNegotiated += (sdpFormat) =>
                                           audioSource.SetAudioSourceFormat(SDPMediaFormatInfo.GetAudioCodecForSdpFormat(sdpFormat.First().FormatCodec));
            pc.OnReceiveReport            += RtpSession_OnReceiveReport;
            pc.OnSendReport               += RtpSession_OnSendReport;
            pc.OnTimeout                  += (mediaType) => pc.Close("remote timeout");
            pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}.");

            pc.onconnectionstatechange += (state) =>
            {
                logger.LogDebug($"Peer connection state change to {state}.");

                if (state == RTCPeerConnectionState.connected)
                {
                    audioSource.SetSource(new AudioSourceOptions {
                        AudioSource = AudioSourcesEnum.SineWave
                    });
                }
                else if (state == RTCPeerConnectionState.disconnected || state == RTCPeerConnectionState.failed)
                {
                    pc.Close("remote disconnection");
                }
                else if (state == RTCPeerConnectionState.closed)
                {
                    audioSource?.CloseAudio();
                    pc.OnReceiveReport -= RtpSession_OnReceiveReport;
                    pc.OnSendReport    -= RtpSession_OnSendReport;
                }
            };

            var offerSdp = pc.createOffer(null);
            await pc.setLocalDescription(offerSdp);

            logger.LogDebug($"Sending SDP offer to client {context.UserEndPoint}.");
            logger.LogDebug(offerSdp.sdp);

            context.WebSocket.Send(offerSdp.sdp);

            return(pc);
        }
Esempio n. 5
0
        /// <summary>
        /// Sends audio samples read from a file.
        /// </summary>
        private void SendMusicSample(object state)
        {
            int sampleSize = (SDPMediaFormatInfo.GetClockRate(SDPMediaFormatsEnum.PCMU) / 1000) * AUDIO_SAMPLE_PERIOD_MILLISECONDS;

            byte[] sample    = new byte[sampleSize];
            int    bytesRead = _audioStreamReader.BaseStream.Read(sample, 0, sample.Length);

            if (bytesRead == 0 || _audioStreamReader.EndOfStream)
            {
                _audioStreamReader.BaseStream.Position = 0;
                bytesRead = _audioStreamReader.BaseStream.Read(sample, 0, sample.Length);
            }

            SendAudioFrame((uint)bytesRead, (int)SDPMediaFormatsEnum.PCMU, sample.Take(bytesRead).ToArray());
        }
Esempio n. 6
0
        /// <summary>
        /// Starts the media capturing/source devices.
        /// </summary>
        public override async Task Start()
        {
            if (!_isStarted)
            {
                _sendingAudioFormat = base.GetSendingFormat(SDPMediaTypesEnum.audio);

                _isStarted = true;

                await base.Start();

                // Render device.
                _waveOutEvent = new WaveOutEvent();
                _waveOutEvent.DeviceNumber = AUDIO_OUTPUTDEVICE_INDEX;
                _waveProvider = new BufferedWaveProvider(_waveFormat);
                _waveProvider.DiscardOnBufferOverflow = true;
                _waveOutEvent.Init(_waveProvider);
                _waveOutEvent.Play();

                // Audio source.
                if (WaveInEvent.DeviceCount > 0)
                {
                    _waveInEvent = new WaveInEvent();
                    _waveInEvent.BufferMilliseconds = AUDIO_SAMPLE_PERIOD_MILLISECONDS;
                    _waveInEvent.NumberOfBuffers    = 1;
                    _waveInEvent.DeviceNumber       = 0;
                    _waveInEvent.WaveFormat         = _waveFormat;
                    _waveInEvent.DataAvailable     += LocalAudioSampleAvailable;

                    _waveInEvent.StartRecording();
                }
                else
                {
                    Log.LogWarning("No audio capture devices are available. No audio stream will be sent.");
                }

                if (_rtpAudioTimestampPeriod == 0)
                {
                    _rtpAudioTimestampPeriod = (uint)(SDPMediaFormatInfo.GetClockRate(SDPMediaFormatsEnum.PCMU) / AUDIO_SAMPLE_PERIOD_MILLISECONDS);
                }
            }
        }
Esempio n. 7
0
        public void Start()
        {
            _stop = false;

            if (_samplesTask == null || _samplesTask.Status != TaskStatus.Running)
            {
                logger.Debug("Music on hold samples task starting.");

                _samplesTask = Task.Run(async() =>
                {
                    // Read the same file in an endless loop while samples are still requried.
                    while (!_stop)
                    {
                        using (StreamReader sr = new StreamReader(AUDIO_FILE_PCMU))
                        {
                            int sampleSize = (SDPMediaFormatInfo.GetClockRate(SDPMediaFormatsEnum.PCMU) / 1000) * AUDIO_SAMPLE_PERIOD_MILLISECONDS;
                            byte[] sample  = new byte[sampleSize];
                            int bytesRead  = sr.BaseStream.Read(sample, 0, sample.Length);

                            while (bytesRead > 0 && !_stop)
                            {
                                if (OnAudioSampleReady == null)
                                {
                                    // Nobody needs music on hold so exit.
                                    logger.Debug("Music on hold has no subscribers, stopping.");
                                    return;
                                }
                                else
                                {
                                    OnAudioSampleReady(sample);
                                }

                                await Task.Delay(AUDIO_SAMPLE_PERIOD_MILLISECONDS);
                                bytesRead = sr.BaseStream.Read(sample, 0, sample.Length);
                            }
                        }
                    }
                });
            }
        }
Esempio n. 8
0
        /// <summary>
        /// Initialise the audio capture and render device.
        /// </summary>
        /// <param name="audioSourceOpts">The options that dictate the type of audio source to use.</param>
        /// <param name="sendingFormat">The codec that will be sued to send the audio.</param>
        private void SetAudioSource(AudioOptions audioSourceOpts, SDPMediaFormat sendingFormat)
        {
            uint sampleRate        = (uint)SDPMediaFormatInfo.GetClockRate(sendingFormat.FormatCodec);
            uint rtpTimestamptRate = (uint)SDPMediaFormatInfo.GetRtpClockRate(sendingFormat.FormatCodec);

            _rtpAudioTimestampPeriod = rtpTimestamptRate * AUDIO_SAMPLE_PERIOD_MILLISECONDS / 1000;

            WaveFormat waveFormat = new WaveFormat((int)sampleRate, BITS_PER_SAMPLE, CHANNEL_COUNT);

            // Render device.
            if (_waveOutEvent == null)
            {
                _waveOutEvent = new WaveOutEvent();
                _waveOutEvent.DeviceNumber = (_audioOpts != null) ? _audioOpts.OutputDeviceIndex : AudioOptions.DEFAULT_OUTPUTDEVICE_INDEX;
                _waveProvider = new BufferedWaveProvider(waveFormat);
                _waveProvider.DiscardOnBufferOverflow = true;
                _waveOutEvent.Init(_waveProvider);
            }

            // Audio source.
            if (!_disableExternalAudioSource)
            {
                if (_waveInEvent == null)
                {
                    if (WaveInEvent.DeviceCount > 0)
                    {
                        _waveInEvent = new WaveInEvent();
                        _waveInEvent.BufferMilliseconds = AUDIO_SAMPLE_PERIOD_MILLISECONDS;
                        _waveInEvent.NumberOfBuffers    = INPUT_BUFFERS;
                        _waveInEvent.DeviceNumber       = 0;
                        _waveInEvent.WaveFormat         = waveFormat;
                        _waveInEvent.DataAvailable     += LocalAudioSampleAvailable;
                    }
                    else
                    {
                        Log.LogWarning("No audio capture devices are available. No audio stream will be sent.");
                    }
                }
            }
        }
Esempio n. 9
0
        /// <summary>
        /// Initialise the video capture and render device.
        /// </summary>
        private async Task SetVideoSource(VideoOptions videoSourceOpts)
        {
            if (videoSourceOpts.VideoSource != VideoSourcesEnum.ExternalBitmap && _videoOpts.BitmapSource != null)
            {
                _videoOpts.BitmapSource.OnBitmap -= LocalBitmapAvailable;
            }

            if (videoSourceOpts.VideoSource != VideoSourcesEnum.TestPattern && _testPatternVideoSource != null)
            {
                _testPatternVideoSource.SampleReady -= LocalVideoSampleAvailable;
                _testPatternVideoSource.Stop();
                _testPatternVideoSource = null;
            }

            if (videoSourceOpts.VideoSource == VideoSourcesEnum.TestPattern)
            {
                if (_testPatternVideoSource == null)
                {
                    _testPatternVideoSource              = new TestPatternVideoSource(videoSourceOpts.SourceFile, videoSourceOpts.SourceFramesPerSecond);
                    _testPatternVideoSource.SampleReady += LocalVideoSampleAvailable;
                }
                else
                {
                    await _testPatternVideoSource.SetSource(videoSourceOpts.SourceFile, videoSourceOpts.SourceFramesPerSecond).ConfigureAwait(false);
                }

                if (_testPatternVideoSource.FramesPerSecond != 0)
                {
                    _rtpVideoTimestampPeriod = (uint)(SDPMediaFormatInfo.GetClockRate(SDPMediaFormatsEnum.VP8) / _testPatternVideoSource.FramesPerSecond);
                }
                else
                {
                    _rtpVideoTimestampPeriod = (uint)(SDPMediaFormatInfo.GetClockRate(SDPMediaFormatsEnum.VP8) / TestPatternVideoSource.DEFAULT_FRAMES_PER_SECOND);
                }
            }
            else if (videoSourceOpts.VideoSource == VideoSourcesEnum.ExternalBitmap)
            {
                videoSourceOpts.BitmapSource.OnBitmap += LocalBitmapAvailable;
            }
        }
Esempio n. 10
0
        /// <summary>
        /// Starts the media capturing/source devices.
        /// </summary>
        public override async Task Start()
        {
            if (!_isStarted)
            {
                _sendingAudioFormat = base.GetSendingFormat(SDPMediaTypesEnum.audio);

                _isStarted = true;

                await base.Start();

                PortAudio.Initialize();

                var outputDevice = PortAudio.DefaultOutputDevice;
                if (outputDevice == PortAudio.NoDevice)
                {
                    throw new ApplicationException("No audio output device available.");
                }
                else
                {
                    StreamParameters stmInParams = new StreamParameters {
                        device = 0, channelCount = 2, sampleFormat = SampleFormat.Float32
                    };
                    StreamParameters stmOutParams = new StreamParameters {
                        device = outputDevice, channelCount = 2, sampleFormat = SampleFormat.Float32
                    };

                    // Combined audio capture and render.
                    _audioIOStream = new Stream(stmInParams, stmOutParams, AUDIO_SAMPLING_RATE, AUDIO_SAMPLE_BUFFER_LENGTH, StreamFlags.NoFlag, AudioSampleAvailable, null);
                    _audioIOStream.Start();
                }

                if (_rtpAudioTimestampPeriod == 0)
                {
                    _rtpAudioTimestampPeriod = (uint)(SDPMediaFormatInfo.GetClockRate(SDPMediaFormatsEnum.PCMU) / AUDIO_SAMPLE_BUFFER_LENGTH);
                }
            }
        }
Esempio n. 11
0
        /// <summary>
        /// Sends audio samples read from a file.
        /// </summary>
        private void SendMusicSample(object state)
        {
            if (!_streamSendInProgress)
            {
                lock (_audioStreamTimer)
                {
                    int    sampleRate = SDPMediaFormatInfo.GetRtpClockRate(_sendingFormat.FormatCodec);
                    int    sampleSize = sampleRate / 1000 * AUDIO_SAMPLE_PERIOD_MILLISECONDS;
                    byte[] sample     = new byte[sampleSize];

                    int bytesRead = _audioStreamReader.BaseStream.Read(sample, 0, sample.Length);

                    if (bytesRead > 0)
                    {
                        SendAudioFrame((uint)sampleSize, (int)_sendingFormat.FormatCodec, sample);
                    }

                    if (bytesRead == 0 || _audioStreamReader.EndOfStream)
                    {
                        _audioStreamReader.BaseStream.Position = 0;
                    }
                }
            }
        }
Esempio n. 12
0
        private static async Task <RTCPeerConnection> SendSDPOffer(WebSocketContext context)
        {
            logger.LogDebug($"Web socket client connection from {context.UserEndPoint}.");

            var peerConnection = new RTCPeerConnection(null);

            // Sink (speaker) only audio end point.
            WindowsAudioEndPoint windowsAudioEP = new WindowsAudioEndPoint(new AudioEncoder(), -1, -1, true, false);

            MediaStreamTrack audioTrack = new MediaStreamTrack(windowsAudioEP.GetAudioSinkFormats(), MediaStreamStatusEnum.RecvOnly);

            peerConnection.addTrack(audioTrack);

            peerConnection.OnAudioFormatsNegotiated += (sdpFormat) =>
                                                       windowsAudioEP.SetAudioSinkFormat(SDPMediaFormatInfo.GetAudioCodecForSdpFormat(sdpFormat.First().FormatCodec));
            peerConnection.OnReceiveReport            += RtpSession_OnReceiveReport;
            peerConnection.OnSendReport               += RtpSession_OnSendReport;
            peerConnection.OnTimeout                  += (mediaType) => logger.LogDebug($"Timeout on media {mediaType}.");
            peerConnection.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state changed to {state}.");
            peerConnection.onconnectionstatechange    += async(state) =>
            {
                logger.LogDebug($"Peer connection connected changed to {state}.");

                if (state == RTCPeerConnectionState.connected)
                {
                    await windowsAudioEP.StartAudio();
                }
                else if (state == RTCPeerConnectionState.closed || state == RTCPeerConnectionState.failed)
                {
                    peerConnection.OnReceiveReport -= RtpSession_OnReceiveReport;
                    peerConnection.OnSendReport    -= RtpSession_OnSendReport;

                    await windowsAudioEP.CloseAudio();
                }
            };

            peerConnection.OnRtpPacketReceived += (IPEndPoint rep, SDPMediaTypesEnum media, RTPPacket rtpPkt) =>
            {
                //logger.LogDebug($"RTP {media} pkt received, SSRC {rtpPkt.Header.SyncSource}.");
                if (media == SDPMediaTypesEnum.audio)
                {
                    windowsAudioEP.GotAudioRtp(rep, rtpPkt.Header.SyncSource, rtpPkt.Header.SequenceNumber, rtpPkt.Header.Timestamp, rtpPkt.Header.PayloadType, rtpPkt.Header.MarkerBit == 1, rtpPkt.Payload);
                }
            };

            var offerSdp = peerConnection.createOffer(null);
            await peerConnection.setLocalDescription(offerSdp);

            logger.LogDebug($"Sending SDP offer to client {context.UserEndPoint}.");
            logger.LogDebug(offerSdp.sdp);

            context.WebSocket.Send(offerSdp.sdp);

            return(peerConnection);
        }
Esempio n. 13
0
        private static async Task <RTCPeerConnection> CreatePeerConnection()
        {
            RTCConfiguration config = new RTCConfiguration
            {
                iceServers = new List <RTCIceServer> {
                    new RTCIceServer {
                        urls = STUN_URL
                    }
                }
            };
            var pc = new RTCPeerConnection(config);

            WindowsVideoEndPoint winVideoEP = new WindowsVideoEndPoint(WEBCAM_NAME);
            bool initResult = await winVideoEP.InitialiseVideoSourceDevice();

            if (!initResult)
            {
                throw new ApplicationException("Could not initialise video capture device.");
            }

            //WindowsVideoEndPoint winVideoEP = new WindowsVideoEndPoint(false, 640, 480, 30);
            //WindowsVideoEndPoint winVideoEP = new WindowsVideoEndPoint(false, 1920, 1080, 30);
            //await winVideoEP.InitialiseVideoSourceDevice();
            var audioSource = new AudioExtrasSource(new AudioEncoder(), new AudioSourceOptions {
                AudioSource = AudioSourcesEnum.Music
            });

            MediaStreamTrack videoTrack = new MediaStreamTrack(winVideoEP.GetVideoSourceFormats(), MediaStreamStatusEnum.SendRecv);

            pc.addTrack(videoTrack);
            MediaStreamTrack audioTrack = new MediaStreamTrack(audioSource.GetAudioSourceFormats(), MediaStreamStatusEnum.SendRecv);

            pc.addTrack(audioTrack);

            winVideoEP.OnVideoSourceEncodedSample  += pc.SendVideo;
            audioSource.OnAudioSourceEncodedSample += pc.SendAudio;
            pc.OnVideoFormatsNegotiated            += (sdpFormat) =>
                                                      winVideoEP.SetVideoSourceFormat(SDPMediaFormatInfo.GetVideoCodecForSdpFormat(sdpFormat.First().FormatCodec));
            pc.onconnectionstatechange += async(state) =>
            {
                logger.LogDebug($"Peer connection state change to {state}.");

                if (state == RTCPeerConnectionState.connected)
                {
                    await audioSource.StartAudio();

                    await winVideoEP.StartVideo();
                }
                else if (state == RTCPeerConnectionState.failed)
                {
                    pc.Close("ice disconnection");
                }
                else if (state == RTCPeerConnectionState.closed)
                {
                    await winVideoEP.CloseVideo();

                    await audioSource.CloseAudio();
                }
            };

            // Diagnostics.
            pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}");
            pc.OnSendReport    += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}");
            pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}.");
            pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}.");

            return(pc);
        }
Esempio n. 14
0
        /// <summary>
        /// Initialises the audio source as required.
        /// </summary>
        public override Task Start()
        {
            lock (this)
            {
                if (!IsStarted)
                {
                    if (AudioLocalTrack == null || AudioLocalTrack.Capabilities == null ||
                        AudioLocalTrack.Capabilities.Count == 0)
                    {
                        throw new ApplicationException(
                                  "Cannot start audio session without a local audio track being available.");
                    }
                    else if (AudioRemoteTrack == null || AudioRemoteTrack.Capabilities == null ||
                             AudioRemoteTrack.Capabilities.Count == 0)
                    {
                        throw new ApplicationException(
                                  "Cannot start audio session without a remote audio track being available.");
                    }

                    _sendingFormat          = base.GetSendingFormat(SDPMediaTypesEnum.audio);
                    _sendingAudioSampleRate = SDPMediaFormatInfo.GetClockRate(_sendingFormat.FormatCodec);
                    _sendingAudioRtpRate    = SDPMediaFormatInfo.GetRtpClockRate(_sendingFormat.FormatCodec);

                    Log.LogDebug($"RTP audio session selected sending codec {_sendingFormat.FormatCodec}.");

                    if (_sendingFormat.FormatCodec == SDPMediaFormatsEnum.G722)
                    {
                        _g722Codec        = new G722Codec();
                        _g722CodecState   = new G722CodecState(G722_BIT_RATE, G722Flags.None);
                        _g722Decoder      = new G722Codec();
                        _g722DecoderState = new G722CodecState(G722_BIT_RATE, G722Flags.None);
                    }

                    // If required start the audio source.
                    if (_audioOpts != null && _audioOpts.AudioSource != AudioSourcesEnum.None)
                    {
                        if (_audioOpts.AudioSource == AudioSourcesEnum.Silence)
                        {
                            _audioStreamTimer = new Timer(SendSilenceSample, null, 0, AUDIO_SAMPLE_PERIOD_MILLISECONDS);
                        }
                        else if (_audioOpts.AudioSource == AudioSourcesEnum.PinkNoise ||
                                 _audioOpts.AudioSource == AudioSourcesEnum.WhiteNoise ||
                                 _audioOpts.AudioSource == AudioSourcesEnum.SineWave)
                        {
                            _signalGenerator = new SignalGenerator(_sendingAudioSampleRate, 1);

                            switch (_audioOpts.AudioSource)
                            {
                            case AudioSourcesEnum.PinkNoise:
                                _signalGenerator.Type = SignalGeneratorType.Pink;
                                break;

                            case AudioSourcesEnum.SineWave:
                                _signalGenerator.Type = SignalGeneratorType.Sin;
                                break;

                            case AudioSourcesEnum.WhiteNoise:
                            default:
                                _signalGenerator.Type = SignalGeneratorType.White;
                                break;
                            }

                            _audioStreamTimer = new Timer(SendSignalGeneratorSample, null, 0,
                                                          AUDIO_SAMPLE_PERIOD_MILLISECONDS);
                        }
                        else if (_audioOpts.AudioSource == AudioSourcesEnum.Music)
                        {
                            if (_audioOpts.SourceFiles == null ||
                                !_audioOpts.SourceFiles.ContainsKey(_sendingFormat.FormatCodec))
                            {
                                Log.LogWarning($"Source file not set for codec {_sendingFormat.FormatCodec}.");
                            }
                            else
                            {
                                string sourceFile = _audioOpts.SourceFiles[_sendingFormat.FormatCodec];

                                if (String.IsNullOrEmpty(sourceFile) || !File.Exists(sourceFile))
                                {
                                    Log.LogWarning(
                                        "Could not start audio music source as the source file does not exist.");
                                }
                                else
                                {
                                    _audioStreamReader = new StreamReader(sourceFile);
                                    _audioStreamTimer  = new Timer(SendMusicSample, null, 0,
                                                                   AUDIO_SAMPLE_PERIOD_MILLISECONDS);
                                }
                            }
                        }
                    }

                    base.OnRtpPacketReceived += RtpPacketReceived;
                }

                return(base.Start());
            }
        }
Esempio n. 15
0
        private static Task <RTCPeerConnection> CreatePeerConnection()
        {
            var peerConnection = new RTCPeerConnection(null);

            var videoEP = new SIPSorceryMedia.Encoders.VideoEncoderEndPoint();

            //var videoEP = new SIPSorceryMedia.Windows.WindowsEncoderEndPoint();
            //var videoEP = new FFmpegVideoEndPoint();
            videoEP.RestrictCodecs(new List <VideoCodecsEnum> {
                VideoCodecsEnum.VP8
            });

            videoEP.OnVideoSinkDecodedSample += (byte[] bmp, uint width, uint height, int stride, VideoPixelFormatsEnum pixelFormat) =>
            {
                _form.BeginInvoke(new Action(() =>
                {
                    unsafe
                    {
                        fixed(byte *s = bmp)
                        {
                            Bitmap bmpImage = new Bitmap((int)width, (int)height, (int)(bmp.Length / height), PixelFormat.Format24bppRgb, (IntPtr)s);
                            _picBox.Image   = bmpImage;
                        }
                    }
                }));
            };

            // Sink (speaker) only audio end point.
            WindowsAudioEndPoint windowsAudioEP = new WindowsAudioEndPoint(new AudioEncoder(), -1, -1, true, false);

            MediaStreamTrack audioTrack = new MediaStreamTrack(windowsAudioEP.GetAudioSinkFormats(), MediaStreamStatusEnum.RecvOnly);

            peerConnection.addTrack(audioTrack);
            MediaStreamTrack videoTrack = new MediaStreamTrack(videoEP.GetVideoSinkFormats(), MediaStreamStatusEnum.RecvOnly);

            peerConnection.addTrack(videoTrack);

            peerConnection.OnVideoFrameReceived     += videoEP.GotVideoFrame;
            peerConnection.OnVideoFormatsNegotiated += (sdpFormat) => videoEP.SetVideoSinkFormat(SDPMediaFormatInfo.GetVideoCodecForSdpFormat(sdpFormat.First().FormatCodec));
            peerConnection.OnAudioFormatsNegotiated += (sdpFormat) =>
                                                       windowsAudioEP.SetAudioSinkFormat(SDPMediaFormatInfo.GetAudioCodecForSdpFormat(sdpFormat.First().FormatCodec));

            peerConnection.OnTimeout += (mediaType) => logger.LogDebug($"Timeout on media {mediaType}.");
            peerConnection.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state changed to {state}.");
            peerConnection.onconnectionstatechange    += async(state) =>
            {
                logger.LogDebug($"Peer connection connected changed to {state}.");

                if (state == RTCPeerConnectionState.connected)
                {
                    await windowsAudioEP.StartAudio();
                }
                else if (state == RTCPeerConnectionState.closed || state == RTCPeerConnectionState.failed)
                {
                    await windowsAudioEP.CloseAudio();
                }
            };

            peerConnection.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) =>
            {
                bool hasUseCandidate = msg.Attributes.Any(x => x.AttributeType == STUNAttributeTypesEnum.UseCandidate);
                Console.WriteLine($"STUN {msg.Header.MessageType} received from {ep}, use candidate {hasUseCandidate}.");
            };

            peerConnection.OnRtpPacketReceived += (IPEndPoint rep, SDPMediaTypesEnum media, RTPPacket rtpPkt) =>
            {
                //logger.LogDebug($"RTP {media} pkt received, SSRC {rtpPkt.Header.SyncSource}.");
                if (media == SDPMediaTypesEnum.audio)
                {
                    windowsAudioEP.GotAudioRtp(rep, rtpPkt.Header.SyncSource, rtpPkt.Header.SequenceNumber, rtpPkt.Header.Timestamp, rtpPkt.Header.PayloadType, rtpPkt.Header.MarkerBit == 1, rtpPkt.Payload);
                }
            };

            return(Task.FromResult(peerConnection));
        }
Esempio n. 16
0
        private static Task <RTCPeerConnection> CreatePeerConnection()
        {
            RTCConfiguration config = new RTCConfiguration
            {
                iceServers = new List <RTCIceServer> {
                    new RTCIceServer {
                        urls = STUN_URL
                    }
                }
            };
            var pc = new RTCPeerConnection(config);

            var mediaFileSource = new SIPSorceryMedia.FFmpeg.FFmpegFileSource(MP4_PATH, false, new AudioEncoder());

            mediaFileSource.Initialise();
            mediaFileSource.RestrictCodecs(new List <VideoCodecsEnum> {
                VideoCodecsEnum.VP8
            });
            mediaFileSource.RestrictCodecs(new List <AudioCodecsEnum> {
                AudioCodecsEnum.PCMU
            });
            mediaFileSource.OnEndOfFile += () => pc.Close("source eof");

            MediaStreamTrack videoTrack = new MediaStreamTrack(mediaFileSource.GetVideoSourceFormats(), MediaStreamStatusEnum.SendRecv);

            pc.addTrack(videoTrack);
            MediaStreamTrack audioTrack = new MediaStreamTrack(mediaFileSource.GetAudioSourceFormats(), MediaStreamStatusEnum.SendRecv);

            pc.addTrack(audioTrack);

            mediaFileSource.OnVideoSourceEncodedSample += pc.SendVideo;
            mediaFileSource.OnAudioSourceEncodedSample += pc.SendAudio;
            pc.OnVideoFormatsNegotiated += (sdpFormat) => mediaFileSource.SetVideoSourceFormat(SDPMediaFormatInfo.GetVideoCodecForSdpFormat(sdpFormat.First().FormatCodec));
            pc.OnAudioFormatsNegotiated += (sdpFormat) => mediaFileSource.SetAudioSourceFormat(SDPMediaFormatInfo.GetAudioCodecForSdpFormat(sdpFormat.First().FormatCodec));

            pc.onconnectionstatechange += async(state) =>
            {
                logger.LogDebug($"Peer connection state change to {state}.");

                if (state == RTCPeerConnectionState.failed)
                {
                    pc.Close("ice disconnection");
                }
                else if (state == RTCPeerConnectionState.closed)
                {
                    await mediaFileSource.CloseVideo();
                }
                else if (state == RTCPeerConnectionState.connected)
                {
                    await mediaFileSource.StartVideo();
                }
            };

            // Diagnostics.
            //pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}");
            //pc.OnSendReport += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}");
            //pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}.");
            pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}.");

            return(Task.FromResult(pc));
        }
Esempio n. 17
0
        private static Task <RTCPeerConnection> CreatePeerConnection()
        {
            RTCConfiguration config = new RTCConfiguration
            {
                iceServers = new List <RTCIceServer> {
                    new RTCIceServer {
                        urls = STUN_URL
                    }
                }
            };
            //var pc = new RTCPeerConnection(config);
            var pc = new RTCPeerConnection(null);

            var testPatternSource = new VideoTestPatternSource();

            //testPatternSource.SetFrameRate(60);
            testPatternSource.SetMaxFrameRate(true);
            var videoEndPoint = new SIPSorceryMedia.FFmpeg.FFmpegVideoEndPoint();

            videoEndPoint.RestrictCodecs(new List <VideoCodecsEnum> {
                VideoCodecsEnum.H264
            });
            //var videoEndPoint = new SIPSorceryMedia.Windows.WindowsVideoEndPoint(true);
            //var videoEndPoint = new SIPSorceryMedia.Windows.WindowsEncoderEndPoint();
            //var videoEndPoint = new SIPSorceryMedia.Encoders.VideoEncoderEndPoint();

            MediaStreamTrack track = new MediaStreamTrack(videoEndPoint.GetVideoSourceFormats(), MediaStreamStatusEnum.SendOnly);

            pc.addTrack(track);

            testPatternSource.OnVideoSourceRawSample += videoEndPoint.ExternalVideoSourceRawSample;
            testPatternSource.OnVideoSourceRawSample += TestPatternSource_OnVideoSourceRawSample;
            videoEndPoint.OnVideoSourceEncodedSample += pc.SendVideo;
            pc.OnVideoFormatsNegotiated += (sdpFormat) => videoEndPoint.SetVideoSourceFormat(SDPMediaFormatInfo.GetVideoCodecForSdpFormat(sdpFormat.First().FormatCodec));

            pc.onconnectionstatechange += async(state) =>
            {
                logger.LogDebug($"Peer connection state change to {state}.");

                if (state == RTCPeerConnectionState.failed)
                {
                    pc.Close("ice disconnection");
                }
                else if (state == RTCPeerConnectionState.closed)
                {
                    await testPatternSource.CloseVideo();

                    await videoEndPoint.CloseVideo();
                }
                else if (state == RTCPeerConnectionState.connected)
                {
                    await videoEndPoint.StartVideo();

                    await testPatternSource.StartVideo();
                }
            };

            // Diagnostics.
            //pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}");
            //pc.OnSendReport += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}");
            //pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}.");
            pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}.");

            return(Task.FromResult(pc));
        }
Esempio n. 18
0
        private static Task <RTCPeerConnection> CreatePeerConnection()
        {
            //var videoEP = new SIPSorceryMedia.Windows.WindowsVideoEndPoint();
            var videoEP = new SIPSorceryMedia.FFmpeg.FFmpegVideoEndPoint();

            videoEP.RestrictCodecs(new List <VideoCodecsEnum> {
                VideoCodecsEnum.VP8
            });

            videoEP.OnVideoSinkDecodedSample += (byte[] bmp, uint width, uint height, int stride, VideoPixelFormatsEnum pixelFormat) =>
            {
                _form.BeginInvoke(new Action(() =>
                {
                    unsafe
                    {
                        fixed(byte *s = bmp)
                        {
                            Bitmap bmpImage = new Bitmap((int)width, (int)height, (int)(bmp.Length / height), PixelFormat.Format24bppRgb, (IntPtr)s);
                            _picBox.Image   = bmpImage;
                        }
                    }
                }));
            };

            RTCConfiguration config = new RTCConfiguration
            {
                iceServers = new List <RTCIceServer> {
                    new RTCIceServer {
                        urls = STUN_URL
                    }
                }
            };
            var pc = new RTCPeerConnection(config);

            // Add local receive only tracks. This ensures that the SDP answer includes only the codecs we support.
            MediaStreamTrack audioTrack = new MediaStreamTrack(SDPMediaTypesEnum.audio, false, new List <SDPMediaFormat> {
                new SDPMediaFormat(SDPMediaFormatsEnum.PCMU)
            }, MediaStreamStatusEnum.RecvOnly);

            pc.addTrack(audioTrack);
            MediaStreamTrack videoTrack = new MediaStreamTrack(videoEP.GetVideoSinkFormats(), MediaStreamStatusEnum.RecvOnly);

            pc.addTrack(videoTrack);

            pc.OnVideoFrameReceived     += videoEP.GotVideoFrame;
            pc.OnVideoFormatsNegotiated += (sdpFormat) => videoEP.SetVideoSinkFormat(SDPMediaFormatInfo.GetVideoCodecForSdpFormat(sdpFormat.First().FormatCodec));

            pc.onconnectionstatechange += async(state) =>
            {
                logger.LogDebug($"Peer connection state change to {state}.");

                if (state == RTCPeerConnectionState.failed)
                {
                    pc.Close("ice disconnection");
                }
                else if (state == RTCPeerConnectionState.closed)
                {
                    await videoEP.CloseVideo();
                }
            };

            // Diagnostics.
            //pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}");
            //pc.OnSendReport += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}");
            //pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}.");
            pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}.");

            return(Task.FromResult(pc));
        }
Esempio n. 19
0
        private Task <RTCPeerConnection> CreatePeerConnection(string url)
        {
            RTCConfiguration config = new RTCConfiguration
            {
                iceServers = new List <RTCIceServer> {
                    new RTCIceServer {
                        urls = STUN_URL
                    }
                }
            };
            var pc = new RTCPeerConnection(config);

            //mediaFileSource.OnEndOfFile += () => pc.Close("source eof");

            MediaStreamTrack videoTrack = new MediaStreamTrack(new List <VideoCodecsEnum> {
                VIDEO_CODEC
            }, MediaStreamStatusEnum.SendOnly);

            pc.addTrack(videoTrack);
            MediaStreamTrack audioTrack = new MediaStreamTrack(new List <AudioCodecsEnum> {
                AUDIO_CODEC
            }, MediaStreamStatusEnum.SendOnly);

            pc.addTrack(audioTrack);

            IVideoSource videoSource = null;
            IAudioSource audioSource = null;

            if (url == MAX_URL)
            {
                videoSource = _maxSource;
                audioSource = _maxSource;
            }
            else
            {
                videoSource = _testPatternEncoder;
                audioSource = _musicSource;
            }

            pc.OnVideoFormatsNegotiated            += (sdpFormat) => videoSource.SetVideoSourceFormat(SDPMediaFormatInfo.GetVideoCodecForSdpFormat(sdpFormat.First().FormatCodec));
            pc.OnAudioFormatsNegotiated            += (sdpFormat) => audioSource.SetAudioSourceFormat(SDPMediaFormatInfo.GetAudioCodecForSdpFormat(sdpFormat.First().FormatCodec));
            videoSource.OnVideoSourceEncodedSample += pc.SendVideo;
            audioSource.OnAudioSourceEncodedSample += pc.SendAudio;

            pc.onconnectionstatechange += async(state) =>
            {
                _logger.LogInformation($"Peer connection state change to {state}.");

                if (state == RTCPeerConnectionState.failed)
                {
                    pc.Close("ice disconnection");
                }
                else if (state == RTCPeerConnectionState.closed)
                {
                    videoSource.OnVideoSourceEncodedSample -= pc.SendVideo;
                    audioSource.OnAudioSourceEncodedSample -= pc.SendAudio;
                    await CheckForSourceSubscribers();
                }
                else if (state == RTCPeerConnectionState.connected)
                {
                    await StartSource(url);
                }
            };

            // Diagnostics.
            //pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}");
            //pc.OnSendReport += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}");
            //pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}.");
            pc.oniceconnectionstatechange += (state) => _logger.LogInformation($"ICE connection state change to {state}.");

            return(Task.FromResult(pc));
        }
Esempio n. 20
0
        private static Task <RTCPeerConnection> Createpc(WebSocketContext context, RTCIceServer stunServer, bool relayOnly)
        {
            if (_peerConnection != null)
            {
                _peerConnection.Close("normal");
            }

            List <RTCCertificate> presetCertificates = null;

            if (File.Exists(LOCALHOST_CERTIFICATE_PATH))
            {
                var localhostCert = new X509Certificate2(LOCALHOST_CERTIFICATE_PATH, (string)null, X509KeyStorageFlags.Exportable);
                presetCertificates = new List <RTCCertificate> {
                    new RTCCertificate {
                        Certificate = localhostCert
                    }
                };
            }

            RTCConfiguration pcConfiguration = new RTCConfiguration
            {
                certificates = presetCertificates,
                //X_RemoteSignallingAddress = (context != null) ? context.UserEndPoint.Address : null,
                iceServers = stunServer != null ? new List <RTCIceServer> {
                    stunServer
                } : null,
                //iceTransportPolicy = RTCIceTransportPolicy.all,
                iceTransportPolicy = relayOnly ? RTCIceTransportPolicy.relay : RTCIceTransportPolicy.all,
                //X_BindAddress = IPAddress.Any, // NOTE: Not reqd. Using this to filter out IPv6 addresses so can test with Pion.
            };

            _peerConnection = new RTCPeerConnection(pcConfiguration);

            //_peerConnection.GetRtpChannel().MdnsResolve = (hostname) => Task.FromResult(NetServices.InternetDefaultAddress);
            _peerConnection.GetRtpChannel().MdnsResolve = MdnsResolve;
            //_peerConnection.GetRtpChannel().OnStunMessageReceived += (msg, ep, isrelay) => logger.LogDebug($"STUN message received from {ep}, message type {msg.Header.MessageType}.");

            var dc = _peerConnection.createDataChannel(DATA_CHANNEL_LABEL, null);

            dc.onmessage += (msg) => logger.LogDebug($"data channel receive ({dc.label}-{dc.id}): {msg}");

            // Add a send-only audio track (this doesn't require any native libraries for encoding so is good for x-platform testing).
            AudioExtrasSource audioSource = new AudioExtrasSource(new AudioEncoder(), new AudioSourceOptions {
                AudioSource = AudioSourcesEnum.SineWave
            });

            audioSource.OnAudioSourceEncodedSample += _peerConnection.SendAudio;

            MediaStreamTrack audioTrack = new MediaStreamTrack(audioSource.GetAudioSourceFormats(), MediaStreamStatusEnum.SendOnly);

            _peerConnection.addTrack(audioTrack);

            _peerConnection.OnAudioFormatsNegotiated += (sdpFormat) =>
                                                        audioSource.SetAudioSourceFormat(SDPMediaFormatInfo.GetAudioCodecForSdpFormat(sdpFormat.First().FormatCodec));

            _peerConnection.onicecandidateerror     += (candidate, error) => logger.LogWarning($"Error adding remote ICE candidate. {error} {candidate}");
            _peerConnection.onconnectionstatechange += async(state) =>
            {
                logger.LogDebug($"Peer connection state changed to {state}.");

                if (state == RTCPeerConnectionState.disconnected || state == RTCPeerConnectionState.failed)
                {
                    _peerConnection.Close("remote disconnection");
                }

                if (state == RTCPeerConnectionState.connected)
                {
                    await audioSource.StartAudio();
                }
                else if (state == RTCPeerConnectionState.closed)
                {
                    await audioSource.CloseAudio();
                }
            };
            _peerConnection.OnReceiveReport += (ep, type, rtcp) => logger.LogDebug($"RTCP {type} report received.");
            _peerConnection.OnRtcpBye       += (reason) => logger.LogDebug($"RTCP BYE receive, reason: {(string.IsNullOrWhiteSpace(reason) ? "<none>" : reason)}.");

            _peerConnection.onicecandidate += (candidate) =>
            {
                if (_peerConnection.signalingState == RTCSignalingState.have_local_offer ||
                    _peerConnection.signalingState == RTCSignalingState.have_remote_offer)
                {
                    if (context != null &&
                        (_iceTypes.Count == 0 || _iceTypes.Any(x => x == candidate.type)))
                    {
                        var candidateInit = new RTCIceCandidateInit
                        {
                            sdpMid           = candidate.sdpMid,
                            sdpMLineIndex    = candidate.sdpMLineIndex,
                            usernameFragment = candidate.usernameFragment,
                            candidate        = "candidate:" + candidate.ToString()
                        };

                        context.WebSocket.Send(JsonConvert.SerializeObject(candidateInit));
                    }
                }
            };

            // Peer ICE connection state changes are for ICE events such as the STUN checks completing.
            _peerConnection.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}.");

            _peerConnection.ondatachannel += (dc) =>
            {
                logger.LogDebug($"Data channel opened by remote peer, label {dc.label}, stream ID {dc.id}.");
                dc.onmessage += (msg) =>
                {
                    logger.LogDebug($"data channel ({dc.label}:{dc.id}): {msg}.");
                };
            };

            return(Task.FromResult(_peerConnection));
        }
Esempio n. 21
0
        /// <summary>
        /// Initialise the audio capture and render device.
        /// </summary>
        private void SetAudioSource(AudioOptions audioSourceOpts)
        {
            // Render device.
            if (_waveOutEvent == null)
            {
                _waveOutEvent = new WaveOutEvent();
                _waveProvider = new BufferedWaveProvider(_waveFormat);
                _waveProvider.DiscardOnBufferOverflow = true;
                _waveOutEvent.Init(_waveProvider);
            }

            // Audio source.
            if (audioSourceOpts.AudioSource == AudioSourcesEnum.Microphone)
            {
                if (_waveInEvent == null)
                {
                    if (WaveInEvent.DeviceCount > 0)
                    {
                        _waveInEvent = new WaveInEvent();
                        _waveInEvent.BufferMilliseconds = AUDIO_SAMPLE_PERIOD_MILLISECONDS;
                        _waveInEvent.NumberOfBuffers    = 1;
                        _waveInEvent.DeviceNumber       = 0;
                        _waveInEvent.WaveFormat         = _waveFormat;
                        _waveInEvent.DataAvailable     += LocalAudioSampleAvailable;
                    }
                    else
                    {
                        Log.LogWarning("No audio capture devices are available. No audio stream will be sent.");
                    }
                }
            }
            else if (audioSourceOpts.AudioSource == AudioSourcesEnum.Music)
            {
                string newAudioFile = audioSourceOpts.SourceFile ?? DEFAULT_AUDIO_SOURCE_FILE;

                // Check whether this is the initial load or whether the source file is the same. If it is there's no need to do anything.
                if (_audioStreamReader == null || newAudioFile != _audioOpts.SourceFile)
                {
                    if (!File.Exists(newAudioFile))
                    {
                        if (File.Exists(DEFAULT_AUDIO_SOURCE_FILE))
                        {
                            Log.LogWarning($"The requested audio source file could not be found {newAudioFile}, falling back to default.");
                            newAudioFile = DEFAULT_AUDIO_SOURCE_FILE;
                        }
                        else
                        {
                            Log.LogError($"The requested audio source file could not be found {newAudioFile}, no audio source will be initialised.");
                            newAudioFile = null;
                        }
                    }

                    if (newAudioFile != null)
                    {
                        _audioStreamReader = new StreamReader(newAudioFile);
                    }
                }
            }

            if (_rtpAudioTimestampPeriod == 0)
            {
                _rtpAudioTimestampPeriod = (uint)(SDPMediaFormatInfo.GetClockRate(SDPMediaFormatsEnum.PCMU) / AUDIO_SAMPLE_PERIOD_MILLISECONDS);
            }
        }