コード例 #1
0
        /// <summary>
        /// Sets or updates the sources of the audio and/or video streams.
        /// </summary>
        /// <param name="audioOptions">Optional. If audio is being switched the new source options.
        /// Set to null to leave the audio source unchanged.</param>
        /// <param name="videoOptions">Optional. If video is being switched the new source options.
        /// Set to null to leave the video source unchanged.</param>
        public async Task SetSources(AudioOptions audioOptions, VideoOptions videoOptions)
        {
            if (audioOptions == null)
            {
                // Do nothing, audio source not being changed.
            }
            else if (audioOptions.AudioSource == AudioSourcesEnum.None)
            {
                // Audio source no longer required.
                _waveInEvent?.StopRecording();

                if (_audioStreamTimer != null)
                {
                    _audioStreamTimer?.Dispose();

                    // Give any currently executing audio sampling time to complete.
                    await Task.Delay(AUDIO_SAMPLE_PERIOD_MILLISECONDS * 2).ConfigureAwait(false);
                }

                _audioStreamReader.Close();
                _audioOpts = audioOptions;
            }
            else
            {
                SetAudioSource(audioOptions);
                _audioOpts = audioOptions;
                StartAudio();
            }

            if (videoOptions == null)
            {
                // Do nothing, video source not being changed.
            }
            else if (videoOptions.VideoSource == VideoSourcesEnum.None)
            {
                // Video source no longer required.
                _testPatternVideoSource?.Stop();
                _videoOpts = videoOptions;
            }
            else
            {
                await SetVideoSource(videoOptions).ConfigureAwait(false);

                _videoOpts = videoOptions;
                StartVideo();
            }
        }
コード例 #2
0
        /// <summary>
        /// Creates a new RTP audio visual session with audio/video capturing and rendering capabilities.
        /// </summary>
        /// <param name="addrFamily">The address family to create the underlying socket on (IPv4 or IPv6).</param>
        /// <param name="audioOptions">Options for the send and receive audio streams on this session.</param>
        /// <param name="videoOptions">Options for the send and receive video streams on this session</param>
        public RtpAVSession(AddressFamily addrFamily, AudioOptions audioOptions, VideoOptions videoOptions)
            : base(addrFamily, false, false, false)
        {
            _audioOpts = audioOptions ?? DefaultAudioOptions;
            _videoOpts = videoOptions ?? DefaultVideoOptions;

            // Initialise the video decoding objects. Even if we are not sourcing video
            // we need to be ready to receive and render.
            _vpxDecoder = new VpxEncoder();
            int res = _vpxDecoder.InitDecoder();

            if (res != 0)
            {
                throw new ApplicationException("VPX decoder initialisation failed.");
            }
            _imgConverter = new ImageConvert();

            if (_audioOpts.AudioSource != AudioSourcesEnum.None)
            {
                var pcmu = new SDPMediaFormat(SDPMediaFormatsEnum.PCMU);

                // RTP event support.
                int            clockRate      = pcmu.GetClockRate();
                SDPMediaFormat rtpEventFormat = new SDPMediaFormat(DTMF_EVENT_PAYLOAD_ID);
                rtpEventFormat.SetFormatAttribute($"{TELEPHONE_EVENT_ATTRIBUTE}/{clockRate}");
                rtpEventFormat.SetFormatParameterAttribute("0-16");

                var audioCapabilities = new List <SDPMediaFormat> {
                    pcmu, rtpEventFormat
                };

                MediaStreamTrack audioTrack = new MediaStreamTrack(null, SDPMediaTypesEnum.audio, false, audioCapabilities);
                addTrack(audioTrack);
            }

            if (_videoOpts.VideoSource != VideoSourcesEnum.None)
            {
                MediaStreamTrack videoTrack = new MediaStreamTrack(null, SDPMediaTypesEnum.video, false, new List <SDPMediaFormat> {
                    new SDPMediaFormat(SDPMediaFormatsEnum.VP8)
                });
                addTrack(videoTrack);
            }

            // Where the magic (for processing received media) happens.
            base.OnRtpPacketReceived += RtpPacketReceived;
        }
コード例 #3
0
        /// <summary>
        /// Initialise the audio capture and render device.
        /// </summary>
        /// <param name="audioSourceOpts">The options that dictate the type of audio source to use.</param>
        /// <param name="sendingFormat">The codec that will be sued to send the audio.</param>
        private void SetAudioSource(AudioOptions audioSourceOpts, SDPMediaFormat sendingFormat)
        {
            uint sampleRate        = (uint)SDPMediaFormatInfo.GetClockRate(sendingFormat.FormatCodec);
            uint rtpTimestamptRate = (uint)SDPMediaFormatInfo.GetRtpClockRate(sendingFormat.FormatCodec);

            _rtpAudioTimestampPeriod = rtpTimestamptRate * AUDIO_SAMPLE_PERIOD_MILLISECONDS / 1000;

            WaveFormat waveFormat = new WaveFormat((int)sampleRate, BITS_PER_SAMPLE, CHANNEL_COUNT);

            // Render device.
            if (_waveOutEvent == null)
            {
                _waveOutEvent = new WaveOutEvent();
                _waveOutEvent.DeviceNumber = (_audioOpts != null) ? _audioOpts.OutputDeviceIndex : AudioOptions.DEFAULT_OUTPUTDEVICE_INDEX;
                _waveProvider = new BufferedWaveProvider(waveFormat);
                _waveProvider.DiscardOnBufferOverflow = true;
                _waveOutEvent.Init(_waveProvider);
            }

            // Audio source.
            if (!_disableExternalAudioSource)
            {
                if (_waveInEvent == null)
                {
                    if (WaveInEvent.DeviceCount > 0)
                    {
                        _waveInEvent = new WaveInEvent();
                        _waveInEvent.BufferMilliseconds = AUDIO_SAMPLE_PERIOD_MILLISECONDS;
                        _waveInEvent.NumberOfBuffers    = INPUT_BUFFERS;
                        _waveInEvent.DeviceNumber       = 0;
                        _waveInEvent.WaveFormat         = waveFormat;
                        _waveInEvent.DataAvailable     += LocalAudioSampleAvailable;
                    }
                    else
                    {
                        Log.LogWarning("No audio capture devices are available. No audio stream will be sent.");
                    }
                }
            }
        }
コード例 #4
0
        /// <summary>
        /// Sets or updates the sources of the audio and/or video streams.
        /// </summary>
        /// <param name="audioOptions">Optional. If audio is being switched the new source options.
        /// Set to null to leave the audio source unchanged.</param>
        /// <param name="videoOptions">Optional. If video is being switched the new source options.
        /// Set to null to leave the video source unchanged.</param>
        /// <param name="disableExternalAudioSource">If true then no attempt will be made to use an external audio
        /// source, e.g. microphone.</param>
        public async Task SetSources(AudioOptions audioOptions, VideoOptions videoOptions, bool disableExternalAudioSource = false)
        {
            _disableExternalAudioSource = disableExternalAudioSource;

            // Check whether the underlying media session has changed which dictates whether
            // an audio or video source needs to be removed.
            if (!HasAudio)
            {
                // Overrule any application supplied options as the session does not currently support audio.
                audioOptions = new AudioOptions {
                    AudioSource = AudioSourcesEnum.None
                };
            }

            if (!HasVideo)
            {
                // Overrule any application supplied options as the session does not currently support video.
                videoOptions = new VideoOptions {
                    VideoSource = VideoSourcesEnum.None
                };
            }

            if (audioOptions == null)
            {
                // Do nothing, audio source not being changed.
            }
            else if (audioOptions.AudioSource == AudioSourcesEnum.None)
            {
                // Audio source no longer required.
                _waveInEvent?.StopRecording();

                if (_audioStreamTimer != null)
                {
                    _audioStreamTimer?.Dispose();

                    // Give any currently executing audio sampling time to complete.
                    await Task.Delay(AUDIO_SAMPLE_PERIOD_MILLISECONDS * 2).ConfigureAwait(false);
                }

                _audioStreamReader?.Close();
                _audioOpts = audioOptions;
            }
            else
            {
                _sendingAudioFormat = base.GetSendingFormat(SDPMediaTypesEnum.audio);
                SetAudioSource(audioOptions, _sendingAudioFormat);
                _audioOpts = audioOptions;
                StartAudio();
            }

            if (videoOptions == null)
            {
                // Do nothing, video source not being changed.
            }
            else if (videoOptions.VideoSource == VideoSourcesEnum.None)
            {
                // Video source no longer required.
                _testPatternVideoSource?.Stop();
                if (_videoOpts.BitmapSource != null)
                {
                    _videoOpts.BitmapSource.OnBitmap -= LocalBitmapAvailable;
                }
                _videoOpts = videoOptions;
            }
            else
            {
                await SetVideoSource(videoOptions).ConfigureAwait(false);

                _videoOpts = videoOptions;
                StartVideo();
            }
        }
コード例 #5
0
        /// <summary>
        /// Creates a new RTP audio visual session with audio/video capturing and rendering capabilities.
        /// </summary>
        /// <param name="addrFamily">The address family to create the underlying socket on (IPv4 or IPv6).</param>
        /// <param name="audioOptions">Options for the send and receive audio streams on this session.</param>
        /// <param name="videoOptions">Options for the send and receive video streams on this session</param>
        /// <param name="bindAddress">Optional. If specified this address will be used as the bind address for any RTP
        /// and control sockets created. Generally this address does not need to be set. The default behaviour
        /// is to bind to [::] or 0.0.0.0, depending on system support, which minimises network routing
        /// causing connection issues.</param>
        /// <param name="disableExternalAudioSource">If true then no attempt will be made to use an external audio
        /// source, e.g. microphone.</param>
        public RtpAVSession(AudioOptions audioOptions, VideoOptions videoOptions, IPAddress bindAddress = null, bool disableExternalAudioSource = false)
            : base(false, false, false, bindAddress)
        {
            _audioOpts = audioOptions ?? DefaultAudioOptions;
            _videoOpts = videoOptions ?? DefaultVideoOptions;
            _disableExternalAudioSource = disableExternalAudioSource;

            if (_audioOpts != null && _audioOpts.AudioCodecs != null &&
                _audioOpts.AudioCodecs.Any(x => !(x == SDPMediaFormatsEnum.PCMU || x == SDPMediaFormatsEnum.PCMA || x == SDPMediaFormatsEnum.G722)))
            {
                throw new ApplicationException("Only PCMA, PCMU and G722 are supported for audio codec options.");
            }

            // Initialise the video decoding objects. Even if we are not sourcing video
            // we need to be ready to receive and render.
            _vpxDecoder = new VpxEncoder();
            int res = _vpxDecoder.InitDecoder();

            if (res != 0)
            {
                throw new ApplicationException("VPX decoder initialisation failed.");
            }
            _imgConverter = new ImageConvert();

            if (_audioOpts.AudioSource != AudioSourcesEnum.None)
            {
                var pcmu = new SDPMediaFormat(SDPMediaFormatsEnum.PCMU);

                //// RTP event support.
                //int clockRate = pcmu.GetClockRate();
                //SDPMediaFormat rtpEventFormat = new SDPMediaFormat(DTMF_EVENT_PAYLOAD_ID);
                //rtpEventFormat.SetFormatAttribute($"{SDP.TELEPHONE_EVENT_ATTRIBUTE}/{clockRate}");
                //rtpEventFormat.SetFormatParameterAttribute("0-16");

                var audioCapabilities = new List <SDPMediaFormat>();
                if (_audioOpts.AudioCodecs == null || _audioOpts.AudioCodecs.Count == 0)
                {
                    audioCapabilities.Add(pcmu);
                }
                else
                {
                    foreach (var codec in _audioOpts.AudioCodecs)
                    {
                        audioCapabilities.Add(new SDPMediaFormat(codec));
                    }
                }
                //audioCapabilities.Add(rtpEventFormat);

                if (audioCapabilities.Any(x => x.FormatCodec == SDPMediaFormatsEnum.G722))
                {
                    _g722Encode      = new G722Codec();
                    _g722EncodeState = new G722CodecState(64000, G722Flags.None);
                    _g722Decode      = new G722Codec();
                    _g722DecodeState = new G722CodecState(64000, G722Flags.None);
                }

                MediaStreamTrack audioTrack = new MediaStreamTrack(SDPMediaTypesEnum.audio, false, audioCapabilities);
                addTrack(audioTrack);
            }

            if (_videoOpts.VideoSource != VideoSourcesEnum.None)
            {
                MediaStreamTrack videoTrack = new MediaStreamTrack(SDPMediaTypesEnum.video, false, new List <SDPMediaFormat> {
                    new SDPMediaFormat(SDPMediaFormatsEnum.VP8)
                });
                addTrack(videoTrack);
            }

            // Where the magic (for processing received media) happens.
            base.OnRtpPacketReceived += RtpPacketReceived;
        }
コード例 #6
0
        /// <summary>
        /// Initialise the audio capture and render device.
        /// </summary>
        private void SetAudioSource(AudioOptions audioSourceOpts)
        {
            // Render device.
            if (_waveOutEvent == null)
            {
                _waveOutEvent = new WaveOutEvent();
                _waveProvider = new BufferedWaveProvider(_waveFormat);
                _waveProvider.DiscardOnBufferOverflow = true;
                _waveOutEvent.Init(_waveProvider);
            }

            // Audio source.
            if (audioSourceOpts.AudioSource == AudioSourcesEnum.Microphone)
            {
                if (_waveInEvent == null)
                {
                    if (WaveInEvent.DeviceCount > 0)
                    {
                        _waveInEvent = new WaveInEvent();
                        _waveInEvent.BufferMilliseconds = AUDIO_SAMPLE_PERIOD_MILLISECONDS;
                        _waveInEvent.NumberOfBuffers    = 1;
                        _waveInEvent.DeviceNumber       = 0;
                        _waveInEvent.WaveFormat         = _waveFormat;
                        _waveInEvent.DataAvailable     += LocalAudioSampleAvailable;
                    }
                    else
                    {
                        Log.LogWarning("No audio capture devices are available. No audio stream will be sent.");
                    }
                }
            }
            else if (audioSourceOpts.AudioSource == AudioSourcesEnum.Music)
            {
                string newAudioFile = audioSourceOpts.SourceFile ?? DEFAULT_AUDIO_SOURCE_FILE;

                // Check whether this is the initial load or whether the source file is the same. If it is there's no need to do anything.
                if (_audioStreamReader == null || newAudioFile != _audioOpts.SourceFile)
                {
                    if (!File.Exists(newAudioFile))
                    {
                        if (File.Exists(DEFAULT_AUDIO_SOURCE_FILE))
                        {
                            Log.LogWarning($"The requested audio source file could not be found {newAudioFile}, falling back to default.");
                            newAudioFile = DEFAULT_AUDIO_SOURCE_FILE;
                        }
                        else
                        {
                            Log.LogError($"The requested audio source file could not be found {newAudioFile}, no audio source will be initialised.");
                            newAudioFile = null;
                        }
                    }

                    if (newAudioFile != null)
                    {
                        _audioStreamReader = new StreamReader(newAudioFile);
                    }
                }
            }

            if (_rtpAudioTimestampPeriod == 0)
            {
                _rtpAudioTimestampPeriod = (uint)(SDPMediaFormatInfo.GetClockRate(SDPMediaFormatsEnum.PCMU) / AUDIO_SAMPLE_PERIOD_MILLISECONDS);
            }
        }