/// <summary> /// Initialise the audio capture and render device. /// </summary> /// <param name="audioSourceOpts">The options that dictate the type of audio source to use.</param> /// <param name="sendingFormat">The codec that will be sued to send the audio.</param> private void SetAudioSource(AudioOptions audioSourceOpts, SDPMediaFormat sendingFormat) { uint sampleRate = (uint)SDPMediaFormatInfo.GetClockRate(sendingFormat.FormatCodec); uint rtpTimestamptRate = (uint)SDPMediaFormatInfo.GetRtpClockRate(sendingFormat.FormatCodec); _rtpAudioTimestampPeriod = rtpTimestamptRate * AUDIO_SAMPLE_PERIOD_MILLISECONDS / 1000; WaveFormat waveFormat = new WaveFormat((int)sampleRate, BITS_PER_SAMPLE, CHANNEL_COUNT); // Render device. if (_waveOutEvent == null) { _waveOutEvent = new WaveOutEvent(); _waveOutEvent.DeviceNumber = (_audioOpts != null) ? _audioOpts.OutputDeviceIndex : AudioOptions.DEFAULT_OUTPUTDEVICE_INDEX; _waveProvider = new BufferedWaveProvider(waveFormat); _waveProvider.DiscardOnBufferOverflow = true; _waveOutEvent.Init(_waveProvider); } // Audio source. if (!_disableExternalAudioSource) { if (_waveInEvent == null) { if (WaveInEvent.DeviceCount > 0) { _waveInEvent = new WaveInEvent(); _waveInEvent.BufferMilliseconds = AUDIO_SAMPLE_PERIOD_MILLISECONDS; _waveInEvent.NumberOfBuffers = INPUT_BUFFERS; _waveInEvent.DeviceNumber = 0; _waveInEvent.WaveFormat = waveFormat; _waveInEvent.DataAvailable += LocalAudioSampleAvailable; } else { Log.LogWarning("No audio capture devices are available. No audio stream will be sent."); } } } }
/// <summary> /// Sends audio samples read from a file. /// </summary> private void SendMusicSample(object state) { if (!_streamSendInProgress) { lock (_audioStreamTimer) { int sampleRate = SDPMediaFormatInfo.GetRtpClockRate(_sendingFormat.FormatCodec); int sampleSize = sampleRate / 1000 * AUDIO_SAMPLE_PERIOD_MILLISECONDS; byte[] sample = new byte[sampleSize]; int bytesRead = _audioStreamReader.BaseStream.Read(sample, 0, sample.Length); if (bytesRead > 0) { SendAudioFrame((uint)sampleSize, (int)_sendingFormat.FormatCodec, sample); } if (bytesRead == 0 || _audioStreamReader.EndOfStream) { _audioStreamReader.BaseStream.Position = 0; } } } }
/// <summary> /// Initialises the audio source as required. /// </summary> public override Task Start() { lock (this) { if (!IsStarted) { if (AudioLocalTrack == null || AudioLocalTrack.Capabilities == null || AudioLocalTrack.Capabilities.Count == 0) { throw new ApplicationException( "Cannot start audio session without a local audio track being available."); } else if (AudioRemoteTrack == null || AudioRemoteTrack.Capabilities == null || AudioRemoteTrack.Capabilities.Count == 0) { throw new ApplicationException( "Cannot start audio session without a remote audio track being available."); } _sendingFormat = base.GetSendingFormat(SDPMediaTypesEnum.audio); _sendingAudioSampleRate = SDPMediaFormatInfo.GetClockRate(_sendingFormat.FormatCodec); _sendingAudioRtpRate = SDPMediaFormatInfo.GetRtpClockRate(_sendingFormat.FormatCodec); Log.LogDebug($"RTP audio session selected sending codec {_sendingFormat.FormatCodec}."); if (_sendingFormat.FormatCodec == SDPMediaFormatsEnum.G722) { _g722Codec = new G722Codec(); _g722CodecState = new G722CodecState(G722_BIT_RATE, G722Flags.None); _g722Decoder = new G722Codec(); _g722DecoderState = new G722CodecState(G722_BIT_RATE, G722Flags.None); } // If required start the audio source. if (_audioOpts != null && _audioOpts.AudioSource != AudioSourcesEnum.None) { if (_audioOpts.AudioSource == AudioSourcesEnum.Silence) { _audioStreamTimer = new Timer(SendSilenceSample, null, 0, AUDIO_SAMPLE_PERIOD_MILLISECONDS); } else if (_audioOpts.AudioSource == AudioSourcesEnum.PinkNoise || _audioOpts.AudioSource == AudioSourcesEnum.WhiteNoise || _audioOpts.AudioSource == AudioSourcesEnum.SineWave) { _signalGenerator = new SignalGenerator(_sendingAudioSampleRate, 1); switch (_audioOpts.AudioSource) { case AudioSourcesEnum.PinkNoise: _signalGenerator.Type = SignalGeneratorType.Pink; break; case AudioSourcesEnum.SineWave: _signalGenerator.Type = SignalGeneratorType.Sin; break; case AudioSourcesEnum.WhiteNoise: default: _signalGenerator.Type = SignalGeneratorType.White; break; } _audioStreamTimer = new Timer(SendSignalGeneratorSample, null, 0, AUDIO_SAMPLE_PERIOD_MILLISECONDS); } else if (_audioOpts.AudioSource == AudioSourcesEnum.Music) { if (_audioOpts.SourceFiles == null || !_audioOpts.SourceFiles.ContainsKey(_sendingFormat.FormatCodec)) { Log.LogWarning($"Source file not set for codec {_sendingFormat.FormatCodec}."); } else { string sourceFile = _audioOpts.SourceFiles[_sendingFormat.FormatCodec]; if (String.IsNullOrEmpty(sourceFile) || !File.Exists(sourceFile)) { Log.LogWarning( "Could not start audio music source as the source file does not exist."); } else { _audioStreamReader = new StreamReader(sourceFile); _audioStreamTimer = new Timer(SendMusicSample, null, 0, AUDIO_SAMPLE_PERIOD_MILLISECONDS); } } } } base.OnRtpPacketReceived += RtpPacketReceived; } return(base.Start()); } }