/// <summary> /// Sends audio samples read from a file. /// </summary> private void SendMusicSample(object state) { lock (_audioStreamReader) { int sampleSize = (SDPMediaFormatInfo.GetClockRate(_sendingAudioFormat.FormatCodec) / 1000) * AUDIO_SAMPLE_PERIOD_MILLISECONDS; byte[] sample = new byte[sampleSize]; int bytesRead = _audioStreamReader.BaseStream.Read(sample, 0, sample.Length); if (bytesRead == 0 || _audioStreamReader.EndOfStream) { _audioStreamReader.BaseStream.Position = 0; bytesRead = _audioStreamReader.BaseStream.Read(sample, 0, sample.Length); } SendAudioFrame((uint)bytesRead, Convert.ToInt32(_sendingAudioFormat.FormatID), sample.Take(bytesRead).ToArray()); #region On hold audio scope. if (OnHoldAudioScopeSampleReady != null) { Complex[] ieeeSamples = new Complex[sample.Length]; for (int index = 0; index < sample.Length; index++) { short pcm = NAudio.Codecs.MuLawDecoder.MuLawToLinearSample(sample[index]); byte[] pcmSample = new byte[] { (byte)(pcm & 0xFF), (byte)(pcm >> 8) }; ieeeSamples[index] = pcm / 32768f; } OnHoldAudioScopeSampleReady(ieeeSamples.ToArray()); } #endregion } }
/// <summary> /// Sends audio samples read from a file. /// </summary> private void SendMusicSample(object state) { int sampleSize = (SDPMediaFormatInfo.GetClockRate(SDPMediaFormatsEnum.PCMU) / 1000) * AUDIO_SAMPLE_PERIOD_MILLISECONDS; byte[] sample = new byte[sampleSize]; int bytesRead = _audioStreamReader.BaseStream.Read(sample, 0, sample.Length); if (bytesRead == 0 || _audioStreamReader.EndOfStream) { _audioStreamReader.BaseStream.Position = 0; bytesRead = _audioStreamReader.BaseStream.Read(sample, 0, sample.Length); } SendAudioFrame((uint)bytesRead, (int)SDPMediaFormatsEnum.PCMU, sample.Take(bytesRead).ToArray()); }
/// <summary> /// Starts the media capturing/source devices. /// </summary> public override async Task Start() { if (!_isStarted) { _sendingAudioFormat = base.GetSendingFormat(SDPMediaTypesEnum.audio); _isStarted = true; await base.Start(); // Render device. _waveOutEvent = new WaveOutEvent(); _waveOutEvent.DeviceNumber = AUDIO_OUTPUTDEVICE_INDEX; _waveProvider = new BufferedWaveProvider(_waveFormat); _waveProvider.DiscardOnBufferOverflow = true; _waveOutEvent.Init(_waveProvider); _waveOutEvent.Play(); // Audio source. if (WaveInEvent.DeviceCount > 0) { _waveInEvent = new WaveInEvent(); _waveInEvent.BufferMilliseconds = AUDIO_SAMPLE_PERIOD_MILLISECONDS; _waveInEvent.NumberOfBuffers = 1; _waveInEvent.DeviceNumber = 0; _waveInEvent.WaveFormat = _waveFormat; _waveInEvent.DataAvailable += LocalAudioSampleAvailable; _waveInEvent.StartRecording(); } else { Log.LogWarning("No audio capture devices are available. No audio stream will be sent."); } if (_rtpAudioTimestampPeriod == 0) { _rtpAudioTimestampPeriod = (uint)(SDPMediaFormatInfo.GetClockRate(SDPMediaFormatsEnum.PCMU) / AUDIO_SAMPLE_PERIOD_MILLISECONDS); } } }
/// <summary> /// Initialise the video capture and render device. /// </summary> private async Task SetVideoSource(VideoOptions videoSourceOpts) { if (videoSourceOpts.VideoSource != VideoSourcesEnum.ExternalBitmap && _videoOpts.BitmapSource != null) { _videoOpts.BitmapSource.OnBitmap -= LocalBitmapAvailable; } if (videoSourceOpts.VideoSource != VideoSourcesEnum.TestPattern && _testPatternVideoSource != null) { _testPatternVideoSource.SampleReady -= LocalVideoSampleAvailable; _testPatternVideoSource.Stop(); _testPatternVideoSource = null; } if (videoSourceOpts.VideoSource == VideoSourcesEnum.TestPattern) { if (_testPatternVideoSource == null) { _testPatternVideoSource = new TestPatternVideoSource(videoSourceOpts.SourceFile, videoSourceOpts.SourceFramesPerSecond); _testPatternVideoSource.SampleReady += LocalVideoSampleAvailable; } else { await _testPatternVideoSource.SetSource(videoSourceOpts.SourceFile, videoSourceOpts.SourceFramesPerSecond).ConfigureAwait(false); } if (_testPatternVideoSource.FramesPerSecond != 0) { _rtpVideoTimestampPeriod = (uint)(SDPMediaFormatInfo.GetClockRate(SDPMediaFormatsEnum.VP8) / _testPatternVideoSource.FramesPerSecond); } else { _rtpVideoTimestampPeriod = (uint)(SDPMediaFormatInfo.GetClockRate(SDPMediaFormatsEnum.VP8) / TestPatternVideoSource.DEFAULT_FRAMES_PER_SECOND); } } else if (videoSourceOpts.VideoSource == VideoSourcesEnum.ExternalBitmap) { videoSourceOpts.BitmapSource.OnBitmap += LocalBitmapAvailable; } }
/// <summary> /// Initialise the audio capture and render device. /// </summary> /// <param name="audioSourceOpts">The options that dictate the type of audio source to use.</param> /// <param name="sendingFormat">The codec that will be sued to send the audio.</param> private void SetAudioSource(AudioOptions audioSourceOpts, SDPMediaFormat sendingFormat) { uint sampleRate = (uint)SDPMediaFormatInfo.GetClockRate(sendingFormat.FormatCodec); uint rtpTimestamptRate = (uint)SDPMediaFormatInfo.GetRtpClockRate(sendingFormat.FormatCodec); _rtpAudioTimestampPeriod = rtpTimestamptRate * AUDIO_SAMPLE_PERIOD_MILLISECONDS / 1000; WaveFormat waveFormat = new WaveFormat((int)sampleRate, BITS_PER_SAMPLE, CHANNEL_COUNT); // Render device. if (_waveOutEvent == null) { _waveOutEvent = new WaveOutEvent(); _waveOutEvent.DeviceNumber = (_audioOpts != null) ? _audioOpts.OutputDeviceIndex : AudioOptions.DEFAULT_OUTPUTDEVICE_INDEX; _waveProvider = new BufferedWaveProvider(waveFormat); _waveProvider.DiscardOnBufferOverflow = true; _waveOutEvent.Init(_waveProvider); } // Audio source. if (!_disableExternalAudioSource) { if (_waveInEvent == null) { if (WaveInEvent.DeviceCount > 0) { _waveInEvent = new WaveInEvent(); _waveInEvent.BufferMilliseconds = AUDIO_SAMPLE_PERIOD_MILLISECONDS; _waveInEvent.NumberOfBuffers = INPUT_BUFFERS; _waveInEvent.DeviceNumber = 0; _waveInEvent.WaveFormat = waveFormat; _waveInEvent.DataAvailable += LocalAudioSampleAvailable; } else { Log.LogWarning("No audio capture devices are available. No audio stream will be sent."); } } } }
public void Start() { _stop = false; if (_samplesTask == null || _samplesTask.Status != TaskStatus.Running) { logger.Debug("Music on hold samples task starting."); _samplesTask = Task.Run(async() => { // Read the same file in an endless loop while samples are still requried. while (!_stop) { using (StreamReader sr = new StreamReader(AUDIO_FILE_PCMU)) { int sampleSize = (SDPMediaFormatInfo.GetClockRate(SDPMediaFormatsEnum.PCMU) / 1000) * AUDIO_SAMPLE_PERIOD_MILLISECONDS; byte[] sample = new byte[sampleSize]; int bytesRead = sr.BaseStream.Read(sample, 0, sample.Length); while (bytesRead > 0 && !_stop) { if (OnAudioSampleReady == null) { // Nobody needs music on hold so exit. logger.Debug("Music on hold has no subscribers, stopping."); return; } else { OnAudioSampleReady(sample); } await Task.Delay(AUDIO_SAMPLE_PERIOD_MILLISECONDS); bytesRead = sr.BaseStream.Read(sample, 0, sample.Length); } } } }); } }
/// <summary> /// Starts the media capturing/source devices. /// </summary> public override async Task Start() { if (!_isStarted) { _sendingAudioFormat = base.GetSendingFormat(SDPMediaTypesEnum.audio); _isStarted = true; await base.Start(); PortAudio.Initialize(); var outputDevice = PortAudio.DefaultOutputDevice; if (outputDevice == PortAudio.NoDevice) { throw new ApplicationException("No audio output device available."); } else { StreamParameters stmInParams = new StreamParameters { device = 0, channelCount = 2, sampleFormat = SampleFormat.Float32 }; StreamParameters stmOutParams = new StreamParameters { device = outputDevice, channelCount = 2, sampleFormat = SampleFormat.Float32 }; // Combined audio capture and render. _audioIOStream = new Stream(stmInParams, stmOutParams, AUDIO_SAMPLING_RATE, AUDIO_SAMPLE_BUFFER_LENGTH, StreamFlags.NoFlag, AudioSampleAvailable, null); _audioIOStream.Start(); } if (_rtpAudioTimestampPeriod == 0) { _rtpAudioTimestampPeriod = (uint)(SDPMediaFormatInfo.GetClockRate(SDPMediaFormatsEnum.PCMU) / AUDIO_SAMPLE_BUFFER_LENGTH); } } }
/// <summary> /// Sends audio samples read from a file. /// </summary> private void SendMusicSample(object state) { if (!_streamSendInProgress) { lock (_audioStreamTimer) { int sampleRate = SDPMediaFormatInfo.GetClockRate(_sendingFormat.FormatCodec); int sampleSize = sampleRate / 1000 * AUDIO_SAMPLE_PERIOD_MILLISECONDS; byte[] sample = new byte[sampleSize]; int bytesRead = _audioStreamReader.BaseStream.Read(sample, 0, sample.Length); if (bytesRead > 0) { SendAudioFrame((uint)sampleSize, (int)_sendingFormat.FormatCodec, sample); } if (bytesRead == 0 || _audioStreamReader.EndOfStream) { _audioStreamReader.BaseStream.Position = 0; } } } }
/// <summary> /// Initialises the audio source as required. /// </summary> public override Task Start() { lock (this) { if (!IsStarted) { if (AudioLocalTrack == null || AudioLocalTrack.Capabilities == null || AudioLocalTrack.Capabilities.Count == 0) { throw new ApplicationException( "Cannot start audio session without a local audio track being available."); } else if (AudioRemoteTrack == null || AudioRemoteTrack.Capabilities == null || AudioRemoteTrack.Capabilities.Count == 0) { throw new ApplicationException( "Cannot start audio session without a remote audio track being available."); } _sendingFormat = base.GetSendingFormat(SDPMediaTypesEnum.audio); _sendingAudioSampleRate = SDPMediaFormatInfo.GetClockRate(_sendingFormat.FormatCodec); _sendingAudioRtpRate = SDPMediaFormatInfo.GetRtpClockRate(_sendingFormat.FormatCodec); Log.LogDebug($"RTP audio session selected sending codec {_sendingFormat.FormatCodec}."); if (_sendingFormat.FormatCodec == SDPMediaFormatsEnum.G722) { _g722Codec = new G722Codec(); _g722CodecState = new G722CodecState(G722_BIT_RATE, G722Flags.None); _g722Decoder = new G722Codec(); _g722DecoderState = new G722CodecState(G722_BIT_RATE, G722Flags.None); } // If required start the audio source. if (_audioOpts != null && _audioOpts.AudioSource != AudioSourcesEnum.None) { if (_audioOpts.AudioSource == AudioSourcesEnum.Silence) { _audioStreamTimer = new Timer(SendSilenceSample, null, 0, AUDIO_SAMPLE_PERIOD_MILLISECONDS); } else if (_audioOpts.AudioSource == AudioSourcesEnum.PinkNoise || _audioOpts.AudioSource == AudioSourcesEnum.WhiteNoise || _audioOpts.AudioSource == AudioSourcesEnum.SineWave) { _signalGenerator = new SignalGenerator(_sendingAudioSampleRate, 1); switch (_audioOpts.AudioSource) { case AudioSourcesEnum.PinkNoise: _signalGenerator.Type = SignalGeneratorType.Pink; break; case AudioSourcesEnum.SineWave: _signalGenerator.Type = SignalGeneratorType.Sin; break; case AudioSourcesEnum.WhiteNoise: default: _signalGenerator.Type = SignalGeneratorType.White; break; } _audioStreamTimer = new Timer(SendSignalGeneratorSample, null, 0, AUDIO_SAMPLE_PERIOD_MILLISECONDS); } else if (_audioOpts.AudioSource == AudioSourcesEnum.Music) { if (_audioOpts.SourceFiles == null || !_audioOpts.SourceFiles.ContainsKey(_sendingFormat.FormatCodec)) { Log.LogWarning($"Source file not set for codec {_sendingFormat.FormatCodec}."); } else { string sourceFile = _audioOpts.SourceFiles[_sendingFormat.FormatCodec]; if (String.IsNullOrEmpty(sourceFile) || !File.Exists(sourceFile)) { Log.LogWarning( "Could not start audio music source as the source file does not exist."); } else { _audioStreamReader = new StreamReader(sourceFile); _audioStreamTimer = new Timer(SendMusicSample, null, 0, AUDIO_SAMPLE_PERIOD_MILLISECONDS); } } } } base.OnRtpPacketReceived += RtpPacketReceived; } return(base.Start()); } }
/// <summary> /// Initialise the audio capture and render device. /// </summary> private void SetAudioSource(AudioOptions audioSourceOpts) { // Render device. if (_waveOutEvent == null) { _waveOutEvent = new WaveOutEvent(); _waveProvider = new BufferedWaveProvider(_waveFormat); _waveProvider.DiscardOnBufferOverflow = true; _waveOutEvent.Init(_waveProvider); } // Audio source. if (audioSourceOpts.AudioSource == AudioSourcesEnum.Microphone) { if (_waveInEvent == null) { if (WaveInEvent.DeviceCount > 0) { _waveInEvent = new WaveInEvent(); _waveInEvent.BufferMilliseconds = AUDIO_SAMPLE_PERIOD_MILLISECONDS; _waveInEvent.NumberOfBuffers = 1; _waveInEvent.DeviceNumber = 0; _waveInEvent.WaveFormat = _waveFormat; _waveInEvent.DataAvailable += LocalAudioSampleAvailable; } else { Log.LogWarning("No audio capture devices are available. No audio stream will be sent."); } } } else if (audioSourceOpts.AudioSource == AudioSourcesEnum.Music) { string newAudioFile = audioSourceOpts.SourceFile ?? DEFAULT_AUDIO_SOURCE_FILE; // Check whether this is the initial load or whether the source file is the same. If it is there's no need to do anything. if (_audioStreamReader == null || newAudioFile != _audioOpts.SourceFile) { if (!File.Exists(newAudioFile)) { if (File.Exists(DEFAULT_AUDIO_SOURCE_FILE)) { Log.LogWarning($"The requested audio source file could not be found {newAudioFile}, falling back to default."); newAudioFile = DEFAULT_AUDIO_SOURCE_FILE; } else { Log.LogError($"The requested audio source file could not be found {newAudioFile}, no audio source will be initialised."); newAudioFile = null; } } if (newAudioFile != null) { _audioStreamReader = new StreamReader(newAudioFile); } } } if (_rtpAudioTimestampPeriod == 0) { _rtpAudioTimestampPeriod = (uint)(SDPMediaFormatInfo.GetClockRate(SDPMediaFormatsEnum.PCMU) / AUDIO_SAMPLE_PERIOD_MILLISECONDS); } }