private byte[] GetMP3Bytes() { List <byte> resampledBytesList = new List <byte>(); var waveProvider = GetMP3WaveProvider(); Logger.Info($"Convert to Mono 16bit PCM 16000KHz from {waveProvider.WaveFormat}"); //loop thorough in up to 1 second chunks var resample = new EventDrivenResampler(waveProvider.WaveFormat, new WaveFormat(INPUT_SAMPLE_RATE, 1)); byte[] buffer = new byte[waveProvider.WaveFormat.AverageBytesPerSecond * 2]; int read = 0; while ((read = waveProvider.Read(buffer, 0, waveProvider.WaveFormat.AverageBytesPerSecond)) > 0) { //resample as we go resampledBytesList.AddRange(resample.ResampleBytes(buffer, read)); } Logger.Info($"Converted to Mono 16bit PCM 16000KHz from {waveProvider.WaveFormat}"); return(resampledBytesList.ToArray()); }
public void StopEncoding() { lock (lockob) { _wasapiCapture?.StopRecording(); _wasapiCapture?.Dispose(); _wasapiCapture = null; _resampler?.Dispose(true); _resampler = null; _waveOut?.Dispose(); _waveOut = null; _playBuffer?.ClearBuffer(); _playBuffer = null; _encoder?.Dispose(); _encoder = null; _decoder?.Dispose(); _decoder = null; _playBuffer?.ClearBuffer(); _playBuffer = null; _speex?.Dispose(); _speex = null; _waveFile?.Flush(); _waveFile?.Dispose(); _waveFile = null; SpeakerMax = -100; MicMax = -100; } }
//Stopwatch _stopwatch = new Stopwatch(); private void WasapiCaptureOnDataAvailable(object sender, WaveInEventArgs e) { if (_resampler == null) { _resampler = new EventDrivenResampler(windowsN, _wasapiCapture.WaveFormat, new WaveFormat(AudioManager.INPUT_SAMPLE_RATE, 16, 1)); } if (e.BytesRecorded > 0) { //Logger.Info($"Time: {_stopwatch.ElapsedMilliseconds} - Bytes: {e.BytesRecorded}"); short[] resampledPCM16Bit = _resampler.Resample(e.Buffer, e.BytesRecorded); // Logger.Info($"Time: {_stopwatch.ElapsedMilliseconds} - Bytes: {resampledPCM16Bit.Length}"); //fill sound buffer short[] pcmShort = null; for (var i = 0; i < resampledPCM16Bit.Length; i++) { _micInputQueue.Enqueue(resampledPCM16Bit[i]); } //read out the queue while ((pcmShort != null) || (_micInputQueue.Count >= AudioManager.SEGMENT_FRAMES)) { //null sound buffer so read from the queue if (pcmShort == null) { pcmShort = new short[AudioManager.SEGMENT_FRAMES]; for (var i = 0; i < AudioManager.SEGMENT_FRAMES; i++) { pcmShort[i] = _micInputQueue.Dequeue(); } } try { //process with Speex _speex.Process(new ArraySegment <short>(pcmShort)); float max = 0; for (var i = 0; i < pcmShort.Length; i++) { //determine peak if (pcmShort[i] > max) { max = pcmShort[i]; } } //convert to dB MicMax = (float)VolumeConversionHelper.ConvertFloatToDB(max / 32768F); var pcmBytes = new byte[pcmShort.Length * 2]; Buffer.BlockCopy(pcmShort, 0, pcmBytes, 0, pcmBytes.Length); // _buffBufferedWaveProvider.AddSamples(pcmBytes, 0, pcmBytes.Length); //encode as opus bytes int len; //need to get framing right for opus - var buff = _encoder.Encode(pcmBytes, pcmBytes.Length, out len); if ((buff != null) && (len > 0)) { //create copy with small buffer var encoded = new byte[len]; Buffer.BlockCopy(buff, 0, encoded, 0, len); var decodedLength = 0; //now decode var decodedBytes = _decoder.Decode(encoded, len, out decodedLength); _buffBufferedWaveProvider.AddSamples(decodedBytes, 0, decodedLength); // Logger.Info($"Time: {_stopwatch.ElapsedMilliseconds} - Added samples"); } else { Logger.Error( $"Invalid Bytes for Encoding - {e.BytesRecorded} should be {AudioManager.SEGMENT_FRAMES} "); } } catch (Exception ex) { Logger.Error(ex, "Error encoding Opus! " + ex.Message); } pcmShort = null; } } // _stopwatch.Restart(); }