/// <summary> /// Disposes this stream /// </summary> /// <param name="disposing">true if the user called this</param> protected override void Dispose(bool disposing) { if (disposing) { // Release managed resources. if (conversionStream != null) { conversionStream.Dispose(); conversionStream = null; } if (sourceStream != null) { sourceStream.Dispose(); sourceStream = null; } } else { System.Diagnostics.Debug.Assert(false, "WaveFormatConversionStream was not disposed"); } // Release unmanaged resources. // Set large fields to null. // Call Dispose on your base class. base.Dispose(disposing); }
internal WasapiLoopbackRecorder(INetworkChatCodec c) { deviceNumber = -1; codec = c; waveIn = new WasapiLoopbackCapture(); convertionStream = new AcmStream(new WaveFormat(waveIn.WaveFormat.SampleRate, 16, waveIn.WaveFormat.Channels), codec.RecordFormat); }
/// <summary> /// Disposes of this MP3 frame decompressor /// </summary> public void Dispose() { if (this.conversionStream != null) { this.conversionStream.Dispose(); this.conversionStream = null; } }
public WaveFormatConversionProvider(WaveFormat targetFormat, IWaveProvider sourceProvider) { this.sourceProvider = sourceProvider; this.WaveFormat = targetFormat; this.conversionStream = new AcmStream(sourceProvider.WaveFormat, targetFormat); this.preferredSourceReadSize = Math.Min(sourceProvider.WaveFormat.AverageBytesPerSecond, this.conversionStream.SourceBuffer.Length); this.preferredSourceReadSize -= this.preferredSourceReadSize % sourceProvider.WaveFormat.BlockAlign; }
public byte[] Encode(byte[] data, int offset, int length) { if (this.encodeStream == null) { this.encodeStream = new AcmStream(this.RecordFormat, this.encodeFormat); } return(Convert(encodeStream, data, offset, length, ref encodeSourceBytesLeftovers)); }
public byte[] Decode(byte[] data, int offset, int length) { if (decodeStream == null) { decodeStream = new AcmStream(encodeFormat, RecordFormat); } return(Convert(decodeStream, data, offset, length, ref decodeSourceBytesLeftovers)); }
public void Dispose() { if (conversionStream != null) { conversionStream.Dispose(); conversionStream = null; } }
public byte[] Decode(byte[] data, int offset, int length) { if (decodeStream == null) { decodeStream = new AcmStream(encodeFormat, RecordFormat); } //Debug.WriteLine(String.Format("Decoding {0} + {1} bytes", data.Length, decodeSourceBytesLeftovers)); return(Convert(decodeStream, data, offset, length, ref decodeSourceBytesLeftovers)); }
public byte[] Encode(byte[] data, int offset, int length) { if (this.encodeStream == null) { this.encodeStream = new AcmStream(this.RecordFormat, this.encodeFormat); } //Debug.WriteLine(String.Format("Encoding {0} + {1} bytes", length, encodeSourceBytesLeftovers)); return(Convert(encodeStream, data, offset, length, ref encodeSourceBytesLeftovers)); }
private static byte[] ResamplePcm(ref byte[] toResample, ref int sourceLength, WaveFormat sourceFormat, WaveFormat destPcmFormat, out int resultLength) { Debug.Assert(destPcmFormat.Encoding == WaveFormatEncoding.Pcm, "Codec format must be PCM"); if (resampleRateStream != null && (!lastResampleSourceFormat.Equals(sourceFormat) || !lastResampleDestFormat.Equals(destPcmFormat))) { resampleRateStream.Dispose(); resampleRateStream = null; } if (resampleRateStream == null) { WaveFormat sourceRateFormat = new WaveFormat(sourceFormat.SampleRate, sourceFormat.BitsPerSample, destPcmFormat.Channels); resampleRateStream = new AcmStream(sourceRateFormat, destPcmFormat); if (sourceFormat.Channels != destPcmFormat.Channels) { WaveFormat destChanFormat = new WaveFormat(sourceFormat.SampleRate, sourceFormat.BitsPerSample, destPcmFormat.Channels); if (resampleChannelStream != null) { resampleChannelStream.Dispose(); } resampleChannelStream = new AcmStream(sourceFormat, destChanFormat); } lastResampleSourceFormat = sourceFormat; lastResampleDestFormat = destPcmFormat; } int bytesConverted; if (sourceFormat.Channels != destPcmFormat.Channels) { if (destPcmFormat.Channels == 1 && sourceFormat.Channels == 2) { toResample = MixStereoToMono(toResample, sourceLength); sourceLength = toResample.Length; } else { Buffer.BlockCopy(toResample, 0, resampleChannelStream.SourceBuffer, 0, sourceLength); sourceLength = resampleChannelStream.Convert(sourceLength, out bytesConverted); if (bytesConverted >> 1 != sourceLength) { Console.WriteLine("WARNING: All input bytes were not converted."); } toResample = resampleChannelStream.DestBuffer; } } Buffer.BlockCopy(toResample, 0, resampleRateStream.SourceBuffer, 0, sourceLength); resultLength = resampleRateStream.Convert(sourceLength, out bytesConverted); if (bytesConverted != sourceLength) { Console.WriteLine("WARNING: All input bytes were not converted."); return(null); } return(resampleRateStream.DestBuffer); }
/// <summary> /// Creates a stream that can convert to PCM /// </summary> /// <param name="sourceStream">The source stream</param> /// <returns>A PCM stream</returns> public static WaveStream CreatePcmStream(WaveStream sourceStream) { if (sourceStream.WaveFormat.Encoding == WaveFormatEncoding.Pcm) { return(sourceStream); } WaveFormat pcmFormat = AcmStream.SuggestPcmFormat(sourceStream.WaveFormat); return(new WaveFormatConversionStream(pcmFormat, sourceStream)); }
/// <summary> /// Create a new WaveFormat conversion stream /// </summary> /// <param name="targetFormat">Desired output format</param> /// <param name="sourceStream">Source stream</param> public WaveFormatConversionStream(WaveFormat targetFormat, WaveStream sourceStream) { this.sourceStream = sourceStream; this.targetFormat = targetFormat; conversionStream = new AcmStream(sourceStream.WaveFormat, targetFormat); // work out how many bytes the entire input stream will convert to length = SourceToDest((int)sourceStream.Length); blockAlign = SourceToDest(sourceStream.BlockAlign); position = 0; }
private void DiskWriterThread() { if (_recordingMode == RecordingMode.Audio) { _AFProcessor.AudioReady += AudioSamplesIn; _AFProcessor.Enabled = true; } int input_rate = (int)_sampleRate; WaveFormat outFormat = new WaveFormat(48000, 16, 1); resampleStream = new AcmStream(new WaveFormat(input_rate, 16, 1), outFormat); TrxwaveFile = new WaveFileWriter(FileName, outFormat); while (_diskWriterRunning) { if (_circularBufferTail == _circularBufferHead) { _bufferEvent.WaitOne(); } if (_diskWriterRunning && _circularBufferTail != _circularBufferHead) { if (_recordingMode == RecordingMode.Audio) { ScaleAudio(_floatCircularBufferPtrs[_circularBufferTail], _circularBuffers[_circularBufferTail].Length * 2); } Write(_floatCircularBufferPtrs[_circularBufferTail], _circularBuffers[_circularBufferTail].Length); _circularBufferUsedCount--; _circularBufferTail++; _circularBufferTail &= (_bufferCount - 1); } } while (_circularBufferTail != _circularBufferHead) { if (_floatCircularBufferPtrs[_circularBufferTail] != null) { Write(_floatCircularBufferPtrs[_circularBufferTail], _circularBuffers[_circularBufferTail].Length); } _circularBufferTail++; _circularBufferTail &= (_bufferCount - 1); } if (_recordingMode == RecordingMode.Audio) { _AFProcessor.Enabled = false; _AFProcessor.AudioReady -= AudioSamplesIn; } _diskWriterRunning = false; }
/// <summary> /// Creates a new ACM frame decompressor /// </summary> /// <param name="sourceFormat">The MP3 source format</param> public VbrAcmMp3FrameDecompressor(WaveFormat sourceFormat, WaveFormat destFormat) { this.pcmFormat = destFormat;// try { conversionStream = new AcmStream(sourceFormat, pcmFormat); } catch (Exception) { disposed = true; GC.SuppressFinalize(this); throw; } }
private void StreamerThread() { if (_recordingMode == RecordingMode.Audio) { _audioProcessor.AudioReady += AudioSamplesIn; _audioProcessor.Enabled = true; } int input_rate = (int)_sampleRate; resampleStream = new AcmStream(new WaveFormat(input_rate, 16, 1), new WaveFormat(48000, 16, 1)); CurrentCounter = 0; LastCheck = DateTime.Now; while (_streamerRunning) { if (_circularBufferTail == _circularBufferHead) { _bufferEvent.WaitOne(); } if (_streamerRunning && _circularBufferTail != _circularBufferHead) { if (_recordingMode == RecordingMode.Audio) { ScaleAudio(_floatCircularBufferPtrs[_circularBufferTail], _circularBuffers[_circularBufferTail].Length * 2); } Write(_floatCircularBufferPtrs[_circularBufferTail], _circularBuffers[_circularBufferTail].Length); _circularBufferUsedCount--; _circularBufferTail++; _circularBufferTail &= (_bufferCount - 1); } } while (_circularBufferTail != _circularBufferHead) { if (_floatCircularBufferPtrs[_circularBufferTail] != null) { Write(_floatCircularBufferPtrs[_circularBufferTail], _circularBuffers[_circularBufferTail].Length); } _circularBufferTail++; _circularBufferTail &= (_bufferCount - 1); } if (_recordingMode == RecordingMode.Audio) { _audioProcessor.Enabled = false; _audioProcessor.AudioReady -= AudioSamplesIn; } _streamerRunning = false; }
internal static void Dispose() { if (resampleChannelStream != null) { resampleChannelStream.Dispose(); resampleChannelStream = null; } if (resampleRateStream != null) { resampleRateStream.Dispose(); resampleRateStream = null; } }
public void Dispose() { if (encodeStream != null) { encodeStream.Dispose(); encodeStream = null; } if (decodeStream != null) { decodeStream.Dispose(); decodeStream = null; } }
public void Dispose() { if (_encodeStream != null) { _encodeStream.Dispose(); _encodeStream = null; } if (_decodeStream != null) { _decodeStream.Dispose(); _decodeStream = null; } }
public void Dispose() { if (_encoder != null) { _encoder.Dispose(); _encoder = null; } if (_decoder != null) { _decoder.Dispose(); _decoder = null; } }
protected virtual void Dispose(bool disposing) { if (!this.isDisposed) { this.isDisposed = true; AcmStream expr_15 = this.conversionStream; if (expr_15 == null) { return; } expr_15.Dispose(); } }
public AcmMp3FrameDecompressor(WaveFormat sourceFormat) { this.pcmFormat = AcmStream.SuggestPcmFormat(sourceFormat); try { this.conversionStream = new AcmStream(sourceFormat, this.pcmFormat); } catch (Exception) { this.disposed = true; GC.SuppressFinalize(this); throw; } }
public void Dispose() { if (EncodeStream != null) { EncodeStream.Dispose(); EncodeStream = null; } if (DecodeStream == null) { return; } DecodeStream.Dispose(); DecodeStream = null; }
public ProcessorWaveProvider(string sourceName, IWaveProvider sourceWaveProvider, string waveFilePath, WaveFormat outFormat, Common.ProcessRadioSignalingItemDelegate sigDelegate, Action <bool> hasPropertyChanged, bool recordEnabled, Common.SignalRecordingType recordType, int recordKickTime, Common.NoiseFloor noiseFloor, int customNoiseFloor, bool removeNoise, bool decodeMDC1200, bool decodeGEStar, bool decodeFleetSync, bool decodeP25) : base(sourceWaveProvider, waveFilePath) { LastValidStreamTitle = string.Empty; _sourceName = sourceName; _sourceFormat = sourceWaveProvider.WaveFormat; _outFormat = outFormat; _hasPropertyChanged = hasPropertyChanged; _silenceHelper = new SilenceHelper(outFormat.AverageBytesPerSecond / (outFormat.BitsPerSample / 8), noiseFloor, removeNoise, customNoiseFloor); if (outFormat.Equals(sourceWaveProvider.WaveFormat)) { _resampleStream = null; _useResampler = false; } else { if (Common.AppSettings.Instance.DiagnosticMode) { Common.ConsoleHelper.ColorWriteLine(ConsoleColor.Magenta, "{0}: Source Format <> Out Format [{1}] <> [{2}]", sourceName, sourceWaveProvider.WaveFormat, outFormat); } _resampleStream = new NAudio.Wave.Compression.AcmStream(sourceWaveProvider.WaveFormat, outFormat); _useResampler = true; } if (decodeMDC1200) { _mdc = new Decoders.MDC1200(outFormat.SampleRate, ProcessMDC1200, sourceName); } else { _mdc = null; } if (decodeGEStar) { _star = new Decoders.STAR(outFormat.SampleRate, ProcessSTAR, Decoders.STAR.star_format.star_format_1_16383, sourceName); } else { _star = null; } _rootDecoder = new Decoders.RootDecoder(outFormat.SampleRate, decodeFleetSync, decodeP25, ProcessRootDecoder); _recorder = new AudioRecorder(sourceName, recordType, recordKickTime, outFormat, AudioProcessingGlobals.DefaultSaveFileWaveFormat, recordEnabled); _bytesPerSample = outFormat.BitsPerSample / 8; _encoding = outFormat.Encoding; _sigDelegate = sigDelegate; }
private static byte[] Convert(AcmStream conversionStream, byte[] data, int offset, int length, ref int sourceBytesLeftovers) { int bytesInSourceBuffer = length + sourceBytesLeftovers; Array.Copy(data, offset, conversionStream.SourceBuffer, sourceBytesLeftovers, length); int bytesConverted = conversionStream.Convert(bytesInSourceBuffer, out var sourceBytesConverted); sourceBytesLeftovers = bytesInSourceBuffer - sourceBytesConverted; if (sourceBytesLeftovers > 0) { Array.Copy(conversionStream.SourceBuffer, sourceBytesConverted, conversionStream.SourceBuffer, 0, sourceBytesLeftovers); } byte[] encoded = new byte[bytesConverted]; Array.Copy(conversionStream.DestBuffer, 0, encoded, 0, bytesConverted); return(encoded); }
private async Task PlayAudioAsync(IAudioClient client) { var discord = client.CreatePCMStream(AudioApplication.Music); WasapiLoopbackCapture CaptureInstance = new WasapiLoopbackCapture(WasapiLoopbackCapture.GetDefaultLoopbackCaptureDevice()); CaptureInstance.DataAvailable += (s, a) => { //step 1 //var resampleStream = new AcmStream(WaveFormat.CreateCustomFormat(WaveFormatEncoding.IeeeFloat, 48000, 2, 384000, 8, 32), WaveFormat.CreateCustomFormat(WaveFormatEncoding.Pcm, 48000, 2, 16000, 8, 16)); var resampleStream = new AcmStream(new WaveFormat(41000, 16, 2), new WaveFormat(4800, 16, 2)); //causes demonic screeching //step 2 byte[] source = a.Buffer; Buffer.BlockCopy(source, 0, resampleStream.SourceBuffer, 0, a.BytesRecorded); //step 3 int sourceBytesConverted = 0; var convertedBytes = resampleStream.Convert(source.Length, out sourceBytesConverted); if (sourceBytesConverted != source.Length) { Console.WriteLine("We didn't convert everything {0} bytes in, {1} bytes converted"); } //step 4 var converted = new byte[convertedBytes]; Buffer.BlockCopy(resampleStream.DestBuffer, 0, converted, 0, convertedBytes); discord.Write(converted, 0, a.BytesRecorded); }; CaptureInstance.RecordingStopped += (s, a) => { Console.WriteLine("Stopped Recording!"); CaptureInstance.Dispose(); discord.Dispose(); }; CaptureInstance.StartRecording(); await Task.Delay(5000); CaptureInstance.StopRecording(); await Task.Delay(5000); }
private static byte[] Convert(AcmStream conversionStream, byte[] data, int offset, int length, ref int sourceBytesLeftovers) { int bytesInSourceBuffer = length + sourceBytesLeftovers; Array.Copy(data, offset, conversionStream.SourceBuffer, sourceBytesLeftovers, length); int bytesConverted = conversionStream.Convert(bytesInSourceBuffer, out var sourceBytesConverted); sourceBytesLeftovers = bytesInSourceBuffer - sourceBytesConverted; if (sourceBytesLeftovers > 0) { //Debug.WriteLine(String.Format("Asked for {0}, converted {1}", bytesInSourceBuffer, sourceBytesConverted)); // shift the leftovers down Array.Copy(conversionStream.SourceBuffer, sourceBytesConverted, conversionStream.SourceBuffer, 0, sourceBytesLeftovers); } byte[] encoded = new byte[bytesConverted]; Array.Copy(conversionStream.DestBuffer, 0, encoded, 0, bytesConverted); return(encoded); }
public static WaveStream CreatePcmStream(WaveStream sourceStream) { if (sourceStream.WaveFormat.Encoding == WaveFormatEncoding.Pcm) { return(sourceStream); } WaveFormat waveFormat = AcmStream.SuggestPcmFormat(sourceStream.WaveFormat); if (waveFormat.SampleRate < 8000) { if (sourceStream.WaveFormat.Encoding != WaveFormatEncoding.G723) { throw new InvalidOperationException("Invalid suggested output format, please explicitly provide a target format"); } waveFormat = new WaveFormat(8000, 16, 1); } return(new WaveFormatConversionStream(waveFormat, sourceStream)); }
static void Main(string[] args) { var fi = new FileInfo(args[0]); var fn = fi.FullName; using (var wfr = new WaveFileReader(fn)) { var wf0 = wfr.WaveFormat; var wf1 = new WaveFormat(48000, 1); var size0 = wf0.AverageBytesPerSecond * RNNoiseCLR.FRAME_SIZE / 48000; var size1 = sizeof(short) * RNNoiseCLR.FRAME_SIZE; using (var acmr = new AcmStream(wf0, wf1)) using (var wfw = new WaveFileWriter(fn.Substring(0, fn.Length - fi.Extension.Length) + "_out" + fi.Extension, wf0)) using (var acmw = new AcmStream(wf1, wf0)) using (var rnn = new RNNoiseCLR()) { var samples = new short[RNNoiseCLR.FRAME_SIZE]; var _samples = new float[RNNoiseCLR.FRAME_SIZE]; int read; while ((read = wfr.Read(acmr.SourceBuffer, 0, size0)) > 0) { var converted = acmr.Convert(read, out _); for (var i = converted; i < size1; ++i) { acmr.DestBuffer[i] = 0; } Buffer.BlockCopy(acmr.DestBuffer, 0, samples, 0, size1); for (var i = 0; i < RNNoiseCLR.FRAME_SIZE; ++i) { _samples[i] = samples[i] * _32767; } rnn.Transform(_samples, _samples); for (var i = 0; i < RNNoiseCLR.FRAME_SIZE; ++i) { samples[i] = (short)(_samples[i] * 32767); } Buffer.BlockCopy(samples, 0, acmw.SourceBuffer, 0, converted); wfw.Write(acmw.DestBuffer, 0, acmw.Convert(converted, out _)); wfw.Flush(); } } } }
/// <summary> /// Create a new WaveFormat conversion stream /// </summary> /// <param name="targetFormat">Desired output format</param> /// <param name="sourceStream">Source stream</param> public WaveFormatConversionStream(WaveFormat targetFormat, WaveStream sourceStream) { this.sourceStream = sourceStream; this.targetFormat = targetFormat; conversionStream = new AcmStream(sourceStream.WaveFormat, targetFormat); try { // work out how many bytes the entire input stream will convert to length = SourceToDest((int)sourceStream.Length); GetBlockAlign(targetFormat, sourceStream); } catch { Dispose(); throw; } position = 0; }
public static async Task ConvertForDiscord(byte[] audiodata, int sampleRate, int channels, VoiceTransmitSink discordTarget, ILogger logger) { var resampleStream = new AcmStream(new WaveFormat(sampleRate, 16, 1), new WaveFormat(OutSampleRate, 16, 1)); if (audiodata.Length > resampleStream.SourceBuffer.Length) { int offset = 0; logger.LogInformation("Large audio returned, the copy will need to be streamed in"); int remaining = (audiodata.Length - offset); while (remaining > 0) { Array.Clear(resampleStream.SourceBuffer, 0, resampleStream.SourceBuffer.Length); Array.Clear(resampleStream.DestBuffer, 0, resampleStream.DestBuffer.Length); int copyamount = remaining > resampleStream.SourceBuffer.Length ? resampleStream.SourceBuffer.Length : remaining; Buffer.BlockCopy(audiodata, offset, resampleStream.SourceBuffer, 0, copyamount); int sourceBytesConverted = 0; // logger.LogInformation("Resampling"); var convertedBytes = resampleStream.Convert(copyamount, out sourceBytesConverted); if (sourceBytesConverted != copyamount) { logger.LogError("Resample didn't produce correct bytestream"); break; } await discordTarget.WriteAsync(resampleStream.DestBuffer); offset += copyamount; remaining = (audiodata.Length - offset); } } else { Buffer.BlockCopy(audiodata, 0, resampleStream.SourceBuffer, 0, audiodata.Length); int sourceBytesConverted = 0; // logger.LogInformation("Resampling"); var convertedBytes = resampleStream.Convert(audiodata.Length, out sourceBytesConverted); if (sourceBytesConverted != audiodata.Length) { logger.LogError("Resample didn't produce correct bytestream"); } await discordTarget.WriteAsync(resampleStream.DestBuffer); } }