/// <summary> /// WaveStream to resample using the DMO Resampler /// </summary> /// <param name="inputProvider">Input Stream</param> /// <param name="outputFormat">Desired Output Format</param> public ResamplerDmoStream(IWaveProvider inputProvider, WaveFormat outputFormat) { this.inputProvider = inputProvider; this.inputStream = inputProvider as WaveStream; this.outputFormat = outputFormat; this.dmoResampler = new DmoResampler(); if (!dmoResampler.MediaObject.SupportsInputWaveFormat(0, inputProvider.WaveFormat)) { throw new ArgumentException("Unsupported Input Stream format", "inputStream"); } dmoResampler.MediaObject.SetInputWaveFormat(0, inputProvider.WaveFormat); if (!dmoResampler.MediaObject.SupportsOutputWaveFormat(0, outputFormat)) { throw new ArgumentException("Unsupported Output Stream format", "outputStream"); } dmoResampler.MediaObject.SetOutputWaveFormat(0, outputFormat); if (inputStream != null) { position = InputToOutputPosition(inputStream.Position); } this.inputMediaBuffer = new MediaBuffer(inputProvider.WaveFormat.AverageBytesPerSecond); this.outputBuffer = new DmoOutputDataBuffer(outputFormat.AverageBytesPerSecond); }
/// <summary> /// Create a new simple compressor stream /// </summary> /// <param name="sourceStream">Source stream</param> public SimpleCompressorStream(WaveStream sourceStream) { this.sourceStream = sourceStream; this.channels = sourceStream.WaveFormat.Channels; this.bytesPerSample = sourceStream.WaveFormat.BitsPerSample / 8; simpleCompressor = new SimpleCompressor(5.0, 10.0, sourceStream.WaveFormat.SampleRate); simpleCompressor.Threshold = 16; simpleCompressor.Ratio = 6; simpleCompressor.MakeUpGain = 16; }
/// <summary> /// Creates a new Wave32To16Stream /// </summary> /// <param name="sourceStream">the source stream</param> public Wave32To16Stream(WaveStream sourceStream) { if (sourceStream.WaveFormat.Encoding != WaveFormatEncoding.IeeeFloat) throw new ArgumentException("Only 32 bit Floating point supported"); if (sourceStream.WaveFormat.BitsPerSample != 32) throw new ArgumentException("Only 32 bit Floating point supported"); waveFormat = new WaveFormat(sourceStream.WaveFormat.SampleRate, 16, sourceStream.WaveFormat.Channels); this.volume = 1.0f; this.sourceStream = sourceStream; length = sourceStream.Length / 2; position = sourceStream.Position / 2; }
/// <summary> /// Creates a new WaveOffsetStream /// </summary> /// <param name="sourceStream">the source stream</param> /// <param name="startTime">the time at which we should start reading from the source stream</param> /// <param name="sourceOffset">amount to trim off the front of the source stream</param> /// <param name="sourceLength">length of time to play from source stream</param> public WaveOffsetStream(WaveStream sourceStream, TimeSpan startTime, TimeSpan sourceOffset, TimeSpan sourceLength) { if (sourceStream.WaveFormat.Encoding != WaveFormatEncoding.Pcm) throw new ArgumentException("Only PCM supported"); // TODO: add support for IEEE float + perhaps some others - // anything with a fixed bytes per sample this.sourceStream = sourceStream; position = 0; bytesPerSample = (sourceStream.WaveFormat.BitsPerSample / 8) * sourceStream.WaveFormat.Channels; this.StartTime = startTime; this.SourceOffset = sourceOffset; this.SourceLength = sourceLength; }
/// <summary> /// Creates a new WaveChannel32 /// </summary> /// <param name="sourceStream">the source stream</param> /// <param name="volume">stream volume (1 is 0dB)</param> /// <param name="pan">pan control (-1 to 1)</param> public WaveChannel32(WaveStream sourceStream, float volume, float pan) { PadWithZeroes = true; var providers = new ISampleChunkConverter[] { new Mono8SampleChunkConverter(), new Stereo8SampleChunkConverter(), new Mono16SampleChunkConverter(), new Stereo16SampleChunkConverter(), new Mono24SampleChunkConverter(), new Stereo24SampleChunkConverter(), new MonoFloatSampleChunkConverter(), new StereoFloatSampleChunkConverter(), }; foreach (var provider in providers) { if (provider.Supports(sourceStream.WaveFormat)) { this.sampleProvider = provider; break; } } if (this.sampleProvider == null) { throw new ArgumentException("Unsupported sourceStream format"); } // always outputs stereo 32 bit waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(sourceStream.WaveFormat.SampleRate, 2); destBytesPerSample = 8; // includes stereo factoring this.sourceStream = sourceStream; this.volume = volume; this.pan = pan; sourceBytesPerSample = sourceStream.WaveFormat.Channels * sourceStream.WaveFormat.BitsPerSample / 8; length = SourceToDest(sourceStream.Length); position = 0; }
/// <summary> /// Create a new WaveFormat conversion stream /// </summary> /// <param name="targetFormat">Desired output format</param> /// <param name="sourceStream">Source stream</param> public WaveFormatConversionStream(WaveFormat targetFormat, WaveStream sourceStream) { this.sourceStream = sourceStream; this.targetFormat = targetFormat; conversionStream = new AcmStream(sourceStream.WaveFormat, targetFormat); /*try { // work out how many bytes the entire input stream will convert to length = conversionStream.SourceToDest((int)sourceStream.Length); } catch { Dispose(); throw; }*/ length = EstimateSourceToDest((int)sourceStream.Length); position = 0; preferredSourceReadSize = Math.Min(sourceStream.WaveFormat.AverageBytesPerSecond, conversionStream.SourceBuffer.Length); preferredSourceReadSize -= (preferredSourceReadSize%sourceStream.WaveFormat.BlockAlign); }
/// <summary> /// Creates a WaveChannel32 with default settings /// </summary> /// <param name="sourceStream">The source stream</param> public WaveChannel32(WaveStream sourceStream) : this(sourceStream, 1.0f, 0.0f) { }
/// <summary> /// Disposes this WaveStream /// </summary> protected override void Dispose(bool disposing) { if (disposing) { if (sourceStream != null) { sourceStream.Dispose(); sourceStream = null; } } base.Dispose(disposing); }
/// <summary> /// Disposes this stream /// </summary> /// <param name="disposing">true if the user called this</param> protected override void Dispose(bool disposing) { if (disposing) { // Release managed resources. if (conversionStream != null) { conversionStream.Dispose(); conversionStream = null; } if (sourceStream != null) { sourceStream.Dispose(); sourceStream = null; } } else { System.Diagnostics.Debug.Assert(false, "WaveFormatConversionStream was not disposed"); } // Release unmanaged resources. // Set large fields to null. // Call Dispose on your base class. base.Dispose(disposing); }
/// <summary> /// Creates a stream that can convert to PCM /// </summary> /// <param name="sourceStream">The source stream</param> /// <returns>A PCM stream</returns> public static WaveStream CreatePcmStream(WaveStream sourceStream) { if (sourceStream.WaveFormat.Encoding == WaveFormatEncoding.Pcm) { return sourceStream; } WaveFormat pcmFormat = AcmStream.SuggestPcmFormat(sourceStream.WaveFormat); if (pcmFormat.SampleRate < 8000) { if (sourceStream.WaveFormat.Encoding == WaveFormatEncoding.G723) { pcmFormat = new WaveFormat(8000, 16, 1); } else { throw new InvalidOperationException("Invalid suggested output format, please explicitly provide a target format"); } } return new WaveFormatConversionStream(pcmFormat, sourceStream); }
/// <summary> /// Disposes this stream /// </summary> /// <param name="disposing">true if the user called this</param> protected override void Dispose(bool disposing) { if (disposing) { // Release managed resources. if (sourceStream != null) { sourceStream.Dispose(); sourceStream = null; } } // Release unmanaged resources. // Set large fields to null. // Call Dispose on your base class. base.Dispose(disposing); }
/// <summary> /// Creates the reader stream, supporting all filetypes in the core NAudio library, /// and ensuring we are in PCM format /// </summary> /// <param name="fileName">File Name</param> private void CreateReaderStream(string fileName) { if (fileName.EndsWith(".wav", StringComparison.OrdinalIgnoreCase)) { readerStream = new WaveFileReader(fileName); if (readerStream.WaveFormat.Encoding != WaveFormatEncoding.Pcm && readerStream.WaveFormat.Encoding != WaveFormatEncoding.IeeeFloat) { readerStream = WaveFormatConversionStream.CreatePcmStream(readerStream); readerStream = new BlockAlignReductionStream(readerStream); } } else if (fileName.EndsWith(".mp3", StringComparison.OrdinalIgnoreCase)) { readerStream = new Mp3FileReader(fileName); } else if (fileName.EndsWith(".aiff") || fileName.EndsWith(".aif")) { readerStream = new AiffFileReader(fileName); } else { // fall back to media foundation reader, see if that can play it readerStream = new MediaFoundationReader(fileName); } }
/// <summary> /// Creates a WaveOffsetStream with default settings (no offset or pre-delay, /// and whole length of source stream) /// </summary> /// <param name="sourceStream">The source stream</param> public WaveOffsetStream(WaveStream sourceStream) : this(sourceStream, TimeSpan.Zero, TimeSpan.Zero, sourceStream.TotalTime) { }
/// <summary> /// Disposes this WaveStream /// </summary> protected override void Dispose(bool disposing) { if (disposing) { if (sourceStream != null) { sourceStream.Dispose(); sourceStream = null; } } else { System.Diagnostics.Debug.Assert(false, "BlockAlignReductionStream was not Disposed"); } base.Dispose(disposing); }
/// <summary> /// Disposes this WaveStream /// </summary> protected override void Dispose(bool disposing) { if (disposing) { if (sourceStream != null) { sourceStream.Dispose(); sourceStream = null; } } else { System.Diagnostics.Debug.Assert(false, "WaveChannel32 was not Disposed"); } base.Dispose(disposing); }
/// <summary> /// Remove a WaveStream from the mixer /// </summary> /// <param name="waveStream">waveStream to remove</param> public void RemoveInputStream(WaveStream waveStream) { lock (inputsLock) { if (inputStreams.Remove(waveStream)) { // recalculate the length long newLength = 0; foreach (var inputStream in inputStreams) { newLength = Math.Max(newLength, inputStream.Length); } length = newLength; } } }
/// <summary> /// Add a new input to the mixer /// </summary> /// <param name="waveStream">The wave input to add</param> public void AddInputStream(WaveStream waveStream) { if (waveStream.WaveFormat.Encoding != WaveFormatEncoding.IeeeFloat) throw new ArgumentException("Must be IEEE floating point", "waveStream"); if (waveStream.WaveFormat.BitsPerSample != 32) throw new ArgumentException("Only 32 bit audio currently supported", "waveStream"); if (inputStreams.Count == 0) { // first one - set the format int sampleRate = waveStream.WaveFormat.SampleRate; int channels = waveStream.WaveFormat.Channels; this.waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, channels); } else { if (!waveStream.WaveFormat.Equals(waveFormat)) throw new ArgumentException("All incoming channels must have the same format", "waveStream"); } lock (inputsLock) { this.inputStreams.Add(waveStream); this.length = Math.Max(this.length, waveStream.Length); // get to the right point in this input file waveStream.Position = Position; } }
/// <summary> /// Creates a new BlockAlignReductionStream /// </summary> /// <param name="sourceStream">the input stream</param> public BlockAlignReductionStream(WaveStream sourceStream) { this.sourceStream = sourceStream; circularBuffer = new CircularBuffer(sourceStream.WaveFormat.AverageBytesPerSecond * 4); }
/// <summary> /// Disposes this AudioFileReader /// </summary> /// <param name="disposing">True if called from Dispose</param> // Changed by Alan to version below //protected override void Dispose(bool disposing) //{ // if (disposing) // { // readerStream.Dispose(); // readerStream = null; // } // base.Dispose(disposing); //} /// <summary> /// Disposes this AudioFileReader /// </summary> /// <param name="disposing">True if called from Dispose</param> protected override void Dispose(bool disposing) { if (disposing) { if (readerStream != null) { readerStream.Dispose(); readerStream = null; } } base.Dispose(disposing); }