/// <summary> /// Helper function to go from IWaveProvider to a SampleProvider /// Must already be PCM or IEEE float /// </summary> /// <param name="waveProvider">The WaveProvider to convert</param> /// <returns>A sample provider</returns> public static ISampleProvider ConvertWaveProviderIntoSampleProvider(IWaveProvider waveProvider) { ISampleProvider sampleProvider; if (waveProvider.WaveFormat.Encoding == WaveFormatEncoding.Pcm) { // go to float if (waveProvider.WaveFormat.BitsPerSample == 8) { sampleProvider = new Pcm8BitToSampleProvider(waveProvider); } else if (waveProvider.WaveFormat.BitsPerSample == 16) { sampleProvider = new Pcm16BitToSampleProvider(waveProvider); } else if (waveProvider.WaveFormat.BitsPerSample == 24) { sampleProvider = new Pcm24BitToSampleProvider(waveProvider); } else { throw new InvalidOperationException("Unsupported operation"); } } else if (waveProvider.WaveFormat.Encoding == WaveFormatEncoding.IeeeFloat) { sampleProvider = new WaveToSampleProvider(waveProvider); } else { throw new ArgumentException("Unsupported source encoding"); } return sampleProvider; }
/// <summary> /// Remove an input from the mixer /// </summary> /// <param name="waveProvider">waveProvider to remove</param> public void RemoveInputStream(IWaveProvider waveProvider) { lock (inputs) { this.inputs.Remove(waveProvider); } }
public void LoadNextChunk(IWaveProvider source, int samplePairsRequired) { int sourceBytesRequired = samplePairsRequired * 6; sourceBuffer = GetSourceBuffer(sourceBytesRequired); sourceBytes = source.Read(sourceBuffer, 0, sourceBytesRequired); offset = 0; }
/// <summary> /// Initialises the WaveOut device /// </summary> /// <param name="waveProvider">WaveProvider to play</param> public void Init(IWaveProvider waveProvider) { if (playbackState != PlaybackState.Stopped) { throw new InvalidOperationException("Can't re-initialize during playback"); } if (hWaveOut != IntPtr.Zero) { // normally we don't allow calling Init twice, but as experiment, see if we can clean up and go again // try to allow reuse of this waveOut device // n.b. risky if Playback thread has not exited DisposeBuffers(); CloseWaveOut(); } this.callbackEvent = new AutoResetEvent(false); this.waveStream = waveProvider; int bufferSize = waveProvider.WaveFormat.ConvertLatencyToByteSize((DesiredLatency + NumberOfBuffers - 1) / NumberOfBuffers); MmResult result; lock (waveOutLock) { result = WaveInterop.waveOutOpenWindow(out hWaveOut, (IntPtr)DeviceNumber, waveStream.WaveFormat, callbackEvent.SafeWaitHandle.DangerousGetHandle(), IntPtr.Zero, WaveInterop.WaveInOutOpenFlags.CallbackEvent); } MmException.Try(result, "waveOutOpen"); buffers = new WaveOutBuffer[NumberOfBuffers]; playbackState = PlaybackState.Stopped; for (int n = 0; n < NumberOfBuffers; n++) { buffers[n] = new WaveOutBuffer(hWaveOut, bufferSize, waveStream, waveOutLock); } }
/// <summary> /// Add a new input to the mixer /// </summary> /// <param name="waveProvider">The wave input to add</param> public void AddInputStream(IWaveProvider waveProvider) { if (waveProvider.WaveFormat.Encoding != WaveFormatEncoding.IeeeFloat) throw new ArgumentException("Must be IEEE floating point", "waveProvider.WaveFormat"); if (waveProvider.WaveFormat.BitsPerSample != 32) throw new ArgumentException("Only 32 bit audio currently supported", "waveProvider.WaveFormat"); if (inputs.Count == 0) { // first one - set the format int sampleRate = waveProvider.WaveFormat.SampleRate; int channels = waveProvider.WaveFormat.Channels; this.waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, channels); } else { if (!waveProvider.WaveFormat.Equals(waveFormat)) throw new ArgumentException("All incoming channels must have the same format", "waveProvider.WaveFormat"); } lock (inputs) { this.inputs.Add(waveProvider); } }
public void LoadNextChunk(IWaveProvider source, int samplePairsRequired) { int sourceBytesRequired = samplePairsRequired; sourceBuffer = BufferHelpers.Ensure(sourceBuffer, sourceBytesRequired); sourceBytes = source.Read(sourceBuffer, 0, sourceBytesRequired); offset = 0; }
/// <summary> /// Initialises the WaveOut device /// </summary> /// <param name="waveProvider">Wave provider to play</param> public void Init(IWaveProvider waveProvider) { if (Thread.CurrentThread.ManagedThreadId != waveOutThread.ManagedThreadId) { lock (actionQueue) { actionQueue.Enqueue(new WaveOutAction(WaveOutFunction.Init, waveStream)); workAvailable.Set(); } return; } waveStream = waveProvider; int bufferSize = waveProvider.WaveFormat.ConvertLatencyToByteSize(desiredLatency); //waveStream.GetReadSize((desiredLatency + 2) / 3); numBuffers = 3; MmException.Try( WaveInterop.waveOutOpen(out hWaveOut, (IntPtr) devNumber, waveStream.WaveFormat, callback, IntPtr.Zero, WaveInterop.CallbackFunction), "waveOutOpen"); buffers = new WaveOutBuffer[numBuffers]; playbackState = PlaybackState.Stopped; var waveOutLock = new object(); for (int n = 0; n < numBuffers; n++) { buffers[n] = new WaveOutBuffer(hWaveOut, bufferSize, waveStream, waveOutLock); } }
public AudioPlayer(IWaveProvider provider) { _playbackDevice.Init(provider); _playbackDevice.Play(); _playbackDevice.PlaybackStopped += (sender, args) => Console.WriteLine("Playback stopped: " + args.Exception); }
public Task Init(IWaveProvider waveProvider) { // do this still on the gui thread mediaElement.SetSource(new WaveProviderRandomAccessStream(waveProvider), "audio/wav"); // must be a better way than this return new Task(() =>{}); }
/// <summary> /// WaveStream to resample using the DMO Resampler /// </summary> /// <param name="inputProvider">Input Stream</param> /// <param name="outputFormat">Desired Output Format</param> public ResamplerDmoStream(IWaveProvider inputProvider, WaveFormat outputFormat) { this.inputProvider = inputProvider; this.inputStream = inputProvider as WaveStream; this.outputFormat = outputFormat; this.resampler = new Resampler(); if (!resampler.MediaObject.SupportsInputWaveFormat(0, inputProvider.WaveFormat)) { throw new ArgumentException("Unsupported Input Stream format", "inputStream"); } resampler.MediaObject.SetInputWaveFormat(0, inputProvider.WaveFormat); if (!resampler.MediaObject.SupportsOutputWaveFormat(0, outputFormat)) { throw new ArgumentException("Unsupported Output Stream format", "outputStream"); } resampler.MediaObject.SetOutputWaveFormat(0, outputFormat); if (inputStream != null) { position = InputToOutputPosition(inputStream.Position); } this.inputMediaBuffer = new MediaBuffer(inputProvider.WaveFormat.AverageBytesPerSecond); this.outputBuffer = new DmoOutputDataBuffer(outputFormat.AverageBytesPerSecond); }
/// <summary> /// Constructs a new MediaFoundationTransform wrapper /// Will read one second at a time /// </summary> /// <param name="sourceProvider">The source provider for input data to the transform</param> /// <param name="outputFormat">The desired output format</param> public MediaFoundationTransform(IWaveProvider sourceProvider, WaveFormat outputFormat) { this.outputWaveFormat = outputFormat; this.sourceProvider = sourceProvider; sourceBuffer = new byte[ComputeSourceBufferSize(sourceProvider)]; outputBuffer = new byte[ComputeOutputBufferSize(outputFormat)]; // we will grow this buffer if needed, but try to make something big enough }
/// <summary> /// Constructs a new MediaFoundationTransform wrapper /// Will read one second at a time /// </summary> /// <param name="sourceProvider">The source provider for input data to the transform</param> /// <param name="outputFormat">The desired output format</param> public MediaFoundationTransform(IWaveProvider sourceProvider, WaveFormat outputFormat) { this.outputWaveFormat = outputFormat; this.sourceProvider = sourceProvider; sourceBuffer = new byte[sourceProvider.WaveFormat.AverageBytesPerSecond]; outputBuffer = new byte[outputWaveFormat.AverageBytesPerSecond + outputWaveFormat.BlockAlign]; // we will grow this buffer if needed, but try to make something big enough }
/// <summary> /// Helper function to simplify encoding to MP3 /// By default, will only be available on Windows 8 and above /// </summary> /// <param name="inputProvider">Input provider, must be PCM</param> /// <param name="outputFile">Output file path, should end with .mp3</param> /// <param name="desiredBitRate">Desired bitrate. Use GetEncodeBitrates to find the possibilities for your input type</param> public static void EncodeToMp3(IWaveProvider inputProvider, string outputFile, int desiredBitRate = 192000) { var mediaType = SelectMediaType(AudioSubtypes.MFAudioFormat_MP3, inputProvider.WaveFormat, desiredBitRate); using (var encoder = new MediaFoundationEncoder(mediaType)) { encoder.Encode(outputFile, inputProvider); } }
/// <summary> /// Initializes a new instance of the WaveToSampleProvider class /// </summary> /// <param name="source">Source wave provider, must be IEEE float</param> public WaveToSampleProvider(IWaveProvider source) : base(source) { if (source.WaveFormat.Encoding != WaveFormatEncoding.IeeeFloat) { throw new ArgumentException("Must be already floating point"); } }
public void LoadNextChunk(IWaveProvider source, int samplePairsRequired) { int sourceBytesRequired = samplePairsRequired * 4; sourceBuffer = GetSourceBuffer(sourceBytesRequired); sourceWaveBuffer = new WaveBuffer(sourceBuffer); sourceSamples = source.Read(sourceBuffer, 0, sourceBytesRequired) / 2; sourceSample = 0; }
public VolumeWaveProvider16(IWaveProvider sourceProvider) { this.sourceProvider = sourceProvider; if (sourceProvider.WaveFormat.Encoding != WaveFormatEncoding.Pcm) throw new ArgumentException("Expecting PCM input"); if (sourceProvider.WaveFormat.BitsPerSample != 16) throw new ArgumentException("Expecting 16 bit"); }
public void LoadNextChunk(IWaveProvider source, int samplePairsRequired) { int sourceBytesRequired = samplePairsRequired * 2; sourceSample = 0; sourceBuffer = BufferHelpers.Ensure(sourceBuffer, sourceBytesRequired); sourceWaveBuffer = new WaveBuffer(sourceBuffer); sourceSamples = source.Read(sourceBuffer, 0, sourceBytesRequired) / 2; }
public SingleChannelMuxProvider(IWaveProvider sourceProvider, int channelNum, int totalChannels) { _source = sourceProvider; _channelNum = channelNum; _totalChannels = totalChannels; _bytesPerSample = sourceProvider.WaveFormat.BitsPerSample / 8; _finalFormat = new WaveFormat(sourceProvider.WaveFormat.SampleRate, sourceProvider.WaveFormat.BitsPerSample, totalChannels); }
/// <summary> /// Helper function to simplify encoding to MP3 /// By default, will only be available on Windows 8 and above /// </summary> /// <param name="inputProvider">Input provider, must be PCM</param> /// <param name="outputFile">Output file path, should end with .mp3</param> /// <param name="desiredBitRate">Desired bitrate. Use GetEncodeBitrates to find the possibilities for your input type</param> public static void EncodeToMp3(IWaveProvider inputProvider, string outputFile, int desiredBitRate = 192000) { var mediaType = SelectMediaType(AudioSubtypes.MFAudioFormat_MP3, inputProvider.WaveFormat, desiredBitRate); if (mediaType == null) throw new InvalidOperationException("No suitable MP3 encoders available"); using (var encoder = new MediaFoundationEncoder(mediaType)) { encoder.Encode(outputFile, inputProvider); } }
public void RemoveInputStream(IWaveProvider waveProvider) { if (waveProvider == null || !_providers.Contains(waveProvider)) return; _providers.Remove(waveProvider); if (_providers.Count <= 0 && _waveOut != null && _waveOut.PlaybackState != PlaybackState.Stopped) { _waveOut.Stop(); } }
/// <summary> /// Create a new WaveFormat conversion stream /// </summary> /// <param name="targetFormat">Desired output format</param> /// <param name="sourceProvider">Source Provider</param> public WaveFormatConversionProvider(WaveFormat targetFormat, IWaveProvider sourceProvider) { this.sourceProvider = sourceProvider; this.targetFormat = targetFormat; conversionStream = new AcmStream(sourceProvider.WaveFormat, targetFormat); preferredSourceReadSize = Math.Min(sourceProvider.WaveFormat.AverageBytesPerSecond, conversionStream.SourceBuffer.Length); preferredSourceReadSize -= (preferredSourceReadSize% sourceProvider.WaveFormat.BlockAlign); }
/// <summary> /// Initialises a new instance of SampleChannel /// </summary> /// <param name="waveProvider">Source wave provider, must be PCM or IEEE</param> /// <param name="forceStereo">force mono inputs to become stereo</param> public SampleChannel(IWaveProvider waveProvider, bool forceStereo) { ISampleProvider sampleProvider = SampleProviderConverters.ConvertWaveProviderIntoSampleProvider(waveProvider); if (sampleProvider.WaveFormat.Channels == 1 && forceStereo) { sampleProvider = new MonoToStereoSampleProvider(sampleProvider); } this.waveFormat = sampleProvider.WaveFormat; // let's put the meter before the volume (useful for drawing waveforms) this.preVolumeMeter = new MeteringSampleProvider(sampleProvider); this.volumeProvider = new VolumeSampleProvider(preVolumeMeter); }
/// <summary> /// Helper function to simplify encoding to AAC /// By default, will only be available on Windows 7 and above /// </summary> /// <param name="inputProvider">Input provider, must be PCM</param> /// <param name="outputFile">Output file path, should end with .mp4 (or .aac on Windows 8)</param> /// <param name="desiredBitRate">Desired bitrate. Use GetEncodeBitrates to find the possibilities for your input type</param> public static void EncodeToAac(IWaveProvider inputProvider, string outputFile, int desiredBitRate = 192000) { // Information on configuring an AAC media type can be found here: // http://msdn.microsoft.com/en-gb/library/windows/desktop/dd742785%28v=vs.85%29.aspx var mediaType = SelectMediaType(AudioSubtypes.MFAudioFormat_AAC, inputProvider.WaveFormat, desiredBitRate); using (var encoder = new MediaFoundationEncoder(mediaType)) { // should AAC container have ADTS, or is that just for ADTS? // http://www.hydrogenaudio.org/forums/index.php?showtopic=97442 encoder.Encode(outputFile, inputProvider); } }
/// <summary> /// Initializes a new instance of the <see cref="Song"/> class. /// </summary> /// <param name="URI">The URI to be loaded.</param> /// <param name="play">if set to <c>true</c> then the song will automatically play once loaded</param> public Song(string URI, bool play = false) { this.URI = URI; this.provider = MakeSong(this.URI); this.output = new DirectSoundOut(100); this.output.Init(provider); this.output.PlaybackStopped += output_PlaybackStopped; if (play) this.Play(); }
/// <summary> /// Creates a new Wave16toFloatProvider /// </summary> /// <param name="sourceProvider">the source provider</param> public Wave16ToFloatProvider(IWaveProvider sourceProvider) { if (sourceProvider.WaveFormat.Encoding != WaveFormatEncoding.Pcm) throw new ApplicationException("Only PCM supported"); if (sourceProvider.WaveFormat.BitsPerSample != 16) throw new ApplicationException("Only 16 bit audio supported"); waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(sourceProvider.WaveFormat.SampleRate, sourceProvider.WaveFormat.Channels); this.sourceProvider = sourceProvider; this.volume = 1.0f; }
/// <summary> /// Creates a new WaveFloatTo16Provider /// </summary> /// <param name="sourceProvider">the source provider</param> public WaveFloatTo16Provider(IWaveProvider sourceProvider) { if (sourceProvider.WaveFormat.Encoding != WaveFormatEncoding.IeeeFloat) throw new ArgumentException("Input wave provider must be IEEE float", "sourceProvider"); if (sourceProvider.WaveFormat.BitsPerSample != 32) throw new ArgumentException("Input wave provider must be 32 bit", "sourceProvider"); waveFormat = new WaveFormat(sourceProvider.WaveFormat.SampleRate, 16, sourceProvider.WaveFormat.Channels); this.sourceProvider = sourceProvider; this.volume = 1.0f; }
/// <summary> /// Creates a new WaveFloatTo16Provider /// </summary> /// <param name="sourceProvider">the source provider</param> public WaveFloatTo16Provider(IWaveProvider sourceProvider) { if (sourceProvider.WaveFormat.Encoding != WaveFormatEncoding.IeeeFloat) throw new ApplicationException("Only PCM supported"); if (sourceProvider.WaveFormat.BitsPerSample != 32) throw new ApplicationException("Only 32 bit audio supported"); waveFormat = new WaveFormat(sourceProvider.WaveFormat.SampleRate, 16, sourceProvider.WaveFormat.Channels); this.sourceProvider = sourceProvider; this.volume = 1.0f; }
/// <summary> /// Creates a Wave file by reading all the data from a WaveProvider /// BEWARE: the WaveProvider MUST return 0 from its Read method when it is finished, /// or the Wave File will grow indefinitely. /// </summary> /// <param name="filename">The filename to use</param> /// <param name="sourceProvider">The source WaveProvider</param> public static void CreateWaveFile(string filename, IWaveProvider sourceProvider) { using (WaveFileWriter writer = new WaveFileWriter(filename, sourceProvider.WaveFormat)) { byte[] buffer = new byte[sourceProvider.WaveFormat.AverageBytesPerSecond * 4]; while (true) { int bytesRead = sourceProvider.Read(buffer, 0, buffer.Length); if (bytesRead == 0) break; writer.Write(buffer, 0, bytesRead); } } }
public AutoTuneWaveProvider(IWaveProvider source, AutoTuneSettings autoTuneSettings) { this.autoTuneSettings = autoTuneSettings; if (source.WaveFormat.SampleRate != 44100) throw new ArgumentException("AutoTune only works at 44.1kHz"); if (source.WaveFormat.Encoding != WaveFormatEncoding.IeeeFloat) throw new ArgumentException("AutoTune only works on IEEE floating point audio data"); if (source.WaveFormat.Channels != 1) throw new ArgumentException("AutoTune only works on mono input sources"); this.source = source; this.pitchDetector = new AutoCorrelator(source.WaveFormat.SampleRate); this.pitchShifter = new SmbPitchShifter(Settings); this.waveBuffer = new WaveBuffer(8192); }
public ProcessorWaveProvider(string sourceName, IWaveProvider sourceWaveProvider, string waveFilePath, WaveFormat outFormat, Common.ProcessRadioSignalingItemDelegate sigDelegate, Action<bool> hasPropertyChanged, bool recordEnabled, Common.SignalRecordingType recordType, int recordKickTime, Common.NoiseFloor noiseFloor, int customNoiseFloor,bool removeNoise, bool decodeMDC1200, bool decodeGEStar, bool decodeFleetSync, bool decodeP25) : base(sourceWaveProvider, waveFilePath) { LastValidStreamTitle = string.Empty; _sourceName = sourceName; _sourceFormat = sourceWaveProvider.WaveFormat; _outFormat = outFormat; _hasPropertyChanged = hasPropertyChanged; _silenceHelper = new SilenceHelper(outFormat.AverageBytesPerSecond / (outFormat.BitsPerSample / 8), noiseFloor, removeNoise, customNoiseFloor); if (outFormat.Equals(sourceWaveProvider.WaveFormat)) { _resampleStream = null; _useResampler = false; } else { if (Common.AppSettings.Instance.DiagnosticMode) { Common.ConsoleHelper.ColorWriteLine(ConsoleColor.Magenta, "{0}: Source Format <> Out Format [{1}] <> [{2}]", sourceName, sourceWaveProvider.WaveFormat, outFormat); } _resampleStream = new NAudio.Wave.Compression.AcmStream(sourceWaveProvider.WaveFormat, outFormat); _useResampler = true; } if (decodeMDC1200) { _mdc = new Decoders.MDC1200(outFormat.SampleRate, ProcessMDC1200, sourceName); } else { _mdc = null; } if (decodeGEStar) { _star = new Decoders.STAR(outFormat.SampleRate, ProcessSTAR, Decoders.STAR.star_format.star_format_1_16383, sourceName); } else { _star = null; } _rootDecoder = new Decoders.RootDecoder(outFormat.SampleRate, decodeFleetSync, decodeP25, ProcessRootDecoder); _recorder = new AudioRecorder(sourceName, recordType, recordKickTime, outFormat, AudioProcessingGlobals.DefaultSaveFileWaveFormat, recordEnabled); _bytesPerSample = outFormat.BitsPerSample / 8; _encoding = outFormat.Encoding; _sigDelegate = sigDelegate; }
/// <summary> /// Converts the given input wavestream into mono or stereo for 16 bit samples /// </summary> /// <param name="input">The input WaveProvider to convert</param> /// <param name="toMono">True for mono output, or false for stereo output</param> /// <returns>A converted IWaveProvider of the original input in either mono or stereo</returns> public IWaveProvider MonoStereoConvert16(IWaveProvider input, bool toMono) { if (toMono && input.WaveFormat.Channels != 1) { var stmp = new StereoToMonoProvider16(input); return(stmp); } else if (!toMono && input.WaveFormat.Channels != 2) { var mtsp = new MonoToStereoProvider16(input); mtsp.LeftVolume = 0.7f; mtsp.RightVolume = 0.7f; //0.7 on each to avoid double loud return(mtsp); } else { return(input); } }
private void FillBuffer(IWaveProvider playbackProvider, int frameCount) { var buffer = renderClient.GetBuffer(frameCount); var readLength = frameCount * bytesPerFrame; int read = playbackProvider.Read(readBuffer, 0, readLength); if (read == 0) { playbackState = PlaybackState.Stopped; } Marshal.Copy(readBuffer, 0, buffer, read); int actualFrameCount = read / bytesPerFrame; /*if (actualFrameCount != frameCount) * { * Debug.WriteLine(String.Format("WASAPI wanted {0} frames, supplied {1}", frameCount, actualFrameCount )); * }*/ renderClient.ReleaseBuffer(actualFrameCount, AudioClientBufferFlags.None); }
/// <summary> /// Creates a new mono waveprovider based on a stereo input /// </summary> /// <param name="sourceProvider">Stereo 16 bit PCM input</param> public StereoToMonoProvider16(IWaveProvider sourceProvider) { LeftVolume = 0.5f; RightVolume = 0.5f; if (sourceProvider.WaveFormat.Encoding != WaveFormatEncoding.Pcm) { throw new ArgumentException("Source must be PCM"); } if (sourceProvider.WaveFormat.Channels != 2) { throw new ArgumentException("Source must be stereo"); } if (sourceProvider.WaveFormat.BitsPerSample != 16) { throw new ArgumentException("Source must be 16 bit"); } this.sourceProvider = sourceProvider; WaveFormat = new WaveFormat(sourceProvider.WaveFormat.SampleRate, 1); }
public SoundTouchWaveStream(IWaveProvider source) { if (source.WaveFormat.BitsPerSample != 16) { throw new FormatException("Can't process bit depth of " + source.WaveFormat.BitsPerSample); } _source = source; _sourceSamples = new short[32768]; _sourceBuffer = new byte[_sourceSamples.Length * 2]; _stretchedSamples = new short[32768]; _stretcher = new SoundTouch <short, long>(); _stretcher.SetSampleRate(_source.WaveFormat.SampleRate); _stretcher.SetChannels(_source.WaveFormat.Channels); _tempo = 1.0; }
/// <summary> /// Initializes the specified wave provider. /// </summary> /// <param name="waveProvider">The wave provider.</param> /// <exception cref="InvalidOperationException">Can't re-initialize during playback</exception> public void Init(IWaveProvider waveProvider) { if (m_PlaybackState != PlaybackState.Stopped) { throw new InvalidOperationException("Can't re-initialize during playback"); } if (DeviceHandle != IntPtr.Zero) { // normally we don't allow calling Init twice, but as experiment, see if we can clean up and go again // try to allow reuse of this waveOut device // n.b. risky if Playback thread has not exited DisposeBuffers(); CloseWaveOut(); } CallbackEvent = new AutoResetEvent(false); WaveStream = waveProvider; var bufferSize = waveProvider.WaveFormat.ConvertLatencyToByteSize((DesiredLatency + NumberOfBuffers - 1) / NumberOfBuffers); MmResult result; lock (WaveOutLock) { result = WaveInterop.NativeMethods.waveOutOpenWindow( out DeviceHandle, DeviceNumber, WaveStream.WaveFormat, CallbackEvent.SafeWaitHandle.DangerousGetHandle(), IntPtr.Zero, WaveInterop.WaveInOutOpenFlags.CallbackEvent); } MmException.Try(result, nameof(WaveInterop.NativeMethods.waveOutOpen)); Buffers = new WaveOutBuffer[NumberOfBuffers]; m_PlaybackState = PlaybackState.Stopped; for (var n = 0; n < NumberOfBuffers; n++) { Buffers[n] = new WaveOutBuffer(DeviceHandle, bufferSize, WaveStream, WaveOutLock); } }
/// <summary> /// Writes to a stream by reading all the data from a WaveProvider /// BEWARE: the WaveProvider MUST return 0 from its Read method when it is finished, /// or the Wave File will grow indefinitely. /// </summary> /// <param name="outStream">The stream the method will output to</param> /// <param name="sourceProvider">The source WaveProvider</param> public static void WriteWavFileToStream(Stream outStream, IWaveProvider sourceProvider) { using (var writer = new WaveFileWriter(new IgnoreDisposeStream(outStream), sourceProvider.WaveFormat)) { var buffer = new byte[sourceProvider.WaveFormat.AverageBytesPerSecond * 4]; while (true) { var bytesRead = sourceProvider.Read(buffer, 0, buffer.Length); if (bytesRead == 0) { // end of source provider outStream.Flush(); break; } writer.Write(buffer, 0, bytesRead); } } }
public MonoToStereoProvider16(IWaveProvider sourceProvider) { if (sourceProvider.WaveFormat.Encoding != WaveFormatEncoding.Pcm) { throw new ArgumentException("Source must be PCM"); } if (sourceProvider.WaveFormat.Channels != 1) { throw new ArgumentException("Source must be Mono"); } if (sourceProvider.WaveFormat.BitsPerSample != 16) { throw new ArgumentException("Source must be 16 bit"); } this.sourceProvider = sourceProvider; this.outputFormat = new WaveFormat(sourceProvider.WaveFormat.SampleRate, 2); this.RightVolume = 1f; this.LeftVolume = 1f; }
public static byte[] ConvertMP3toWAV(MemoryStream inMem, bool resample8KHz16bit = false) { /* its a mp3*/ MemoryStream outMem = new MemoryStream(); using (var reader = new Mp3FileReader(inMem)) { if (!resample8KHz16bit) { WriteWavFileToStream(outMem, reader); return(outMem.ToArray()); } else { var outFormat = new WaveFormat(8000, 16, reader.WaveFormat.Channels); using (var resampler = new MediaFoundationResampler(reader, outFormat)) { // resampler.ResamplerQuality = 60; IWaveProvider sourceProvider = (IWaveProvider)resampler; long outputLength = 0; var buffer = new byte[sourceProvider.WaveFormat.AverageBytesPerSecond * 4]; while (true) { int bytesRead = sourceProvider.Read(buffer, 0, buffer.Length); if (bytesRead == 0) { // end of source provider break; } outputLength += bytesRead; if (outputLength > Int32.MaxValue) { throw new InvalidOperationException("WAV File cannot be greater than 2GB. Check that sourceProvider is not an endless stream."); } outMem.Write(buffer, 0, bytesRead); } return(outMem.ToArray()); } } } }
private GCHandle hHeader; // we need to pin the header structure /// <summary> /// creates a new wavebuffer /// </summary> /// <param name="hWaveOut">WaveOut device to write to</param> /// <param name="bufferSize">Buffer size in bytes</param> /// <param name="bufferFillStream">Stream to provide more data</param> /// <param name="waveOutLock">Lock to protect WaveOut API's from being called on >1 thread</param> public WaveOutBuffer(IntPtr hWaveOut, Int32 bufferSize, IWaveProvider bufferFillStream, object waveOutLock) { this.bufferSize = bufferSize; bufferPtr = Marshal.AllocHGlobal(bufferSize); this.hWaveOut = hWaveOut; waveStream = bufferFillStream; this.waveOutLock = waveOutLock; header = new WaveHeader(); hHeader = GCHandle.Alloc(header, GCHandleType.Pinned); header.dataBuffer = bufferPtr; header.bufferLength = bufferSize; header.loops = 1; lock (waveOutLock) { MmException.Try(WaveInterop.waveOutPrepareHeader(hWaveOut, header, Marshal.SizeOf(header)), "waveOutPrepareHeader"); } }
public AudioPipeline(EffectChain effects) { // Audio pipeline: // get the audio from Skype this.bufferStream = new SkypeBufferProvider(16000); // convert to 32 bit floating point var bufferStream32 = new Pcm16BitToSampleProvider(bufferStream); // pass through the effects var effectStream = new EffectStream(bufferStream32); // now mix in any sound effects mixer = new MixingSampleProvider(effectStream.WaveFormat); mixer.AddMixerInput(effectStream); // and convert back to 16 bit ready to be given back to skype outputProvider = new SampleToWaveProvider16(mixer); effects.Modified += (s, a) => effectStream.UpdateEffectChain(effects.ToArray()); }
/// <summary> /// Creates a Wave file by reading all the data from a WaveProvider /// BEWARE: the WaveProvider MUST return 0 from its Read method when it is finished, /// or the Wave File will grow indefinitely. /// </summary> /// <param name="filename">The filename to use</param> /// <param name="sourceProvider">The source WaveProvider</param> public static void CreateWaveFile(string filename, IWaveProvider sourceProvider) { using (var writer = new WaveFileWriter(filename, sourceProvider.WaveFormat)) { var buffer = new byte[sourceProvider.WaveFormat.AverageBytesPerSecond * 4]; var b = new Span <byte>(buffer, 0, buffer.Length); while (true) { int bytesRead = sourceProvider.Read(b); if (bytesRead == 0) { // end of source provider break; } // Write will throw exception if WAV file becomes too large writer.Write(buffer, 0, bytesRead); } } }
/// <summary> /// ミキサーに音声の追加 /// </summary> /// <param name="waveprovider"></param> /// <param name="name"></param> public static void AddWaveProvider(IWaveProvider waveprovider, string name) { //Wave16ToFloatProviderとか使ってIeeeFloatにしないとだめ if (waveprovider.WaveFormat.Encoding != WaveFormatEncoding.IeeeFloat) { return; } if (waveprovider.WaveFormat.SampleRate != waveformat.SampleRate || waveprovider.WaveFormat.Channels != waveformat.Channels) { return; } if (DctWaveProvider.ContainsKey(name)) { RemoveWaveProvider(name); } DctWaveProvider.Add(name, waveprovider); mixer.AddInputStream(waveprovider); }
internal bool OnDone() { IWaveProvider obj = this.waveStream; int num; lock (obj) { num = this.waveStream.Read(this.buffer, 0, this.buffer.Length); } if (num == 0) { return(false); } for (int i = num; i < this.buffer.Length; i++) { this.buffer[i] = 0; } this.WriteToWaveOut(); return(true); }
/// <summary> /// Initialises the WaveOut device /// </summary> /// <param name="waveProvider">WaveProvider to play</param> public void Init(IWaveProvider waveProvider) { this.waveStream = waveProvider; int bufferSize = waveProvider.WaveFormat.ConvertLatencyToByteSize((DesiredLatency + NumberOfBuffers - 1) / NumberOfBuffers); MmResult result; lock (waveOutLock) { result = WaveInterop.waveOutOpenWindow(out hWaveOut, (IntPtr)DeviceNumber, waveStream.WaveFormat, callbackEvent.SafeWaitHandle.DangerousGetHandle(), IntPtr.Zero, WaveInterop.WaveInOutOpenFlags.CallbackEvent); } MmException.Try(result, "waveOutOpen"); buffers = new WaveOutBuffer[NumberOfBuffers]; playbackState = PlaybackState.Stopped; for (int n = 0; n < NumberOfBuffers; n++) { buffers[n] = new WaveOutBuffer(hWaveOut, bufferSize, waveStream, waveOutLock); } }
/// <summary> /// Initialises the WaveOut device /// </summary> /// <param name="waveProvider">WaveProvider to play</param> public void Init(IWaveProvider waveProvider) { this.waveStream = waveProvider; int bufferSize = waveProvider.WaveFormat.ConvertLatencyToByteSize((DesiredLatency + NumberOfBuffers - 1) / NumberOfBuffers); MmResult result; lock (waveOutLock) { result = callbackInfo.WaveOutOpen(out hWaveOut, DeviceNumber, waveStream.WaveFormat, callback); } MmException.Try(result, "waveOutOpen"); buffers = new WaveOutBuffer[NumberOfBuffers]; playbackState = PlaybackState.Stopped; for (int n = 0; n < NumberOfBuffers; n++) { buffers[n] = new WaveOutBuffer(hWaveOut, bufferSize, waveStream, waveOutLock); } }
/// <summary> /// Initialises the WaveOut device /// </summary> /// <param name="waveProvider">WaveProvider to play</param> public void Init(IWaveProvider waveProvider) { _waveStream = waveProvider; var bufferSize = waveProvider.WaveFormat.ConvertLatencyToByteSize((DesiredLatency + NumberOfBuffers - 1) / NumberOfBuffers); MmResult result; lock (_waveOutLock) { result = _callbackInfo.WaveOutOpen(out _hWaveOut, DeviceNumber, _waveStream.WaveFormat, _callback); } MmException.Try(result, "waveOutOpen"); _buffers = new WaveOutBuffer[NumberOfBuffers]; _playbackState = PlaybackState.Stopped; for (var n = 0; n < NumberOfBuffers; n++) { _buffers[n] = new WaveOutBuffer(_hWaveOut, bufferSize, _waveStream, _waveOutLock); } }
/// <summary> /// Instanciate a PitchWaveProvider /// </summary> /// <param name="source">Stream Source</param> /// <param name="ar">Pitch Reference</param> public PitchWaveProvider(IWaveProvider source, Pitch ar) { if (source.WaveFormat.SampleRate != 44100) { throw new ArgumentException("Pitch Detection only works at 44.1kHz"); } if (source.WaveFormat.Encoding != WaveFormatEncoding.IeeeFloat) { throw new ArgumentException("Pitch Detection only works on IEEE floating point audio data"); } if (source.WaveFormat.Channels != 1) { throw new ArgumentException("Pitch Detection only works on mono input sources"); } this.source = source; this.pitchDetector = new AutoCorrelator(source.WaveFormat.SampleRate); this.waveBuffer = new WaveBuffer(8192); this.AR = ar; }
/// <summary> /// Initializes the audio renderer. /// Call the Play Method to start reading samples /// </summary> private void Initialize() { Destroy(); if (AudioSamplesProvider == null) { AudioSamplesProvider = new CallbackWaveProvider(ProvideAudioSamplesCallback); } AudioDevice = new WaveOutEvent() { DesiredLatency = 200, NumberOfBuffers = 2, }; var bufferLength = AudioSamplesProvider.WaveFormat.ConvertLatencyToByteSize(AudioDevice.DesiredLatency) * MediaElement.Blocks[MediaType.Audio].Capacity / 2; AudioBuffer = new CircularBuffer(bufferLength); AudioDevice.Init(AudioSamplesProvider); }
public WaveOutBuffer(IntPtr hWaveOut, int bufferSize, IWaveProvider bufferFillStream, object waveOutLock) { this.bufferSize = bufferSize; this.buffer = new byte[bufferSize]; this.hBuffer = GCHandle.Alloc(this.buffer, GCHandleType.Pinned); this.hWaveOut = hWaveOut; this.waveStream = bufferFillStream; this.waveOutLock = waveOutLock; this.header = new WaveHeader(); this.hHeader = GCHandle.Alloc(this.header, GCHandleType.Pinned); this.header.dataBuffer = this.hBuffer.AddrOfPinnedObject(); this.header.bufferLength = bufferSize; this.header.loops = 1; this.hThis = GCHandle.Alloc(this); this.header.userData = (IntPtr)this.hThis; lock (waveOutLock) { MmException.Try(WaveInterop.waveOutPrepareHeader(hWaveOut, this.header, Marshal.SizeOf(this.header)), "waveOutPrepareHeader"); } }
/// <summary> /// Initialises a new instance of SampleChannel /// </summary> /// <param name="waveProvider">Source wave provider, must be PCM or IEEE</param> public SampleChannel(IWaveProvider waveProvider) { ISampleProvider sampleProvider; if (waveProvider.WaveFormat.Encoding == WaveFormatEncoding.Pcm) { // go to float if (waveProvider.WaveFormat.BitsPerSample == 8) { sampleProvider = new Pcm8BitToSampleProvider(waveProvider); } else if (waveProvider.WaveFormat.BitsPerSample == 16) { sampleProvider = new Pcm16BitToSampleProvider(waveProvider); } else if (waveProvider.WaveFormat.BitsPerSample == 24) { sampleProvider = new Pcm24BitToSampleProvider(waveProvider); } else { throw new InvalidOperationException("Unsupported operation"); } } else if (waveProvider.WaveFormat.Encoding == WaveFormatEncoding.IeeeFloat) { sampleProvider = new WaveToSampleProvider(waveProvider); } else { throw new ArgumentException("Unsupported source encoding"); } if (sampleProvider.WaveFormat.Channels == 1) { sampleProvider = new MonoToStereoSampleProvider(sampleProvider); } this.waveFormat = sampleProvider.WaveFormat; // let's put the meter before the volume (useful for drawing waveforms) this.preVolumeMeter = new MeteringSampleProvider(sampleProvider); this.volumeProvider = new VolumeSampleProvider(preVolumeMeter); }
/// <summary> /// Attempts to play an audio stream. /// </summary> /// <param name="memoryStream">Audio stream to play</param> private void PlayMemoryStream(Stream memoryStream, bool fromRequest) { try { if (!fromRequest) { IsStreamSongLocked = false; while (!isPlayerWaveOutAvailable) { Thread.Sleep(100); } } isPlayerWaveOutAvailable = false; IsStreamSongLocked = true; isPlayerStopped = false; ShouldPlayNextSong = true; memoryStream.Position = 0; if (latestStream == null || memoryStream.Length != latestStream.Count) { latestStream = (memoryStream as MemoryStream).ToArray().ToList(); } reader = new WaveFileReader(memoryStream); SetPlayerData(reader); Application.Current.Dispatcher.Invoke(delegate { songDurationTimeTextBlock.Text = ((WaveFileReader)reader).TotalTime.ToString("mm\\:ss"); }); PlayStreamSong(reader); } catch (Exception) { Console.WriteLine("Exception@PlayerPage->PlayMemoryStream() -> Audio file is not .wav, trying with .mp3"); try { reader = new Mp3FileReader(memoryStream); SetPlayerData(reader); Application.Current.Dispatcher.Invoke(delegate { songDurationTimeTextBlock.Text = ((Mp3FileReader)reader).TotalTime.ToString("mm\\:ss"); }); PlayStreamSong(reader); } catch (Exception exception) { Console.WriteLine("Exception@PlayerPage->PlayMemoryStream() -> " + exception); throw; } } }
public static void waveInStream_DataAvailable(object sender, WaveInEventArgs e) { if (!MonitorAudioInput) { audioout.Volume = 0.0f; } else { audioout.Volume = 1.0f; } Task.Factory.StartNew(() => { AudioArray = e.Buffer; //TUTAJ JEST MIEJSCE NA KONWERSJĘ! //=================================== Console.WriteLine("Mam mikrofon" + AudioArray.Length); using (WaveOut audioout = new WaveOut()) using (MemoryStream ms = new MemoryStream(AudioArray)) { ManualResetEvent semaphoreObject = new ManualResetEvent(false); audioout.DesiredLatency = 100; RawSourceWaveStream rsws = new RawSourceWaveStream(ms, wf); IWaveProvider provider = rsws; audioout.Init(provider); EventHandler <NAudio.Wave.StoppedEventArgs> handler = (o, k) => { semaphoreObject.Set(); }; audioout.PlaybackStopped += handler; audioout.Play(); //while (audioout.PlaybackState != PlaybackState.Stopped) ; semaphoreObject.WaitOne(); audioout.PlaybackStopped -= handler; Send(AudioArray); } }); }
/// <summary> /// Converts from 32-bit Ieee Floating-point format to MuLaw 8khz 8-bit 1 channel. /// Used for WasapiCapture and WasapiLoopbackCapture. /// </summary> /// <param name="audio">The raw audio stream.</param> /// <param name="inputFormat">The input format.</param> public MuLawResamplerProvider(byte[] stream, WaveFormat inputFormat) { // Root buffer provider. waveBuffer = new BufferedWaveProvider(inputFormat); waveBuffer.DiscardOnBufferOverflow = false; waveBuffer.ReadFully = false; waveBuffer.AddSamples(stream, 0, stream.Length); var sampleStream = new WaveToSampleProvider(waveBuffer); // Stereo to mono filter. var monoStream = new StereoToMonoSampleProvider(sampleStream) { LeftVolume = 2.0f, RightVolume = 2.0f }; // Downsample to 8000 filter. var resamplingProvider = new WdlResamplingSampleProvider(monoStream, 8000); // Convert to 16-bit in order to use ACM or MuLaw tools. ieeeToPcm = new SampleToWaveProvider16(resamplingProvider); sourceBuffer = new byte[ieeeToPcm.WaveFormat.AverageBytesPerSecond]; }
protected void Init(IWaveProvider waveProvider) { @out = new WasapiOut(); @out.Init(waveProvider); using (var en = new MMDeviceEnumerator()) { SessionCollection sessions = en.GetDefaultAudioEndpoint(DataFlow.Render, Role.Multimedia).AudioSessionManager.Sessions; int id = System.Diagnostics.Process.GetCurrentProcess().Id; for (int i = 0; i < sessions.Count; i++) { AudioSessionControl session = sessions[i]; if (session.GetProcessID == id) { appVolume = session; appVolume.RegisterEventClient(this); break; } } } @out.Play(); }
private GCHandle callbackHandle; // for the user callback /// <summary> /// Initializes a new instance of the <see cref="WaveOutBuffer"/> class. /// </summary> /// <param name="hWaveOut">WaveOut device to write to</param> /// <param name="bufferSize">Buffer size in bytes</param> /// <param name="bufferFillStream">Stream to provide more data</param> /// <param name="waveOutLock">Lock to protect WaveOut API's from being called on >1 thread</param> public WaveOutBuffer(IntPtr hWaveOut, Int32 bufferSize, IWaveProvider bufferFillStream, object waveOutLock) { this.bufferSize = bufferSize; buffer = new byte[bufferSize]; bufferHandle = GCHandle.Alloc(buffer, GCHandleType.Pinned); this.waveOutPtr = hWaveOut; waveStream = bufferFillStream; this.waveOutLock = waveOutLock; header = new WaveHeader(); headerHandle = GCHandle.Alloc(header, GCHandleType.Pinned); header.DataBuffer = bufferHandle.AddrOfPinnedObject(); header.BufferLength = bufferSize; header.Loops = 1; callbackHandle = GCHandle.Alloc(this); header.UserData = (IntPtr)callbackHandle; lock (waveOutLock) { MmException.Try(WaveInterop.NativeMethods.waveOutPrepareHeader(hWaveOut, header, Marshal.SizeOf(header)), "waveOutPrepareHeader"); } }
public void Encode(string outputFile, IWaveProvider inputProvider) { if (inputProvider.WaveFormat.Encoding != WaveFormatEncoding.Pcm && inputProvider.WaveFormat.Encoding != WaveFormatEncoding.IeeeFloat) { throw new ArgumentException("Encode input format must be PCM or IEEE float"); } MediaType mediaType = new MediaType(inputProvider.WaveFormat); IMFSinkWriter imfsinkWriter = MediaFoundationEncoder.CreateSinkWriter(outputFile); try { int num; imfsinkWriter.AddStream(this.outputMediaType.MediaFoundationObject, out num); imfsinkWriter.SetInputMediaType(num, mediaType.MediaFoundationObject, null); this.PerformEncode(imfsinkWriter, num, inputProvider); } finally { Marshal.ReleaseComObject(imfsinkWriter); Marshal.ReleaseComObject(mediaType.MediaFoundationObject); } }
/// <summary> /// Create a new DmoEffectWaveProvider /// </summary> /// <param name="inputProvider">Input Stream</param> public DmoEffectWaveProvider(IWaveProvider inputProvider) { this.inputProvider = inputProvider; effector = new TDmoEffector(); var mediaObject = effector.MediaObject; if (mediaObject == null) { throw new NotSupportedException(@"Dmo Effector Not Supported: " + nameof(TDmoEffector)); } if (!mediaObject.SupportsInputWaveFormat(0, inputProvider.WaveFormat)) { throw new ArgumentException(@"Unsupported Input Stream format", nameof(inputProvider)); } mediaObject.AllocateStreamingResources(); mediaObject.SetInputWaveFormat(0, this.inputProvider.WaveFormat); mediaObject.SetOutputWaveFormat(0, this.inputProvider.WaveFormat); }
public Pitch(IWaveProvider source) { if (source.WaveFormat.SampleRate != 44100) { throw new ArgumentException("Source must be at 44.1kHz"); } if (source.WaveFormat.Encoding != WaveFormatEncoding.IeeeFloat) { throw new ArgumentException("Source must be IEEE floating point audio data"); } if (source.WaveFormat.Channels != 1) { throw new ArgumentException("Source must be a mono input source"); } this.source = source; this.pitchDetector = new Autocorrelator(source.WaveFormat.SampleRate); this.waveBuffer = new WaveBuffer(8192); }
private GCHandle _hThis; // for the user callback /// <summary> /// creates a new wavebuffer /// </summary> /// <param name="hWaveOut">WaveOut device to write to</param> /// <param name="bufferSize">Buffer size in bytes</param> /// <param name="bufferFillStream">Stream to provide more data</param> /// <param name="waveOutLock">Lock to protect WaveOut API's from being called on >1 thread</param> public WaveBuffer(IntPtr hWaveOut, Int32 bufferSize, IWaveProvider bufferFillStream, object waveOutLock) { _bufferSize = bufferSize; _buffer = new byte[bufferSize]; _hBuffer = GCHandle.Alloc(_buffer, GCHandleType.Pinned); _hWaveOut = hWaveOut; _waveStream = bufferFillStream; _waveOutLock = waveOutLock; _header = new WaveHeader(); _hHeader = GCHandle.Alloc(_header, GCHandleType.Pinned); _header.dataBuffer = _hBuffer.AddrOfPinnedObject(); _header.bufferLength = bufferSize; _header.loops = 1; _hThis = GCHandle.Alloc(this); _header.userData = (IntPtr)_hThis; lock (waveOutLock) { MmException.Try(WaveInterop.waveOutPrepareHeader(hWaveOut, _header, Marshal.SizeOf(_header)), "waveOutPrepareHeader"); } }
//called when data for any output pin is requested public void Evaluate(int SpreadMax) { FOutput.SliceCount = SpreadMax; if (FDriverIn.IsChanged) { CreateAsio(); if (FPlayIn[0]) { FAsioOut.Play(); } } if (FShowPanelIn[0]) { FAsioOut.ShowControlPanel(); } if (FWave != FWaveIn[0]) { FWave = FWaveIn[0]; CreateAsio(); if (FPlayIn[0]) { FAsioOut.Play(); } } if (FPlayIn.IsChanged) { if (FPlayIn[0]) { FAsioOut.Play(); } else { FAsioOut.Stop(); } } }