private ISampleProvider ConvertToRightChannelCount(IWaveProvider input) { if (input.WaveFormat.Channels == mixer.WaveFormat.Channels) { return(input.ToSampleProvider()); } if (input.WaveFormat.Channels == 1 && mixer.WaveFormat.Channels == 2) { return(new MonoToStereoSampleProvider(input.ToSampleProvider())); } throw new NotImplementedException("Not yet implemented this channel count conversion"); }
public ConvertingSampleProvider(IWaveProvider waveProvider, WaveFormat targetFormat) { if (waveProvider.WaveFormat.SampleRate != targetFormat.SampleRate) { if (waveProvider.WaveFormat.Encoding == WaveFormatEncoding.IeeeFloat) { provider = ((resampler = new MediaFoundationResampler(waveProvider, targetFormat)) as MediaFoundationResampler).ToSampleProvider(); } else { provider = ((resampler = new MediaFoundationResampler(new Wave16ToFloatProvider(waveProvider), targetFormat)) as MediaFoundationResampler).ToSampleProvider(); } } else { provider = waveProvider.ToSampleProvider(); } if (provider.WaveFormat.Channels != targetFormat.Channels) { if (targetFormat.Channels == 1 & provider.WaveFormat.Channels > 1) { provider = provider.ToMono(); } else if (targetFormat.Channels == 2) { provider = provider.ToStereo(); } else { throw new InvalidWaveFormatException($"Couldn´t find a suitable conversion from {provider.WaveFormat.Channels} to {targetFormat.Channels} Channels"); } } }
private static int SecondsUntilPatternConcluded(IWaveProvider reader, int targetFrequency1, int targetFrequency2) { var sampleProvider = reader.ToSampleProvider(); var toneDetector = new TonePatternDetector(targetFrequency1, targetFrequency2, sampleProvider.WaveFormat.SampleRate); var buffer = new float[BufferSize]; long sampleCount = 0; while (true) { var bytesRead = sampleProvider.Read(buffer, 0, buffer.Length); sampleCount += bytesRead; if (bytesRead < buffer.Length) { break; } if (toneDetector.Detected(buffer)) { return(TimeStampHelper.GetElapsedSeconds(sampleProvider.WaveFormat.SampleRate, sampleCount)); } } return(-1); }
private int SecondsUntilPatternConcluded(IWaveProvider reader) { var sampleProvider = reader.ToSampleProvider(); var toneDetector = new TonePatternDetector(_mockConfigurationReader); var buffer = new float[BufferSize]; long sampleCount = 0; while (true) { var bytesRead = sampleProvider.Read(buffer, 0, buffer.Length); sampleCount += bytesRead; if (bytesRead < buffer.Length) { break; } if (toneDetector.Detected(buffer, sampleProvider.WaveFormat.SampleRate)) { return(ElapsedTimeSpanHelper.GetElapsedTimeSpan(sampleProvider.WaveFormat.SampleRate, sampleCount).Seconds); } } return(-1); }
public ConvertingWaveProvider(IWaveProvider waveProvider, WaveFormat targetFormat, bool ignoreChannels = false) { int chain = 0; log.Info($"New ConvertingWaveProvider for '{waveProvider}' from '{waveProvider.WaveFormat.ToStringBetter()}' to '{targetFormat.ToStringBetter()}'"); if (waveProvider.WaveFormat.SampleRate != targetFormat.SampleRate) { chain++; log.Info($"-> using MediaFoundationResampler"); if (waveProvider.WaveFormat.Encoding == WaveFormatEncoding.IeeeFloat) { provider = new WrappedProvider(resampler = new MediaFoundationResampler(waveProvider, targetFormat)); } else { provider = new WrappedProvider(resampler = new MediaFoundationResampler(new Wave16ToFloatProvider(waveProvider), targetFormat)); } } else { provider = waveProvider; } if (provider.WaveFormat.Encoding != targetFormat.Encoding) { chain++; provider = (provider.WaveFormat.Encoding, targetFormat.Encoding) switch { (WaveFormatEncoding.Pcm, WaveFormatEncoding.IeeeFloat) => provider.ToSampleProvider().ToWaveProvider(), (WaveFormatEncoding.IeeeFloat, WaveFormatEncoding.Pcm)when targetFormat.BitsPerSample == 16 => new WaveFloatTo16Provider(provider), (WaveFormatEncoding.IeeeFloat, WaveFormatEncoding.Pcm) => new PCMConversionWaveProvider(provider.ToSampleProvider(), targetFormat.BitsPerSample / 8), _ => throw new InvalidWaveFormatException($"Couldn´t find a suitable conversion from {provider.WaveFormat.Channels} to {targetFormat.Channels} Channels") }; } if (provider.WaveFormat.Encoding == WaveFormatEncoding.Pcm && provider.WaveFormat.BitsPerSample != targetFormat.BitsPerSample) { chain++; provider = new PCMConversionWaveProvider(provider.ToSampleProvider(), targetFormat.BitsPerSample / 8); } if (!ignoreChannels && provider.WaveFormat.Channels != targetFormat.Channels) { chain++; if (targetFormat.Channels == 1 & provider.WaveFormat.Channels == 2) { provider = new StereoToMonoProvider16(provider); } else if (targetFormat.Channels == 2) { provider = new MonoToStereoProvider16(provider); } else { throw new InvalidWaveFormatException($"Couldn´t find a suitable conversion from {provider.WaveFormat.Channels} to {targetFormat.Channels} Channels"); } } log.Info($"-> Conversion successfull using {chain} links"); }
public static IWaveProvider ConvertToRightSampleRate(this IWaveProvider input, WaveFormat format) { if (input.WaveFormat.SampleRate == format.SampleRate) { return(input); } return(new WdlResamplingSampleProvider(input.ToSampleProvider(), format.SampleRate).ToWaveProvider()); }
public static ISampleProvider ApplyEffects(this IWaveProvider waveProvider, Effects.Effect effectsChain) { if (effectsChain is null || effectsChain is Effects.NoEffect) { //Bypass conversion return(waveProvider.ToSampleProvider()); } return(effectsChain.ApplyEffects(waveProvider.ToBGCStream().EnsureMono()).LimitStream().ToSampleProvider()); }
public StaticSampleSource(DetectorConfig config, IWaveProvider source, bool forceMono = true) { var sampleProvider = source.ToSampleProvider(); if (forceMono) { sampleProvider = sampleProvider.AsMono(); } samples = sampleProvider.DownsampleTo(config.MaxSampleRate).Blockwise(config.SampleBlockSize); // Optimistically assume that we are going to read at least BlockSize bytes. numSamplesRead = samples.BlockSize; }
public WaveProviderToBGCStream( IWaveProvider stream, int channelSamples = int.MaxValue) { internalWaveProvider = stream; internalSampleProvider = stream.ToSampleProvider(); ChannelSamples = channelSamples; if (channelSamples == int.MaxValue) { TotalSamples = int.MaxValue; } else { TotalSamples = Channels * channelSamples; } }
public SpeedControlSampleProvider(IWaveProvider sourceProvider, int readDurationMilliseconds, SoundTouchProfile soundTouchProfile) { soundTouch = new SoundTouch(); // explore what the default values are before we change them: //Debug.WriteLine(String.Format("SoundTouch Version {0}", soundTouch.VersionString)); //Debug.WriteLine("Use QuickSeek: {0}", soundTouch.GetUseQuickSeek()); //Debug.WriteLine("Use AntiAliasing: {0}", soundTouch.GetUseAntiAliasing()); SetSoundTouchProfile(soundTouchProfile); this.sourceProvider = sourceProvider.ToSampleProvider(); soundTouch.SetSampleRate(WaveFormat.SampleRate); channelCount = WaveFormat.Channels; soundTouch.SetChannels(channelCount); sourceReadBuffer = new float[(WaveFormat.SampleRate * channelCount * (long)readDurationMilliseconds) / 1000]; soundTouchReadBuffer = new float[sourceReadBuffer.Length * 10]; // support down to 0.1 speed PlaybackRate = 0.9f; }
public DRCSampleProvider(IWaveProvider waveProvider) : base(waveProvider) { volumeSampleProvider_ = new VolumeSampleProvider(waveProvider.ToSampleProvider()); drcSampleProvider_ = new SimpleCompressorEffect(volumeSampleProvider_); }
private void PlayThread() { ResamplerDmoStream resamplerDmoStream = null; IWaveProvider playbackProvider = sourceProvider; Exception exception = null; WdlResamplingSampleProvider resamplerWdlStream = null; try { if (dmoResamplerNeeded) { if (!windowsN) { resamplerDmoStream = new ResamplerDmoStream(sourceProvider, outputFormat); playbackProvider = resamplerDmoStream; } else { resamplerWdlStream = new WdlResamplingSampleProvider(sourceProvider.ToSampleProvider(), outputFormat.SampleRate); playbackProvider = resamplerWdlStream.ToWaveProvider(); } } // fill a whole buffer bufferFrameCount = audioClient.BufferSize; bytesPerFrame = outputFormat.Channels * outputFormat.BitsPerSample / 8; readBuffer = new byte[bufferFrameCount * bytesPerFrame]; FillBuffer(playbackProvider, bufferFrameCount); // Create WaitHandle for sync var waitHandles = new WaitHandle[] { frameEventWaitHandle }; audioClient.Start(); while (playbackState != PlaybackState.Stopped) { // If using Event Sync, Wait for notification from AudioClient or Sleep half latency int indexHandle = 0; if (isUsingEventSync) { indexHandle = WaitHandle.WaitAny(waitHandles, 3 * latencyMilliseconds, false); } else { Thread.Sleep(latencyMilliseconds / 2); } // If still playing and notification is ok if (playbackState == PlaybackState.Playing && indexHandle != WaitHandle.WaitTimeout) { // See how much buffer space is available. int numFramesPadding; if (isUsingEventSync) { // In exclusive mode, always ask the max = bufferFrameCount = audioClient.BufferSize numFramesPadding = (shareMode == AudioClientShareMode.Shared) ? audioClient.CurrentPadding : 0; } else { numFramesPadding = audioClient.CurrentPadding; } int numFramesAvailable = bufferFrameCount - numFramesPadding; if (numFramesAvailable > 10) // see https://naudio.codeplex.com/workitem/16363 { FillBuffer(playbackProvider, numFramesAvailable); } } } Thread.Sleep(latencyMilliseconds / 2); audioClient.Stop(); if (playbackState == PlaybackState.Stopped) { audioClient.Reset(); } } catch (Exception e) { exception = e; } finally { if (resamplerDmoStream != null) { resamplerDmoStream.Dispose(); } RaisePlaybackStopped(exception); } }
public void Update(IWaveProvider src) { this.src = src.ToSampleProvider(); this.format = src.WaveFormat; }
public AudioResampler(IWaveProvider src) { this.src = src.ToSampleProvider(); this.format = src.WaveFormat; Instance = this; }
public PullAdapter(IWaveProvider provider, int sampleRate) { _provider = provider.ToSampleProvider().ToMono().Resample(sampleRate).ToWaveProvider16(); }