public MonoToStereoSource(IWaveStream source) : base(source) { if (source.WaveFormat.Channels != 1) throw new ArgumentException("format of source has to be stereo(1 channel)", "source"); _waveFormat = new WaveFormat(source.WaveFormat.SampleRate, 32, 2, AudioEncoding.IeeeFloat); }
public static Equalizer Create10BandEqualizer(IWaveStream source) { return(new Equalizer(source) { SampleFilters = Create10BandEqFilter(source.WaveFormat.SampleRate, source.WaveFormat.Channels) }); }
public PanSource(IWaveStream source) : base(source) { if (source.WaveFormat.Channels != 2) { throw new ArgumentException("Source has to be stereo.", "source"); } }
public MonoToStereoSource(IWaveStream source) : base(source) { if (source.WaveFormat.Channels != 1) { throw new ArgumentException("format of source has to be stereo(1 channel)", "source"); } _waveFormat = new WaveFormat(source.WaveFormat.SampleRate, 32, 2, AudioEncoding.IeeeFloat); }
public StereoToMonoSource(IWaveStream source) : base(source) { if (source == null) throw new ArgumentNullException("source"); if (source.WaveFormat.Channels != 2) throw new ArgumentException("source has to have 2 channels", "source"); _waveFormat = new WaveFormat(source.WaveFormat.SampleRate, 32, 1, AudioEncoding.IeeeFloat); }
protected override void Dispose(bool disposing) { if (playing) { Stop(); } (waveStream as IDisposable)?.Dispose(); waveStream = null; }
public SampleSourceBase(IWaveStream source) { if (source == null) throw new ArgumentNullException("source"); if (source is ISampleSource) _source = (source as ISampleSource); else { _source = WaveToSampleBase.CreateConverter(source as IWaveSource); } }
public static TimeSpan GetTime(this IWaveStream source, long bytes) { if (source == null) { throw new ArgumentNullException("source"); } if (bytes < 0) { throw new ArgumentNullException("bytes"); } return(TimeSpan.FromMilliseconds(GetMilliseconds(source, bytes))); }
public StereoToMonoSource(IWaveStream source) : base(source) { if (source == null) { throw new ArgumentNullException("source"); } if (source.WaveFormat.Channels != 2) { throw new ArgumentException("source has to have 2 channels", "source"); } _waveFormat = new WaveFormat(source.WaveFormat.SampleRate, 32, 1, AudioEncoding.IeeeFloat); }
/// <summary> /// Sets the position of a WaveStream as a TimeSpan. /// </summary> public static void SetPosition(this IWaveStream source, TimeSpan position) { if (source == null) { throw new ArgumentNullException("source"); } if (position.TotalMilliseconds < 0) { throw new ArgumentOutOfRangeException("position"); } var bytes = GetBytes(source, (long)position.TotalMilliseconds); source.Position = bytes; }
public SampleSourceBase(IWaveStream source) { if (source == null) { throw new ArgumentNullException("source"); } if (source is ISampleSource) { _source = (source as ISampleSource); } else { _source = WaveToSampleBase.CreateConverter(source as IWaveSource); } }
public SampleDataProvider(IWaveStream source) { if (!(source is ISampleSource) && source is IWaveSource) { source = WaveToSampleBase.CreateConverter(source as IWaveSource); } else if (source is ISampleSource) { } else { throw new ArgumentException("source has to of type IWaveSource or ISampleSource"); } _source = source as ISampleSource; BlockSize = (int)(source.WaveFormat.SampleRate * (40.0 / 1000.0)); _sampleBuffer = new Queue<float>(); _sampleBuffer1 = new Queue<float>(); }
public SampleDataProvider(IWaveStream source) { if (!(source is ISampleSource) && source is IWaveSource) { source = WaveToSampleBase.CreateConverter(source as IWaveSource); } else if (source is ISampleSource) { } else { throw new ArgumentException("source has to of type IWaveSource or ISampleSource"); } _source = source as ISampleSource; BlockSize = (int)(source.WaveFormat.SampleRate * (40.0 / 1000.0)); _sampleBuffer = new Queue <float>(); _sampleBuffer1 = new Queue <float>(); }
public void Initialize(IWaveStream waveStream) { this.waveStream = waveStream; var format = WaveFormatExtensible.Make(waveStream.Format); wasapi.Initialize(waveStream.Format); if (!format.Equals(OutFormat)) { var e = new ResampleEventArgs(waveStream, OutFormat); NotifyResample(e); this.waveStream = e.Source; } renderClient = wasapi.AudioClient.AudioRenderClient; if (wasapi.EventSync) { wasapi.AudioClient.SetEventHandle(eventObject.SafeWaitHandle.DangerousGetHandle()); } bufferFrames = wasapi.AudioClient.BufferSize; }
public static long GetBytes(this IWaveStream source, long milliseconds) { if (source == null) { throw new ArgumentNullException("source"); } if (milliseconds < 0) { throw new ArgumentOutOfRangeException("milliseconds"); } if (source is IWaveSource) { return(source.WaveFormat.MillisecondsToBytes(milliseconds)); } else if (source is ISampleSource) { return(source.WaveFormat.MillisecondsToBytes(milliseconds / 4)); } else { throw new NotSupportedException("IWaveStream-Subtype is not supported"); } }
public FadeInOut(IWaveStream source, float initialVolume) : base(source) { base.Volume = initialVolume; }
public GainSource(IWaveStream source) : base(source) { Gain = 1; }
public SingleBlockNotificationStream(IWaveStream source) : base(source) { }
public NotificationSource(IWaveStream source) : base(source) { BlockCount = (int)(source.WaveFormat.SampleRate * (40.0 / 1000.0)); _buffer = new Queue <float>(BlockCount * source.WaveFormat.Channels); }
public AutoGain(IWaveStream source) : base(source) { Gain = 1f; }
public PanSource(IWaveStream source) : base(source) { if (source.WaveFormat.Channels != 2) throw new ArgumentException("Source has to be stereo.", "source"); }
public SimpleNotificationSource(IWaveStream source) : base(source) { }
public NotificationSource(IWaveStream source) : base(source) { BlockCount = (int)(source.WaveFormat.SampleRate * (40.0 / 1000.0)); _buffer = new Queue<float>(BlockCount * source.WaveFormat.Channels); }
public static Equalizer Create10BandEqualizer(IWaveStream source) { return new Equalizer(source) { SampleFilters = Create10BandEqFilter(source.WaveFormat.SampleRate, source.WaveFormat.Channels) }; }
public static long GetBytes(this IWaveStream source, TimeSpan timespan) { return(GetBytes(source, (long)timespan.TotalMilliseconds)); }
public ResampleEventArgs(IWaveStream Source, WaveFormat OutFormat) { this.Source = Source; this.OutFormat = OutFormat; }
public PlaybackStoppedEventArgs(IWaveStream waveStream, Exception exception = null) : base(exception) { WaveStream = waveStream; }
public Equalizer(IWaveStream source) : base(source) { _sampleFilters = new EqFilterCollection(source.WaveFormat.Channels); }
public VolumeSource(IWaveStream source) : base(source) { }
/// <summary> /// Gets the length of a WaveStream as a TimeSpan. /// </summary> public static TimeSpan GetLength(this IWaveStream source) { return(GetTime(source, source.Length)); }
/// <summary> /// Gets the position of a WaveStream as a TimeSpan. /// </summary> public static TimeSpan GetPosition(this IWaveStream source) { return(GetTime(source, source.Position)); }
/// <summary> /// Creates a new instance of the <see cref="PeakMeter"/> class. /// </summary> /// <param name="source"></param> public PeakMeter(IWaveStream source) : base(source) { ChannelPeakValues = new float[WaveFormat.Channels]; BlocksToProcess = source.WaveFormat.SampleRate / 4; }
public Mp3Stream(IWaveStream waveStream, IProgressReporter progressReporter, IOutputDirectoryProvider outputDirectoryProvider) { this.waveStream = waveStream; this.progressReporter = progressReporter; this.outputDirectoryProvider = outputDirectoryProvider; }