/// <summary> /// Initializes a new instance of the <see cref="CircularWriteableBufferingSource"/> class. /// </summary> /// <param name="waveFormat">The WaveFormat of the source.</param> /// <param name="bufferSize">Buffersize in bytes.</param> public DirectStreamingSource(WaveFormat waveFormat, IAPU apu) { if (waveFormat == null) { throw new ArgumentNullException("waveFormat"); } _waveFormat = waveFormat; _streamingSource = apu; MaxBufferSize = 88000; }
public SimpleMixer(int channelCount, int sampleRate) { if (channelCount < 1) throw new ArgumentOutOfRangeException("channelCount"); if (sampleRate < 1) throw new ArgumentOutOfRangeException("sampleRate"); _waveFormat = new WaveFormat(sampleRate, 32, channelCount, AudioEncoding.IeeeFloat); }
public NVorbisSource(Stream stream) { if (stream == null) throw new ArgumentNullException("stream"); if(!stream.CanRead) throw new ArgumentException("Stream is not readable.", "stream"); _stream = stream; _vorbisReader = new VorbisReader(stream, false); _waveFormat = new WaveFormat(_vorbisReader.SampleRate, 32, _vorbisReader.Channels, AudioEncoding.IeeeFloat); }
public void Initialize(WaveFormat waveFormat) { if (_isInitialized) throw new InvalidOperationException("Can't reuse SampleAnalyser."); if (waveFormat == null) throw new ArgumentNullException("waveFormat"); _peaks = new float[waveFormat.Channels, 2]; _waveFormat = waveFormat; _isInitialized = true; }
public MetronomeGenerator(double bpm, double frequency, double amplitude) { if (frequency <= 0) throw new ArgumentOutOfRangeException("frequency"); if (amplitude < 0 || amplitude > 1) throw new ArgumentOutOfRangeException("amplitude"); Frequency = frequency; Amplitude = amplitude; Bpm = bpm; _waveFormat = new WaveFormat(44100, 32, 1, AudioEncoding.IeeeFloat); }
/// <summary> /// Initializes a new instance of the <see cref="CircularWriteableBufferingSource"/> class. /// </summary> /// <param name="waveFormat">The WaveFormat of the source.</param> /// <param name="bufferSize">Buffersize in bytes.</param> public CircularWriteableBufferingSource(WaveFormat waveFormat, int bufferSize, int latency) { if (waveFormat == null) throw new ArgumentNullException("waveFormat"); if (bufferSize <= 0 || (bufferSize % waveFormat.BlockAlign) != 0) throw new ArgumentException("Invalid bufferSize."); MaxBufferSize = bufferSize; _waveFormat = waveFormat; _buffer = new byte[bufferSize]; Latency = latency; LatencyInBytes = (int)_waveFormat.MillisecondsToBytes(latency); }
public async Task <ActionResult <BestMatch> > PostWebM() { await using var ms = new MemoryStream(); await Request.Body.CopyToAsync(ms); ms.Position = 0; var ff = new CSCore.WaveFormat(48000, 16, 2, AudioEncoding.IeeeFloat); var dr = new RawDataReader(ms, ff); await using var outs = new MemoryStream(); dr.WriteToWaveStream(outs); var bytes = outs.ToArray(); return(await MatchFingerprintAsWav(bytes)); }
public void StartRecordingDevice(MMDevice recordingDevice) { if (recordingDevice == null) { Console.WriteLine("No devices found."); return; } StopRecording(); soundIn = new CSCore.SoundIn.WasapiLoopbackCapture() { Device = recordingDevice }; soundIn.Initialize(); soundInSource = new SoundInSource(soundIn) { FillWithZeros = false }; convertedSource = soundInSource.ChangeSampleRate(44100).ToSampleSource().ToWaveSource(16); convertedSource = convertedSource.ToStereo(); soundInSource.DataAvailable += OnDataAvailable; soundIn.Start(); waveFormat = convertedSource.WaveFormat; buffer0 = new BufferBlock() { Data = new byte[convertedSource.WaveFormat.BytesPerSecond / 2] }; buffer1 = new BufferBlock() { Data = new byte[convertedSource.WaveFormat.BytesPerSecond / 2] }; enabled = true; eventThread = new Thread(EventThread); eventThread.Name = "Loopback Event Thread"; eventThread.IsBackground = true; eventThread.Start(new WeakReference <LoopbackRecorder>(this)); }
private void ReloadAPU() { _apu = _gameboy.APU; _waveFormat = new WaveFormat(_apu.SampleRate, _apu.SampleSize * 8, _apu.NumChannels); _source = new DirectStreamingSource(_waveFormat, _apu); _soundOut = GetSoundOut(); _soundOut.Initialize(_source); _soundOut.Volume = 0.2f; }
public static Guid SubTypeFromWaveFormat(WaveFormat waveFormat) { if (waveFormat == null) throw new ArgumentNullException("waveFormat"); if (waveFormat is WaveFormatExtensible) return ((WaveFormatExtensible)waveFormat).SubFormat; else { return MediaTypes.MediaTypeFromEncoding(waveFormat.WaveFormatTag); //todo: mp3, gsm,...? } }
/// <summary> /// Returns the SubType-Guid of a <paramref name="waveFormat" />. If the specified <paramref name="waveFormat" /> does /// not contain a SubType-Guid, the <see cref="WaveFormat.WaveFormatTag" /> gets converted to the equal SubType-Guid /// using the <see cref="AudioSubTypes.SubTypeFromEncoding" /> method. /// </summary> /// <param name="waveFormat"><see cref="WaveFormat" /> which gets used to determine the SubType-Guid.</param> /// <returns>SubType-Guid of the specified <paramref name="waveFormat" />.</returns> public static Guid SubTypeFromWaveFormat(WaveFormat waveFormat) { if (waveFormat == null) throw new ArgumentNullException("waveFormat"); if (waveFormat is WaveFormatExtensible) return ((WaveFormatExtensible) waveFormat).SubFormat; return AudioSubTypes.SubTypeFromEncoding(waveFormat.WaveFormatTag); }
public WaveFormat(WaveFormat waveFormat, int sampleRate) : this(sampleRate, waveFormat.BitsPerSample, waveFormat.Channels, waveFormat._encoding) { }
public override TimeSpan ToTimeSpan(WaveFormat waveFormat, long rawElements) { return TimeSpan.FromMilliseconds(waveFormat.BytesToMilliseconds(rawElements)); }
public override long ToRawElements(WaveFormat waveFormat, TimeSpan timeSpan) { return waveFormat.MillisecondsToBytes(timeSpan.TotalMilliseconds); }
/// <summary> /// Converts raw elements to a <see cref="TimeSpan"/> value. The unit of these raw elements depends on the implementation. For more information, see <see cref="TimeConverter"/>. /// </summary> /// <param name="waveFormat">The <see cref="WaveFormat"/> of the source which gets used to convert the <paramref name="rawElements"/>. </param> /// <param name="rawElements">The raw elements to convert to a <see cref="TimeSpan"/>.</param> /// <returns>The <see cref="TimeSpan"/>.</returns> public abstract TimeSpan ToTimeSpan(WaveFormat waveFormat, long rawElements);
/// <summary> /// Converts a <see cref="TimeSpan"/> back to raw elements, a source works with. The unit of these raw elements depends on the implementation. For more information, see <see cref="TimeConverter"/>. /// </summary> /// <param name="waveFormat">The <see cref="WaveFormat"/> of the source which gets used to convert the <paramref name="timeSpan"/>.</param> /// <param name="timeSpan">The <see cref="TimeSpan"/> to convert to raw elements.</param> /// <returns>The converted <see cref="TimeSpan"/> in raw elements.</returns> public abstract long ToRawElements(WaveFormat waveFormat, TimeSpan timeSpan);