private void CreateRepositionTestFile(string saveFile, IWaveProvider source, Action reposition) { using (var writer = new WaveFileWriter(saveFile, source.WaveFormat)) { // half-second buffer var buffer = new byte[writer.WaveFormat.AverageBytesPerSecond / 2]; // read three and a half seconds (half a second is to ensure Resampler has some leftovers to drain) for (int n = 0; n < 7; n++) { var read = source.Read(buffer, 0, buffer.Length); writer.Write(buffer, 0, read); } Array.Clear(buffer, 0, buffer.Length); // two seconds of absolute silence for (int n = 0; n < 4; n++) { writer.Write(buffer, 0, buffer.Length); } // do the reposition reposition(); // now read some more out for (int n = 0; n < 6; n++) { var read = source.Read(buffer, 0, buffer.Length); writer.Write(buffer, 0, read); } } }
private long ConvertOneBuffer(IMFSinkWriter writer, int streamIndex, IWaveProvider inputProvider, long position, byte[] managedBuffer, int seconds, ref bool flag) { long durationConverted = 0; int maxLength; IMFMediaBuffer buffer = MediaFoundationApi.CreateMemoryBuffer(managedBuffer.Length); buffer.GetMaxLength(out maxLength); IMFSample sample = MediaFoundationApi.CreateSample(); sample.AddBuffer(buffer); IntPtr ptr; int currentLength; buffer.Lock(out ptr, out maxLength, out currentLength); int oneLength = inputProvider.WaveFormat.AverageBytesPerSecond; int read = 0; if (flag) { for (int i = 0; i < seconds; i++) { read = inputProvider.Read(managedBuffer, 0, oneLength); } flag = false; } else { read = inputProvider.Read(managedBuffer, 0, oneLength); } if (read > 0) { durationConverted = BytesToNsPosition(read, inputProvider.WaveFormat); Marshal.Copy(managedBuffer, 0, ptr, read); buffer.SetCurrentLength(read); buffer.Unlock(); sample.SetSampleTime(position); sample.SetSampleDuration(durationConverted); writer.WriteSample(streamIndex, sample); //writer.Flush(streamIndex); } else { buffer.Unlock(); } Marshal.ReleaseComObject(sample); Marshal.ReleaseComObject(buffer); return(durationConverted); }
/// <summary> /// Creates a Wave file by reading all the data from a WaveProvider /// BEWARE: the WaveProvider MUST return 0 from its Read method when it is finished, /// or the Wave File will grow indefinitely. /// </summary> /// <param name="filename">The filename to use</param> /// <param name="sourceProvider">The source WaveProvider</param> public static async Task CreateWaveFileAsync(string filename, IWaveProvider sourceProvider) { StorageFile fileOperation = await StorageFile.GetFileFromPathAsync(filename); Stream fileStream = await fileOperation.OpenStreamForWriteAsync(); using (var writer = new WaveFileWriterRT(fileStream, sourceProvider.WaveFormat)) { writer.filename = filename; long outputLength = 0; var buffer = new byte[sourceProvider.WaveFormat.AverageBytesPerSecond * 4]; while (true) { int bytesRead = sourceProvider.Read(buffer, 0, buffer.Length); if (bytesRead == 0) { // end of source provider break; } outputLength += bytesRead; // Write will throw exception if WAV file becomes too large writer.Write(buffer, 0, bytesRead); } } }
private void FillBuffer(IWaveProvider playbackProvider, int frameCount) { var buffer = renderClient.GetBuffer(frameCount); var readLength = frameCount * bytesPerFrame; int read = playbackProvider.Read(readBuffer, 0, readLength); if (read == 0) { playbackState = PlaybackState.Stopped; } Marshal.Copy(readBuffer, 0, buffer, read); if (this.isUsingEventSync && this.shareMode == AudioClientShareMode.Exclusive) { renderClient.ReleaseBuffer(frameCount, AudioClientBufferFlags.None); } else { int actualFrameCount = read / bytesPerFrame; /*if (actualFrameCount != frameCount) * { * Debug.WriteLine(String.Format("WASAPI wanted {0} frames, supplied {1}", frameCount, actualFrameCount )); * }*/ renderClient.ReleaseBuffer(actualFrameCount, AudioClientBufferFlags.None); } }
public override int Read(byte[] buffer, int offset, int count) { int read = source.Read(buffer, offset, count); position += read; return(read); }
/// <summary> /// Read simply returns what the source returns, but writes to disk along the way /// </summary> public int Read(byte[] buffer, int offset, int count) { int bytesRead = source.Read(buffer, offset, count); writer.WriteData(buffer, offset, bytesRead); return(bytesRead); }
public void ProcessOutgoing(byte[] buffer, int count) { // give the input audio to the beginning of our audio graph bufferStream.SetLatestInBuffer(buffer); // process it out through the effects int read = outputProvider.Read(buffer, 0, count); }
/// <summary> /// driver buffer update callback to fill the wave buffer. /// </summary> /// <param name="bufferChannels">The buffer channels.</param> void driver_BufferUpdate(IntPtr[] bufferChannels) { // AsioDriver driver = sender as AsioDriver; int read = sourceStream.Read(waveBuffer, 0, waveBuffer.Length); if (read < waveBuffer.Length) { // we have stopped } // Call the convertor unsafe { // TODO : check if it's better to lock the buffer at initialization? fixed(void *pBuffer = &waveBuffer[0]) { convertor(new IntPtr(pBuffer), bufferChannels, waveFormat.Channels, nbSamples); } } if (read == 0) { Stop(); } }
public int Read(byte[] buffer, int offset, int count) { int read = source.Read(buffer, offset, count); if (Settings.GlobalSettingsStore.Instance.ProfileSettingsStore.GetClientSettingBool(ProfileSettingsKeys.NATOTone)) { var effectBytes = GetEffect(read / 2); //mix together for (int i = 0; i < read / 2; i++) { short audio = ConversionHelpers.ToShort(buffer[(offset + i) * 2], buffer[((i + offset) * 2) + 1]); audio = (short)(audio + _audioEffectShort[i]); //buffer[i + offset] = effectBytes[i]+buffer[i + offset]; byte byte1; byte byte2; ConversionHelpers.FromShort(audio, out byte1, out byte2); buffer[(offset + i) * 2] = byte1; buffer[((i + offset) * 2) + 1] = byte2; } } return(read); }
private IMFSample ReadFromSource() { // we always read a full second int bytesRead = sourceProvider.Read(sourceBuffer, 0, sourceBuffer.Length); if (bytesRead == 0) { return(null); } var mediaBuffer = MediaFoundationApi.CreateMemoryBuffer(bytesRead); IntPtr pBuffer; int maxLength, currentLength; mediaBuffer.Lock(out pBuffer, out maxLength, out currentLength); Marshal.Copy(sourceBuffer, 0, pBuffer, bytesRead); mediaBuffer.Unlock(); mediaBuffer.SetCurrentLength(bytesRead); var sample = MediaFoundationApi.CreateSample(); sample.AddBuffer(mediaBuffer); // we'll set the time, I don't think it is needed for Resampler, but other MFTs might need it sample.SetSampleTime(inputPosition); long duration = BytesToNsPosition(bytesRead, sourceProvider.WaveFormat); sample.SetSampleDuration(duration); inputPosition += duration; Marshal.ReleaseComObject(mediaBuffer); return(sample); }
public void LoadNextChunk(IWaveProvider source, int samplePairsRequired) { int sourceBytesRequired = samplePairsRequired; sourceBuffer = BufferHelpers.Ensure(sourceBuffer, sourceBytesRequired); sourceBytes = source.Read(sourceBuffer, 0, sourceBytesRequired); offset = 0; }
public int Read(byte[] buffer, int offset, int count) { int perChannelCount = count / _totalChannels; Array.Clear(buffer, offset, count); _readBuffer = NAudio.Utils.BufferHelpers.Ensure(_readBuffer, perChannelCount); int readCnt = _source.Read(_readBuffer, 0, perChannelCount); int readIndex = 0; int outIndex = offset; int i, c; while (readIndex < readCnt) { for (c = 0; c < _totalChannels; c++) { for (i = 0; i < _bytesPerSample; i++) { if (c % _totalChannels == _channelNum) { buffer[outIndex] = _readBuffer[readIndex]; readIndex++; } outIndex++; } } } return(readCnt * _totalChannels); }
/// <summary> /// driver buffer update callback to fill the wave buffer. /// </summary> /// <param name="inputChannels">The input channels.</param> /// <param name="outputChannels">The output channels.</param> void driver_BufferUpdate(IntPtr[] inputChannels, IntPtr[] outputChannels) { if (this.NumberOfInputChannels > 0) { var audioAvailable = AudioAvailable; if (audioAvailable != null) { audioAvailable(this, new AsioAudioAvailableEventArgs(inputChannels, nbSamples, driver.Capabilities.InputChannelInfos[0].type)); } } if (this.NumberOfOutputChannels > 0) { int read = sourceStream.Read(waveBuffer, 0, waveBuffer.Length); if (read < waveBuffer.Length) { // we have stopped } // Call the convertor unsafe { // TODO : check if it's better to lock the buffer at initialization? fixed(void *pBuffer = &waveBuffer[0]) { convertor(new IntPtr(pBuffer), outputChannels, NumberOfOutputChannels, nbSamples); } } if (read == 0) { Stop(); } } }
// Token: 0x06000945 RID: 2373 RVA: 0x0001B0A0 File Offset: 0x000192A0 private long ConvertOneBuffer(IMFSinkWriter writer, int streamIndex, IWaveProvider inputProvider, long position, byte[] managedBuffer) { long num = 0L; IMFMediaBuffer imfmediaBuffer = MediaFoundationApi.CreateMemoryBuffer(managedBuffer.Length); int count; imfmediaBuffer.GetMaxLength(out count); IMFSample imfsample = MediaFoundationApi.CreateSample(); imfsample.AddBuffer(imfmediaBuffer); IntPtr destination; int num2; imfmediaBuffer.Lock(out destination, out count, out num2); int num3 = inputProvider.Read(managedBuffer, 0, count); if (num3 > 0) { num = MediaFoundationEncoder.BytesToNsPosition(num3, inputProvider.WaveFormat); Marshal.Copy(managedBuffer, 0, destination, num3); imfmediaBuffer.SetCurrentLength(num3); imfmediaBuffer.Unlock(); imfsample.SetSampleTime(position); imfsample.SetSampleDuration(num); writer.WriteSample(streamIndex, imfsample); } else { imfmediaBuffer.Unlock(); } Marshal.ReleaseComObject(imfsample); Marshal.ReleaseComObject(imfmediaBuffer); return(num); }
private long ConvertOneBuffer(SinkWriter writer, int streamIndex, IWaveProvider inputProvider, long position, byte[] managedBuffer) { long durationConverted = 0; using var buffer = MediaFactory.CreateMemoryBuffer(managedBuffer.Length); using var sample = MediaFactory.CreateSample(); sample.AddBuffer(buffer); var ptr = buffer.Lock(out int maxLength, out int currentLength); int read = inputProvider.Read(managedBuffer, 0, maxLength); if (read > 0) { durationConverted = BytesToNsPosition(read, inputProvider.WaveFormat); Marshal.Copy(managedBuffer, 0, ptr, read); buffer.CurrentLength = read; buffer.Unlock(); sample.SampleTime = position; sample.SampleDuration = durationConverted; writer.WriteSample(streamIndex, sample); //writer.Flush(streamIndex); } else { buffer.Unlock(); } return(durationConverted); }
private Sample ReadFromSource() { // we always read a full second int bytesRead = sourceProvider.Read(sourceBuffer, 0, sourceBuffer.Length); if (bytesRead == 0) { return(null); } Sample sample; using (var mediaBuffer = MediaFactory.CreateMemoryBuffer(bytesRead)) { var pBuffer = mediaBuffer.Lock(out int maxLength, out int currentLength); Marshal.Copy(sourceBuffer, 0, pBuffer, bytesRead); mediaBuffer.Unlock(); mediaBuffer.CurrentLength = bytesRead; sample = MediaFactory.CreateSample(); sample.AddBuffer(mediaBuffer); // we'll set the time, I don't think it is needed for Resampler, but other MFTs might need it sample.SampleTime = inputPosition; long duration = BytesToNsPosition(bytesRead, sourceProvider.WaveFormat); sample.SampleDuration = duration; inputPosition += duration; } return(sample); }
/// <summary> /// Reads bytes from this wave stream /// </summary> /// <param name="destBuffer">The destination buffer</param> /// <param name="offset">Offset into the destination buffer</param> /// <param name="numBytes">Number of bytes read</param> /// <returns>Number of bytes read.</returns> public int Read(byte[] destBuffer, int offset, int numBytes) { int sourceBytesRequired = numBytes * 2; byte[] sourceBuffer = GetSourceBuffer(sourceBytesRequired); int sourceBytesRead = sourceProvider.Read(sourceBuffer, 0, sourceBytesRequired); WaveBuffer sourceWaveBuffer = new WaveBuffer(sourceBuffer); WaveBuffer destWaveBuffer = new WaveBuffer(destBuffer); int sourceSamples = sourceBytesRead / 4; int destOffset = offset / 2; for (int sample = 0; sample < sourceSamples; sample++) { // adjust volume float sample32 = sourceWaveBuffer.FloatBuffer[sample] * volume; // clip if (sample32 > 1.0f) { sample32 = 1.0f; } if (sample32 < -1.0f) { sample32 = -1.0f; } destWaveBuffer.ShortBuffer[destOffset++] = (short)(sample32 * 32767); } return(sourceSamples * 2); }
/// <summary> /// Reads bytes from this WaveProvider /// </summary> public int Read(byte[] buffer, int offset, int count) { int sourceBytesRequired = count * 2; sourceBuffer = BufferHelpers.Ensure(sourceBuffer, sourceBytesRequired); WaveBuffer sourceWaveBuffer = new WaveBuffer(sourceBuffer); WaveBuffer destWaveBuffer = new WaveBuffer(buffer); int sourceBytesRead = sourceProvider.Read(sourceBuffer, 0, sourceBytesRequired); int samplesRead = sourceBytesRead / 2; int destOffset = offset / 2; for (int sample = 0; sample < samplesRead; sample += 2) { short left = sourceWaveBuffer.ShortBuffer[sample]; short right = sourceWaveBuffer.ShortBuffer[sample + 1]; float outSample = (left * LeftVolume) + (right * RightVolume); // hard limiting if (outSample > Int16.MaxValue) { outSample = Int16.MaxValue; } if (outSample < Int16.MinValue) { outSample = Int16.MinValue; } destWaveBuffer.ShortBuffer[destOffset++] = (short)outSample; } return(sourceBytesRead / 2); }
/* * public void AddSamples(byte[] buffer, int offset, int count) * { * int BytesPerSample = WaveFormat.BitsPerSample / 8; // 共有モードは32bit固定っぽい * int nChannels = WaveFormat.Channels; // * int dst_id = 0; * int dst_count = (int)(count * ratio); * double rraito = 1.0 / ratio; * double src_frame = 0; * * while(dst_id < dst_count) * { * int sr_id_tmp = offset + (int)src_frame * BytesPerSample * nChannels; * * for (var j = 0; j < nChannels; ++j) * { * Array.Copy(buffer, sr_id_tmp, TempBuffer, dst_id, BytesPerSample); * dst_id += BytesPerSample; * sr_id_tmp += BytesPerSample; * } * src_frame += rraito; * } * BufferedProvider.AddSamples(TempBuffer, 0, dst_count); * } */ /// <summary> /// リサンプリングしながらbufferに書き込む /// </summary> /// <param name="buffer"></param> /// <param name="offset"></param> /// <param name="count"></param> /// <returns>num read bytes.</returns> public int Read(byte[] buffer, int offset, int count) { double rratio = 1.0 / ratio; double src_frame = 0; byte[] src = TempBuffer; int BytesPerSample = WaveFormat.BitsPerSample / 8; // 共有モードは32bit固定っぽい int BlockAlign = WaveFormat.BlockAlign; int dst_id = offset; int src_count_tmp = (int)Math.Ceiling(count * rratio) / BytesPerSample * BytesPerSample; int src_count = InputProvider.Read(src, 0, src_count_tmp); int dst_count = count * src_count / src_count_tmp + offset; while (dst_id < dst_count) { int src_id_tmp = offset + (int)src_frame * BlockAlign; Array.Copy(src, src_id_tmp, buffer, dst_id, BlockAlign); dst_id += BlockAlign; src_frame += rratio; } return(dst_count); }
/// <summary> /// driver buffer update callback to fill the wave buffer. /// </summary> /// <param name="inputChannels">The input channels.</param> /// <param name="outputChannels">The output channels.</param> void driver_BufferUpdate(IntPtr[] inputChannels, IntPtr[] outputChannels) { if (this.NumberOfOutputChannels > 0) { int read = sourceStream.Read(waveBuffer, 0, waveBuffer.Length); if (read < waveBuffer.Length) { // we have stopped } // Call the convertor unsafe { // TODO : check if it's better to lock the buffer at initialization? fixed(void *pBuffer = &waveBuffer[0]) { convertor(new IntPtr(pBuffer), outputChannels, NumberOfOutputChannels, nbSamples); } } if (read == 0) { // do not call stop from inside, or the asioOut will not be disposed and never return error // raising the auto stop from outside RaisePlaybackStopped(null); } } if (AudioAvailable != null) { var args = new MyAsioAudioAvailableEventArgs(inputChannels, outputChannels, nbSamples, driver.Capabilities.InputChannelInfos[0].type, driver.Capabilities.OutputChannelInfos[0].type); AudioAvailable(this, args); } }
void ProcessAudio(ProcessBuffer processingChunk) { if (_playbackState != PlaybackState.Playing) { return; } int bufferCount = processingChunk.AudioOut.Length; if (bufferCount == 0) { return; } int bufferSize = processingChunk.Frames; int floatsCount = bufferCount * bufferSize; int bytesCount = floatsCount * sizeof(float); byte[] fromWave = new byte[bytesCount]; _waveStream.Read(fromWave, 0, bytesCount); float[] interlacedSamples = new float[floatsCount]; Buffer.BlockCopy(fromWave, 0, interlacedSamples, 0, bytesCount); for (int i = 0; i < floatsCount; i++) { interlacedSamples [i] = interlacedSamples [i] * _volume; } BufferOperations.DeinterlaceAudio(interlacedSamples, processingChunk.AudioOut, bufferSize, bufferCount); }
private void FillBuffer(IWaveProvider playbackProvider, int frameCount) { var buffer = renderClient.GetBuffer(frameCount); var readLength = frameCount * bytesPerFrame; int actualFrameCount = 0; if (playbackState == PlaybackState.Paused) { byte[] rb = new byte[readLength]; Marshal.Copy(rb, 0, buffer, readLength); actualFrameCount = readLength / bytesPerFrame; } else if (playbackState == PlaybackState.Playing) { int read = playbackProvider.Read(readBuffer, 0, readLength); if (read == 0) { playbackState = PlaybackState.Stopped; PlaybackStopped?.Invoke(this, new StoppedEventArgs()); } Marshal.Copy(readBuffer, 0, buffer, read); actualFrameCount = read / bytesPerFrame; /*if (actualFrameCount != frameCount) * { * Debug.WriteLine(String.Format("WASAPI wanted {0} frames, supplied {1}", frameCount, actualFrameCount )); * }*/ } renderClient.ReleaseBuffer(actualFrameCount, AudioClientBufferFlags.None); }
public void LoadNextChunk(IWaveProvider source, int samplePairsRequired) { int sourceBytesRequired = samplePairsRequired * 6; sourceBuffer = GetSourceBuffer(sourceBytesRequired); sourceBytes = source.Read(sourceBuffer, 0, sourceBytesRequired); offset = 0; }
public SoundPlayer(SoundDevice device, SoundSettings settings) { _soundSettings = settings; _inputDevice = device.Device; if (_inputDevice.DataFlow == DataFlow.Render) { _capture = new WasapiLoopbackCapture(_inputDevice) { ShareMode = AudioClientShareMode.Shared }; _log.Debug($"Initialized WasapiLoopbackCapture for device {_inputDevice.FriendlyName} of type {_inputDevice.DataFlow}. ShareMode: {_capture.ShareMode}, State: {_inputDevice.State}"); _log.Debug($"Capturing in format: {_capture.WaveFormat} {_capture.WaveFormat.BitsPerSample}bit {_capture.WaveFormat.SampleRate}Hz {_capture.WaveFormat.Channels} channels"); _captureProvider = new WaveInProvider(_capture); _remoteOutput = new DefaultResampler(_capture.WaveFormat).Resample(_captureProvider); _remoteBuffer = new BufferedWaveProvider(SoundSettings.DiscordFormat); _capture.DataAvailable += (s, e) => { _remoteOutput.Read(buf, 0, e.BytesRecorded / 2); _remoteBuffer.AddSamples(buf, 0, e.BytesRecorded / 2); }; } else { _capture = new WasapiCapture(_inputDevice) { ShareMode = AudioClientShareMode.Shared }; _log.Debug($"Initialized WasapiCapture for device {_inputDevice.FriendlyName} of type {_inputDevice.DataFlow}. ShareMode: {_capture.ShareMode}, State: {_inputDevice.State}."); _log.Debug($"Capturing in format: {_capture.WaveFormat} {_capture.WaveFormat.BitsPerSample}bit {_capture.WaveFormat.SampleRate}Hz {_capture.WaveFormat.Channels} channels"); _captureProvider = new WaveInProvider(_capture); _remoteOutput = new DefaultResampler(_capture.WaveFormat).Resample(_captureProvider); var captureRate = _capture.WaveFormat.AverageBytesPerSecond; var outputRate = SoundSettings.DiscordFormat.AverageBytesPerSecond; _log.Info($"Capture rate is {captureRate}, output rate is {outputRate}, that gives a ratio of {(float)captureRate / (float)outputRate}"); _log.Debug($"Outputting in format: {_remoteOutput.WaveFormat} {_remoteOutput.WaveFormat.BitsPerSample}bit {_remoteOutput.WaveFormat.SampleRate}Hz {_remoteOutput.WaveFormat.Channels} channels"); var rate = (float)SoundSettings.DiscordFormat.AverageBytesPerSecond / _capture.WaveFormat.AverageBytesPerSecond; _remoteBuffer = new BufferedWaveProvider(_remoteOutput.WaveFormat); _capture.DataAvailable += (s, e) => { _remoteOutput.Read(buf, 0, (int)Math.Round(e.BytesRecorded * rate)); _remoteBuffer.AddSamples(buf, 0, (int)Math.Round(e.BytesRecorded * rate)); }; } if (_soundSettings.PlayLocally) { _localOutput = new WasapiOut(); _localBuffer = new BufferedWaveProvider(_remoteOutput.WaveFormat); _localOutput.Init(_localBuffer); _capture.DataAvailable += (s, e) => { _localBuffer.AddSamples(buf, 0, e.BytesRecorded / 2); }; } }
public void LoadNextChunk(IWaveProvider source, int samplePairsRequired) { int sourceBytesRequired = samplePairsRequired * 4; sourceBuffer = GetSourceBuffer(sourceBytesRequired); sourceWaveBuffer = new WaveBuffer(sourceBuffer); sourceSamples = source.Read(sourceBuffer, 0, sourceBytesRequired) / 2; sourceSample = 0; }
public void LoadNextChunk(IWaveProvider source, int samplePairsRequired) { int sourceBytesRequired = samplePairsRequired * 2; sourceSample = 0; sourceBuffer = BufferHelpers.Ensure(sourceBuffer, sourceBytesRequired); sourceWaveBuffer = new WaveBuffer(sourceBuffer); sourceSamples = source.Read(sourceBuffer, 0, sourceBytesRequired) / 2; }
public void LoadNextChunk(IWaveProvider source, int samplePairsRequired) { int sourceBytesRequired = samplePairsRequired * 2; sourceBuffer = GetSourceBuffer(sourceBytesRequired); sourceBytes = source.Read(sourceBuffer, 0, sourceBytesRequired); offset = 0; }
public void LoadNextChunk(IWaveProvider source, int samplePairsRequired) { int num = samplePairsRequired * 6; this.sourceBuffer = BufferHelpers.Ensure(this.sourceBuffer, num); this.sourceBytes = source.Read(this.sourceBuffer, 0, num); this.offset = 0; }
/// <summary> /// Event handler to capture waspi device and convert to pcm16. /// </summary> /// <remarks> /// see also: https://qiita.com/zufall/items/2e027a2bc996864fe4af /// </remarks> /// <param name="sender"></param> /// <param name="eventArgs"></param> private void WaspiDataAvailable(object sender, WaveInEventArgs eventArgs) { if (eventArgs.BytesRecorded == 0) { ResampledDataAvailable?.Invoke(this, new byte[0]); ResampledMaxValueAvailable?.Invoke(this, 0); return; } using (var memStream = new MemoryStream(eventArgs.Buffer, 0, eventArgs.BytesRecorded)) { using (var inputStream = new RawSourceWaveStream(memStream, capture.WaveFormat)) { var sampleStream = new WaveToSampleProvider(inputStream); var resamplingProvider = new WdlResamplingSampleProvider(sampleStream, TargetWaveFormat.SampleRate); var pcmProvider = new SampleToWaveProvider16(resamplingProvider); IWaveProvider targetProvider = pcmProvider; if (capture.WaveFormat.Channels == 2) { var stereoToMonoProvider = new StereoToMonoProvider16(pcmProvider); stereoToMonoProvider.RightVolume = 0.5f; stereoToMonoProvider.LeftVolume = 0.5f; targetProvider = stereoToMonoProvider; } byte[] buffer = new byte[eventArgs.BytesRecorded]; var outputStream = new MemoryStream(); int readBytes; int writeBytes = 0; while ((readBytes = targetProvider.Read(buffer, 0, eventArgs.BytesRecorded)) > 0) { outputStream.Write(buffer, 0, readBytes); writeBytes += readBytes; } var aryOutputStream = outputStream.ToArray(); ResampledDataAvailable?.Invoke(this, aryOutputStream); float max = 0; var tempBuffer = new WaveBuffer(aryOutputStream); for (int index = 0; index < aryOutputStream.Length / 2; index++) { var sample = (double)tempBuffer.ShortBuffer[index]; // absolute value if (sample < 0.0) { sample = -sample; } // is this the max value? if (sample > max) { max = (float)sample; } } ResampledMaxValueAvailable?.Invoke(this, max); } } }
public void LoadNextChunk(IWaveProvider source, int samplePairsRequired) { int sourceBytesRequired = samplePairsRequired * 8; sourceBuffer = BufferHelpers.Ensure(sourceBuffer, sourceBytesRequired); sourceWaveBuffer = new WaveBuffer(sourceBuffer); sourceSamples = source.Read(sourceBuffer, 0, sourceBytesRequired) / 4; sourceSample = 0; }
public void LoadNextChunk(IWaveProvider source, int samplePairsRequired) { int num = samplePairsRequired * 2; this.sourceSample = 0; this.sourceBuffer = BufferHelpers.Ensure(this.sourceBuffer, num); this.sourceWaveBuffer = new WaveBuffer(this.sourceBuffer); this.sourceSamples = source.Read(this.sourceBuffer, 0, num) / 2; }
/// <summary> /// Read simply returns what the source returns, but writes to disk along the way /// </summary> public int Read(byte[] buffer, int offset, int count) { int bytesRead = source.Read(buffer, offset, count); #pragma warning disable CS0618 // 형식 또는 멤버는 사용되지 않습니다. writer.WriteData(buffer, offset, bytesRead); #pragma warning restore CS0618 // 형식 또는 멤버는 사용되지 않습니다. return(bytesRead); }
/// <summary> /// Creates a Wave file by reading all the data from a WaveProvider /// BEWARE: the WaveProvider MUST return 0 from its Read method when it is finished, /// or the Wave File will grow indefinitely. /// </summary> /// <param name="filename">The filename to use</param> /// <param name="sourceProvider">The source WaveProvider</param> public static void CreateWaveFile(string filename, IWaveProvider sourceProvider) { using (WaveFileWriter writer = new WaveFileWriter(filename, sourceProvider.WaveFormat)) { byte[] buffer = new byte[sourceProvider.WaveFormat.AverageBytesPerSecond * 4]; while (true) { int bytesRead = sourceProvider.Read(buffer, 0, buffer.Length); if (bytesRead == 0) break; writer.Write(buffer, 0, bytesRead); } } }
/// <summary> /// Creates a Wave file by reading all the data from a WaveProvider /// BEWARE: the WaveProvider MUST return 0 from its Read method when it is finished, /// or the Wave File will grow indefinitely. /// </summary> /// <param name="filename">The filename to use</param> /// <param name="sourceProvider">The source WaveProvider</param> public static void CreateWaveFile(string filename, IWaveProvider sourceProvider) { using (var writer = new WaveFileWriter(filename, sourceProvider.WaveFormat)) { var buffer = new byte[sourceProvider.WaveFormat.AverageBytesPerSecond * 4]; while (true) { int bytesRead = sourceProvider.Read(buffer, 0, buffer.Length); if (bytesRead == 0) { // end of source provider break; } // Write will throw exception if WAV file becomes too large writer.Write(buffer, 0, bytesRead); } } }
public static void AppendWaveFile(string filename, IWaveProvider sourceProvider) { using (var writer = new WaveFileWriter(filename, sourceProvider.WaveFormat, FileMode.Append)) { long outputLength = 0; var buffer = new byte[sourceProvider.WaveFormat.AverageBytesPerSecond * 4]; while (true) { int bytesRead = sourceProvider.Read(buffer, 0, buffer.Length); if (bytesRead == 0) { // end of source provider break; } outputLength += bytesRead; if (outputLength > Int32.MaxValue) { throw new InvalidOperationException("WAV File cannot be greater than 2GB. Check that sourceProvider is not an endless stream."); } writer.Write(buffer, 0, bytesRead); } } }
internal static void CreateWaveFile(string filename, IWaveProvider sourceProvider, Action<long> progressCallback) { Debug.Assert(progressCallback != null); using (var writer = new WaveFileWriter(filename, sourceProvider.WaveFormat)) { long outputLength = 0; var buffer = new byte[sourceProvider.WaveFormat.AverageBytesPerSecond * 4]; while (true) { int bytesRead = sourceProvider.Read(buffer, 0, buffer.Length); if (bytesRead == 0) { // end of source provider break; } outputLength += bytesRead; // Write will throw exception if WAV file becomes too large writer.Write(buffer, 0, bytesRead); progressCallback(bytesRead); } } }
private long ConvertOneBuffer(IMFSinkWriter writer, int streamIndex, IWaveProvider inputProvider, long position, byte[] managedBuffer) { long durationConverted = 0; int maxLength; IMFMediaBuffer buffer = MediaFoundationApi.CreateMemoryBuffer(managedBuffer.Length); buffer.GetMaxLength(out maxLength); IMFSample sample = MediaFoundationApi.CreateSample(); sample.AddBuffer(buffer); IntPtr ptr; int currentLength; buffer.Lock(out ptr, out maxLength, out currentLength); int read = inputProvider.Read(managedBuffer, 0, maxLength); if (read > 0) { durationConverted = BytesToNsPosition(read, inputProvider.WaveFormat); Marshal.Copy(managedBuffer, 0, ptr, read); buffer.SetCurrentLength(read); buffer.Unlock(); sample.SetSampleTime(position); sample.SetSampleDuration(durationConverted); writer.WriteSample(streamIndex, sample); //writer.Flush(streamIndex); } else { buffer.Unlock(); } Marshal.ReleaseComObject(sample); Marshal.ReleaseComObject(buffer); return durationConverted; }
private void FillBuffer(IWaveProvider playbackProvider, int frameCount) { IntPtr buffer = renderClient.GetBuffer(frameCount); int readLength = frameCount*bytesPerFrame; int read = playbackProvider.Read(readBuffer, 0, readLength); if (read == 0) { playbackState = PlaybackState.Stopped; } Marshal.Copy(readBuffer, 0, buffer, read); int actualFrameCount = read/bytesPerFrame; /*if (actualFrameCount != frameCount) { Debug.WriteLine(String.Format("WASAPI wanted {0} frames, supplied {1}", frameCount, actualFrameCount )); }*/ renderClient.ReleaseBuffer(actualFrameCount, AudioClientBufferFlags.None); }