/// <summary> /// Приняли данные для вывода на график /// </summary> /// <param name="sender"></param> /// <param name="args"></param> void OnDataAvailable(object sender, WaveInEventArgs args) { float max = 0; var buffer = new WaveBuffer(args.Buffer); // interpret as 32 bit floating point audio List <int> buf = new List <int>(); for (int index = 0; index < args.BytesRecorded / 4; index++) { var sample = buffer.FloatBuffer[index]; // absolute value if (sample < 0) { sample = -sample; } // is this the max value? if (sample > max) { max = sample; } buf.Add((int)(sample * 100)); } picks.Clear(); if (buf.Count != 0) { for (int i = 0; i < 7; i++) { var pick = buf.GetRange(Convert.ToInt32(i * buf.Count / 7), Convert.ToInt32(buf.Count / 7)); picks.Add(pick.Max()); } } }
private void waveIn_DataAvailable(object sender, WaveInEventArgs e) { bwp.AddSamples(e.Buffer, 0, e.BytesRecorded); this.MicroValue = 0; var buffer = new WaveBuffer(e.Buffer); // interpret as 32 bit floating point audio for (int index = 0; index < e.BytesRecorded / 4; index++) { var sample = buffer.FloatBuffer[index]; // absolute value if (sample < 0) { sample = -sample; } // is this the max value? if (sample > this.MicroValue) { this.MicroValue = sample; } if (sample > 100) { this.MicroValue = 100; } } }
/// <summary> /// Modifies the audio buffer in accord with the current fading status. /// </summary> /// <param name="buffer">The audio samples.</param> /// <param name="bytesInBuffer">The number of bytes in the audio buffer.</param> /// <param name="isFloatingPointAudio">If the audio is 32-bit.</param> public void FadeBuffer(byte[] buffer, int bytesInBuffer, bool isFloatingPointAudio) { _sampleCountModified += bytesInBuffer; var volumeAdjustmentFraction = 1 - ((float)_sampleCountModified / _sampleCountToModify); var buff = new WaveBuffer(buffer); if (isFloatingPointAudio) { for (var index = 0; index < bytesInBuffer / 4; ++index) { var sample = buff.FloatBuffer[index]; buff.FloatBuffer[index] = sample * volumeAdjustmentFraction; } } else { for (var index = 0; index < bytesInBuffer / 2; ++index) { var sample = buff.ShortBuffer[index]; buff.ShortBuffer[index] = (short)(sample * volumeAdjustmentFraction); } } if (volumeAdjustmentFraction <= 0) { OnFadeComplete(); } }
/// <summary> /// Reads bytes from this WaveProvider /// </summary> public int Read(byte[] buffer, int offset, int count) { int sourceBytesRequired = count * 2; this.sourceBuffer = BufferHelpers.Ensure(this.sourceBuffer, sourceBytesRequired); WaveBuffer sourceWaveBuffer = new WaveBuffer(sourceBuffer); WaveBuffer destWaveBuffer = new WaveBuffer(buffer); int sourceBytesRead = sourceProvider.Read(sourceBuffer, 0, sourceBytesRequired); int samplesRead = sourceBytesRead / 2; int destOffset = offset / 2; for (int sample = 0; sample < samplesRead; sample += 2) { short left = sourceWaveBuffer.ShortBuffer[sample]; short right = sourceWaveBuffer.ShortBuffer[sample + 1]; float outSample = (left * LeftVolume) + (right * RightVolume); // hard limiting if (outSample > Int16.MaxValue) { outSample = Int16.MaxValue; } if (outSample < Int16.MinValue) { outSample = Int16.MinValue; } destWaveBuffer.ShortBuffer[destOffset++] = (short)outSample; } return(sourceBytesRead / 2); }
private void AudioInDataAvailable(object s, WaveInEventArgs a) { if (IsRecording) { _writer.Write(a.Buffer, 0, a.BytesRecorded); } PeakValue = 0; var buffer = new WaveBuffer(a.Buffer); // interpret as 32 bit floating point audio for (int index = 0; index < a.BytesRecorded / 4; index++) { var sample = buffer.FloatBuffer[index]; // absolute value if (sample < 0) { sample = -sample; } // is this the max value? if (sample > PeakValue) { PeakValue = sample; } } OnAudioEventAvailable(new AudioEventArgs { State = AudioRecordState.SampleAvailable }); OnTresholdReached(); }
public void DataAvailable(object sender, WaveInEventArgs e) { if (e.BytesRecorded != 0) { int offset = 0; while (offset < e.BytesRecorded) { for (int n = 0; n < ((WasapiLoopbackCapture)sender).WaveFormat.Channels; n++) { stereo[n].AddSamples(e.Buffer, offset, 4); offset += 4; } } WaveBuffer[] a = new WaveBuffer[2]; byte[] buffer1 = new byte[Resolution]; byte[] buffer2 = new byte[Resolution]; stereo[0].Read(buffer1, 0, buffer1.Length); stereo[1].Read(buffer2, 0, buffer2.Length); a[0] = new WaveBuffer(buffer1); a[1] = new WaveBuffer(buffer2); audio.Add(a); } }
private SoundMixer() { SamplesPerBuffer = Config.Instance.SampleRate / (Engine.AGB_FPS * Config.Instance.InterFrames); SampleRateReciprocal = 1f / Config.Instance.SampleRate; SamplesReciprocal = 1f / SamplesPerBuffer; dsChannels = new DirectSoundChannel[Config.Instance.DirectCount]; for (int i = 0; i < Config.Instance.DirectCount; i++) { dsChannels[i] = new DirectSoundChannel(); } gbChannels = new GBChannel[] { sq1 = new SquareChannel(), sq2 = new SquareChannel(), wave = new WaveChannel(), noise = new NoiseChannel() }; allChannels = dsChannels.Union(gbChannels).ToArray(); mutes = new bool[17]; // 0-15 for tracks, 16 for the program int amt = SamplesPerBuffer * 2; audio = new WaveBuffer(amt * 4) { FloatBufferCount = amt }; buffer = new BufferedWaveProvider(WaveFormat.CreateIeeeFloatWaveFormat(Config.Instance.SampleRate, 2)) { DiscardOnBufferOverflow = true }; @out = new WasapiOut(); @out.Init(buffer); @out.Play(); }
/// <summary> /// Reads from this provider /// </summary> public int Read(byte[] buffer, int offset, int count) { int samplesNeeded = count / 4; WaveBuffer wb = new WaveBuffer(buffer); int samplesRead = source.Read(wb.FloatBuffer, offset / 4, samplesNeeded); return samplesRead * 4; }
private void _waveIn_DataAvailable(object sender, WaveInEventArgs e) { _buff = e.Buffer; _waveBuffer = new WaveBuffer(8192); //var samples = new float[e.BytesRecorded]; var samples = (from i in _buff select BitConverter.ToSingle(_buff, i)).ToArray(); //_buffer.AddSamples(e.Buffer, 0, e.BytesRecorded); ProcessData(_waveBuffer.FloatBuffer); //byte[] buffer = e.Buffer; //float[] newBuffer = new float[e.BytesRecorded / 4]; //for (int i = 0; i < e.BytesRecorded / 4; i++) //{ // newBuffer[i] = BitConverter.ToInt16(buffer, i * 4) / 32768f; //} //for (int index = 0; index < e.BytesRecorded; index += 2) //{ // short sample = (short)((buffer[index + 1] << 8) | // buffer[index + 0]); // newBuffer[index] = sample / 32768f; //} //ProcessData(newBuffer); }
public int Read(byte[] buffer, int offset, int count) { float[] floatBuffer = new float[count / targetBytes]; int read = provider.Read(floatBuffer, 0, count / targetBytes); WaveBuffer outWaveBuffer = new WaveBuffer(buffer); int calcOffset = offset / targetBytes; for (int i = 0; i < read; i++) { switch (targetBytes) { case 1: outWaveBuffer.ByteBuffer[calcOffset + i] = (byte)((floatBuffer[i] + 1) * 128f); break; case 2: outWaveBuffer.ShortBuffer[calcOffset + i] = (short)(floatBuffer[i] * 32768f); break; case 4: outWaveBuffer.IntBuffer[calcOffset + i] = (int)(floatBuffer[i] * 2147483648f); break; } } return(read * targetBytes); }
private void mainWaveIn_DataAvailable(object sender, WaveInEventArgs e) { if (recWaveWriter == null) { return; } recWaveWriter.Write(e.Buffer, 0, e.BytesRecorded); recWaveWriter.Flush(); float max = 0; var buffer = new WaveBuffer(e.Buffer); // interpret as 32 bit floating point audio for (int index = 0; index < e.BytesRecorded / 4; index++) { var sample = buffer.FloatBuffer[index]; // absolute value if (sample < 0) { sample = -sample; } // is this the max value? if (sample > max) { max = sample; } } volumeSliderMeter.Volume = max; }
protected override void CaptureDataAvailable(WaveInEventArgs args) { float max = 0.0f; var buffer = new WaveBuffer(args.Buffer); // interpret as 32 bit floating point audio for (int index = 0; index < args.BytesRecorded / 4; index++) { var sample = Math.Abs(buffer.FloatBuffer[index]); // is this the max value? if (sample > max) { max = sample; } } int volHeight = Convert.ToInt32(Convert.ToDouble(Canvas.Height) * max * 1.5); for (int x = 0; x < volHeight; x++) { Canvas.DrawLineX(x, RgbColor.Green); } Canvas.DrawLineX(volHeight, RgbColor.Red); for (int x = volHeight + 1; x < Canvas.Width; x++) { for (int y = 0; y < Canvas.Height; y++) { Canvas.SetPixel(x, y, RgbColor.Black); } } Render(); }
public override int Read(byte[] buffer, int offset, int count) { WaveBuffer waveBuffer = new WaveBuffer(buffer); int count2 = count / 4; return(Read(waveBuffer.FloatBuffer, offset / 4, count2) * 4); }
public override void OnData(WaveInEventArgs a) { var width = this.formWidth / this.pixelsPerLine; int bytesPerSample = 4; var samples = a.BytesRecorded / (bytesPerSample); int samplesPerPixel = samples / width; var buffer = new WaveBuffer(a.Buffer); var averages = new float[width]; for (int x = 0; x < width; x++) { float sum = 0; for (int s = 0; s < samplesPerPixel; s++) { var sample = buffer.FloatBuffer[x * samplesPerPixel + s]; sum += sample; if (sum > float.MaxValue * 0.8) { Debug.Fail("Too Close to max"); } } averages[x] = sum / samplesPerPixel; } this.draw(averages, DrawMode.Line, this.formHeight); }
public Mixer(Config config) { Config = config; (SampleRate, SamplesPerBuffer) = Utils.FrequencyTable[config.SampleRate]; SampleRateReciprocal = 1f / SampleRate; _samplesReciprocal = 1f / SamplesPerBuffer; PCM8MasterVolume = config.Volume / 15f; _pcm8Channels = new PCM8Channel[24]; for (int i = 0; i < _pcm8Channels.Length; i++) { _pcm8Channels[i] = new PCM8Channel(this); } _psgChannels = new PSGChannel[] { _sq1 = new SquareChannel(this), _sq2 = new SquareChannel(this), _pcm4 = new PCM4Channel(this), _noise = new NoiseChannel(this) }; int amt = SamplesPerBuffer * 2; _audio = new WaveBuffer(amt * sizeof(float)) { FloatBufferCount = amt }; _trackBuffers = new float[0x10][]; for (int i = 0; i < _trackBuffers.Length; i++) { _trackBuffers[i] = new float[amt]; } _buffer = new BufferedWaveProvider(WaveFormat.CreateIeeeFloatWaveFormat(SampleRate, 2)) { DiscardOnBufferOverflow = true, BufferLength = SamplesPerBuffer * 64 }; Init(_buffer); }
private static SoundType DetectSound(WaveBuffer buffer, int bytesRecorded, short minVoiceRecordSampleVolume, short minNoiseRecordSampleVolume) { if (minVoiceRecordSampleVolume == 0) { return(SoundType.VOICE); } if (minNoiseRecordSampleVolume == 0) { return(SoundType.NOISE); } SoundType result = SoundType.NOTHING; //check if the volume peaks above the MinRecordVolume // interpret as 32 bit floating point audio for (int index = 0; index < bytesRecorded / 4; index++) { var sample = buffer.ShortBuffer[index]; //Check voice volume threshold if (sample > minVoiceRecordSampleVolume || sample < -minVoiceRecordSampleVolume) { result = SoundType.VOICE; //skip testing the rest of the sample data as soon as voice volume threshold has been reached break; } //Check noise volume threshold else if (sample > minNoiseRecordSampleVolume || sample < -minNoiseRecordSampleVolume) { result = SoundType.NOISE; } } return(result); }
private static byte[] IeeeTo16Bit(float[] bufferF, int samples, WaveFormat sourceFormat, out int newLength) { if (EnableTraces) { Trace.WriteLine(string.Format("In {0} samples: {1} duration:{2}ms", sourceFormat, samples, samples / sourceFormat.Channels / (sourceFormat.SampleRate / 1000))); } byte[] bufferB = new byte[samples * 2]; WaveBuffer destWaveBuffer = new WaveBuffer(bufferB); int destOffset = 0; for (int n = 0; n < samples; n++) { float sample32 = bufferF[n]; sample32 = ClipFloatSample(sample32); destWaveBuffer.ShortBuffer[destOffset++] = FloatToPCM(sample32); } newLength = samples * 2; if (EnableTraces) { Trace.WriteLine(string.Format("Out PCM samples: {0} duration:{1}ms", newLength / (16 / 8), (newLength / (16 / 8)) / sourceFormat.Channels / (sourceFormat.SampleRate / 1000))); } return(bufferB); }
private float GetPeakValue(WaveInEventArgs args) { int max = 0; int sample = 0; var buffer = new WaveBuffer(args.Buffer); // interpret as 32 bit floating point audio for (int index = 0; index < args.BytesRecorded / 4; index++) { sample = buffer.IntBuffer[index]; // absolute value if (sample < 0) { sample = -sample; } // is this the max value? if (sample > max) { max = sample; } } return(max); }
void waveIn_DataAvailable(object sender, WaveInEventArgs e) { { // write recorded data to MP3 writer if (wri != null) { wri.Write(e.Buffer, 0, e.BytesRecorded); } float max = 0; var buffer = new WaveBuffer(e.Buffer); // interpret as 32 bit floating point audio for (int index = 0; index < e.BytesRecorded / 2; index++) { var sample = buffer.ShortBuffer[index]; // to floating point var sample32 = sample / 32768f; //for fft sampleAggregator.Add(sample32); // absolute value if (sample32 < 0) { sample32 = -sample32; } // is this the max value? if (sample32 > max) { max = sample32; } } MyDelegate md = new MyDelegate(showResult); this.BeginInvoke(md, this, max); } }
public Mixer(Config config) { Config = config; const int sampleRate = 13379; // TODO: Actual value unknown SamplesPerBuffer = 224; // TODO SampleRateReciprocal = 1f / sampleRate; _samplesReciprocal = 1f / SamplesPerBuffer; int amt = SamplesPerBuffer * 2; _audio = new WaveBuffer(amt * sizeof(float)) { FloatBufferCount = amt }; for (int i = 0; i < Player.NumTracks; i++) { _trackBuffers[i] = new float[amt]; } _buffer = new BufferedWaveProvider(WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, 2)) // TODO { DiscardOnBufferOverflow = true, BufferLength = SamplesPerBuffer * 64 }; Init(_buffer); }
private IWaveBuffer GetWaveBuffer(uint size) { IWaveBuffer waveBuffer = null; m_waveBufferMutex.WaitOne(); var count = m_waveBuffers.Count; //Debug.WriteLine(count); if (m_waveBuffers.Count > 0) { waveBuffer = m_waveBuffers[0]; m_waveBuffers.RemoveAt(0); } m_waveBufferMutex.ReleaseMutex(); // check if the current wavebuffer is the right size if (waveBuffer != null) { if (waveBuffer.ByteBuffer.Length != size) { waveBuffer = null; } } if (waveBuffer == null) { var byteArray = new byte[size]; waveBuffer = new WaveBuffer(byteArray); } return(waveBuffer); }
/// <summary> /// Reads bytes from this wave stream /// </summary> /// <param name="destBuffer">The destination buffer</param> /// <param name="offset">Offset into the destination buffer</param> /// <param name="numBytes">Number of bytes read</param> /// <returns>Number of bytes read.</returns> public int Read(byte[] destBuffer, int offset, int numBytes) { int samplesRequired = numBytes / 2; sourceBuffer = BufferHelpers.Ensure(sourceBuffer, samplesRequired); int sourceSamples = sourceProvider.Read(sourceBuffer, 0, samplesRequired); var destWaveBuffer = new WaveBuffer(destBuffer); int destOffset = offset / 2; for (int sample = 0; sample < sourceSamples; sample++) { // adjust volume float sample32 = sourceBuffer[sample] * volume; // clip if (sample32 > 1.0f) { sample32 = 1.0f; } if (sample32 < -1.0f) { sample32 = -1.0f; } destWaveBuffer.ShortBuffer[destOffset++] = (short)(sample32 * 32767); } return(sourceSamples * 2); }
public int Read(byte[] buffer, int offset, int bytesCount) { // How many [SourceBitsPerSample] samples I need to read for reaching [TargetBitsPerSample] bytesCount const int ratio = SourceBitsPerSample / TargetBitsPerSample; var samplesRequired = bytesCount / ratio; //_sourceBuffer = BufferHelpers.Ensure(_sourceBuffer, samplesRequired); if (_sourceBuffer == null || _sourceBuffer.Length < samplesRequired) { _sourceBuffer = new float[samplesRequired]; } var sourceSamples = _sourceProvider.Read(_sourceBuffer, 0, samplesRequired); var destWaveBuffer = new WaveBuffer(buffer); var destOffset = offset / ratio; for (var index = 0; index < sourceSamples; index++) { var sample = _sourceBuffer[index]; // sample fits in [-1, 1] so we first add 1 to make it [0, 2]; var normalizedSample = (sample + 1); // multiplying by sbyte.MaxValue to obtain the most significant bits destWaveBuffer.ByteBuffer[destOffset++] = (byte)(normalizedSample * sbyte.MaxValue); } return sourceSamples; }
private void RenderFile() { SampleAggregator.RaiseRestart(); using (WaveFileReader reader = new WaveFileReader(this.voiceRecorderState.ActiveFile)) { this.samplesPerSecond = reader.WaveFormat.SampleRate; SampleAggregator.NotificationCount = reader.WaveFormat.SampleRate / 10; byte[] buffer = new byte[1024]; WaveBuffer waveBuffer = new WaveBuffer(buffer); waveBuffer.ByteBufferCount = buffer.Length; int bytesRead; do { bytesRead = reader.Read(waveBuffer, 0, buffer.Length); int samples = bytesRead / 2; for (int sample = 0; sample < samples; sample++) { if (bytesRead > 0) { sampleAggregator.Add(waveBuffer.ShortBuffer[sample] / 32768f); } } } while (bytesRead > 0); int totalSamples = (int)reader.Length / 2; TotalWaveFormSamples = totalSamples / sampleAggregator.NotificationCount; SelectAll(); } audioPlayer.LoadFile(this.voiceRecorderState.ActiveFile); }
public int Read(byte[] buffer, int offset, int count) { int count2 = count / 4; WaveBuffer waveBuffer = new WaveBuffer(buffer); return(this.source.Read(waveBuffer.FloatBuffer, offset / 4, count2) * 4); }
public short[] FindPeaks(byte[] samples, int bytes, int samplesPerPeak) { if (peakBuffer == null) { peakBuffer = new short[bytes / (2 * samplesPerPeak) + 1]; } if (waveBuffer == null) { waveBuffer = new WaveBuffer(samples); // nb assumes samples is always the same } var peakOffset = 0; var inputSamples = bytes / 2; short currentMax = 0; var sample = 0; while (sample < inputSamples) { currentMax = Math.Max(waveBuffer.ShortBuffer[sample], currentMax); sample++; if (sample % samplesPerPeak == 0) { peakBuffer[peakOffset++] = currentMax; currentMax = 0; } } return(peakBuffer); }
public int Read(byte[] buffer, int offset, int count) { WaveBuffer waveBuffer = new WaveBuffer(buffer); int samplesRequired = count / 4; int samplesRead = Read(waveBuffer.FloatBuffer, offset / 4, samplesRequired); return(samplesRead * 4); }
public int Read(byte[] buffer, int offset, int count) { var waveBuffer = new WaveBuffer(buffer); var samplesRequired = count / 4; var samplesRead = audioBuffer.CopyToArray(waveBuffer.FloatBuffer, offset / 4, samplesRequired); return(samplesRead * 4); }
protected SpeexChatCodec(BandMode bandMode, int sampleRate, string description) { decoder = new SpeexDecoder(bandMode); encoder = new SpeexEncoder(bandMode); recordingFormat = new WaveFormat(sampleRate, 16, 1); this.description = description; encoderInputBuffer = new WaveBuffer(recordingFormat.AverageBytesPerSecond); // more than enough }
public void LoadNextChunk(IWaveProvider source, int samplePairsRequired) { int sourceBytesRequired = samplePairsRequired * 2; sourceSample = 0; sourceBuffer = BufferHelpers.Ensure(sourceBuffer, sourceBytesRequired); sourceWaveBuffer = new WaveBuffer(sourceBuffer); sourceSamples = source.Read(sourceBuffer, 0, sourceBytesRequired) / 2; }
public int Read(byte[] buffer, int offset, int count) { WaveBuffer waveBuffer = new WaveBuffer(buffer); int samplesRequired = count / 2; int samplesRead = Read(waveBuffer.ShortBuffer, offset / 2, samplesRequired); return(samplesRead * 2); }
/// <summary> /// Reads from this provider /// </summary> public int Read(byte[] buffer, int offset, int count) { int samplesNeeded = count / 4; WaveBuffer wb = new WaveBuffer(buffer); int samplesRead = source.Read(wb.FloatBuffer, offset / 4, samplesNeeded); return(samplesRead * 4); }
public void LoadNextChunk(IWaveProvider source, int samplePairsRequired) { int sourceBytesRequired = samplePairsRequired * 4; sourceBuffer = GetSourceBuffer(sourceBytesRequired); sourceWaveBuffer = new WaveBuffer(sourceBuffer); sourceSamples = source.Read(sourceBuffer, 0, sourceBytesRequired) / 2; sourceSample = 0; }
/// <summary> /// Reads bytes from this wave stream /// </summary> /// <param name="destBuffer">The destination buffer</param> /// <param name="offset">Offset into the destination buffer</param> /// <param name="numBytes">Number of bytes read</param> /// <returns>Number of bytes read.</returns> public int Read(byte[] destBuffer, int offset, int numBytes) { int samplesRequired = numBytes / 2; sourceBuffer = BufferHelpers.Ensure(sourceBuffer, samplesRequired); int sourceSamples = sourceProvider.Read(sourceBuffer, 0, samplesRequired); var destWaveBuffer = new WaveBuffer(destBuffer); int destOffset = offset / 2; for (int sample = 0; sample < sourceSamples; sample++) { // adjust volume float sample32 = sourceBuffer[sample] * volume; // clip if (sample32 > 1.0f) sample32 = 1.0f; if (sample32 < -1.0f) sample32 = -1.0f; destWaveBuffer.ShortBuffer[destOffset++] = (short)(sample32 * 32767); } return sourceSamples * 2; }