private bool ProcessPairWaveForm(WaveformGenerationParams argument) { bool isCancel = false; WaveFileReader forwardStream = new WaveFileReader(new MemoryStream(argument.Path)); WaveFileReader backStream = new WaveFileReader(new MemoryStream(argument.BackPath)); WaveChannel32 forwardChannel = new WaveChannel32(forwardStream); WaveChannel32 backChannel = new WaveChannel32(backStream); backChannel.Sample += waveStream_Sample; forwardChannel.Sample += waveStream_Sample; long frameLength = 2 * backChannel.Length / argument.Points; frameLength = frameLength - frameLength % backChannel.WaveFormat.BlockAlign; waveformAggregator = new SampleAggregator((int)(frameLength / backChannel.WaveFormat.BlockAlign)); float[] numArray = new float[argument.Points]; byte[] buffer = new byte[frameLength]; int factPointsCount = argument.Points / 2; for (int i = 0; i < factPointsCount; i++) { backChannel.Read(buffer, 0, buffer.Length); numArray[i * 2] = waveformAggregator.LeftMaxVolume * verticalScale; forwardChannel.Read(buffer, 0, buffer.Length); numArray[i * 2 + 1] = waveformAggregator.LeftMaxVolume * verticalScale; if (this.waveformGenerateWorker.CancellationPending) { isCancel = true; break; } } float[] finalClonedData = (float[])numArray.Clone(); Application.Current.Dispatcher.Invoke(new Action(() => this.WaveformData = finalClonedData)); forwardChannel.Close(); forwardChannel.Dispose(); forwardChannel = null; backChannel.Close(); backChannel.Dispose(); backChannel = null; forwardStream.Close(); forwardStream.Dispose(); forwardStream = null; backStream.Close(); backStream.Dispose(); backStream = null; return(isCancel); }
private void FFt(string name) { name = $"{name}.wav"; chart1.Series.RemoveAt(0); chart1.Series.Add("Eroare"); chart1.Series["Eroare"].ChartType = System.Windows.Forms.DataVisualization.Charting.SeriesChartType.FastLine; chart1.Series["Eroare"].ChartArea = "ChartArea1"; WaveChannel32 Wave32 = new WaveChannel32(new WaveFileReader(name)); byte[] buffer = new byte[2048]; int read = 0; while (Wave32.Position < Wave32.Length) { read = Wave32.Read(buffer, 0, 2048); for (int i = 0; i < read / 4; i += 30) { chart1.Series["Eroare"].Points.Add(BitConverter.ToSingle(buffer, i * 4)); waveData.Add(BitConverter.ToSingle(buffer, i * 4)); } } Wave32.Dispose(); fftData = FFT.GetData(waveData.ToArray()); waveData = new List <double>(); chart2.Series.RemoveAt(0); chart2.Series.Add("wave"); chart2.Series["wave"].ChartType = System.Windows.Forms.DataVisualization.Charting.SeriesChartType.FastLine; for (int i = 0; i < fftData.Count - 1; i = i += 30) { chart2.Series["wave"].Points.Add(Math.Round(fftData[i], 8)); } }
/// <summary> /// Get the waveform points /// </summary> /// <param name="points">List of the waveform points</param> public static List <float> GetWavePoints(WaveStream ws) { // create the list of points to return List <float> points = new List <float>(); // get the wave data WaveChannel32 wave = new WaveChannel32(ws); // create the reading buffer byte[] buffer = new byte[16384]; // initialize the reading position int read; // read the whole file while (wave.Position < wave.Length) { // read the first chunk read = wave.Read(buffer, 0, 16384); // scan the data chunk for (int i = 0; i < read / 4; i++) { // get the point points.Add(BitConverter.ToSingle(buffer, i * 4)); } // allow the form to update Application.DoEvents(); } return(points); }
public void LoadFile() { var wave = new WaveChannel32(new Mp3FileReader(@"file.mp3")); var buffer = new byte[16384 * 2]; var read = 0; var i = 0; while (wave.Position < wave.Length) { read = wave.Read(buffer, 0, 16384 * 2); var max = 0f; var absMax = 0f; for (int j = 0; j < read / 4; j++) { var s = BitConverter.ToSingle(buffer, j * 4); var abs = Math.Abs(s); if (abs > absMax) { absMax = abs; max = s; } } Samples.Add(new TLSample(i += 2, max)); } }
private void buttonDrawChart_Click(object sender, EventArgs e) { OpenFileDialog open = new OpenFileDialog(); open.Filter = " Wave File (*.wav)|*.wav;"; if (open.ShowDialog() != DialogResult.OK) { return; } WaveChannel32 wave = new WaveChannel32(new WaveFileReader(open.FileName)); int sampleSize = 1024; var bufferSize = 16384 * sampleSize; var buffer = new byte[bufferSize]; int read = 0; chart1.Series.Add("wave"); chart1.Series["wave"].ChartType = System.Windows.Forms.DataVisualization.Charting.SeriesChartType.FastLine; chart1.Series["wave"].ChartArea = "ChartArea1"; while (wave.Position < wave.Length) { read = wave.Read(buffer, 0, bufferSize); for (int i = 0; i < read / sampleSize; i++) { var point = BitConverter.ToSingle(buffer, i * sampleSize); chart1.Series["wave"].Points.Add(point); } } }
/// <summary> /// /// </summary> /// <param name="buffer"></param> /// <param name="offset"></param> /// <param name="count"></param> /// <returns></returns> public override int Read(byte[] buffer, int offset, int count) { int totalBytesRead = 0; while (totalBytesRead < count) { int bytesRead = sourceStream.Read(buffer, offset + totalBytesRead, count - totalBytesRead); if (bytesRead == 0 || sourceStream.Position > sourceStream.Length) { if (sourceStream.Position == 0 || !EnableLooping) { // something wrong with the source stream break; } // loop sourceStream.Position = 0; } totalBytesRead += bytesRead; } return(totalBytesRead); }
private void UpdateWaveform(WaveChannel32 sample) { // int channels = sample.WaveFormat.Channels; int bytesPer = sample.WaveFormat.BitsPerSample / 8; float sampleRate = sample.WaveFormat.SampleRate; //finding the original waveform byte[] data = new byte[sample.Length]; float[] channel1Data = new float[sample.Length / (bytesPer * channels)]; float[] channel2Data = new float[sample.Length / (bytesPer * channels)]; int read = sample.Read(data, 0, (int)sample.Length); //adding to the chart chart1.Series.Clear(); var series1 = new Series { Name = "Audio", Color = Color.Red, ChartType = SeriesChartType.Line }; chart1.Series.Add(series1); //wave files store bytes from separate channels alternating - therfore this will read from 1 channel for (int i = 0; i < (channel1Data.Length); i += channels) { int j = i; channel1Data[j] = (float)BitConverter.ToSingle(data, i * bytesPer); chart1.Series["Audio"].Points.AddXY((float)j / sampleRate, channel1Data[j]); } //resetting trim bars }
private void button3_Click(object sender, EventArgs e) { var openFileDialog = new OpenFileDialog { Filter = "Wave file (*.wav)|*.wav" }; if (openFileDialog.ShowDialog() != DialogResult.OK) { return; } chart1.Series.Add("wave"); chart1.Series["wave"].ChartType = System.Windows.Forms.DataVisualization.Charting.SeriesChartType.FastLine; chart1.Series["wave"].ChartArea = "ChartArea1"; var wave = new WaveChannel32(new WaveFileReader(openFileDialog.FileName)); byte[] buffer = new byte[16384]; int read = 0; while (wave.Position < wave.Length) { read = wave.Read(buffer, 0, 16384); for (int i = 0; i < read / 4; i++) { chart1.Series["wave"].Points.Add(BitConverter.ToSingle(buffer, i * 4)); } } waveViewer1.WaveStream = new NAudio.Wave.WaveFileReader(openFileDialog.FileName); }
public AudioSample(string fileName) { _fileName = fileName; WaveFileReader reader = new WaveFileReader(fileName); //offsetStream = new WaveOffsetStream(reader); //channelStream = new WaveChannel32(offsetStream); channelStream = new WaveChannel32(reader); muted = false; volume = 1.0f; // Reverse the sample NAudioBufferReverse nbr = new NAudioBufferReverse(); // Setup a byte array which will store the reversed sample, ready for playback reversedSample = new byte[(int)channelStream.Length]; // Read the channelStream sample in to the reversedSample byte array channelStream.Read(reversedSample, 0, (int)channelStream.Length); // Calculate how many bytes are used per sample, whole samples are swaped in // positioning by the reverse class bytesPerSample = (channelStream.WaveFormat.BitsPerSample / 8) * channelStream.WaveFormat.Channels; // Pass in the byte array storing a copy of the sample, and save back to the // reversedSample byte array reversedSample = nbr.reverseSample(reversedSample, (int)channelStream.Length, bytesPerSample); }
private void openToolStripMenuItem_Click(object sender, EventArgs e) { // WAV File Open OpenFileDialog open = new OpenFileDialog(); open.Filter = "WAV File (*.wav)|*.wav;"; if (open.ShowDialog() != DialogResult.OK) { return; } waveViewer1.BackColor = Color.White; waveViewer1.SamplesPerPixel = 400; waveViewer1.StartPosition = 40000; waveViewer1.WaveStream = new WaveFileReader(open.FileName); chart1.Series.Add("wave"); chart1.Series["wave"].ChartType = System.Windows.Forms.DataVisualization.Charting.SeriesChartType.FastLine; chart1.Series["wave"].ChartArea = "ChartArea1"; WaveChannel32 wave = new WaveChannel32(new WaveFileReader(open.FileName)); byte[] buffer = new byte[16384]; int read = 0; while (wave.Position < wave.Length) { read = wave.Read(buffer, 0, 16384); for (int i = 0; i < read / 4; i++) { chart1.Series["wave"].Points.Add(BitConverter.ToSingle(buffer, i * 4)); } } }
private void LoadSound(WaveChannel32 sound, int index) { int count = 0; int read = 0; sound.Sample += Sound0_Sample; bufferSize = 1024 * sampleRate * 16 / 256000 * Channels; byte[] buffer = new byte[bufferSize]; while (sound.Position < sound.Length) { max = -1; min = 1; read = sound.Read(buffer, 0, bufferSize); pwfc.WaveFormDisplay.AddValue(max, min); count++; } sound.Close(); //Debug.WriteLine("Sound is " + sound.TotalTime.TotalMilliseconds + "ms long"); //Debug.WriteLine("Sound is " + wfr.Length + " bytes"); //Debug.WriteLine("Called addvalue " + count + " times"); }
private void waveformGenerateWorker_DoWork() { #if (MARKERS) var span = Markers.EnterSpan("waveformGen"); #endif using (Mp3FileReader waveformMp3Stream = new Mp3FileReader(song.FileName)) using (WaveChannel32 waveformInputStream = new WaveChannel32(waveformMp3Stream)) { waveformInputStream.Sample += waveStream_Sample; int frameCount = (int)((float)waveformInputStream.Length / frameLength); byte[] readBuffer = new byte[frameLength]; waveformAggregator = new SampleAggregator(frameLength); int currentPointIndex = 0; float[] waveformArray = new float[frameCount * 2]; float waveformLeftMax = 0; float waveformRightMax = 0; while (currentPointIndex < frameCount * 2) { waveformInputStream.Read(readBuffer, 0, readBuffer.Length); var leftMaxVolume = waveformAggregator.LeftMaxVolume; var rightMaxVolume = waveformAggregator.RightMaxVolume; waveformArray[currentPointIndex++] = leftMaxVolume; waveformArray[currentPointIndex++] = rightMaxVolume; if (leftMaxVolume > waveformLeftMax) { waveformLeftMax = leftMaxVolume; } if (rightMaxVolume > waveformRightMax) { waveformRightMax = rightMaxVolume; } waveformAggregator.Clear(); tkn.ThrowIfCancellationRequested(); } byte[] waveformBytes = new byte[waveformArray.Length]; float factor = 31f / Math.Max(Math.Abs(waveformLeftMax), Math.Abs(waveformRightMax)); for (int ndx = 0; ndx < waveformArray.Length; ndx++) { waveformBytes[ndx] = (byte)Math.Abs(Math.Abs(waveformArray[ndx]) * factor); } song.WaveformData = waveformBytes; } #if (MARKERS) span.Leave(); #endif }
private void ProcessWave() { const int bufferSize = 1024 * 10; byte[] inputBuffer = new byte[bufferSize * sizeof(float)]; byte[] soundTouchOutBuffer = new byte[bufferSize * sizeof(float)]; ByteAndFloatsConverter convertInputBuffer = new ByteAndFloatsConverter { Bytes = inputBuffer }; ByteAndFloatsConverter convertOutputBuffer = new ByteAndFloatsConverter { Bytes = soundTouchOutBuffer }; byte[] buffer = new byte[bufferSize]; _stopWorker = false; while (!_stopWorker && _waveChannel.Position < _waveChannel.Length) { int bytesRead = _waveChannel.Read(convertInputBuffer.Bytes, 0, convertInputBuffer.Bytes.Length); //bytesRead = _waveChannel.Read(buffer, 0, BUFFER_SIZE); //bytesRead = _reader.Read(convertInputBuffer.Bytes, 0, convertInputBuffer.Bytes.Length); int floatsRead = bytesRead / ((sizeof(float)) * _waveChannel.WaveFormat.Channels); _soundTouch.PutSamples(convertInputBuffer.Floats, (uint)floatsRead); uint receivecount; do { if (WaitingMode) { SetSoundTouchValues(); } uint outBufferSizeFloats = (uint)convertOutputBuffer.Bytes.Length / (uint)(sizeof(float) * _waveChannel.WaveFormat.Channels); receivecount = _soundTouch.ReceiveSamples(convertOutputBuffer.Floats, outBufferSizeFloats); if (receivecount > 0) { _provider.AddSamples(convertOutputBuffer.Bytes, 0, (int)receivecount * sizeof(float) * _reader.WaveFormat.Channels, _reader.CurrentTime); while (_provider.BuffersCount > 3) { Thread.Sleep(10); } } } while (!_stopWorker && receivecount != 0); } _reader.Close(); }
/// <summary> /// Overridden Read function that returns samples processed with SoundTouch. Returns data in same format as /// WaveChannel32 i.e. stereo float samples. /// </summary> /// <param name="buffer">Buffer where to return sample data</param> /// <param name="offset">Offset from beginning of the buffer</param> /// <param name="count">Number of bytes to return</param> /// <returns>Number of bytes copied to buffer</returns> public override int Read(byte[] buffer, int offset, int count) { try { if (ArcaeaSpeedChanger.GlobalVariable.soundParseFormat == SupportedAudioFormat.OGG) { if (ArcaeaSpeedChanger.GlobalVariable.soundParseProgress <= 50) { ArcaeaSpeedChanger.GlobalVariable.soundParseProgress += 2; } } else { ArcaeaSpeedChanger.GlobalVariable.soundParseProgress += 5; } //Console.WriteLine("Change!" + ArcaeaSpeedChanger.GlobalVariable.soundParseProgress); // Iterate until enough samples available for output: // - read samples from input stream // - put samples to SoundStretch processor while (st.AvailableSampleCount < count) { int nbytes = inputStr.Read(bytebuffer, 0, bytebuffer.Length); if (nbytes == 0) { // end of stream. flush final samples from SoundTouch buffers to output if (endReached == false) { endReached = true; // do only once to avoid continuous flushing st.Flush(); } break; } // binary copy data from "byte[]" to "float[]" buffer Buffer.BlockCopy(bytebuffer, 0, floatbuffer, 0, nbytes); st.PutSamples(floatbuffer, (uint)(nbytes / 8)); } // ensure that buffer is large enough to receive desired amount of data out if (floatbuffer.Length < count / 4) { floatbuffer = new float[count / 4]; } // get processed output samples from SoundTouch int numsamples = (int)st.ReceiveSamples(floatbuffer, (uint)(count / 8)); // binary copy data from "float[]" to "byte[]" buffer Buffer.BlockCopy(floatbuffer, 0, buffer, offset, numsamples * 8); return(numsamples * 8); // number of bytes } catch (Exception) { return(0); } }
/// <summary> /// Overridden Read function that returns samples processed with SoundTouch. Returns data in same format as /// WaveChannel32 i.e. stereo float samples. /// </summary> /// <param name="buffer">Buffer where to return sample data</param> /// <param name="offset">Offset from beginning of the buffer</param> /// <param name="count">Number of bytes to return</param> /// <returns>Number of bytes copied to buffer</returns> public override int Read(byte[] buffer, int offset, int count) { try { // Iterate until enough samples available for output: // - read samples from input stream // - put samples to SoundStretch processor while (st.AvailableSampleCount < count) { int nbytes = inputStr.Read(bytebuffer, 0, bytebuffer.Length); if (nbytes == 0) { // end of stream. flush final samples from SoundTouch buffers to output if (endReached == false) { endReached = true; // do only once to avoid continuous flushing st.Flush(); } break; } // binary copy data from "byte[]" to "float[]" buffer Buffer.BlockCopy(bytebuffer, 0, floatbuffer, 0, nbytes); st.PutSamples(floatbuffer, (uint)(nbytes / 8)); } // ensure that buffer is large enough to receive desired amount of data out if (floatbuffer.Length < count / 4) { floatbuffer = new float[count / 4]; } // get processed output samples from SoundTouch int numsamples = (int)st.ReceiveSamples(floatbuffer, (uint)(count / 8)); // binary copy data from "float[]" to "byte[]" buffer for (int i = 0; i < count / 4; i++) { floatbuffer[i + offset] = equaliser.TransformSample(floatbuffer[i + offset]); } Buffer.BlockCopy(floatbuffer, 0, buffer, offset, numsamples * 8); return(numsamples * 8); // number of bytes } catch (Exception exp) { StatusMessage.Write("exception in WaveStreamProcessor.Read: " + exp.Message); return(0); } }
//crashes if no file has been loaded private void SaveButton_Click(object sender, EventArgs e) { AudioFileReader tempFileReader = new AudioFileReader(tempFile); WaveChannel32 wave = new WaveChannel32(tempFileReader); byte[] buffer = new byte[wave.Length]; wave.Read(buffer, 0, buffer.Length); WaveFileWriter fileWriter = new WaveFileWriter(file, tempFileReader.WaveFormat); fileWriter.Write(buffer, 0, buffer.Length); wave.Dispose(); tempFileReader.Dispose(); fileWriter.Dispose(); recentSave = true; }
public Bitmap DrawAudio(WaveStream w, IMessage m) { var c = new WaveChannel32(w); float[] normSamplesMin = new float[1000]; Array.Fill(normSamplesMin, 250); float[] normSamplesMax = new float[1000]; Array.Fill(normSamplesMax, 250); byte[] buffer = new byte[1024]; int reader = 0; while (c.Position < c.Length) { reader = c.Read(buffer, 0, buffer.Length); for (int i = 0; i < buffer.Length / 4; i++) { float sample = BitConverter.ToSingle(buffer, i * 4) * 200 + 250; int index = (int)(((c.Position - buffer.Length) / 4.0 + i) / (c.Length / 4.0) * 1000); if (index >= normSamplesMax.Length) { continue; } if (normSamplesMax[index] < sample) { normSamplesMax[index] = sample; } if (normSamplesMin[index] > sample) { normSamplesMin[index] = sample; } } } int j = 0; Bitmap output = new Bitmap(1000, 500); using (Graphics graphics = Graphics.FromImage(output)) graphics.DrawLines(new Pen(System.Drawing.Color.White), Enumerable. Range(0, 1000). Select(x => new Point[] { new Point(j, (int)normSamplesMin[x]), new Point(j++, (int)normSamplesMax[x]), }). SelectMany(x => x). ToArray()); w.Dispose(); return(output); }
public static List <float> LoadWaveform(string filename, int lenght) { var reader = new WaveChannel32(new AudioFileReader(filename)); var res = new List <float>(); int step = (int)(reader.Length / lenght); step -= step % 4; const int sampleSize = 1024; var buffer = new byte[step]; while (reader.Position < reader.Length) { int readed = reader.Read(buffer, 0, step); float avg = 0; for (int i = 0; i < readed / sampleSize; i++) { var point = BitConverter.ToSingle(buffer, i * sampleSize); avg += Math.Abs(point); } avg /= (readed / sampleSize); res.Add(avg); } reader.Close(); float max = Single.MinValue; float min = Single.MaxValue; foreach (var re in res) { if (re > max) { max = re; } if (re < max) { min = re; } } for (int i = 0; i < res.Count; ++i) { res[i] = (res[i] - min) / (max - min); } return(res); }
private void ReadWave() { var floats = new List <float>(); WaveChannel32 wave = new WaveChannel32(new WaveFileReader(_recorder.GetFile())); byte[] buffer = new Byte[16384]; int read = 0; while (wave.Position < wave.Length) { read = wave.Read(buffer, 0, 16384); for (int i = 0; i < read / 4; i++) { var item = BitConverter.ToSingle(buffer, i * 4); floats.Add(item); } } new VisuliseForm(floats.ToArray()).Show(); }
//wav making the location of the tempfile the same as that of the normal file - making temp file redundant private void LoadButton_Click(object sender, EventArgs e) { //https://docs.microsoft.com/en-us/dotnet/framework/winforms/controls/how-to-open-files-using-the-openfiledialog-component //version 1 - use file explorer to select file - with filtered file type (.wav) string fileOriginal = file; OpenFileDialog open = new OpenFileDialog { Filter = "WAVE files (*.wav, *.wave)|*.wav;*.wave" }; open.ShowDialog(); file = open.FileName; //checking if the file type is really .wav - prevents shortcuts from being loaded //forced to lower to assist in checking the string (.Wav should be the same as .wav) string ext = Path.GetExtension(file).ToLower(); //if the user exits out of the dialog without selecting a file, this makes it so that the file doesn't change //it also prevents shortcuts from being loaded as files if (!(ext == ".wav" || ext == ".wave")) { file = fileOriginal; return; } // AudioFileReader fileReader = new AudioFileReader(file); WaveChannel32 wave = new WaveChannel32(fileReader); byte[] buffer = new byte[wave.Length]; wave.Read(buffer, 0, buffer.Length); WaveFileWriter fileWriter = new WaveFileWriter(tempFile, fileReader.WaveFormat); panel1.Controls.Clear(); fileWriter.Write(buffer, 0, buffer.Length); wave.Dispose(); fileReader.Dispose(); fileWriter.Dispose(); // open.Dispose(); recentSave = true; return; }
private static List <MyPoint> GetSamples(WaveChannel32 waveChannel, WaveFormat waveFormat) { int byteDepth = waveFormat.BitsPerSample / 8; float frequency = 1.0f / waveFormat.SampleRate; int bufferSize = 8192; byte[] buffer = new byte[bufferSize]; int read = 0; int sampleNumber = 0; List <MyPoint> samples = new List <MyPoint>(); while (waveChannel.Position < waveChannel.Length) { read = waveChannel.Read(buffer, 0, bufferSize); for (int i = 0; i < read / byteDepth; i++) { float amplitude; if (byteDepth == 4) { amplitude = BitConverter.ToSingle(buffer, i * 4); } else { amplitude = (float)BitConverter.ToDouble(buffer, i * 8); } float time = sampleNumber * frequency * (1.0f / waveFormat.Channels); samples.Add(new MyPoint(time, amplitude)); sampleNumber++; } } return(samples); }
void PlaySoundDBClick(object sender, EventArgs e) { if (soundList.SelectedItems.Count == 0) { return; } if (waveWriter != null) { return; } DisposeWavePlay(); string sFile = soundList.SelectedItem.ToString(); wave = new WaveFileReader("./Sounds/" + sFile + ".wav"); waveSignal = new WaveChannel32(new WaveFileReader("./Sounds/" + sFile + ".wav")); output = new DirectSoundOut(); wavechannel = new WaveChannel32(wave); chart1.Visible = true; chart1.Series.Clear(); chart1.Series.Add("wave"); chart1.Series["wave"].ChartType = System.Windows.Forms.DataVisualization.Charting.SeriesChartType.FastLine; chart1.Series["wave"].ChartArea = "ChartArea1"; byte[] buffer = new byte[1024]; int read = 0; while (waveSignal.Position < waveSignal.Length) { read = waveSignal.Read(buffer, 0, 1024); for (int i = 0; i < read / 4; i++) { chart1.Series["wave"].Points.Add(BitConverter.ToSingle(buffer, i * 4)); } } buffer = null; read = 0; output.Init(wavechannel); output.Play(); }
private void StartVisualization(object sender, EventArgs e) { WaveChannel32 wave = new WaveChannel32(new Mp3FileReader("test.mp3")); // WaveChannel32 wave = new WaveChannel32(new Mp3FileReader("C:\\Users\\Kapi\\Desktop\\test2.mp3")); int sampleSize = 1024; var bufferSize = 16384 * sampleSize; bufferSize = 1024; var buffer = new byte[bufferSize]; int read = 0; while (wave.Position < wave.Length) { read = wave.Read(buffer, 0, bufferSize); for (int i = 0; i < read / sampleSize; i++) { var point = BitConverter.ToSingle(buffer, i * sampleSize); _waveformPoints.Add(point); } } _waveOutDevice = new WaveOut(); _audioFileReader = new AudioFileReader("test.mp3"); //_audioFileReader = new AudioFileReader("C:\\Users\\Kapi\\Desktop\\test2.mp3"); for (int i = 0; i < _audioFileReader.TotalTime.TotalMilliseconds; i++) { _waveformPointsMiliseconds.Add(_waveformPoints[(int)((double)i / (_audioFileReader.TotalTime.TotalMilliseconds) * _waveformPoints.Count)]); } _waveOutDevice.Init(_audioFileReader); _waveOutDevice.Play(); _waveformTmr.Start(); label2.Text = _audioFileReader.TotalTime.TotalMilliseconds.ToString(); waveFormBox.Refresh(); }
// get record from file public Record(String path, String label) { WaveChannel32 wave = new WaveChannel32(new WaveFileReader(path)); this.path = path; this.label = label; this.frames = new List <RecordFrame>(); byte[] buffer = new byte[16384]; int read = 0; List <double> listSamples = new List <double>(); int sampleRate = 16000; while (wave.Position < wave.Length) { read = wave.Read(buffer, 0, 16384); for (int i = 0; i < read / 4; i++) { double sample = BitConverter.ToSingle(buffer, i * 4); listSamples.Add(sample); } } Signal signal = Signal.FromArray(listSamples.ToArray(), sampleRate); MelFrequencyCepstrumCoefficient mfcc = new MelFrequencyCepstrumCoefficient(); IEnumerable <MelFrequencyCepstrumCoefficientDescriptor> features = mfcc.Transform(signal); foreach (var t in features) { RecordFrame recordFrame = new RecordFrame(); recordFrame.coefficients = new List <double>(t.Descriptor); recordFrame.label = label; this.frames.Add(recordFrame); } }
// This function fills vstOutputBuffers with audio processed by a plugin public int ProcessReplacing(uint sampleCount) { int loopSize = (int)sampleCount / Channels; lock (this) { // check if we are processing a wavestream (VST) or if this is audio outputting only (VSTi) if (wavStream != null) { int sampleCountx4 = (int)sampleCount * 4; // 4 bytes per sample (32 bit) byte[] naudioBuf = new byte[sampleCountx4]; int bytesRead = wavStream.Read(naudioBuf, 0, sampleCountx4); if (wavStream.CurrentTime > wavStream.TotalTime.Add(TimeSpan.FromSeconds(tailWaitForNumberOfSeconds))) { return(0); } // populate the inputbuffers with the incoming wave stream // TODO: do not use unsafe - but like this http://vstnet.codeplex.com/discussions/246206 ? // this whole section is modelled after http://vstnet.codeplex.com/discussions/228692 unsafe { fixed(byte *byteBuf = &naudioBuf[0]) { float *floatBuf = (float *)byteBuf; int j = 0; for (int i = 0; i < loopSize; i++) { vstInputBuffers[0][i] = *(floatBuf + j); // left j++; vstInputBuffers[1][i] = *(floatBuf + j); // right j++; } } } } // make sure the plugin has been opened. doPluginOpen(); // and do the vst processing try { PluginContext.PluginCommandStub.ProcessReplacing(vstInputBuffers, vstOutputBuffers); } catch (Exception) { } // store the output into the last processed buffers for (int channelNumber = 0; channelNumber < Channels; channelNumber++) { for (int samples = 0; samples < vstOutputBuffers[channelNumber].SampleCount; samples++) { switch (channelNumber) { case 0: lastProcessedBufferLeft[samples] = vstOutputBuffers[channelNumber][samples]; break; case 1: lastProcessedBufferRight[samples] = vstOutputBuffers[channelNumber][samples]; break; } } } // Record audio if (doRecord) { recordedRight.AddRange(lastProcessedBufferLeft); recordedLeft.AddRange(lastProcessedBufferRight); } count++; } return((int)sampleCount); }
private void ProcessWave() { //MsToBytes(Latency); byte[] inputBuffer = new byte[BUFFER_SIZE * sizeof(float)]; byte[] soundTouchOutBuffer = new byte[BUFFER_SIZE * sizeof(float)]; ByteAndFloatsConverter convertInputBuffer = new ByteAndFloatsConverter { Bytes = inputBuffer }; ByteAndFloatsConverter convertOutputBuffer = new ByteAndFloatsConverter { Bytes = soundTouchOutBuffer }; byte[] buffer = new byte[BUFFER_SIZE]; bool finished = false; int bytesRead = 0; stopWorker = false; while (!stopWorker && waveChannel.Position < waveChannel.Length) { //bytesRead = waveChannel.Read(buffer, 0, BUFFER_SIZE); bytesRead = waveChannel.Read(convertInputBuffer.Bytes, 0, convertInputBuffer.Bytes.Length); //bytesRead = reader.Read(convertInputBuffer.Bytes, 0, convertInputBuffer.Bytes.Length); SetSoundSharpValues(); int floatsRead = bytesRead / ((sizeof(float)) * waveChannel.WaveFormat.Channels); soundTouch.PutSamples(convertInputBuffer.Floats, (uint)floatsRead); uint receivecount; do {// 榨干SoundTouch里面的数据 uint outBufferSizeFloats = (uint)convertOutputBuffer.Bytes.Length / (uint)(sizeof(float) * waveChannel.WaveFormat.Channels); receivecount = soundTouch.ReceiveSamples(convertOutputBuffer.Floats, outBufferSizeFloats); #region Test: write buffers into test.mp3 //waveFileWriter.Write(convertOutputBuffer.Bytes, 0, convertOutputBuffer.Bytes.Length); //bool finish = false; //if (finish) //{ // waveFileWriter.Close(); //} #endregion if (receivecount > 0) { provider.AddSamples(convertOutputBuffer.Bytes, 0, (int)receivecount * sizeof(float) * reader.WaveFormat.Channels, reader.CurrentTime);; //provider.AddSamples(convertOutputBuffer.Bytes, 0, convertOutputBuffer.Bytes.Length, reader.CurrentTime); ; while (provider.BuffersCount > 3) { Thread.Sleep(10); } } //if (finished && bytesRead == 0) //{ // break; //} } while (!stopWorker && receivecount != 0); } reader.Close(); }
private float[] ProcessReplace(int blockSize) { lock (this) { if (blockSize != BlockSize) { UpdateBlockSize(blockSize); } // check if we are processing a wavestream (VST) or if this is audio outputting only (VSTi) if (wavStream != null) { if (wavStream.Position == 0) { RaisePlayingStarted(); } int sampleCount = blockSize * 2; int sampleCountx4 = sampleCount * 4; int loopSize = sampleCount / WaveFormat.Channels; // Convert byte array into float array and store in Vst Buffers // naudio reads an buffer of interlaced float's // must take every 4th byte and convert to float // Vst.Net audio buffer format (-1 to 1 floats). var naudioBuf = new byte[blockSize * WaveFormat.Channels * 4]; int bytesRead = wavStream.Read(naudioBuf, 0, sampleCountx4); // populate the inputbuffers with the incoming wave stream // TODO: do not use unsafe - but like this http://vstnet.codeplex.com/discussions/246206 ? // this whole section is modelled after http://vstnet.codeplex.com/discussions/228692 unsafe { fixed(byte *byteBuf = &naudioBuf[0]) { float *floatBuf = (float *)byteBuf; int j = 0; for (int i = 0; i < loopSize; i++) { inputBuffers[0][i] = *(floatBuf + j); j++; inputBuffers[1][i] = *(floatBuf + j); j++; } } } } try { //pluginContext.PluginCommandStub.MainsChanged(true); pluginContext.PluginCommandStub.StartProcess(); pluginContext.PluginCommandStub.ProcessReplacing(inputBuffers, outputBuffers); pluginContext.PluginCommandStub.StopProcess(); //pluginContext.PluginCommandStub.MainsChanged(false); } catch (Exception ex) { Console.Out.WriteLine(ex.Message); } int indexOutput = 0; float maxL = float.MinValue; float maxR = float.MinValue; for (int j = 0; j < BlockSize; j++) { output[indexOutput] = outputBuffers[0][j]; output[indexOutput + 1] = outputBuffers[1][j]; maxL = Math.Max(maxL, output[indexOutput]); maxR = Math.Max(maxR, output[indexOutput + 1]); indexOutput += 2; } // try to find when processing input file has reached // zero volume level float almostZero = 0.0000001f; if (maxL < almostZero && maxR < almostZero) { //Console.Out.Write("-"); // don't stop until we have x consequetive silence calls after each other if (foundSilenceCounter >= 5) { if (wavStream != null && wavStream.CurrentTime >= wavStream.TotalTime) { RaisePlayingStopped(); } } else { foundSilenceCounter++; } } else { foundSilenceCounter = 0; //Console.Out.Write("."); } RaiseProcessCalled(maxL, maxR); } return(output); }
void waveformGenerateWorker_DoWork(object sender, DoWorkEventArgs e) { WaveformGenerationParams waveformParams = e.Argument as WaveformGenerationParams; Mp3FileReader waveformMp3Stream = new Mp3FileReader(waveformParams.Path); WaveChannel32 waveformInputStream = new WaveChannel32(waveformMp3Stream); waveformInputStream.Sample += waveStream_Sample; int frameLength = (int)((20.0d / waveformInputStream.TotalTime.TotalMilliseconds) * waveformInputStream.Length); // Sample 20ms of data. int frameCount = (int)((double)waveformInputStream.Length / (double)frameLength); int waveformLength = frameCount * 2; byte[] readBuffer = new byte[frameLength]; waveformAggregator = new SampleAggregator(frameLength); float maxLeftPointLevel = float.MinValue; float maxRightPointLevel = float.MinValue; int currentPointIndex = 0; float[] waveformCompressedPoints = new float[waveformParams.Points]; List <float> waveformData = new List <float>(); List <int> waveMaxPointIndexes = new List <int>(); for (int i = 1; i <= waveformParams.Points; i++) { waveMaxPointIndexes.Add((int)Math.Round(waveformLength * ((double)i / (double)waveformParams.Points), 0)); } int readCount = 0; while (currentPointIndex * 2 < waveformParams.Points) { waveformInputStream.Read(readBuffer, 0, readBuffer.Length); waveformData.Add(waveformAggregator.LeftMaxVolume); waveformData.Add(waveformAggregator.RightMaxVolume); if (waveformAggregator.LeftMaxVolume > maxLeftPointLevel) { maxLeftPointLevel = waveformAggregator.LeftMaxVolume; } if (waveformAggregator.RightMaxVolume > maxRightPointLevel) { maxRightPointLevel = waveformAggregator.RightMaxVolume; } if (readCount > waveMaxPointIndexes[currentPointIndex]) { waveformCompressedPoints[(currentPointIndex * 2)] = maxLeftPointLevel; waveformCompressedPoints[(currentPointIndex * 2) + 1] = maxRightPointLevel; maxLeftPointLevel = float.MinValue; maxRightPointLevel = float.MinValue; currentPointIndex++; } if (readCount % 3000 == 0) { float[] clonedData = (float[])waveformCompressedPoints.Clone(); App.Current.Dispatcher.Invoke(new Action(() => { WaveformData = clonedData; })); } if (waveformGenerateWorker.CancellationPending) { e.Cancel = true; break; } readCount++; } float[] finalClonedData = (float[])waveformCompressedPoints.Clone(); App.Current.Dispatcher.Invoke(new Action(() => { fullLevelData = waveformData.ToArray(); WaveformData = finalClonedData; })); waveformInputStream.Close(); waveformInputStream.Dispose(); waveformInputStream = null; waveformMp3Stream.Close(); waveformMp3Stream.Dispose(); waveformMp3Stream = null; }
public override int Read(byte[] buffer, int offset, int count) { if (_sampleReversed) { //need to understand why this is a more reliable offset offset = (int)channelStream.Position; // Have to work out our own number. The only time this number should be // different is when we hit the end of the stream but we always need to // report that we read the same amount. Missing data is filled in with // silence int outCount = count; // Find out if we are trying to read more data than is available in the buffer if (offset + count > reversedSample.Length) { // If we are then reduce the read amount count = count - ((offset + count) - reversedSample.Length); } for (int i = 0; i < count; i++) { // Individually copy the samples into the buffer for reading by the overriden method buffer[i] = reversedSample[i + offset]; } // Setting this position lets us keep track of how much has been played back. // There is no other offset used to track this information channelStream.Position = channelStream.Position + count; // Regardless of how much is read the count expected by the calling method is // the same number as was origionaly provided to the Read method return(outCount); ////This code relates to looping and is to be integrated back in. //int read = 0; //while (read < count) ////{ //// int required = count - read; // //for (int i = 1; i < count; i++) // { // //int readThisTime = channelStream.Read(buffer, (count - i) + (offset - read), 1); // int readThisTime = channelStream.Read(buffer, 0, count); // read += readThisTime; // } //// int readThisTime = channelStream.Read(buffer, offset + read, required); //// if (readThisTime < required) //// { //// channelStream.Position = 0; //// } //// if (channelStream.Position >= channelStream.Length) //// { //// channelStream.Position = 0; //// } //// read += readThisTime; ////} //return read; } else { // Normal read code, sample has not been set to loop return(channelStream.Read(buffer, offset, count)); } }
private void waveformGenerateWorker_DoWork(object sender, DoWorkEventArgs e) { var waveformParams = e.Argument as WaveformGenerationParams; var waveformMp3Stream = new Mp3FileReader(waveformParams.Path); var waveformInputStream = new WaveChannel32(waveformMp3Stream); waveformInputStream.Sample += waveStream_Sample; var frameLength = fftDataSize; var frameCount = (int)(waveformInputStream.Length / (double)frameLength); var waveformLength = frameCount * 2; var readBuffer = new byte[frameLength]; waveformAggregator = new SampleAggregator(frameLength); var maxLeftPointLevel = float.MinValue; var maxRightPointLevel = float.MinValue; var currentPointIndex = 0; var waveformCompressedPoints = new float[waveformParams.Points]; var waveformData = new List <float>(); var waveMaxPointIndexes = new List <int>(); for (var i = 1; i <= waveformParams.Points; i++) { waveMaxPointIndexes.Add((int)Math.Round(waveformLength * (i / (double)waveformParams.Points), 0)); } var readCount = 0; while (currentPointIndex * 2 < waveformParams.Points) { waveformInputStream.Read(readBuffer, 0, readBuffer.Length); waveformData.Add(waveformAggregator.LeftMaxVolume); waveformData.Add(waveformAggregator.RightMaxVolume); if (waveformAggregator.LeftMaxVolume > maxLeftPointLevel) { maxLeftPointLevel = waveformAggregator.LeftMaxVolume; } if (waveformAggregator.RightMaxVolume > maxRightPointLevel) { maxRightPointLevel = waveformAggregator.RightMaxVolume; } if (readCount > waveMaxPointIndexes[currentPointIndex]) { waveformCompressedPoints[currentPointIndex * 2] = maxLeftPointLevel; waveformCompressedPoints[currentPointIndex * 2 + 1] = maxRightPointLevel; maxLeftPointLevel = float.MinValue; maxRightPointLevel = float.MinValue; currentPointIndex++; } if (readCount % 3000 == 0) { var clonedData = (float[])waveformCompressedPoints.Clone(); Application.Current.Dispatcher.Invoke(() => { WaveformData = clonedData; }); } if (waveformGenerateWorker.CancellationPending) { e.Cancel = true; break; } readCount++; } var finalClonedData = (float[])waveformCompressedPoints.Clone(); Application.Current.Dispatcher.Invoke(() => { fullLevelData = waveformData.ToArray(); WaveformData = finalClonedData; }); waveformInputStream.Close(); waveformInputStream.Dispose(); waveformInputStream = null; waveformMp3Stream.Close(); waveformMp3Stream.Dispose(); waveformMp3Stream = null; }