private void CreateOptimizedArray() { long offset = 0; long numSamples = mDrawSource.Length; int x = 0; int y = 0; //Nth item holds maxVal, N+1th item holds minVal so allocate an array of double size mOptimizedArray = new float[((numSamples / mThresholdSample) + 1) * 2]; float[] data = new float[mThresholdSample]; int samplesRead = 1; mDrawSource.Position = 0; string rawFilePath = System.Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments) + @"\SoundFactory\"; if (!Directory.Exists(rawFilePath)) { Directory.CreateDirectory(rawFilePath); } mRawFileName = rawFilePath + Guid.NewGuid().ToString() + ".raw"; FileStream rawFile = new FileStream(mRawFileName, FileMode.Create, FileAccess.ReadWrite); BinaryWriter bin = new BinaryWriter(rawFile); while (offset < numSamples && samplesRead > 0) { samplesRead = mDrawSource.Read(data, 0, mThresholdSample); if (samplesRead > 0) //for some files file length is wrong so samplesRead may become 0 even if we did not come to the end of the file { for (int i = 0; i < samplesRead; i++) { bin.Write(data[i]); } float maxVal = -1; float minVal = 1; // finds the max & min peaks for this pixel for (x = 0; x < samplesRead; x++) { maxVal = Math.Max(maxVal, data[x]); minVal = Math.Min(minVal, data[x]); } mOptimizedArray[y] = minVal; mOptimizedArray[y + 1] = maxVal; y += 2; offset += samplesRead; mProgressStatus = (int)(((float)offset / numSamples) * 100); if (progress.Value != mProgressStatus) { InvokeOnUiThreadIfRequired(() => progress.Value = mProgressStatus); } } } rawFile.Close(); }
public bool Read(ArraySegment <float> samples) { // If synchroniser is not enabled just pass the request upstream with no playback rate adjustment if (!_enabled) { PlaybackRate = 1; return(_upstream.Read(samples)); } // Start the timer when the first sample is read. All subsequent timing will be based off this. if (!_timer.IsRunning) { _timer.Reset(); _timer.Start(); } // We always read the amount of requested data, update the count of total data read now _totalSamplesRead += samples.Count; // Calculate how out of sync playback is (based on actual samples read vs time passed) _desync.Update(IdealPlaybackPosition, PlaybackPosition); // If playback rate is too fast, slow down immediately (to prevent exhausting the buffer). If playback speed is too slow, ramp up over the next few frames. var corrected = _desync.CorrectedPlaybackSpeed; PlaybackRate = corrected < PlaybackRate ? corrected : Mathf.LerpUnclamped(PlaybackRate, _desync.CorrectedPlaybackSpeed, 0.25f); // Skip audio if necessary to resync the audio stream int skippedSamples; int skippedMilliseconds; var complete = Skip(_desync.DesyncMilliseconds, out skippedSamples, out skippedMilliseconds); if (skippedSamples > 0) { _totalSamplesRead += skippedSamples; _desync.Skip(skippedMilliseconds); } // If skipping completed the session return silence if (complete) { // ReSharper disable once AssignNullToNotNullAttribute Array.Clear(samples.Array, samples.Offset, samples.Count); return(true); } // Read from upstream return(_upstream.Read(samples)); }
public bool Read(ArraySegment <float> samples) { // Start the timer when the first sample is read. All subsequent timing will be based off this. if (!_timer.IsRunning) { _timer.Reset(); _timer.Start(); } // We always read the amount of requested data, update the count of total data read now _totalSamplesRead += samples.Count; // If the buffer is too small slighty increase the count of samples read (by 0.1ms) Desync compensation will think it is ahead of // where it should be and will slow down playback, which will cause the buffer to grow. if (_pipeline.BufferCount < 1) { _totalSamplesRead += WaveFormat.SampleRate / 10000; } // Calculate how out of sync playback is (based on actual samples read vs time passed) _desync.Update(IdealPlaybackPosition, PlaybackPosition); // If playback rate is too fast, slow down immediately (to prevent exhausting the buffer). If playback speed is too slow, ramp up over the next few frames. var corrected = _desync.CorrectedPlaybackSpeed; PlaybackRate = corrected < PlaybackRate ? corrected : Mathf.LerpUnclamped(PlaybackRate, _desync.CorrectedPlaybackSpeed, 0.25f); // Skip audio if necessary to resync the audio stream int skippedSamples; int skippedMilliseconds; var complete = Skip(_desync.DesyncMilliseconds, out skippedSamples, out skippedMilliseconds); if (skippedSamples > 0) { _totalSamplesRead += skippedSamples; _desync.Skip(skippedMilliseconds); } // If skipping completed the session return silence if (complete) { // ReSharper disable once AssignNullToNotNullAttribute Array.Clear(samples.Array, samples.Offset, samples.Count); return(true); } // Read from upstream return(_upstream.Read(samples)); }
/// <summary>Reads processed data to the provided buffer.</summary> /// <param name="buffer">The buffer.</param> /// <param name="offset">The offset to the first value.</param> /// <param name="count">The number of the values to read.</param> /// <returns>The number of read values.</returns> public int Read(float[] buffer, int offset, int count) { int read = resampler.Read(buffer, offset, count); SamplesAvailable?.Invoke(this, new SamplesAvailableEventArgs(buffer, offset, read)); return(read); }
public int Read(float[] buffer, int offset, int count) { lock (_lockObj) { int read = _source.Read(buffer, offset, count); for (int n = 0; n < read; n += WaveFormat.Channels) { if (WaveFormat.Channels > 1) { if (Mode != SampleDataProviderMode.Left && Mode != SampleDataProviderMode.Merge) { _sampleBuffer1.Enqueue(buffer[n + 1]); } else if (Mode == SampleDataProviderMode.Merge) { _sampleBuffer.Enqueue((buffer[n] + buffer[n + 1]) / 2f); } } if (Mode != SampleDataProviderMode.Right && Mode != SampleDataProviderMode.Merge) { _sampleBuffer.Enqueue(buffer[n]); } if (_sampleBuffer.Count >= BlockSize || _sampleBuffer1.Count > BlockSize) { RaiseBlockRead(); Mode = Mode; } } return(read); } }
private void Initialise(ISampleSource sampleSource) { Initialise(sampleSource.WaveFormat.SampleRate, (int)sampleSource.Length / sampleSource.WaveFormat.Channels, sampleSource.WaveFormat.Channels); float[] buffer = new float[sampleSource.Length]; sampleSource.Read(buffer, 0, (int)sampleSource.Length); LoadAudioData(buffer); }
// Helper for State.Looking void StartCapture() { Debug.Assert(m_State == State.Looking); Debug.Assert(m_AudioCapture != null); // TODO: This starts as a WaveSource (raw bytes), converts to floats // so we can notify once for each sample. // The SingleBlockNotificationStream is very garbagey; we should use our own // wrapper that grabs all the samples read and pushes them into m_HotValues // en masse instead of one-at-a-time. var soundInSource = new SoundInSource(m_AudioCapture); var sampleSource = soundInSource.ToSampleSource(); var singleBlockNotificationStream = new SingleBlockNotificationStream(sampleSource); m_FinalSource = singleBlockNotificationStream; // Consume and discard any bytes when they come in. We do this for // its side effects (firing the SingleBlockNotificationStream event). // buffer is closed-over by the lambda. float[] buffer = new float[m_FinalSource.WaveFormat.BytesPerSecond / 4]; soundInSource.DataAvailable += (s, e) => { int read; do { read = m_FinalSource.Read(buffer, 0, buffer.Length); } while (read > 0); }; singleBlockNotificationStream.SingleBlockRead += SingleBlockNotificationStreamOnSingleBlockRead; m_AudioCapture.Start(); }
public bool Read(ArraySegment <float> samples) { var result = _upstream.Read(samples); _clipper.Clip(samples); return(result); }
private void GenerateVideo(ISampleSource source) { Console.WriteLine("Initializing video writer..."); using (var writer = new VideoWriter(options.OutputFile.FullName)) { writer.Width = 1920; writer.Height = 1080; writer.Fps = options.Fps; writer.AudioSampleRate = source.WaveFormat.SampleRate; writer.Open(); double totalFrames = FrameMath.CalculateTotalFrames(source, options); Console.WriteLine("Initializing renderer..."); using (var renderer = new GLRenderer(false)) { var wallpaper = Image.FromFile(options.WallpaperFile.FullName) as Bitmap; wallpaperTexture = renderer.LoadTexture(wallpaper); effectChain.Initialize(source.ToMono(), options); Console.WriteLine("Generating video..."); float[] sampleBuffer = new float[writer.AudioSamplesPerFrame]; var frameNumber = 0; while (true) { if (writer.WriteVideo) { renderer.Clear(); RenderFrame(renderer, new Frame(frameNumber, TimeSpan.FromSeconds(frameNumber / (double)options.Fps))); var frame = renderer.Snapshot(); writer.WriteVideoFrame(frame); if (frameNumber % 60 == 0) { ProgressHandler?.Invoke(Math.Round(frameNumber / totalFrames * 100.0, 2)); } frameNumber++; } else { var read = source.Read(sampleBuffer, 0, sampleBuffer.Length); if (read > 0) { writer.WriteAudioFrame(sampleBuffer); } else { break; } } } ProgressHandler?.Invoke(Math.Round(frameNumber / totalFrames * 100.0, 2)); } } }
public bool Read(ArraySegment<float> samples) { FlushTransferBuffer(); var complete = _output.Read(samples); if (complete) _completionHandler(this); return complete; }
/// <summary> /// Reads from this sample provider /// </summary> public int Read(float[] buffer, int offset, int count) { float[] inBuffer; int inBufferOffset; int framesRequested = count / channels; int inNeeded = resampler.ResamplePrepare(framesRequested, outFormat.Channels, out inBuffer, out inBufferOffset); int inAvailable = source.Read(inBuffer, inBufferOffset, inNeeded * channels) / channels; int outAvailable = resampler.ResampleOut(buffer, offset, inAvailable, framesRequested, channels); return(outAvailable * channels); }
public int Read(float[] buffer, int offset, int count) { int status = audiostream.Read(buffer, offset, count); audioThisTime = buffer; for (int i = offset; i < offset + status; i++) { buffer[i] = buffer[i] * gain; } PitchProcessing.PitchShift(pitchFactor, offset, count, 2048, 4, audiostream.WaveFormat.SampleRate, buffer); return(status); }
static void NewData(ISampleSource source) { newDataIsRunning = true; if (source == null) { WriteLog("ERROR! (newData): Something went wrong (source is null). Exiting."); Exit(1); } int read; bool res = false; float[] buffer = new float[source.WaveFormat.BytesPerSecond / 1000]; while ((read = source.Read(buffer, 0, buffer.Length)) > 0) { ; } int i = 0; float sum = 0; foreach (float item in buffer) { sum += item; i++; } sum = Math.Abs(sum); if (sum.GetHashCode() > 0) { res = true; } bool ret = IsProgramRunning(appName); if (isMusicPlaying & !res & !noSoundIsRunning) { Thread noSound = new Thread(NoSound); noSound.IsBackground = true; noSound.Start(); } else if (!isMusicPlaying & !res & !noSoundIsRunning) { Thread noSound = new Thread(NoSound); noSound.IsBackground = true; noSound.Start(); } else if (!isMusicPlaying & res) { WriteLog("(newData): We have sound! Player is running: " + ret.ToString()); } isMusicPlaying = res; Thread.Sleep(100); }
public bool Read(ArraySegment <float> samples) { var inFormat = _source.WaveFormat; var outFormat = _outputFormat; if (outFormat.SampleRate == inFormat.SampleRate) { return(_source.Read(samples)); } if (_resampler == null || outFormat.SampleRate != (int)_resampler.OutputSampleRate) { Log.Debug("Initializing resampler to resample {0}Hz source to {1}Hz output", inFormat.SampleRate, outFormat.SampleRate); _resampler = new WdlResampler(); _resampler.SetMode(true, 2, false); _resampler.SetFilterParms(); _resampler.SetFeedMode(false); // output driven _resampler.SetRates(inFormat.SampleRate, outFormat.SampleRate); } var channels = inFormat.Channels; // prepare buffers float[] inBuffer; int inBufferOffset; var samplesPerChannelRequested = samples.Count / channels; var samplesPerChannelRequired = _resampler.ResamplePrepare(samplesPerChannelRequested, channels, out inBuffer, out inBufferOffset); var sourceBuffer = new ArraySegment <float>(inBuffer, inBufferOffset, samplesPerChannelRequired * channels); // read source var complete = _source.Read(sourceBuffer); // resample Log.Trace("Resampling {0}Hz -> {1}Hz", inFormat.SampleRate, outFormat.SampleRate); _resampler.ResampleOut(samples.Array, samples.Offset, samplesPerChannelRequired, samplesPerChannelRequested, channels); return(complete); }
private void OnReadAudio(float[] data) { if (_decoder == null) { throw new ObjectDisposedException("this", "This resource has already disposed."); } var remainedLength = _loopEnd - _decoder.Position / _decoder.WaveFormat.Channels; if (remainedLength < data.Length) { var readLength = remainedLength == 0 ? 0 : _decoder.Read(data, 0, (int)remainedLength); _decoder.Position = _loopBegin; _decoder.Read(data, readLength, data.Length - readLength); } else { var buffer = new float[data.Length]; _decoder.Read(buffer, 0, buffer.Length); buffer.CopyTo(data, 0); } }
public virtual int Read(float[] buffer, int offset, int count) { if (offset % WaveFormat.Channels != 0) { throw new ArgumentOutOfRangeException("offset"); } if (count % WaveFormat.Channels != 0) { throw new ArgumentOutOfRangeException("count"); } return(_source.Read(buffer, offset, count)); }
public int Read(float[] buffer, int offset, int count) { int sourceSampleRate = source.WaveFormat.SampleRate; int mySampleRate = WaveFormat.SampleRate; int numTheirSamples = (int)Mathf.Floor((count * (float)sourceSampleRate / mySampleRate)); if (source == null) { return(0); } int res = 100; try { res = source.Read(tempBuffer1, 0, numTheirSamples); if (res == 0) { if (count == 0) { return(0); } if (count == 1) { buffer[offset] = lastThing; return(1); } if (count >= 2) { buffer[offset] = lastThing * 0.8f; buffer[offset + 1] = lastThing * 0.6f; return(2); } } int numOurSamples = (int)Mathf.Round((res * (float)mySampleRate / sourceSampleRate)); res = src_simple_plain(tempBuffer1, tempBuffer2, res, numOurSamples, (float)mySampleRate / sourceSampleRate, quality, source.WaveFormat.Channels); if (res > count) { res = count; } Buffer.BlockCopy(tempBuffer2, 0, buffer, offset, res * 4); lastThing = buffer[offset + res - 1]; } catch (Exception e) { Debug.Log("failed read: " + e.Message); } return(Mathf.Max(res, 2)); }
public override int Read(float[] buffer, int offset, int count) { lock (lockObject) { if (_seekRequested) { _soundTouch.Clear(); _seekRequested = false; } var samplesRead = 0; var endOfSource = false; while (samplesRead < count) { if (_soundTouch.NumberOfSamples() == 0) { var readFromSource = _sampleSource.Read(_sourceReadBuffer, 0, _sourceReadBuffer.Length); if (_sampleSource.Position >= _sampleSource.Length) { return(samplesRead); } if (readFromSource == 0) { endOfSource = true; _soundTouch.Flush(); return(0); } _soundTouch.PutSamples(_sourceReadBuffer, readFromSource / _sampleSource.WaveFormat.Channels); } var desiredSampleFrames = (count - samplesRead) / _sampleSource.WaveFormat.Channels; var received = _soundTouch.ReceiveSamples(_soundTouchReadBuffer, desiredSampleFrames) * _sampleSource.WaveFormat.Channels; for (int n = 0; n < received; n++) { buffer[offset + samplesRead++] = _soundTouchReadBuffer[n]; } if (received == 0 && endOfSource) { break; } } return(samplesRead); } }
public static void chargerChanson(string chemin, out double Fe, out double[] res) { IWaveSource source = CodecFactory.Instance.GetCodec(chemin); ISampleSource signal = source.ToSampleSource(); Fe = source.WaveFormat.SampleRate; signal = signal.ToMono(); int size = (int)signal.Length; float[] valeurs = new float[size]; signal.Read(valeurs, 0, size); res = new double[size]; for (int i = 0; i < size; i++) { res[i] = (double)valeurs[i]; } }
public int Read(float[] buffer, int offset, int count) { float gainAmplification = (float)(Math.Pow(10.0, GainDB / 20.0)); int samples = mSource.Read(buffer, offset, count); if (gainAmplification != 1.0f) { for (int i = offset; i < offset + samples; i++) { buffer[i] = Math.Max(Math.Min(buffer[i] * gainAmplification, 1), -1); } } if (PitchShift != 1.0f) { PitchShifter.PitchShift(PitchShift, offset, count, 2048, 4, mSource.WaveFormat.SampleRate, buffer); } return(samples); }
//########################################################################################################################################################################################################## /// <summary> /// Read the WAV file and split the samples to the left and right channel /// </summary> private void ReadToList() { if (!File.Exists(Filepath)) { return; } WaveFileReader reader = new WaveFileReader(Filepath); format = reader.WaveFormat; //Length_s = reader.GetLength().TotalSeconds; // reader.GetLength() contains the FormatChunks too! //Length_s = (reader.Length / format.BytesPerSecond); // reader.Length contains the FormatChunks too! List <CSCore.Codecs.WAV.WaveFileChunk> waveFileDataChunks = reader.Chunks.Where(c => c.GetType() == typeof(DataChunk)).ToList(); long waveFileDataChunksSizeBytes = waveFileDataChunks.Sum(c => c.ChunkDataSize); Length_s = (waveFileDataChunksSizeBytes / format.BytesPerSecond); //long _sampleCount = reader.Length / (reader.WaveFormat.BitsPerSample / 8); //_sampleCount /= reader.WaveFormat.Channels; //Each sample contains the values of the right and left channel for (Waveformat.Channels == 2) ISampleSource source = WaveToSampleBase.CreateConverter(reader); format = source.WaveFormat; float[] sample_buffer = new float[format.Channels]; while (source.Read(sample_buffer, 0, sample_buffer.Length) > 0 && source.Position < (waveFileDataChunksSizeBytes / format.Channels)) //At least one byte read { double time_ms = ((1 / (double)source.WaveFormat.BytesPerSecond) * source.WaveFormat.BytesPerSample * source.Position) * 1000; AudioSample sample_left = new AudioSample(sample_buffer[0], time_ms, AudioChannels.LEFT); Samples.Add(sample_left); if (reader.WaveFormat.Channels == 2) { AudioSample sample_right = new AudioSample(sample_buffer[1], time_ms, AudioChannels.RIGHT); Samples.Add(sample_right); } } reader.Dispose(); }
public bool Read(ArraySegment <float> samples) { var inFormat = _source.WaveFormat; var outFormat = _outputFormat; // Configure the rate of the resampler based on the requested playback rate. // If rate adjustment is very small (<1%) play back at the base rate, this means // in the normal case (rate=1) the rate won't be changing every frame var outputRate = (double)outFormat.SampleRate; if (Mathf.Abs(_rate.PlaybackRate - 1) > 0.01f) { outputRate = outFormat.SampleRate * (1 / _rate.PlaybackRate); } // ReSharper disable once CompareOfFloatsByEqualityOperator (justification: we want exact comparison) if (outputRate != _resampler.OutputSampleRate) { Log.Trace("Changing resampler rate to {0}Hz", outputRate); _resampler.SetRates(inFormat.SampleRate, outputRate); } var channels = inFormat.Channels; // prepare buffers float[] inBuffer; int inBufferOffset; var samplesPerChannelRequested = samples.Count / channels; var samplesPerChannelRequired = _resampler.ResamplePrepare(samplesPerChannelRequested, channels, out inBuffer, out inBufferOffset); var sourceBuffer = new ArraySegment <float>(inBuffer, inBufferOffset, samplesPerChannelRequired * channels); // read source var complete = _source.Read(sourceBuffer); // resample Log.Trace("Resampling {0}Hz -> {1}Hz", inFormat.SampleRate, outFormat.SampleRate); _resampler.ResampleOut(samples.Array, samples.Offset, samplesPerChannelRequired, samplesPerChannelRequested, channels); return(complete); }
/// <summary> /// Fetch data dynamically from the SampleSource. /// </summary> /// <param name="start"></param> /// <param name="length"></param> /// <param name="resolution"></param> /// <returns></returns> public float[] GetDataRange(int start, int length, int resolution = 2048) { if (start + length > _sampleSource.Length) { return(null); } if (resolution <= 0) { return(null); } if (length < resolution) { resolution = length; } var blockSize = length / resolution; var samples = new float[resolution]; _sampleSource.Position = start; var buffer = new float[blockSize]; for (var i = 0; i < resolution; i++) { int read = _sampleSource.Read(buffer, 0, blockSize); if (read < buffer.Length) { Array.Clear(samples, read, buffer.Length - read); } samples[i] = GetPeak(buffer); } return(samples); }
public List <Onset> Detect(ISampleSource audio) { _onsets.Clear(); _completed = 0; _sliceCount = 0; _onsets = new List <float>(); _amplitudes = new List <float>(); var onsets = new List <Onset>(); //init detection specific variables int sliceSampleSize = (int)Math.Ceiling(_options.SliceLength * audio.WaveFormat.SampleRate); //the size of each slice's sample int slicePaddingSize = (int)Math.Ceiling(_options.SlicePaddingLength * audio.WaveFormat.SampleRate); _sliceCount = (int)Math.Ceiling((float)audio.Length / audio.WaveFormat.Channels / sliceSampleSize); //the number of slices needed var samples = (int)audio.Length / audio.WaveFormat.Channels; //init parallel specific variables var pOptions = new ParallelOptions(); if (_options.MaxDegreeOfParallelism != -1) { pOptions.MaxDegreeOfParallelism = _options.MaxDegreeOfParallelism; } ParallelLoopState loopState; List <Wav> wavSlices = new List <Wav>(); for (int i = 0; i < _sliceCount; i++) { int baseStart = i * sliceSampleSize; int adjustedStart = (baseStart - sliceSampleSize > 0) ? baseStart - slicePaddingSize : 0; int count = (sliceSampleSize + slicePaddingSize + baseStart > samples) ? samples - adjustedStart : sliceSampleSize + (baseStart - adjustedStart) + slicePaddingSize; float delay = (float)adjustedStart / audio.WaveFormat.SampleRate; float[] buffer = new float[count * audio.WaveFormat.Channels]; audio.SetPosition(TimeConverter.SampleSourceTimeConverter.ToTimeSpan(audio.WaveFormat, adjustedStart * audio.WaveFormat.Channels)); audio.Read(buffer, 0, count * audio.WaveFormat.Channels); wavSlices.Add(new Wav(buffer, audio.WaveFormat.SampleRate, count, audio.WaveFormat.Channels) { Delay = delay, Padding = ((delay > 0) ? slicePaddingSize : 0) / audio.WaveFormat.SampleRate }); } int bucketSize = 5; int bucketcount = (int)Math.Ceiling((double)wavSlices.Count / bucketSize); MemoryAllocator _allocator = new MemoryAllocator(); for (int i = 0; i < bucketcount; i++) { _allocator.Reset(); int count = bucketSize; if ((i + 1) * bucketSize > wavSlices.Count) { count = wavSlices.Count - i * bucketSize; } if (count < 0) { continue; } List <Wav> parallel = wavSlices.GetRange(i * bucketSize, count); var ploopResult = Parallel.ForEach(parallel, pOptions, (w, state) => GetOnsets(w, _allocator)); if (!ploopResult.IsCompleted) { throw new Exception(); } } onsets = _onsets.Zip(_amplitudes, (onset, amplitude) => new Onset { OnsetTime = onset, OnsetAmplitude = amplitude }).ToList(); onsets = onsets.OrderBy(f => f.OnsetTime).ToList(); float prev = 0; float combine = 0.03f; var ret = new List <Onset>(); for (int i = 0; i < onsets.Count; i++) { if (onsets[i].OnsetTime - prev < _options.MinimumTimeDelta / 1000.0f) { continue; } prev = onsets[i].OnsetTime; ret.Add(onsets[i]); } return(ret); }
//public int Read(byte[] buffer, int offset, int count) //{ // return _audioSource.Read(buffer, 0, count); //} public int Read(float[] buffer, int offset, int count) { return(_audioSource.Read(buffer, 0, count)); }
public string GetLevelsFromAudioFX(string audioType, string audioFile) { string audioFilename = Path.Combine(Executor.Current.ExpanderSharedFiles, audioType, audioFile); string levelsFilename = Path.Combine(Executor.Current.ExpanderSharedFiles, audioType, audioFile + ".levels"); if (!File.Exists(levelsFilename)) { using (ISampleSource source = CodecFactory.Instance.GetCodec(audioFilename).ToSampleSource()) { var fftProvider = new FftProvider(source.WaveFormat.Channels, FftSize.Fft1024); int millisecondsPerFrame = 1000 / 40; long maxBufferLengthInSamples = source.GetRawElements(millisecondsPerFrame); long bufferLength = Math.Min(source.Length, maxBufferLengthInSamples); float[] buffer = new float[bufferLength]; int read = 0; int totalSamplesRead = 0; var fftData = new float[1024]; var list = new List <float>(); float highest = 0; do { //determine how many samples to read int samplesToRead = (int)Math.Min(source.Length - totalSamplesRead, buffer.Length); read = source.Read(buffer, 0, samplesToRead); if (read == 0) { break; } totalSamplesRead += read; //add read data to the fftProvider fftProvider.Add(buffer, read); fftProvider.GetFftData(fftData); float highestAmplitude = 0; for (int i = 0; i < fftData.Length / 2; i++) { if (fftData[i] > highestAmplitude) { highestAmplitude = fftData[i]; } } list.Add(highestAmplitude); if (highestAmplitude > highest) { highest = highestAmplitude; } } while (totalSamplesRead < source.Length); if (highest > 0) { // Adjust to equalize float adjustment = 1 / highest; for (int i = 0; i < list.Count; i++) { list[i] *= adjustment; } } using (var fs = File.Create(levelsFilename)) { fs.Write(list.Select(x => (byte)(x * 255)).ToArray(), 0, list.Count); } } } return(levelsFilename); }
public int Read(float[] buffer, int offset, int count) => m_source.Read(new Span <float>(buffer, offset, count));
//private Bitmap DrawWave(Pen pen, int w, int h) //{ // Color defaultColor = pen.Color; // long numSamples = mDrawSource.Length; // int mSamplesPerPixel = (int)(mDrawSource.Length / w); // int mDrawingStartOffset = 0; // int mPrevSamplesPerPixel = mSamplesPerPixel; // Bitmap mBitmap = null; // if (mBitmap == null || ((mBitmap.Width != w) | (mBitmap.Height != h))) // { // if (mBitmap != null) // mBitmap.Dispose(); // mBitmap = new Bitmap(w, h); // } // Graphics canvas = Graphics.FromImage(mBitmap); // int prevX = 0; // int prevMaxY = 0; // int prevMinY = 0; // float maxVal = 0; // float minVal = 0; // int i = 0; // // index is how far to offset into the data array // long index = 0; // int maxSampleToShow = (int)Math.Min((mSamplesPerPixel * w) + mDrawingStartOffset, numSamples); // int sampleCount = 0; // int offsetIndex = 0; // if (mSamplesPerPixel > mThresholdSample) // { // sampleCount = (int)(mSamplesPerPixel / mThresholdSample) * 2; // offsetIndex = (int)Math.Floor((decimal)(mDrawingStartOffset / mThresholdSample)) * 2; // } // float[] data = new float[mSamplesPerPixel]; // mDrawSource.Position = mDrawingStartOffset; // int x = 0; // while (index < maxSampleToShow) // { // maxVal = -1; // minVal = 1; // int samplesRead = 0; // if (mSamplesPerPixel > mThresholdSample) // { // int startIndex = offsetIndex + (i * sampleCount); // int endIndex = Math.Min(mOptimizedArray.Length - 1, startIndex + sampleCount - 1); // for (x = startIndex; x <= endIndex; x++) // { // maxVal = Math.Max(maxVal, mOptimizedArray[x]); // minVal = Math.Min(minVal, mOptimizedArray[x]); // } // } // else // { // samplesRead = mDrawSource.Read(data, 0, data.Length); // // finds the max & min peaks for this pixel // for (x = 0; x < samplesRead; x++) // { // maxVal = Math.Max(maxVal, data[x]); // minVal = Math.Min(minVal, data[x]); // } // } // //8-bit samples are stored as unsigned bytes, ranging from 0 to 255. // //16-bit samples are stored as 2's-complement signed integers, ranging from -32768 to 32767. // // scales based on height of window // int scaledMinVal = (int)(((minVal + 1) * h) / 2); // int scaledMaxVal = (int)(((maxVal + 1) * h) / 2); // // if the max/min are the same, then draw a line from the previous position, // // otherwise we will not see anything // if (prevX >= Math.Min(splitP1.X, splitP2.X) - 10 && prevX <= Math.Max(splitP1.X, splitP2.X) - 10) // { // pen.Color = defaultColor; // } // else // { // pen.Color = Color.Black; // } // if (scaledMinVal == scaledMaxVal) // { // if (prevMaxY != 0) // { // canvas.DrawLine(pen, prevX, prevMaxY, i, scaledMaxVal); // } // } // else // { // if (i > prevX) // { // if (prevMaxY < scaledMinVal) // { // canvas.DrawLine(pen, prevX, prevMaxY, i, scaledMinVal); // } // else // { // if (prevMinY > scaledMaxVal) // { // canvas.DrawLine(pen, prevX, prevMinY, i, scaledMaxVal); // } // } // } // canvas.DrawLine(pen, i, scaledMinVal, i, scaledMaxVal); // } // prevX = i; // prevMaxY = scaledMaxVal; // prevMinY = scaledMinVal; // i += 1; // index = (i * mSamplesPerPixel) + mDrawingStartOffset; // } // return mBitmap; //} //private string mRawFileName; //private void CreateOptimizedArray() //{ // mDrawSource = CodecFactory.Instance.GetCodec(filePath).ToSampleSource().ToMono(); // long offset = 0; // long numSamples = mDrawSource.Length; // int x = 0; // int y = 0; // //Nth item holds maxVal, N+1th item holds minVal so allocate an array of double size // mOptimizedArray = new float[((numSamples / mThresholdSample) + 1) * 2]; // float[] data = new float[mThresholdSample]; // int samplesRead = 1; // mDrawSource.Position = 0; // string rawFilePath = System.Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments) + @"\SoundFactory\"; // if (!Directory.Exists(rawFilePath)) Directory.CreateDirectory(rawFilePath); // mRawFileName = rawFilePath + Guid.NewGuid().ToString() + ".raw"; // FileStream rawFile = new FileStream(mRawFileName, FileMode.Create, FileAccess.ReadWrite); // BinaryWriter bin = new BinaryWriter(rawFile); // while (offset < numSamples && samplesRead > 0) // { // samplesRead = mDrawSource.Read(data, 0, mThresholdSample); // if (samplesRead > 0) //for some files file length is wrong so samplesRead may become 0 even if we did not come to the end of the file // { // for (int i = 0; i < samplesRead; i++) // { // bin.Write(data[i]); // } // float maxVal = -1; // float minVal = 1; // // finds the max & min peaks for this pixel // for (x = 0; x < samplesRead; x++) // { // maxVal = Math.Max(maxVal, data[x]); // minVal = Math.Min(minVal, data[x]); // } // mOptimizedArray[y] = minVal; // mOptimizedArray[y + 1] = maxVal; // y += 2; // offset += samplesRead; // //mProgressStatus = (int)(((float)offset / numSamples) * 100); // } // } //} private Bitmap DrawWave(Pen pen, int w, int h) { Color defaultColor = pen.Color; long numSamples = mDrawSource.Length; int mSamplesPerPixel = (int)(mDrawSource.Length / w); int mDrawingStartOffset = 0; int mPrevSamplesPerPixel = mSamplesPerPixel; Bitmap mBitmap = null; if (mBitmap == null || ((mBitmap.Width != w) | (mBitmap.Height != h))) { if (mBitmap != null) { mBitmap.Dispose(); } mBitmap = new Bitmap(w, h); } Graphics canvas = Graphics.FromImage(mBitmap); int prevX = 0; int prevMaxY = 0; int prevMinY = 0; float maxVal = 0; float minVal = 0; int i = 0; // index is how far to offset into the data array long index = 0; int maxSampleToShow = (int)Math.Min((mSamplesPerPixel * w) + mDrawingStartOffset, numSamples); int sampleCount = 0; int offsetIndex = 0; if (mSamplesPerPixel > mThresholdSample) { sampleCount = (int)(mSamplesPerPixel / mThresholdSample) * 2; offsetIndex = (int)Math.Floor((decimal)(mDrawingStartOffset / mThresholdSample)) * 2; } float[] data = new float[mSamplesPerPixel]; mDrawSource.Position = mDrawingStartOffset; int x = 0; while (index < maxSampleToShow) { maxVal = -1; minVal = 1; int samplesRead = 0; if (mSamplesPerPixel > mThresholdSample) { int startIndex = offsetIndex + (i * sampleCount); int endIndex = Math.Min(mOptimizedArray.Length - 1, startIndex + sampleCount - 1); for (x = startIndex; x <= endIndex; x++) { maxVal = Math.Max(maxVal, mOptimizedArray[x]); minVal = Math.Min(minVal, mOptimizedArray[x]); } } else { samplesRead = mDrawSource.Read(data, 0, data.Length); // finds the max & min peaks for this pixel for (x = 0; x < samplesRead; x++) { maxVal = Math.Max(maxVal, data[x]); minVal = Math.Min(minVal, data[x]); } } //8-bit samples are stored as unsigned bytes, ranging from 0 to 255. //16-bit samples are stored as 2's-complement signed integers, ranging from -32768 to 32767. // scales based on height of window int scaledMinVal = (int)(((minVal + 1) * h) / 2); int scaledMaxVal = (int)(((maxVal + 1) * h) / 2); // if the max/min are the same, then draw a line from the previous position, // otherwise we will not see anything if (prevX >= Math.Min(splitP1.X, splitP2.X) - 10 && prevX <= Math.Max(splitP1.X, splitP2.X) - 10) { pen.Color = defaultColor; } else { pen.Color = Color.Black; } if (scaledMinVal == scaledMaxVal) { if (prevMaxY != 0) { canvas.DrawLine(pen, prevX, prevMaxY, i, scaledMaxVal); } } else { if (i > prevX) { if (prevMaxY < scaledMinVal) { canvas.DrawLine(pen, prevX, prevMaxY, i, scaledMinVal); } else { if (prevMinY > scaledMaxVal) { canvas.DrawLine(pen, prevX, prevMinY, i, scaledMaxVal); } } } canvas.DrawLine(pen, i, scaledMinVal, i, scaledMaxVal); } prevX = i; prevMaxY = scaledMaxVal; prevMinY = scaledMinVal; i += 1; index = (i * mSamplesPerPixel) + mDrawingStartOffset; } return(mBitmap); }
public List<Onset> Detect(ISampleSource audio) { _onsets.Clear(); _completed = 0; _sliceCount = 0; _onsets = new List<float>(); _amplitudes = new List<float>(); var onsets = new List<Onset>(); //init detection specific variables int sliceSampleSize = (int)Math.Ceiling(_options.SliceLength * audio.WaveFormat.SampleRate); //the size of each slice's sample int slicePaddingSize = (int)Math.Ceiling(_options.SlicePaddingLength * audio.WaveFormat.SampleRate); _sliceCount = (int)Math.Ceiling((float)audio.Length/audio.WaveFormat.Channels / sliceSampleSize); //the number of slices needed var samples = (int)audio.Length / audio.WaveFormat.Channels; //init parallel specific variables var pOptions = new ParallelOptions(); if (_options.MaxDegreeOfParallelism != -1) pOptions.MaxDegreeOfParallelism = _options.MaxDegreeOfParallelism; ParallelLoopState loopState; List<Wav> wavSlices = new List<Wav>(); for (int i = 0; i < _sliceCount; i++) { int baseStart = i * sliceSampleSize; int adjustedStart = (baseStart - sliceSampleSize > 0) ? baseStart - slicePaddingSize : 0; int count = (sliceSampleSize + slicePaddingSize + baseStart > samples) ? samples - adjustedStart : sliceSampleSize + (baseStart - adjustedStart) + slicePaddingSize; float delay = (float)adjustedStart / audio.WaveFormat.SampleRate; float[] buffer = new float[count * audio.WaveFormat.Channels]; audio.SetPosition(TimeConverter.SampleSourceTimeConverter.ToTimeSpan(audio.WaveFormat, adjustedStart * audio.WaveFormat.Channels)); audio.Read(buffer, 0, count * audio.WaveFormat.Channels); wavSlices.Add(new Wav(buffer, audio.WaveFormat.SampleRate, count, audio.WaveFormat.Channels) { Delay = delay, Padding = ((delay > 0) ? slicePaddingSize : 0) / audio.WaveFormat.SampleRate }); } int bucketSize = 5; int bucketcount = (int)Math.Ceiling((double)wavSlices.Count / bucketSize); MemoryAllocator _allocator = new MemoryAllocator(); for (int i = 0; i < bucketcount; i++) { _allocator.Reset(); int count = bucketSize; if ((i + 1) * bucketSize > wavSlices.Count) count = wavSlices.Count - i * bucketSize; if (count < 0) continue; List<Wav> parallel = wavSlices.GetRange(i * bucketSize, count); var ploopResult = Parallel.ForEach(parallel, pOptions, (w, state) => GetOnsets(w, _allocator)); if (!ploopResult.IsCompleted) throw new Exception(); } onsets = _onsets.Zip(_amplitudes, (onset, amplitude) => new Onset { OnsetTime = onset, OnsetAmplitude = amplitude }).ToList(); onsets = onsets.OrderBy(f => f.OnsetTime).ToList(); float prev = 0; float combine = 0.03f; var ret = new List<Onset>(); for (int i = 0; i < onsets.Count; i++) { if (onsets[i].OnsetTime - prev < _options.MinimumTimeDelta / 1000.0f) continue; prev = onsets[i].OnsetTime; ret.Add(onsets[i]); } return ret; }