/// <summary> /// Transposes down the sample rate, causing the observed playback /// 'rate' of the sound to increase /// </summary> private void Downsample(ArrayPtr <TSampleType> src, int numSamples) { // If the parameter 'uRate' value is larger than 'SCALE', first apply the // anti-alias filter to remove high frequencies (prevent them from folding // over the lover frequencies), then transpose. // Add the new samples to the end of the storeBuffer _storeBuffer.PutSamples(src, numSamples); // Anti-alias filter the samples to prevent folding and output the filtered // data to tempBuffer. Note : because of the FIR filter length, the // filtering routine takes in 'filter_length' more samples than it outputs. Debug.Assert(_tempBuffer.IsEmpty); var sizeTemp = _storeBuffer.AvailableSamples; int count = _antiAliasFilter.Evaluate(_tempBuffer.PtrEnd(sizeTemp), _storeBuffer.PtrBegin(), sizeTemp, _channels); if (count == 0) { return; } // Remove the filtered samples from 'storeBuffer' _storeBuffer.ReceiveSamples(count); // Transpose the samples (+16 is to reserve some slack in the destination buffer) sizeTemp = (int)(numSamples / Rate + 16.0f); count = Transpose(_outputBuffer.PtrEnd(sizeTemp), _tempBuffer.PtrBegin(), count); _outputBuffer.PutSamples(count); }
/// <summary> /// Inputs a block of samples for analyzing: Envelopes the samples and /// then updates the autocorrelation estimation. When whole song data /// has been input in smaller blocks using this function, read the /// resulting bpm with <see cref="GetBpm"/> function. /// </summary> /// <remarks> /// Notice that data in <paramref name="samples"/> array can be /// disrupted in processing. /// </remarks> /// <param name="samples">Pointer to input/working data buffer.</param> /// <param name="numSamples">Number of samples in buffer.</param> public void InputSamples(ArrayPtr <TSampleType> samples, int numSamples) { var decimated = new TSampleType[DECIMATED_BLOCK_SAMPLES]; // iterate so that max INPUT_BLOCK_SAMPLES processed per iteration while (numSamples > 0) { int block = (numSamples > INPUT_BLOCK_SAMPLES) ? INPUT_BLOCK_SAMPLES : numSamples; // decimate. note that converts to mono at the same time int decSamples = Decimate(decimated, samples, block); samples += block * Channels; numSamples -= block; // envelope new samples and add them to buffer CalcEnvelope(decimated, decSamples); Buffer.PutSamples(decimated, decSamples); } // when the buffer has enought samples for processing... if (Buffer.AvailableSamples > WindowLen) { // how many samples are processed int processLength = Buffer.AvailableSamples - WindowLen; // ... calculate autocorrelations for oldest samples... UpdateXCorr(processLength); // ... and remove them from the buffer Buffer.ReceiveSamples(processLength); } }
/// <summary> /// Inputs a block of samples for analyzing: Envelopes the samples and then /// updates the auto-correlation estimation. When whole song data has been input /// in smaller blocks using this function, read the resulting bpm with 'getBpm' /// method. /// </summary> /// <param name="samples">Pointer to input/working data buffer.</param> /// <param name="numSamples">Number of samples to insert.</param> /// <remarks> /// Notice that data in 'samples' array can be disrupted in processing. /// </remarks> public void InputSamples(ReadOnlySpan <float> samples, int numSamples) { Span <float> decimated = stackalloc float[DECIMATED_BLOCK_SIZE]; // iterate so that max INPUT_BLOCK_SAMPLES processed per iteration while (numSamples > 0) { var block = (numSamples > INPUT_BLOCK_SIZE) ? INPUT_BLOCK_SIZE : numSamples; // decimate. note that converts to mono at the same time var decSamples = Decimate(in decimated, samples, block); samples = samples.Slice(block * _channels); numSamples -= block; _buffer.PutSamples(decimated, decSamples); } // when the buffer has enough samples for processing... int req = Math.Max(_windowLen + XCORR_UPDATE_SEQUENCE, 2 * XCORR_UPDATE_SEQUENCE); while (_buffer.AvailableSamples >= req) { // ... update auto-correlations... UpdateXCorr(XCORR_UPDATE_SEQUENCE); // ...update beat position calculation... UpdateBeatPos(XCORR_UPDATE_SEQUENCE / 2); // ... and remove processed samples from the buffer const int NUM_SAMPLES = XCORR_UPDATE_SEQUENCE / OVERLAP_FACTOR; _buffer.ReceiveSamples(NUM_SAMPLES); } }
public virtual int Transpose(FifoSampleBuffer <TSampleType> dest, FifoSampleBuffer <TSampleType> src) { int numSrcSamples = src.AvailableSamples; int sizeDemand = (int)(numSrcSamples / rate) + 8; int numOutput; ArrayPtr <TSampleType> psrc = src.PtrBegin(); ArrayPtr <TSampleType> pdest = dest.PtrEnd(sizeDemand); #if !USE_MULTICH_ALWAYS if (channels == 1) { numOutput = TransposeMono(pdest, psrc, ref numSrcSamples); } else if (channels == 2) { numOutput = TransposeStereo(pdest, psrc, ref numSrcSamples); } else #endif { Debug.Assert(channels > 0); numOutput = TransposeMulti(pdest, psrc, ref numSrcSamples); } dest.PutSamples(numOutput); src.ReceiveSamples(numSrcSamples); return(numOutput); }
/// <summary> /// Processes as many processing frames of the samples <see cref="_inputBuffer"/>, store /// the result into <see cref="_outputBuffer"/> /// </summary> private void ProcessSamples() { // Process samples as long as there are enough samples in '_inputBuffer' // to form a processing frame. while (_inputBuffer.AvailableSamples >= _sampleReq) { // If tempo differs from the normal ('SCALE'), scan for the best overlapping // position int offset = SeekBestOverlapPosition(_inputBuffer.PtrBegin()); // Mix the samples in the '_inputBuffer' at position of 'offset' with the // samples in 'midBuffer' using sliding overlapping // ... first partially overlap with the end of the previous sequence // (that's in 'midBuffer') Overlap(_outputBuffer.PtrEnd(_overlapLength), _inputBuffer.PtrBegin(), offset); _outputBuffer.PutSamples(_overlapLength); // ... then copy sequence samples from '_inputBuffer' to output: // length of sequence int temp = (_seekWindowLength - 2 * _overlapLength); // crosscheck that we don't have buffer overflow... if (_inputBuffer.AvailableSamples < (offset + temp + _overlapLength * 2)) { continue; // just in case, shouldn't really happen } _outputBuffer.PutSamples(_inputBuffer.PtrBegin() + _channels * (offset + _overlapLength), temp); // Copies the end of the current sequence from '_inputBuffer' to // 'midBuffer' for being mixed with the beginning of the next // processing sequence and so on Debug.Assert((offset + temp + _overlapLength * 2) <= _inputBuffer.AvailableSamples); ArrayPtr <TSampleType> .CopyBytes(_midBuffer, _inputBuffer.PtrBegin() + _channels *(offset + temp + _overlapLength), _channels *SIZEOF_SAMPLETYPE *_overlapLength); // Remove the processed samples from the input buffer. Update // the difference between integer & nominal skip step to '_skipFract' // in order to prevent the error from accumulating over time. _skipFract += _nominalSkip; // real skip size var ovlSkip = (int)_skipFract; _skipFract -= ovlSkip; // maintain the fraction part, i.e. real vs. integer skip _inputBuffer.ReceiveSamples(ovlSkip); } }
public int Evaluate(FifoSampleBuffer <TSampleType> dest, FifoSampleBuffer <TSampleType> src) { ArrayPtr <TSampleType> pdest; ArrayPtr <TSampleType> psrc; int numSrcSamples; int result; int numChannels = src.GetChannels(); Debug.Assert(numChannels == dest.GetChannels()); numSrcSamples = src.AvailableSamples; psrc = src.PtrBegin(); pdest = dest.PtrEnd(numSrcSamples); result = _firFilter.Evaluate(pdest, psrc, numSrcSamples, numChannels); src.ReceiveSamples(result); dest.PutSamples(result); return(result); }