public void TestReadOverEnd() { var streams = GetStreams(); var buffer = new byte[32768]; foreach (var stream in streams) { long length = stream.Length; int frameSize = stream.SampleBlockSize; Assert.AreEqual(0, stream.Position); // Set position to one frame before end stream.Position = length - frameSize; Assert.AreEqual(length - frameSize, stream.Position); // Force read over the end of the stream int bytesRead = StreamUtil.ForceRead(stream, buffer, 0, buffer.Length); Assert.AreEqual(frameSize, bytesRead); Assert.AreEqual(length, stream.Position); } }
/// <summary> /// Fills the stream buffer. /// </summary> /// <returns>true if the buffer has been filled, false if the end of the stream has been reached</returns> private bool FillBuffer() { // first, carry over unprocessed samples from the end of the stream buffer to its beginning streamBufferOffset = streamBufferLevel - frameOffset; if (streamBufferOffset > 0) { Buffer.BlockCopy(streamBuffer, frameOffset, streamBuffer, 0, streamBufferOffset); } frameOffset = 0; // second, fill the stream input buffer - if no bytes returned we have reached the end of the stream streamBufferLevel = StreamUtil.ForceRead(stream, streamBuffer, streamBufferOffset, streamBuffer.Length - streamBufferOffset); if (streamBufferLevel == 0) { Debug.WriteLine("stream windowing finished - end position {0}/{1}", stream.Position, stream.Length); return(false); // whole stream has been processed } streamBufferLevel += streamBufferOffset; streamBufferOffset = 0; return(true); // stream buffer successfully filled }
public void Execute() { Debug.WriteLine("window length: {0}s, interval length: {1}s, sample rate: {2}", windowLength.TotalSeconds, intervalLength.TotalSeconds, sampleRate); IProgressReporter reporter = progressMonitor.BeginTask("Analyzing alignment...", true); List <IAudioStream> streams = new List <IAudioStream>(audioTracks.Count); TimeSpan start = audioTracks.Start; TimeSpan end = audioTracks.End; foreach (AudioTrack audioTrack in audioTracks) { streams.Add(CrossCorrelation.PrepareStream(audioTrack.CreateAudioStream(), sampleRate)); } long[] streamOffsets = new long[audioTracks.Count]; for (int i = 0; i < audioTracks.Count; i++) { streamOffsets[i] = TimeUtil.TimeSpanToBytes(audioTracks[i].Offset - start, streams[0].Properties); } int windowLengthInBytes = (int)TimeUtil.TimeSpanToBytes(windowLength, streams[0].Properties); int windowLengthInSamples = windowLengthInBytes / streams[0].Properties.SampleBlockByteSize; long intervalLengthInBytes = TimeUtil.TimeSpanToBytes(intervalLength, streams[0].Properties); long analysisIntervalLength = TimeUtil.TimeSpanToBytes(end - start, streams[0].Properties); OnStarted(); byte[] x = new byte[windowLengthInBytes]; byte[] y = new byte[windowLengthInBytes]; long positionX; long positionY; double sumNegative = 0; double sumPositive = 0; int countNegative = 0; int countPositive = 0; double min = 0; double max = 0; for (long position = 0; position < analysisIntervalLength; position += intervalLengthInBytes) { double windowSumNegative = 0; double windowSumPositive = 0; int windowCountNegative = 0; int windowCountPositive = 0; double windowMin = 0; double windowMax = 0; Debug.WriteLine("Analyzing {0} @ {1} / {2}", intervalLengthInBytes, position, analysisIntervalLength); // at each position in the analysis interval, compare each stream with each other for (int i = 0; i < streams.Count; i++) { positionX = position - streamOffsets[i]; if (positionX >= 0 && positionX < streams[i].Length) { streams[i].Position = positionX; StreamUtil.ForceRead(streams[i], x, 0, windowLengthInBytes); for (int j = i + 1; j < streams.Count; j++) { positionY = position - streamOffsets[j]; if (positionY >= 0 && positionY < streams[j].Length) { streams[j].Position = positionY; StreamUtil.ForceRead(streams[j], y, 0, windowLengthInBytes); double val = analyzeSection(x, y); if (val > 0) { windowSumPositive += val; windowCountPositive++; } else { windowSumNegative += val; windowCountNegative++; } if (windowMin > val) { windowMin = val; } if (windowMax < val) { windowMax = val; } Debug.WriteLine("{0,2}->{1,2}: {2}", i, j, val); } } } } sumPositive += windowSumPositive; countPositive += windowCountPositive; sumNegative += windowSumNegative; countNegative += windowCountNegative; if (min > windowMin) { min = windowMin; } if (max < windowMax) { max = windowMax; } reporter.ReportProgress((double)position / analysisIntervalLength * 100); OnWindowAnalyzed(start + TimeUtil.BytesToTimeSpan(position, streams[0].Properties), windowCountPositive, windowCountNegative, windowMin, windowMax, windowSumPositive, windowSumNegative); } reporter.Finish(); Debug.WriteLine("Finished. sum: {0}, sum+: {1}, sum-: {2}, sumAbs: {3}, avg: {4}, avg+: {5}, avg-: {6}, avgAbs: {7}, min: {8}, max: {9}, points: {10}", sumPositive + sumNegative, sumPositive, sumNegative, sumPositive + (sumNegative * -1), (sumPositive + sumNegative) / (countPositive + countNegative), sumPositive / countPositive, sumNegative / countNegative, (sumPositive + (sumNegative * -1)) / (countPositive + countNegative), min, max, countPositive + countNegative); double score = (sumPositive + (sumNegative * -1)) / (countPositive + countNegative); Debug.WriteLine("Score: {0} => {1}%", score, Math.Round(score * 100)); OnFinished(countPositive, countNegative, min, max, sumPositive, sumNegative); streams.ForEach(s => s.Close()); }
public int ReadPeaks(float[][] peaks, int sampleCount, int peakCount) { long streamPosition = sourceStream.Position; int samplesPerPeak = (int)Math.Ceiling((float)sampleCount / peakCount); unsafe { // if the samplesPerPeak count is beyond the PeakStore threshold, load the peaks from there if (samplesPerPeak > peakStore.SamplesPerPeak) { byte[][] peakData = peakStore.GetData(samplesPerPeak, out samplesPerPeak); int positionOffset = (int)(streamPosition / SampleBlockSize / samplesPerPeak) * sizeof(Peak); int sourcePeakCount = (int)Math.Round((float)sampleCount / samplesPerPeak); float sourceToTargetIndex = 1 / ((float)sourcePeakCount / peakCount); for (int channel = 0; channel < Properties.Channels; channel++) { fixed(byte *peakChannelDataB = &peakData[channel][positionOffset]) { Peak *peakChannelDataP = (Peak *)peakChannelDataB; fixed(float *peakChannelF = &peaks[channel][0]) { Peak *peakChannelP = (Peak *)peakChannelF; int peak = 0; peakChannelP[0] = new Peak(float.MaxValue, float.MinValue); for (int p = 0; p < sourcePeakCount; p++) { if ((int)(p * sourceToTargetIndex) > peak) { peak++; peakChannelP[peak] = new Peak(float.MaxValue, float.MinValue); } peakChannelP[peak].Merge(peakChannelDataP[p]); } } } } return(peakCount); } // else load samples from the stream and generate peaks int requiredBytes = sampleCount * Properties.SampleBlockByteSize; int bufferOffset = 0; int bytesRead = 0; if (bufferPosition <= sourceStream.Position && sourceStream.Position + requiredBytes <= bufferPosition + bufferLength) { // the requested data can be read directly from the buffer, no need to read from the stream bufferOffset = (int)(sourceStream.Position - bufferPosition); bytesRead = requiredBytes; } else { // resize buffer and read data from stream ResizeBuffer(requiredBytes); bufferPosition = sourceStream.Position; bytesRead = StreamUtil.ForceRead(sourceStream, buffer, 0, requiredBytes); bufferLength = bytesRead; } if (bytesRead % Properties.SampleBlockByteSize != 0) { throw new Exception(); } int samplesRead = bytesRead / Properties.SampleBlockByteSize; float sampleIndexToPeakIndex = 1 / ((float)samplesRead / peakCount); fixed(byte *bufferB = &buffer[bufferOffset]) { float *bufferF = (float *)bufferB; for (int channel = 0; channel < Properties.Channels; channel++) { fixed(float *peakChannelF = &peaks[channel][0]) { Peak *peakChannelP = (Peak *)peakChannelF; int index = channel; int peakIndex = 0; peakChannelP[peakIndex] = new Peak(float.MaxValue, float.MinValue); for (int i = 0; i < samplesRead; i++) { float sampleValue = bufferF[index]; index += Properties.Channels; peakChannelP[peakIndex].Merge(sampleValue, sampleValue); if ((int)(i * sampleIndexToPeakIndex) > peakIndex) { peakChannelP[++peakIndex] = new Peak(sampleValue, sampleValue); } } } } } } return(peakCount); }