public void SetPosition03() { var audioProperties = new AudioProperties(2, 44100, 32, AudioFormat.IEEE); TimeWarpStream s = new TimeWarpStream( new NullStream(audioProperties, TimeUtil.TimeSpanToBytes(new TimeSpan(0, 1, 0), audioProperties))); TimeSpan length = TimeUtil.BytesToTimeSpan(s.Length, s.Properties); s.Mappings.Add(new TimeWarp { From = new TimeSpan(length.Ticks / 2), To = new TimeSpan(length.Ticks / 4) }); s.Mappings.Add(new TimeWarp { From = length, To = new TimeSpan(length.Ticks / 4 * 2) }); byte[] buffer = new byte[5000]; Assert.AreEqual(0, s.Position); s.Read(buffer, 0, buffer.Length); s.Position = 44440; while (s.Read(buffer, 0, buffer.Length) > 0) { } Assert.AreEqual(s.Length, s.Position); }
public void SetPosition02() { var audioProperties = new AudioProperties(2, 44100, 32, AudioFormat.IEEE); TimeWarpStream s = new TimeWarpStream( new NullStream(audioProperties, TimeUtil.TimeSpanToBytes(new TimeSpan(0, 1, 0), audioProperties))); TimeSpan length = TimeUtil.BytesToTimeSpan(s.Length, s.Properties); //s.Mappings.Add(new TimeWarp { // From = StreamUtil.AlignToBlockSize(length / 2, s.SampleBlockSize), // To = StreamUtil.AlignToBlockSize(length / 4, s.SampleBlockSize) //}); s.Mappings.Add(new TimeWarp { From = length, To = new TimeSpan(length.Ticks / 4 * 2) }); byte[] buffer = new byte[5000]; int bytesRead; long totalBytesRead; Assert.AreEqual(0, s.Position); totalBytesRead = 0; s.Position = 11104; Assert.AreEqual(11104, s.Position); //Assert.AreEqual(0, s.BufferedBytes); while ((bytesRead = s.Read(buffer, 0, buffer.Length)) > 0) { totalBytesRead += bytesRead; } Assert.AreEqual(totalBytesRead, s.Position - 11104); Assert.AreEqual(s.Length, s.Position); }
private void timer_Elapsed(object sender, ElapsedEventArgs e) { if (wavePlayer != null) { lblCurrentPlaybackTime.Dispatcher.BeginInvoke(DispatcherPriority.Normal, new DispatcherOperationCallback(delegate { lblCurrentPlaybackTime.Content = TimeUtil.BytesToTimeSpan(playbackStream.Position, playbackStream.Properties); playbackSeeker.Tag = SEEKER_PROGRAMMATIC_VALUECHANGED_TAG; playbackSeeker.Value = TimeUtil.BytesToTimeSpan(playbackStream.Position, playbackStream.Properties).TotalSeconds; return(null); }), null); } }
static void Main(string[] args) { var audioProperties = new AudioProperties(1, 44100, 32, AudioFormat.IEEE); // Aurio resampling & FFT initialization ResamplerFactory.Factory = new Soxr.ResamplerFactory(); FFTFactory.Factory = new PFFFT.FFTFactory(); // Create a 10-second FIFO buffer var fifoStream = new BlockingFixedLengthFifoStream(audioProperties, audioProperties.SampleBlockByteSize * audioProperties.SampleRate * 10); // Create fingerprinter // Emit subfingerprints every 20 generated // Keep the buffer to its minimum value (the subfingerprint frame size) to keep the processing latency as low as possible var audioTrack = new AudioTrack(fifoStream, "realtime fifo stream"); var fingerprintingProfile = FingerprintGenerator.GetProfiles()[0]; fingerprintingProfile.FlipWeakestBits = 0; // Don't generate flipped hashes for simplicities's sake (we're not working with the result anyway) var fingerprinter = new FingerprintGenerator(fingerprintingProfile, audioTrack, 20, fingerprintingProfile.FrameSize); long subfingerprintsGenerated = 0; fingerprinter.SubFingerprintsGenerated += (object sender, Matching.SubFingerprintsGeneratedEventArgs e) => { subfingerprintsGenerated += e.SubFingerprints.Count; // Calculate some stats var ingressed = TimeUtil.BytesToTimeSpan(_dataGenerated, audioProperties); var buffered = TimeUtil.BytesToTimeSpan(fifoStream.WritePosition - fifoStream.Position, audioProperties); var processed = new TimeSpan((long)Math.Round(subfingerprintsGenerated * fingerprintingProfile.HashTimeScale * TimeUtil.SECS_TO_TICKS)); // print the stats Console.WriteLine("{0} ingressed, {1} buffered, {2} processed, {3} subfingerprints generated", ingressed, buffered, processed, subfingerprintsGenerated); }; // Start stream input StartSineWaveRealtimeGenerator(audioProperties, fifoStream); // Start output processing fingerprinter.Generate(); }
public void SetPosition04() { var audioProperties = new AudioProperties(2, 44100, 32, AudioFormat.IEEE); TimeWarpStream s = new TimeWarpStream( new NullStream(audioProperties, TimeUtil.TimeSpanToBytes(new TimeSpan(0, 1, 0), audioProperties))); TimeSpan length = TimeUtil.BytesToTimeSpan(s.Length, s.Properties); s.Mappings.Add(new TimeWarp { From = new TimeSpan(length.Ticks / 2), To = new TimeSpan(length.Ticks / 4) }); s.Mappings.Add(new TimeWarp { From = length, To = new TimeSpan(length.Ticks / 4 * 2) }); byte[] buffer = new byte[5000]; Assert.AreEqual(0, s.Position); bool positionSet = false; int count = 0; while (s.Read(buffer, 0, buffer.Length) > 0) { if (++count == 5) { positionSet = true; long posBefore = s.Position; //long sourcePosBefore = s.SourceStream.Position - s.BufferedBytes; s.Position = posBefore; Assert.AreEqual(posBefore, s.Position); //Assert.AreEqual(sourcePosBefore, s.SourceStream.Position); } } Assert.IsTrue(positionSet); // if the position hasn't been set, the whole test case is pointless Assert.AreEqual(s.Length, s.Position); }
private static UpgradeDelegate ProjectFormatUpgrade(int format) { if (FormatVersion == 2 && format == 1) { // Version 2 changed the time warp representation from byte positions (long) to time positions (TimeSpan, internally a long) // -> Conversion possible return(project => { foreach (AudioTrack track in project.AudioTracks) { foreach (TimeWarp tw in track.TimeWarps) { // The byte positions are stored in the Ticks property and we convert them to real ticks as time units tw.From = TimeUtil.BytesToTimeSpan(tw.From.Ticks, track.SourceProperties); tw.To = TimeUtil.BytesToTimeSpan(tw.To.Ticks, track.SourceProperties); } } return project; }); } // Every other conversion is unsupported return(null); }
private void btnPlay_Click(object sender, RoutedEventArgs e) { if (wavePlayer != null) { wavePlayer.Dispose(); } debugStreamController = new DebugStreamController(); MixerStream mixer = new MixerStream(2, 44100); foreach (AudioTrack audioTrack in trackListBox.Items) { WaveFileReader reader = new WaveFileReader(audioTrack.FileInfo.FullName); IeeeStream channel = new IeeeStream(new DebugStream(new NAudioSourceStream(reader), debugStreamController)); //ResamplingStream res = new ResamplingStream(new DebugStream(channel, debugStreamController), ResamplingQuality.SincBest, 22050); TimeWarpStream warp = new TimeWarpStream(new DebugStream(channel, debugStreamController)); //warp.Mappings.Add(new TimeWarp { From = new TimeSpan(audioTrack.Length.Ticks / 10 * 4), To = new TimeSpan(audioTrack.Length.Ticks / 9) }); //warp.Mappings.Add(new TimeWarp { From = new TimeSpan(audioTrack.Length.Ticks / 10 * 5), To = new TimeSpan(audioTrack.Length.Ticks / 9 * 2) }); //warp.Mappings.Add(new TimeWarp { From = new TimeSpan(audioTrack.Length.Ticks / 10 * 10), To = new TimeSpan(audioTrack.Length.Ticks / 9 * 3) }); // necessary to control each track individually VolumeControlStream volumeControl = new VolumeControlStream(new DebugStream(warp, debugStreamController)) { Mute = audioTrack.Mute, Volume = audioTrack.Volume }; // when the AudioTrack.Mute property changes, just set it accordingly on the audio stream audioTrack.MuteChanged += new EventHandler <ValueEventArgs <bool> >( delegate(object vsender, ValueEventArgs <bool> ve) { volumeControl.Mute = ve.Value; }); // when the AudioTrack.Solo property changes, we have to react in different ways: audioTrack.SoloChanged += new EventHandler <ValueEventArgs <bool> >( delegate(object vsender, ValueEventArgs <bool> ve) { AudioTrack senderTrack = (AudioTrack)vsender; bool isOtherTrackSoloed = false; foreach (AudioTrack vaudioTrack in trackListBox.Items) { if (vaudioTrack != senderTrack && vaudioTrack.Solo) { isOtherTrackSoloed = true; break; } } /* if there's at least one other track that is soloed, we set the mute property of * the current track to the opposite of the solo property: * - if the track is soloed, we unmute it * - if the track is unsoloed, we mute it */ if (isOtherTrackSoloed) { senderTrack.Mute = !ve.Value; } /* if this is the only soloed track, we mute all other tracks * if this track just got unsoloed, we unmute all other tracks */ else { foreach (AudioTrack vaudioTrack in trackListBox.Items) { if (vaudioTrack != senderTrack && !vaudioTrack.Solo) { vaudioTrack.Mute = ve.Value; } } } }); // when the AudioTrack.Volume property changes, just set it accordingly on the audio stream audioTrack.VolumeChanged += new EventHandler <ValueEventArgs <float> >( delegate(object vsender, ValueEventArgs <float> ve) { volumeControl.Volume = ve.Value; }); mixer.Add(new DebugStream(volumeControl)); } VolumeControlStream volumeControlStream = new VolumeControlStream(new DebugStream(mixer, debugStreamController)) { Volume = (float)volumeSlider.Value }; VolumeMeteringStream volumeMeteringStream = new VolumeMeteringStream(new DebugStream(volumeControlStream, debugStreamController), 5000); volumeMeteringStream.StreamVolume += new EventHandler <StreamVolumeEventArgs>(meteringStream_StreamVolume); VolumeClipStream volumeClipStream = new VolumeClipStream(new DebugStream(volumeMeteringStream, debugStreamController)); playbackStream = volumeClipStream; wavePlayer = new WaveOut(); wavePlayer.DesiredLatency = 250; wavePlayer.Init(new NAudioSinkStream(new DebugStream(playbackStream, debugStreamController))); // master volume setting volumeSlider.ValueChanged += new RoutedPropertyChangedEventHandler <double>( delegate(object vsender, RoutedPropertyChangedEventArgs <double> ve) { volumeControlStream.Volume = (float)ve.NewValue; }); lblTotalPlaybackTime.Content = TimeUtil.BytesToTimeSpan(playbackStream.Length, playbackStream.Properties); playbackSeeker.Maximum = TimeUtil.BytesToTimeSpan(playbackStream.Length, playbackStream.Properties).TotalSeconds; wavePlayer.Play(); }
public float Run() { IAudioStream audioStream = new ResamplingStream( new MonoStream(AudioStreamFactory.FromFileInfoIeee32(audioTrack.FileInfo)), ResamplingQuality.Medium, 11000); ContinuousFrequencyActivationQuantifier cfaq = new ContinuousFrequencyActivationQuantifier(audioStream); float[] cfaValue = new float[1]; float[] cfaValues = new float[cfaq.WindowCount]; Label[] cfaLabels = new Label[cfaq.WindowCount]; int count = 0; int musicCount = 0; while (cfaq.HasNext()) { cfaq.ReadFrame(cfaValue); cfaValues[count] = cfaValue[0]; if (cfaValue[0] > threshold) { musicCount++; cfaLabels[count] = Label.MUSIC; } Console.WriteLine("cfa {0,3}% {3} {1,5:0.00} {2}", (int)(Math.Round((float)count++ / cfaq.WindowCount * 100)), cfaValue[0], cfaValue[0] > threshold ? "MUSIC" : "", TimeUtil.BytesToTimeSpan(audioStream.Position, audioStream.Properties)); } audioStream.Close(); if (smoothing) { // 3.3 Smoothing /* majority filtering with sliding window ~5 secs * 1 frame = ~2,4 secs, at least 3 frames are needed for majority filtering -> 3 * ~2,4 secs = ~7,2 secs */ // filter out single NO_MUSIC frames for (int i = 2; i < cfaLabels.Length; i++) { if (cfaLabels[i - 2] == Label.MUSIC && cfaLabels[i - 1] == Label.NO_MUSIC && cfaLabels[i] == Label.MUSIC) { cfaLabels[i - 1] = Label.MUSIC; } } // filter out single MUSIC frames for (int i = 2; i < cfaLabels.Length; i++) { if (cfaLabels[i - 2] == Label.NO_MUSIC && cfaLabels[i - 1] == Label.MUSIC && cfaLabels[i] == Label.NO_MUSIC) { cfaLabels[i - 1] = Label.NO_MUSIC; } } // swap ~5 secs NO_MUSIC segments to MUSIC for (int i = 3; i < cfaLabels.Length; i++) { if (cfaLabels[i - 3] == Label.MUSIC && cfaLabels[i - 2] == Label.NO_MUSIC && cfaLabels[i - 1] == Label.NO_MUSIC && cfaLabels[i] == Label.MUSIC) { cfaLabels[i - 1] = Label.MUSIC; cfaLabels[i - 2] = Label.MUSIC; } } // swap ~5 secs NMUSIC segments to NO_MUSIC for (int i = 3; i < cfaLabels.Length; i++) { if (cfaLabels[i - 3] == Label.NO_MUSIC && cfaLabels[i - 2] == Label.MUSIC && cfaLabels[i - 1] == Label.MUSIC && cfaLabels[i] == Label.NO_MUSIC) { cfaLabels[i - 1] = Label.NO_MUSIC; cfaLabels[i - 2] = Label.NO_MUSIC; } } } float musicRatio = (float)musicCount / count; float musicRatioSmoothed = -1f; Console.WriteLine("'" + audioTrack.FileInfo.FullName + "' contains " + ((int)(Math.Round(musicRatio * 100))) + "% music"); if (smoothing) { musicCount = cfaLabels.Count <Label>(l => l == Label.MUSIC); musicRatioSmoothed = (float)musicCount / count; Console.WriteLine("smoothed: " + ((int)(Math.Round(musicRatioSmoothed * 100))) + "% music"); } if (writeLog) { FileInfo logFile = new FileInfo(audioTrack.FileInfo.FullName + ".music"); StreamWriter writer = logFile.CreateText(); writer.WriteLine(musicRatio + "; " + musicRatioSmoothed); writer.WriteLine(threshold); for (int i = 0; i < cfaValues.Length; i++) { writer.WriteLine("{0:0.00000}; {1}; \t{2}", cfaValues[i], cfaValues[i] > threshold ? Label.MUSIC : Label.NO_MUSIC, cfaLabels[i]); } writer.Flush(); writer.Close(); } return(0); }
private void InitializeLength(IAudioStream audioStream = null) { using (audioStream = audioStream ?? CreateAudioStream()) { Length = TimeUtil.BytesToTimeSpan(audioStream.Length, audioStream.Properties); } }
public void Execute() { Debug.WriteLine("window length: {0}s, interval length: {1}s, sample rate: {2}", windowLength.TotalSeconds, intervalLength.TotalSeconds, sampleRate); IProgressReporter reporter = progressMonitor.BeginTask("Analyzing alignment...", true); List <IAudioStream> streams = new List <IAudioStream>(audioTracks.Count); TimeSpan start = audioTracks.Start; TimeSpan end = audioTracks.End; foreach (AudioTrack audioTrack in audioTracks) { streams.Add(CrossCorrelation.PrepareStream(audioTrack.CreateAudioStream(), sampleRate)); } long[] streamOffsets = new long[audioTracks.Count]; for (int i = 0; i < audioTracks.Count; i++) { streamOffsets[i] = TimeUtil.TimeSpanToBytes(audioTracks[i].Offset - start, streams[0].Properties); } int windowLengthInBytes = (int)TimeUtil.TimeSpanToBytes(windowLength, streams[0].Properties); int windowLengthInSamples = windowLengthInBytes / streams[0].Properties.SampleBlockByteSize; long intervalLengthInBytes = TimeUtil.TimeSpanToBytes(intervalLength, streams[0].Properties); long analysisIntervalLength = TimeUtil.TimeSpanToBytes(end - start, streams[0].Properties); OnStarted(); byte[] x = new byte[windowLengthInBytes]; byte[] y = new byte[windowLengthInBytes]; long positionX; long positionY; double sumNegative = 0; double sumPositive = 0; int countNegative = 0; int countPositive = 0; double min = 0; double max = 0; for (long position = 0; position < analysisIntervalLength; position += intervalLengthInBytes) { double windowSumNegative = 0; double windowSumPositive = 0; int windowCountNegative = 0; int windowCountPositive = 0; double windowMin = 0; double windowMax = 0; Debug.WriteLine("Analyzing {0} @ {1} / {2}", intervalLengthInBytes, position, analysisIntervalLength); // at each position in the analysis interval, compare each stream with each other for (int i = 0; i < streams.Count; i++) { positionX = position - streamOffsets[i]; if (positionX >= 0 && positionX < streams[i].Length) { streams[i].Position = positionX; StreamUtil.ForceRead(streams[i], x, 0, windowLengthInBytes); for (int j = i + 1; j < streams.Count; j++) { positionY = position - streamOffsets[j]; if (positionY >= 0 && positionY < streams[j].Length) { streams[j].Position = positionY; StreamUtil.ForceRead(streams[j], y, 0, windowLengthInBytes); double val = analyzeSection(x, y); if (val > 0) { windowSumPositive += val; windowCountPositive++; } else { windowSumNegative += val; windowCountNegative++; } if (windowMin > val) { windowMin = val; } if (windowMax < val) { windowMax = val; } Debug.WriteLine("{0,2}->{1,2}: {2}", i, j, val); } } } } sumPositive += windowSumPositive; countPositive += windowCountPositive; sumNegative += windowSumNegative; countNegative += windowCountNegative; if (min > windowMin) { min = windowMin; } if (max < windowMax) { max = windowMax; } reporter.ReportProgress((double)position / analysisIntervalLength * 100); OnWindowAnalyzed(start + TimeUtil.BytesToTimeSpan(position, streams[0].Properties), windowCountPositive, windowCountNegative, windowMin, windowMax, windowSumPositive, windowSumNegative); } reporter.Finish(); Debug.WriteLine("Finished. sum: {0}, sum+: {1}, sum-: {2}, sumAbs: {3}, avg: {4}, avg+: {5}, avg-: {6}, avgAbs: {7}, min: {8}, max: {9}, points: {10}", sumPositive + sumNegative, sumPositive, sumNegative, sumPositive + (sumNegative * -1), (sumPositive + sumNegative) / (countPositive + countNegative), sumPositive / countPositive, sumNegative / countNegative, (sumPositive + (sumNegative * -1)) / (countPositive + countNegative), min, max, countPositive + countNegative); double score = (sumPositive + (sumNegative * -1)) / (countPositive + countNegative); Debug.WriteLine("Score: {0} => {1}%", score, Math.Round(score * 100)); OnFinished(countPositive, countNegative, min, max, sumPositive, sumNegative); streams.ForEach(s => s.Close()); }