public void TestAnnotatedSonogramWithPlots() { // Make a decibel spectrogram var actualDecibelSpectrogram = new SpectrogramStandard(this.sonoConfig, this.recording.WavReader); // prepare normalisation bounds for three plots double minDecibels = -100.0; double maxDecibels = -50; //double decibelThreshold = 12.5 dB above -100 dB; var normThreshold = 0.25; //plot 1 int minHz = 2000; int maxHz = 3000; var decibelArray = SNR.CalculateFreqBandAvIntensity(actualDecibelSpectrogram.Data, minHz, maxHz, actualDecibelSpectrogram.NyquistFrequency); var normalisedIntensityArray = DataTools.NormaliseInZeroOne(decibelArray, minDecibels, maxDecibels); var plot1 = new Plot("Intensity 2-3 kHz", normalisedIntensityArray, normThreshold); //plot 2 minHz = 3000; maxHz = 4000; decibelArray = SNR.CalculateFreqBandAvIntensity(actualDecibelSpectrogram.Data, minHz, maxHz, actualDecibelSpectrogram.NyquistFrequency); normalisedIntensityArray = DataTools.NormaliseInZeroOne(decibelArray, minDecibels, maxDecibels); var plot2 = new Plot("Intensity 3-4 kHz", normalisedIntensityArray, normThreshold); //plot 3 minHz = 4000; maxHz = 5000; decibelArray = SNR.CalculateFreqBandAvIntensity(actualDecibelSpectrogram.Data, minHz, maxHz, actualDecibelSpectrogram.NyquistFrequency); normalisedIntensityArray = DataTools.NormaliseInZeroOne(decibelArray, minDecibels, maxDecibels); var plot3 = new Plot("Intensity 4-5 kHz", normalisedIntensityArray, normThreshold); // combine the plots var plots = new List <Plot> { plot1, plot2, plot3 }; // create three events var startOffset = TimeSpan.Zero; var events = new List <AcousticEvent> { new AcousticEvent(startOffset, 10.0, 10.0, 2000, 3000), new AcousticEvent(startOffset, 25.0, 10.0, 3000, 4000), new AcousticEvent(startOffset, 40.0, 10.0, 4000, 5000), }; var image = SpectrogramTools.GetSonogramPlusCharts(actualDecibelSpectrogram, events, plots, null); // create the image for visual confirmation image.Save(Path.Combine(this.outputDirectory.FullName, this.recording.BaseName + ".png")); Assert.AreEqual(1621, image.Width); Assert.AreEqual(647, image.Height); }
public void TestGetEventsAroundMaxima() { //string abbreviatedSpeciesName = "Pteropus"; string speciesName = "Pteropus species"; int minHz = 800; int maxHz = 8000; var minTimeSpan = TimeSpan.FromSeconds(0.15); var maxTimeSpan = TimeSpan.FromSeconds(0.8); double decibelThreshold = 9.0; TimeSpan segmentStartOffset = TimeSpan.Zero; var decibelArray = SNR.CalculateFreqBandAvIntensity(this.sonogram.Data, minHz, maxHz, this.sonogram.NyquistFrequency); // prepare plots double intensityNormalisationMax = 3 * decibelThreshold; var eventThreshold = decibelThreshold / intensityNormalisationMax; var normalisedIntensityArray = DataTools.NormaliseInZeroOne(decibelArray, 0, intensityNormalisationMax); var plot = new Plot(speciesName + " Territory", normalisedIntensityArray, eventThreshold); var plots = new List <Plot> { plot }; //iii: CONVERT decibel SCORES TO ACOUSTIC EVENTS var acousticEvents = AcousticEvent.GetEventsAroundMaxima( decibelArray, segmentStartOffset, minHz, maxHz, decibelThreshold, minTimeSpan, maxTimeSpan, this.sonogram.FramesPerSecond, this.sonogram.FBinWidth); Assert.AreEqual(10, acousticEvents.Count); Assert.AreEqual(new Rectangle(19, 1751, 168, 27), acousticEvents[0].GetEventAsRectangle()); Assert.AreEqual(new Rectangle(19, 1840, 168, 10), acousticEvents[2].GetEventAsRectangle()); Assert.AreEqual(new Rectangle(19, 1961, 168, 31), acousticEvents[5].GetEventAsRectangle()); Assert.AreEqual(new Rectangle(19, 2294, 168, 17), acousticEvents[7].GetEventAsRectangle()); Assert.AreEqual(new Rectangle(19, 2504, 168, 7), acousticEvents[9].GetEventAsRectangle()); //Assert.AreEqual(28.Seconds() + segmentOffset, stats.ResultStartSeconds.Seconds()); }
/// <summary> /// THis method does the work. /// </summary> /// <param name="audioRecording">the recording.</param> /// <param name="configuration">the config file.</param> /// <param name="profileName">name of call/event type to be found.</param> /// <param name="segmentStartOffset">where one segment is located in the total recording.</param> /// <returns>a list of events.</returns> private static RecognizerResults WingBeats(AudioRecording audioRecording, Config configuration, string profileName, TimeSpan segmentStartOffset) { ConfigFile.TryGetProfile(configuration, profileName, out var profile); // get the common properties string speciesName = configuration[AnalysisKeys.SpeciesName] ?? "Pteropus species"; string abbreviatedSpeciesName = configuration[AnalysisKeys.AbbreviatedSpeciesName] ?? "Pteropus"; // The following parameters worked well on a ten minute recording containing 14-16 calls. // Note: if you lower the dB threshold, you need to increase maxDurationSeconds int minHz = profile.GetIntOrNull(AnalysisKeys.MinHz) ?? 100; int maxHz = profile.GetIntOrNull(AnalysisKeys.MaxHz) ?? 3000; double minDurationSeconds = profile.GetDoubleOrNull(AnalysisKeys.MinDuration) ?? 1.0; double maxDurationSeconds = profile.GetDoubleOrNull(AnalysisKeys.MaxDuration) ?? 10.0; double decibelThreshold = profile.GetDoubleOrNull("DecibelThreshold") ?? 6.0; double dctDuration = profile.GetDoubleOrNull("DctDuration") ?? 1.0; double dctThreshold = profile.GetDoubleOrNull("DctThreshold") ?? 0.5; double minOscFreq = profile.GetDoubleOrNull("MinOscilFreq") ?? 4.0; double maxOscFreq = profile.GetDoubleOrNull("MaxOscilFreq") ?? 6.0; double eventThreshold = profile.GetDoubleOrNull("EventThreshold") ?? 0.3; //###################### //2. Don't use samples in this recognizer. //var samples = audioRecording.WavReader.Samples; //Instead, convert each segment to a spectrogram. var sonogram = GetSonogram(configuration, audioRecording); var decibelArray = SNR.CalculateFreqBandAvIntensity(sonogram.Data, minHz, maxHz, sonogram.NyquistFrequency); // Look for wing beats using oscillation detector /* * int scoreSmoothingWindow = 11; // sets a default that was good for Cane toad * Oscillations2019.Execute( * (SpectrogramStandard)sonogram, * minHz, * maxHz, * decibelThreshold, * dctDuration, * (int)Math.Floor(minOscFreq), * (int)Math.Floor(maxOscFreq), * dctThreshold, * eventThreshold, * minDurationSeconds, * maxDurationSeconds, * scoreSmoothingWindow, * out var scores, * out var acousticEvents, * //out var hits, * segmentStartOffset); */ Oscillations2012.Execute( (SpectrogramStandard)sonogram, minHz, maxHz, //decibelThreshold, dctDuration, (int)Math.Floor(minOscFreq), (int)Math.Floor(maxOscFreq), dctThreshold, eventThreshold, minDurationSeconds, maxDurationSeconds, out var scores, out var acousticEvents, out var hits, segmentStartOffset); // prepare plots double intensityNormalisationMax = 3 * decibelThreshold; var normThreshold = decibelThreshold / intensityNormalisationMax; var normalisedIntensityArray = DataTools.NormaliseInZeroOne(decibelArray, 0, intensityNormalisationMax); var plot1 = new Plot(speciesName + " Wing-beat band", normalisedIntensityArray, normThreshold); var plot2 = new Plot(speciesName + " Wing-beat Osc Score", scores, eventThreshold); var plots = new List <Plot> { plot1, plot2 }; // ###################################################################### // add additional information about the recording and sonogram properties from which the event is derived. acousticEvents.ForEach(ae => { ae.FileName = audioRecording.BaseName; ae.SpeciesName = speciesName; ae.Name = abbreviatedSpeciesName + profileName; ae.Profile = profileName; ae.SegmentDurationSeconds = audioRecording.Duration.TotalSeconds; ae.SegmentStartSeconds = segmentStartOffset.TotalSeconds; var frameOffset = sonogram.FrameStep; var frameDuration = sonogram.FrameDuration; ae.SetTimeAndFreqScales(frameOffset, frameDuration, sonogram.FBinWidth); //UNCOMMENT following lines to get spectral profiles of the Wingbeat events. /* double[,] spectrogramData = sonogram.Data; * int maxBin = (int)Math.Round(8000 / sonogram.FBinWidth); * double startSecond = ae.EventStartSeconds - ae.SegmentStartSeconds; * int startFrame = (int)Math.Round(startSecond / sonogram.FrameStep); * int frameLength = (int)Math.Round(ae.EventDurationSeconds / sonogram.FrameStep); * int endFrame = startFrame + frameLength; * * // get only the frames from centre of the acoustic event * var subMatrix = DataTools.Submatrix(spectrogramData, startFrame + 10, 0, endFrame - 10, maxBin); * var spectrum = MatrixTools.GetColumnAverages(subMatrix); * var normalisedSpectrum = DataTools.normalise(spectrum); * normalisedSpectrum = DataTools.filterMovingAverageOdd(normalisedSpectrum, 11); * var maxId = DataTools.GetMaxIndex(normalisedSpectrum); * var hzMax = (int)Math.Ceiling(maxId * sonogram.FBinWidth); * string name = "BeatSpectrum " + (ae.SegmentStartSeconds / 60) + "m" + (int)Math.Floor(startSecond) + "s hzMax" + hzMax; * var bmp2 = GraphsAndCharts.DrawGraph(name, normalisedSpectrum, 100); * * //Set required path * bmp2.Save(Path.Combine(@"C:\PATH", name + ".png")); */ }); return(new RecognizerResults() { Events = acousticEvents, Hits = null, ScoreTrack = null, Plots = plots, Sonogram = sonogram, }); }
/// <summary> /// THis method does the work. /// </summary> /// <param name="audioRecording">the recording.</param> /// <param name="configuration">the config file.</param> /// <param name="profileName">name of the call/event type.</param> /// <param name="segmentStartOffset">where one segment is located in the total recording.</param> /// <returns>a list of events.</returns> private static RecognizerResults TerritorialCall(AudioRecording audioRecording, Config configuration, string profileName, TimeSpan segmentStartOffset) { ConfigFile.TryGetProfile(configuration, profileName, out var profile); // get the common properties string speciesName = configuration[AnalysisKeys.SpeciesName] ?? "Pteropus species"; string abbreviatedSpeciesName = configuration[AnalysisKeys.AbbreviatedSpeciesName] ?? "Pteropus"; // The following parameters worked well on a ten minute recording containing 14-16 calls. // Note: if you lower the dB threshold, you need to increase maxDurationSeconds int minHz = profile.GetIntOrNull(AnalysisKeys.MinHz) ?? 800; int maxHz = profile.GetIntOrNull(AnalysisKeys.MaxHz) ?? 8000; double minDurationSeconds = profile.GetDoubleOrNull(AnalysisKeys.MinDuration) ?? 0.15; double maxDurationSeconds = profile.GetDoubleOrNull(AnalysisKeys.MaxDuration) ?? 0.5; double decibelThreshold = profile.GetDoubleOrNull(AnalysisKeys.DecibelThreshold) ?? 9.0; var minTimeSpan = TimeSpan.FromSeconds(minDurationSeconds); var maxTimeSpan = TimeSpan.FromSeconds(maxDurationSeconds); //###################### //2. Convert each segment to a spectrogram. var sonogram = GetSonogram(configuration, audioRecording); var decibelArray = SNR.CalculateFreqBandAvIntensity(sonogram.Data, minHz, maxHz, sonogram.NyquistFrequency); // prepare plots double intensityNormalisationMax = 3 * decibelThreshold; var eventThreshold = decibelThreshold / intensityNormalisationMax; var normalisedIntensityArray = DataTools.NormaliseInZeroOne(decibelArray, 0, intensityNormalisationMax); var plot = new Plot(speciesName + " Territory", normalisedIntensityArray, eventThreshold); var plots = new List <Plot> { plot }; //iii: CONVERT decibel SCORES TO ACOUSTIC EVENTS var acousticEvents = AcousticEvent.GetEventsAroundMaxima( decibelArray, segmentStartOffset, minHz, maxHz, decibelThreshold, minTimeSpan, maxTimeSpan, sonogram.FramesPerSecond, sonogram.FBinWidth); //iV add additional info to the acoustic events acousticEvents.ForEach(ae => { ae.FileName = audioRecording.BaseName; ae.SpeciesName = speciesName; ae.Name = abbreviatedSpeciesName + profileName; ae.Profile = profileName; ae.SegmentDurationSeconds = audioRecording.Duration.TotalSeconds; ae.SegmentStartSeconds = segmentStartOffset.TotalSeconds; }); acousticEvents = FilterEventsForSpectralProfile(acousticEvents, sonogram); return(new RecognizerResults() { Events = acousticEvents, Hits = null, ScoreTrack = null, Plots = plots, Sonogram = sonogram, }); }
public static void Execute( SpectrogramStandard sonogram, int minHz, int maxHz, double decibelThreshold, double dctDuration, int minOscFreq, int maxOscFreq, double dctThreshold, double scoreThreshold, double minDuration, double maxDuration, int smoothingWindow, out double[] dctScores, out List <OscillationEvent> events, TimeSpan segmentStartOffset) { // smooth the frames to make oscillations more regular. sonogram.Data = MatrixTools.SmoothRows(sonogram.Data, 5); // extract array of decibel values, frame averaged over required frequency band var decibelArray = SNR.CalculateFreqBandAvIntensity(sonogram.Data, minHz, maxHz, sonogram.NyquistFrequency); // if first value is negative dB, this means noise removal was not done. // Do noise removal now //if (decibelArray[0] < 0.0) //{ // NoiseRemovalModal.CalculateNoiseUsingLamelsAlgorithm(decibelArray, out double _, out double _, out double noiseMode, out double _); // decibelArray = SNR.SubtractAndTruncate2Zero(decibelArray, noiseMode); //} //DETECT OSCILLATIONS var framesPerSecond = sonogram.FramesPerSecond; DetectOscillations( decibelArray, framesPerSecond, decibelThreshold, dctDuration, minOscFreq, maxOscFreq, dctThreshold, out dctScores, out var oscFreq); // smooth the scores - window=11 has been the DEFAULT. Now letting user set this. dctScores = DataTools.filterMovingAverage(dctScores, smoothingWindow); //double midOscFreq = minOscFreq + ((maxOscFreq - minOscFreq) / 2); events = Oscillations2012.ConvertOscillationScores2Events( dctScores, oscFreq, minHz, maxHz, sonogram.FramesPerSecond, sonogram.FBinWidth, scoreThreshold, minDuration, maxDuration, sonogram.Configuration.SourceFName, segmentStartOffset); }