/// <summary>
        /// THis method can be modified if want to do something non-standard with the output spectrogram.
        /// </summary>
        internal static void SaveDebugSpectrogram(RecognizerResults results, Config genericConfig, DirectoryInfo outputDirectory, string baseName)
        {
            //var image = sonogram.GetImageFullyAnnotated("Test");
            var image = SpectrogramTools.GetSonogramPlusCharts(results.Sonogram, results.Events, results.Plots, null);

            image.Save(Path.Combine(outputDirectory.FullName, baseName + ".profile.png"));
        }
        /// <summary>
        /// Do your analysis. This method is called once per segment (typically one-minute segments).
        /// </summary>
        public override RecognizerResults Recognize(AudioRecording audioRecording, Config configuration, TimeSpan segmentStartOffset, Lazy <IndexCalculateResult[]> getSpectralIndexes, DirectoryInfo outputDirectory, int?imageWidth)
        {
            // The next line actually calculates the high resolution indices!
            // They are not much help for frogs recognition but could be useful for HiRes spectrogram display

            /*
             * var indices = getSpectralIndexes.Value;
             * // check if the indices have been calculated - you shouldn't actually need this
             * if (getSpectralIndexes.IsValueCreated)
             * {
             *  // then indices have been calculated before
             * }
             */

            //DIFFERENT WAYS to get value from CONFIG file.
            //Get a value from the config file - with a backup default
            //      int minHz = (int?)configuration[AnalysisKeys.MinHz] ?? 600;
            //Get a value from the config file - with no default, throw an exception if value is not present
            //      int maxHz = ((int?)configuration[AnalysisKeys.MaxHz]).Value;
            //Get a value from the config file - without a string accessor, as a double
            //      double someExampleSettingA = (double?)configuration.someExampleSettingA ?? 0.0;
            //Common properties
            //      string speciesName = (string)configuration[AnalysisKeys.SpeciesName] ?? "<no species>";
            //      string abbreviatedSpeciesName = (string)configuration[AnalysisKeys.AbbreviatedSpeciesName] ?? "<no.sp>";

            //RecognizerResults results = Gruntwork1(audioRecording, configuration, outputDirectory, segmentStartOffset);
            RecognizerResults results = this.Gruntwork2(audioRecording, configuration, outputDirectory, segmentStartOffset);

            return(results);
        }
Beispiel #3
0
        /*
         * /// <summary>
         * /// Summarize your results. This method is invoked exactly once per original file.
         * /// </summary>
         * public override void SummariseResults(
         *  AnalysisSettings settings,
         *  FileSegment inputFileSegment,
         *  EventBase[] events,
         *  SummaryIndexBase[] indices,
         *  SpectralIndexBase[] spectralIndices,
         *  AnalysisResult2[] results)
         * {
         *  // No operation - do nothing. Feel free to add your own logic.
         *  base.SummariseResults(settings, inputFileSegment, events, indices, spectralIndices, results);
         * }
         */

        /// <summary>
        /// THis method can be modified if want to do something non-standard with the output spectrogram.
        /// </summary>
        public static string SaveDebugSpectrogram(RecognizerResults results, Config genericConfig, DirectoryInfo outputDirectory, string baseName)
        {
            var image3 = SpectrogramTools.GetSonogramPlusCharts(results.Sonogram, results.NewEvents, results.Plots, null);

            var path = Path.Combine(outputDirectory.FullName, baseName + ".profile.png");

            image3.Save(path);

            return(path);
        }
        /*
         * /// <summary>
         * /// Summarize your results. This method is invoked exactly once per original file.
         * /// </summary>
         * public override void SummariseResults(
         *  AnalysisSettings settings,
         *  FileSegment inputFileSegment,
         *  EventBase[] events,
         *  SummaryIndexBase[] indices,
         *  SpectralIndexBase[] spectralIndices,
         *  AnalysisResult2[] results)
         * {
         *  // No operation - do nothing. Feel free to add your own logic.
         *  base.SummariseResults(settings, inputFileSegment, events, indices, spectralIndices, results);
         * }
         */

        /// <summary>
        /// This method is called once per segment (typically one-minute segments).
        /// </summary>
        /// <param name="audioRecording">one minute of audio recording.</param>
        /// <param name="genericConfig">config file that contains parameters used by all profiles.</param>
        /// <param name="segmentStartOffset">when recording starts.</param>
        /// <param name="getSpectralIndexes">not sure what this is.</param>
        /// <param name="outputDirectory">where the recognizer results can be found.</param>
        /// <param name="imageWidth"> assuming ????.</param>
        /// <returns>recognizer results.</returns>
        public override RecognizerResults Recognize(AudioRecording audioRecording, Config genericConfig, TimeSpan segmentStartOffset, Lazy <IndexCalculateResult[]> getSpectralIndexes, DirectoryInfo outputDirectory, int?imageWidth)
        {
            if (ConfigFile.HasProfiles(genericConfig))
            {
                string[] profileNames = ConfigFile.GetProfileNames(genericConfig);
                int      count        = profileNames.Length;
                var      message      = $"Found {count} config profile(s): ";
                foreach (string s in profileNames)
                {
                    message = message + (s + ", ");
                }

                PteropusLog.Debug(message);
            }
            else
            {
                PteropusLog.Warn("No configuration profiles found. Two profiles expected for the Flying Fox recogniser.");
            }

            var territorialResults = new RecognizerResults();

            if (ConfigFile.TryGetProfile(genericConfig, "Territorial", out var _))
            {
                territorialResults = TerritorialCall(audioRecording, genericConfig, "Territorial", segmentStartOffset);
                PteropusLog.Debug("Territory event count = " + territorialResults.Events.Count);
            }
            else
            {
                PteropusLog.Warn("Could not access Territorial configuration parameters");
            }

            var wingbeatResults = new RecognizerResults();

            if (ConfigFile.TryGetProfile(genericConfig, "Wingbeats", out var _))
            {
                wingbeatResults = WingBeats(audioRecording, genericConfig, "Wingbeats", segmentStartOffset);
                PteropusLog.Debug("Wingbeat event count = " + wingbeatResults.Events.Count);
            }
            else
            {
                PteropusLog.Warn("Could not access Wingbeats configuration parameters");
            }

            // combine the results i.e. add wing-beat events to the list of territorial call events.
            //NOTE: The returned territorialResults and wingbeatResults will never be null.
            territorialResults.Events.AddRange(wingbeatResults.Events);
            territorialResults.Plots.AddRange(wingbeatResults.Plots);

            //UNCOMMENT following line if you want special debug spectrogram, i.e. with special plots.
            //  NOTE: Standard spectrograms are produced by setting SaveSonogramImages: "True" or "WhenEventsDetected" in <Towsey.PteropusSpecies.yml> config file.
            //SaveDebugSpectrogram(territorialResults, genericConfig, outputDirectory, audioRecording.BaseName);

            return(territorialResults);
        }
Beispiel #5
0
        /// <summary>
        /// This method is called once per segment (typically one-minute segments).
        /// </summary>
        /// <param name="audioRecording">one minute of audio recording.</param>
        /// <param name="config">config file that contains parameters used by all profiles.</param>
        /// <param name="segmentStartOffset">when recording starts.</param>
        /// <param name="getSpectralIndexes">not sure what this is.</param>
        /// <param name="outputDirectory">where the recognizer results can be found.</param>
        /// <param name="imageWidth"> assuming ????.</param>
        /// <returns>recognizer results.</returns>
        public override RecognizerResults Recognize(
            AudioRecording audioRecording,
            Config config,
            TimeSpan segmentStartOffset,
            Lazy <IndexCalculateResult[]> getSpectralIndexes,
            DirectoryInfo outputDirectory,
            int?imageWidth)
        {
            //class NinoxBoobookConfig is defined at bottom of this file.
            var genericConfig = (NinoxBoobookConfig)config;
            var recognizer    = new GenericRecognizer();

            RecognizerResults combinedResults = recognizer.Recognize(
                audioRecording,
                genericConfig,
                segmentStartOffset,
                getSpectralIndexes,
                outputDirectory,
                imageWidth);

            // DO POST-PROCESSING of EVENTS

            // Filter out the chirp events for possible combining.
            var(chirpEvents, others) = combinedResults.NewEvents.FilterForEventType <ChirpEvent, EventCommon>();

            // Uncomment the next line when want to obtain the event frequency profiles.
            // WriteFrequencyProfiles(chirpEvents);

            // Calculate frequency profile score for each event
            foreach (var ev in chirpEvents)
            {
                SetFrequencyProfileScore((ChirpEvent)ev);
            }

            // Combine overlapping events. If the dB threshold is set low, may get lots of little events.
            var newEvents = CompositeEvent.CombineOverlappingEvents(chirpEvents.Cast <EventCommon>().ToList());

            if (genericConfig.CombinePossibleSyllableSequence)
            {
                // convert events to spectral events for possible combining.
                var(spectralEvents, _) = combinedResults.NewEvents.FilterForEventType <SpectralEvent, EventCommon>();

                var startDiff = genericConfig.SyllableStartDifference;
                var hertzDiff = genericConfig.SyllableHertzGap;
                newEvents = CompositeEvent.CombineSimilarProximalEvents(spectralEvents, TimeSpan.FromSeconds(startDiff), (int)hertzDiff);
            }

            combinedResults.NewEvents = newEvents;

            //UNCOMMENT following line if you want special debug spectrogram, i.e. with special plots.
            //  NOTE: Standard spectrograms are produced by setting SaveSonogramImages: "True" or "WhenEventsDetected" in <Towsey.PteropusSpecies.yml> config file.
            //GenericRecognizer.SaveDebugSpectrogram(territorialResults, genericConfig, outputDirectory, audioRecording.BaseName);
            return(combinedResults);
        }
Beispiel #6
0
        public override AnalysisResult2 Analyze <T>(AnalysisSettings analysisSettings, SegmentSettings <T> segmentSettings)
        {
            FileInfo audioFile = segmentSettings.SegmentAudioFile;

            // execute actual analysis
            dynamic configuration = analysisSettings.Configuration;
            var     recording     = new AudioRecording(audioFile.FullName);

            Log.Debug("Canetoad sample rate:" + recording.SampleRate);

            RecognizerResults results = Analysis(recording, configuration, segmentSettings.SegmentStartOffset, segmentSettings.SegmentOutputDirectory);

            var analysisResults = new AnalysisResult2(analysisSettings, segmentSettings, recording.Duration);

            BaseSonogram sonogram = results.Sonogram;

            double[,] hits = results.Hits;
            Plot scores = results.Plots.First();
            List <AcousticEvent> predictedEvents = results.Events;

            analysisResults.Events = predictedEvents.ToArray();

            if (analysisSettings.AnalysisDataSaveBehavior)
            {
                this.WriteEventsFile(segmentSettings.SegmentEventsFile, analysisResults.Events);
                analysisResults.EventsFile = segmentSettings.SegmentEventsFile;
            }

            if (analysisSettings.AnalysisDataSaveBehavior)
            {
                var unitTime = TimeSpan.FromMinutes(1.0);
                analysisResults.SummaryIndices = this.ConvertEventsToSummaryIndices(analysisResults.Events, unitTime, analysisResults.SegmentAudioDuration, 0);

                analysisResults.SummaryIndicesFile = segmentSettings.SegmentSummaryIndicesFile;
                this.WriteSummaryIndicesFile(segmentSettings.SegmentSummaryIndicesFile, analysisResults.SummaryIndices);
            }

            if (analysisSettings.AnalysisImageSaveBehavior.ShouldSave(analysisResults.Events.Length))
            {
                string       imagePath      = segmentSettings.SegmentImageFile.FullName;
                const double EventThreshold = 0.1;
                Image        image          = DrawSonogram(sonogram, hits, scores, predictedEvents, EventThreshold);
                image.Save(imagePath, ImageFormat.Png);
                analysisResults.ImageFile = segmentSettings.SegmentImageFile;
            }

            return(analysisResults);
        }
        /// <summary>
        /// Do your analysis. This method is called once per segment (typically one-minute segments).
        /// </summary>
        public override RecognizerResults Recognize(AudioRecording audioRecording, Config configuration, TimeSpan segmentStartOffset, Lazy <IndexCalculateResult[]> getSpectralIndexes, DirectoryInfo outputDirectory, int?imageWidth)
        {
            RecognizerResults results = this.Gruntwork(audioRecording, configuration, outputDirectory, segmentStartOffset);

            return(results);
        }
Beispiel #8
0
        /// <inheritdoc/>
        public override RecognizerResults Recognize(
            AudioRecording audioRecording,
            Config genericConfig,
            TimeSpan segmentStartOffset,
            Lazy <IndexCalculateResult[]> getSpectralIndexes,
            DirectoryInfo outputDirectory,
            int?imageWidth)
        {
            var configuration = (GenericRecognizerConfig)genericConfig;

            if (configuration.Profiles?.Count < 1)
            {
                throw new ConfigFileException(
                          "The generic recognizer needs at least one profile set. 0 were found.");
            }

            int count   = configuration.Profiles.Count;
            var message = $"Found {count} analysis profile(s): " + configuration.Profiles.Keys.Join(", ");

            Log.Info(message);

            var allResults = new RecognizerResults()
            {
                Events     = new List <AcousticEvent>(),
                NewEvents  = new List <EventCommon>(),
                Hits       = null,
                ScoreTrack = null,
                Plots      = new List <Plot>(),
                Sonogram   = null,
            };

            // Now process each of the profiles
            foreach (var(profileName, profileConfig) in configuration.Profiles)
            {
                Log.Info("Processing profile: " + profileName);

                //List<AcousticEvent> acousticEvents;
                List <EventCommon> spectralEvents;
                var plots = new List <Plot>();
                SpectrogramStandard sonogram;

                Log.Debug($"Using the {profileName} algorithm... ");
                if (profileConfig is CommonParameters parameters)
                {
                    if (profileConfig is BlobParameters ||
                        profileConfig is OscillationParameters ||
                        profileConfig is OnebinTrackParameters ||
                        profileConfig is HarmonicParameters ||
                        profileConfig is ForwardTrackParameters ||
                        profileConfig is UpwardTrackParameters ||
                        profileConfig is OneframeTrackParameters)
                    {
                        sonogram = new SpectrogramStandard(ParametersToSonogramConfig(parameters), audioRecording.WavReader);

                        if (profileConfig is BlobParameters bp)
                        {
                            //get the array of intensity values minus intensity in side/buffer bands.
                            //i.e. require silence in side-bands. Otherwise might simply be getting part of a broader band acoustic event.
                            var decibelArray = SNR.CalculateFreqBandAvIntensityMinusBufferIntensity(
                                sonogram.Data,
                                bp.MinHertz.Value,
                                bp.MaxHertz.Value,
                                bp.BottomHertzBuffer.Value,
                                bp.TopHertzBuffer.Value,
                                sonogram.NyquistFrequency);

                            // prepare plot of resultant blob decibel array.
                            var plot = PreparePlot(decibelArray, $"{profileName} (Blob:db Intensity)", bp.DecibelThreshold.Value);
                            plots.Add(plot);

                            // iii: CONVERT blob decibel SCORES TO ACOUSTIC EVENTS.
                            // Note: This method does NOT do prior smoothing of the dB array.
                            var acEvents = AcousticEvent.GetEventsAroundMaxima(
                                decibelArray,
                                segmentStartOffset,
                                bp.MinHertz.Value,
                                bp.MaxHertz.Value,
                                bp.DecibelThreshold.Value,
                                TimeSpan.FromSeconds(bp.MinDuration.Value),
                                TimeSpan.FromSeconds(bp.MaxDuration.Value),
                                sonogram.FramesPerSecond,
                                sonogram.FBinWidth);
                            spectralEvents = acEvents.ConvertAcousticEventsToSpectralEvents();
                        }
                        else if (profileConfig is OnebinTrackParameters wp)
                        {
                            //get the array of intensity values minus intensity in side/buffer bands.
                            double[] decibelArray;
                            (spectralEvents, decibelArray) = OnebinTrackAlgorithm.GetOnebinTracks(
                                sonogram,
                                wp,
                                segmentStartOffset);

                            var plot = PreparePlot(decibelArray, $"{profileName} (Whistle:dB Intensity)", wp.DecibelThreshold.Value);
                            plots.Add(plot);
                        }
                        else if (profileConfig is ForwardTrackParameters tp)
                        {
                            double[] decibelArray;
                            (spectralEvents, decibelArray) = ForwardTrackAlgorithm.GetForwardTracks(
                                sonogram,
                                tp,
                                segmentStartOffset);

                            var plot = PreparePlot(decibelArray, $"{profileName} (Chirps:dB Intensity)", tp.DecibelThreshold.Value);
                            plots.Add(plot);
                        }
                        else if (profileConfig is OneframeTrackParameters cp)
                        {
                            double[] decibelArray;
                            (spectralEvents, decibelArray) = OneframeTrackAlgorithm.GetOneFrameTracks(
                                sonogram,
                                cp,
                                segmentStartOffset);

                            var plot = PreparePlot(decibelArray, $"{profileName} (Clicks:dB Intensity)", cp.DecibelThreshold.Value);
                            plots.Add(plot);
                        }
                        else if (profileConfig is UpwardTrackParameters vtp)
                        {
                            double[] decibelArray;
                            (spectralEvents, decibelArray) = UpwardTrackAlgorithm.GetUpwardTracks(
                                sonogram,
                                vtp,
                                segmentStartOffset);

                            var plot = PreparePlot(decibelArray, $"{profileName} (VerticalTrack:dB Intensity)", vtp.DecibelThreshold.Value);
                            plots.Add(plot);
                        }
                        else if (profileConfig is HarmonicParameters hp)
                        {
                            double[] decibelMaxArray;
                            double[] harmonicIntensityScores;
                            (spectralEvents, decibelMaxArray, harmonicIntensityScores) = HarmonicParameters.GetComponentsWithHarmonics(
                                sonogram,
                                hp.MinHertz.Value,
                                hp.MaxHertz.Value,
                                sonogram.NyquistFrequency,
                                hp.DecibelThreshold.Value,
                                hp.DctThreshold.Value,
                                hp.MinDuration.Value,
                                hp.MaxDuration.Value,
                                hp.MinFormantGap.Value,
                                hp.MaxFormantGap.Value,
                                segmentStartOffset);

                            var plot = PreparePlot(harmonicIntensityScores, $"{profileName} (Harmonics:dct intensity)", hp.DctThreshold.Value);
                            plots.Add(plot);
                        }
                        else if (profileConfig is OscillationParameters op)
                        {
                            Oscillations2012.Execute(
                                sonogram,
                                op.MinHertz.Value,
                                op.MaxHertz.Value,
                                op.DctDuration,
                                op.MinOscillationFrequency,
                                op.MaxOscillationFrequency,
                                op.DctThreshold,
                                op.EventThreshold,
                                op.MinDuration.Value,
                                op.MaxDuration.Value,
                                out var scores,
                                out var oscillationEvents,
                                out var hits,
                                segmentStartOffset);

                            spectralEvents = new List <EventCommon>(oscillationEvents);

                            //plots.Add(new Plot($"{profileName} (:OscillationScore)", scores, op.EventThreshold));
                            var plot = PreparePlot(scores, $"{profileName} (:OscillationScore)", op.EventThreshold);
                            plots.Add(plot);
                        }
                        else
                        {
                            throw new InvalidOperationException();
                        }
                    }
                    else
                    {
                        throw new InvalidOperationException();
                    }

                    //iV add additional info to the acoustic events
                    spectralEvents.ForEach(ae =>
                    {
                        ae.FileName = audioRecording.BaseName;
                        ae.Name     = parameters.SpeciesName;
                        ae.Profile  = profileName;

                        //ae.SegmentDurationSeconds = audioRecording.Duration.TotalSeconds;
                        //ae.SegmentStartSeconds = segmentStartOffset.TotalSeconds;
                        //ae.SetTimeAndFreqScales(sonogram.FrameStep, sonogram.FrameDuration, sonogram.FBinWidth);
                    });
                }
                else if (profileConfig is Aed.AedConfiguration ac)
                {
                    var config = new SonogramConfig
                    {
                        NoiseReductionType      = ac.NoiseReductionType,
                        NoiseReductionParameter = ac.NoiseReductionParameter,
                    };
                    sonogram = new SpectrogramStandard(config, audioRecording.WavReader);

                    // GET THIS TO RETURN BLOB EVENTS.
                    spectralEvents = Aed.CallAed(sonogram, ac, segmentStartOffset, audioRecording.Duration).ToList();
                }
                else
                {
                    throw new InvalidOperationException();
                }

                // combine the results i.e. add the events list of call events.
                allResults.NewEvents.AddRange(spectralEvents);
                allResults.Plots.AddRange(plots);

                // effectively keeps only the *last* sonogram produced
                allResults.Sonogram = sonogram;
                Log.Debug($"{profileName} event count = {spectralEvents.Count}");

                // DEBUG PURPOSES COMMENT NEXT LINE
                //SaveDebugSpectrogram(allResults, genericConfig, outputDirectory, "name");
            }

            return(allResults);
        }
Beispiel #9
0
        public void TestVerticalTrackAlgorithm()
        {
            // Set up the recognizer parameters.
            var windowSize                   = 512;
            var windowStep                   = 512;
            var minHertz                     = 6000;
            var maxHertz                     = 11000;
            var minBandwidthHertz            = 100;
            var maxBandwidthHertz            = 5000;
            var decibelThreshold             = 2.0;
            var combineProximalSimilarEvents = true;

            //Set up the virtual recording.
            int    samplerate     = 22050;
            double signalDuration = 13.0; //seconds

            // set up the config for a virtual spectrogram.
            var sonoConfig = new SonogramConfig()
            {
                WindowSize              = windowSize,
                WindowStep              = windowStep,
                WindowOverlap           = 0.0, // this must be set
                WindowFunction          = WindowFunctions.HANNING.ToString(),
                NoiseReductionType      = NoiseReductionType.Standard,
                NoiseReductionParameter = 0.0,
                Duration   = TimeSpan.FromSeconds(signalDuration),
                SampleRate = samplerate,
            };

            var spectrogram = this.CreateArtificialSpectrogramToTestTracksAndHarmonics(sonoConfig);

            //var image1 = SpectrogramTools.GetSonogramPlusCharts(spectrogram, null, null, null);
            //results.Sonogram.GetImage().Save(this.outputDirectory + "\\debug.png");

            var segmentStartOffset = TimeSpan.Zero;
            var plots = new List <Plot>();

            double[]             dBArray;
            List <AcousticEvent> acousticEvents;

            (acousticEvents, dBArray) = VerticalTrackParameters.GetVerticalTracks(
                spectrogram,
                minHertz,
                maxHertz,
                spectrogram.NyquistFrequency,
                decibelThreshold,
                minBandwidthHertz,
                maxBandwidthHertz,
                combineProximalSimilarEvents,
                segmentStartOffset);

            // draw a plot of max decibels in each frame
            double decibelNormalizationMax = 3 * decibelThreshold;
            var    dBThreshold             = decibelThreshold / decibelNormalizationMax;
            var    normalisedDecibelArray  = DataTools.NormaliseInZeroOne(dBArray, 0, decibelNormalizationMax);
            var    plot1 = new Plot("decibel max", normalisedDecibelArray, dBThreshold);

            plots.Add(plot1);

            var allResults = new RecognizerResults()
            {
                Events     = new List <AcousticEvent>(),
                Hits       = null,
                ScoreTrack = null,
                Plots      = new List <Plot>(),
                Sonogram   = null,
            };

            // combine the results i.e. add the events list of call events.
            allResults.Events.AddRange(acousticEvents);
            allResults.Plots.AddRange(plots);

            // effectively keeps only the *last* sonogram produced
            allResults.Sonogram = spectrogram;

            // DEBUG PURPOSES ONLY - COMMENT NEXT LINE
            var outputDirectory = new DirectoryInfo("C:\\temp");

            GenericRecognizer.SaveDebugSpectrogram(allResults, null, outputDirectory, "VerticalTracks1");

            Assert.AreEqual(2, allResults.Events.Count);

            var @event = allResults.Events[0];

            Assert.AreEqual(10.0, @event.EventStartSeconds, 0.1);
            Assert.AreEqual(10.1, @event.EventEndSeconds, 0.1);
            Assert.AreEqual(6460, @event.LowFrequencyHertz);
            Assert.AreEqual(10724, @event.HighFrequencyHertz);

            @event = allResults.Events[1];
            Assert.AreEqual(11.0, @event.EventStartSeconds, 0.1);
            Assert.AreEqual(11.24, @event.EventEndSeconds, 0.1);
            Assert.AreEqual(6460, @event.LowFrequencyHertz);
            Assert.AreEqual(7278, @event.HighFrequencyHertz);

            // do a SECOND TEST of the vertical tracks
            minHertz                  = 500;
            maxHertz                  = 6000;
            minBandwidthHertz         = 200;
            maxBandwidthHertz         = 5000;
            (acousticEvents, dBArray) = VerticalTrackParameters.GetVerticalTracks(
                spectrogram,
                minHertz,
                maxHertz,
                spectrogram.NyquistFrequency,
                decibelThreshold,
                minBandwidthHertz,
                maxBandwidthHertz,
                combineProximalSimilarEvents,
                segmentStartOffset);

            // draw a plot of max decibels in each frame
            normalisedDecibelArray = DataTools.NormaliseInZeroOne(dBArray, 0, decibelNormalizationMax);
            var plot2 = new Plot("decibel max", normalisedDecibelArray, dBThreshold);

            plots.Add(plot2);

            var allResults2 = new RecognizerResults()
            {
                Events     = new List <AcousticEvent>(),
                Hits       = null,
                ScoreTrack = null,
                Plots      = new List <Plot>(),
                Sonogram   = null,
            };

            // combine the results i.e. add the events list of call events.
            allResults2.Events.AddRange(acousticEvents);
            allResults2.Plots.AddRange(plots);

            // effectively keeps only the *last* sonogram produced
            allResults2.Sonogram = spectrogram;

            // DEBUG PURPOSES ONLY - COMMENT NEXT LINE
            GenericRecognizer.SaveDebugSpectrogram(allResults2, null, outputDirectory, "VerticalTracks2");

            Assert.AreEqual(5, allResults2.Events.Count);
        }
Beispiel #10
0
        public void TestHarmonicsAlgorithm()
        {
            // Set up the recognizer parameters.
            var windowSize       = 512;
            var windowStep       = 512;
            var minHertz         = 500;
            var maxHertz         = 5000;
            var dctThreshold     = 0.15;
            var minFormantGap    = 400;
            var maxFormantGap    = 1200;
            var minDuration      = 0.2;
            var maxDuration      = 1.1;
            var decibelThreshold = 2.0;

            //Set up the virtual recording.
            int    samplerate     = 22050;
            double signalDuration = 13.0; //seconds

            // set up the config for a virtual spectrogram.
            var sonoConfig = new SonogramConfig()
            {
                WindowSize              = windowSize,
                WindowStep              = windowStep,
                WindowOverlap           = 0.0, // this must be set
                WindowFunction          = WindowFunctions.HANNING.ToString(),
                NoiseReductionType      = NoiseReductionType.Standard,
                NoiseReductionParameter = 0.0,
                Duration   = TimeSpan.FromSeconds(signalDuration),
                SampleRate = samplerate,
            };

            var spectrogram = this.CreateArtificialSpectrogramToTestTracksAndHarmonics(sonoConfig);

            //var image1 = SpectrogramTools.GetSonogramPlusCharts(spectrogram, null, null, null);
            //results.Sonogram.GetImage().Save(this.outputDirectory + "\\debug.png");

            //var results = recognizer.Recognize(recording, sonoConfig, 100.Seconds(), null, this.TestOutputDirectory, null);
            //get the array of intensity values minus intensity in side/buffer bands.
            var segmentStartOffset = TimeSpan.Zero;
            var plots = new List <Plot>();

            double[]             dBArray;
            double[]             harmonicIntensityScores;
            List <AcousticEvent> acousticEvents;

            (acousticEvents, dBArray, harmonicIntensityScores) = HarmonicParameters.GetComponentsWithHarmonics(
                spectrogram,
                minHertz,
                maxHertz,
                spectrogram.NyquistFrequency,
                decibelThreshold,
                dctThreshold,
                minDuration,
                maxDuration,
                minFormantGap,
                maxFormantGap,
                segmentStartOffset);

            // draw a plot of max decibels in each frame
            double decibelNormalizationMax = 3 * decibelThreshold;
            var    dBThreshold             = decibelThreshold / decibelNormalizationMax;
            var    normalisedDecibelArray  = DataTools.NormaliseInZeroOne(dBArray, 0, decibelNormalizationMax);
            var    plot1 = new Plot("decibel max", normalisedDecibelArray, dBThreshold);

            plots.Add(plot1);

            // draw a plot of dct intensity
            double intensityNormalizationMax = 3 * dctThreshold;
            var    eventThreshold            = dctThreshold / intensityNormalizationMax;
            var    normalisedIntensityArray  = DataTools.NormaliseInZeroOne(harmonicIntensityScores, 0, intensityNormalizationMax);
            var    plot2 = new Plot("dct intensity", normalisedIntensityArray, eventThreshold);

            plots.Add(plot2);

            var allResults = new RecognizerResults()
            {
                Events     = new List <AcousticEvent>(),
                Hits       = null,
                ScoreTrack = null,
                Plots      = new List <Plot>(),
                Sonogram   = null,
            };

            // combine the results i.e. add the events list of call events.
            allResults.Events.AddRange(acousticEvents);
            allResults.Plots.AddRange(plots);

            // effectively keeps only the *last* sonogram produced
            allResults.Sonogram = spectrogram;

            // DEBUG PURPOSES COMMENT NEXT LINE
            //var outputDirectory = new DirectoryInfo("C:\\temp");
            //GenericRecognizer.SaveDebugSpectrogram(allResults, null, outputDirectory, "name");

            Assert.AreEqual(4, allResults.Events.Count);

            var @event = allResults.Events[0];

            Assert.AreEqual("NoName", @event.SpeciesName);
            Assert.AreEqual("Harmonics", @event.Name);
            Assert.AreEqual(3.0, @event.EventStartSeconds, 0.1);
            Assert.AreEqual(4.0, @event.EventEndSeconds, 0.1);
            Assert.AreEqual(500, @event.LowFrequencyHertz);
            Assert.AreEqual(5000, @event.HighFrequencyHertz);

            @event = allResults.Events[1];
            Assert.AreEqual(5.2, @event.EventStartSeconds, 0.1);
            Assert.AreEqual(5.5, @event.EventEndSeconds, 0.1);

            @event = allResults.Events[2];
            Assert.AreEqual(7.0, @event.EventStartSeconds, 0.1);
            Assert.AreEqual(8.0, @event.EventEndSeconds, 0.1);

            @event = allResults.Events[3];
            Assert.AreEqual(11.3, @event.EventStartSeconds, 0.1);
            Assert.AreEqual(11.6, @event.EventEndSeconds, 0.1);
        }
Beispiel #11
0
        public void Test2UpwardsTrackAlgorithm()
        {
            // Set up the recognizer parameters.
            var parameters = new UpwardTrackParameters()
            {
                MinHertz                     = 500,
                MaxHertz                     = 6000,
                MinBandwidthHertz            = 200,
                MaxBandwidthHertz            = 5000,
                DecibelThreshold             = 2.0,
                CombineProximalSimilarEvents = false,
                SyllableStartDifference      = TimeSpan.FromSeconds(0.2),
                SyllableHertzDifference      = 300,
            };

            //Set up the virtual recording.
            var    segmentStartOffset = TimeSpan.Zero;
            int    samplerate         = 22050;
            double signalDuration     = 13.0; //seconds

            // set up the config for a virtual spectrogram.
            var sonoConfig = new SonogramConfig()
            {
                WindowSize              = 512,
                WindowStep              = 512,
                WindowOverlap           = 0.0, // this must be set
                WindowFunction          = WindowFunctions.HANNING.ToString(),
                NoiseReductionType      = NoiseReductionType.Standard,
                NoiseReductionParameter = 0.0,
                Duration   = TimeSpan.FromSeconds(signalDuration),
                SampleRate = samplerate,
            };

            var spectrogram = this.CreateArtificialSpectrogramToTestTracksAndHarmonics(sonoConfig);
            var plots       = new List <Plot>();

            // do a SECOND TEST of the vertical tracks
            var(spectralEvents, dBArray) = UpwardTrackAlgorithm.GetUpwardTracks(
                spectrogram,
                parameters,
                segmentStartOffset);

            // draw a plot of max decibels in each frame
            double decibelNormalizationMax = 5 * parameters.DecibelThreshold.Value;
            var    dBThreshold             = parameters.DecibelThreshold.Value / decibelNormalizationMax;
            var    normalisedDecibelArray  = DataTools.NormaliseInZeroOne(dBArray, 0, decibelNormalizationMax);
            var    plot2 = new Plot("decibel max", normalisedDecibelArray, dBThreshold);

            plots.Add(plot2);

            var allResults2 = new RecognizerResults()
            {
                NewEvents  = new List <EventCommon>(),
                Hits       = null,
                ScoreTrack = null,
                Plots      = new List <Plot>(),
                Sonogram   = null,
            };

            // combine the results i.e. add the events list of call events.
            allResults2.NewEvents.AddRange(spectralEvents);
            allResults2.Plots.AddRange(plots);
            allResults2.Sonogram = spectrogram;

            // DEBUG PURPOSES ONLY - COMMENT NEXT LINE
            this.SaveTestOutput(
                outputDirectory => GenericRecognizer.SaveDebugSpectrogram(allResults2, null, outputDirectory, "UpwardTracks2"));

            Assert.AreEqual(10, allResults2.NewEvents.Count);
        }
Beispiel #12
0
        public void Test1UpwardsTrackAlgorithm()
        {
            // Set up the recognizer parameters.
            var parameters = new UpwardTrackParameters()
            {
                MinHertz                     = 6000,
                MaxHertz                     = 11000,
                MinBandwidthHertz            = 100,
                MaxBandwidthHertz            = 5000,
                DecibelThreshold             = 2.0,
                CombineProximalSimilarEvents = true,
                SyllableStartDifference      = TimeSpan.FromSeconds(0.2),
                SyllableHertzDifference      = 300,
            };

            //Set up the virtual recording.
            int    samplerate     = 22050;
            double signalDuration = 13.0; //seconds

            // set up the config for a virtual spectrogram.
            var sonoConfig = new SonogramConfig()
            {
                WindowSize              = 512,
                WindowStep              = 512,
                WindowOverlap           = 0.0, // this must be set
                WindowFunction          = WindowFunctions.HANNING.ToString(),
                NoiseReductionType      = NoiseReductionType.Standard,
                NoiseReductionParameter = 0.0,
                Duration   = TimeSpan.FromSeconds(signalDuration),
                SampleRate = samplerate,
            };

            var spectrogram = this.CreateArtificialSpectrogramToTestTracksAndHarmonics(sonoConfig);

            //var image1 = SpectrogramTools.GetSonogramPlusCharts(spectrogram, null, null, null);
            //results.Sonogram.GetImage().Save(this.outputDirectory + "\\debug.png");

            var segmentStartOffset = TimeSpan.Zero;
            var plots = new List <Plot>();

            var(spectralEvents, dBArray) = UpwardTrackAlgorithm.GetUpwardTracks(
                spectrogram,
                parameters,
                segmentStartOffset);

            // draw a plot of max decibels in each frame
            double decibelNormalizationMax = 5 * parameters.DecibelThreshold.Value;
            var    dBThreshold             = parameters.DecibelThreshold.Value / decibelNormalizationMax;
            var    normalisedDecibelArray  = DataTools.NormaliseInZeroOne(dBArray, 0, decibelNormalizationMax);
            var    plot1 = new Plot("decibel max", normalisedDecibelArray, dBThreshold);

            plots.Add(plot1);

            var allResults = new RecognizerResults()
            {
                NewEvents  = new List <EventCommon>(),
                Hits       = null,
                ScoreTrack = null,
                Plots      = new List <Plot>(),
                Sonogram   = null,
            };

            // combine the results i.e. add the events list of call events.
            allResults.NewEvents.AddRange(spectralEvents);
            allResults.Plots.AddRange(plots);

            // effectively keeps only the *last* sonogram produced
            allResults.Sonogram = spectrogram;

            // DEBUG PURPOSES ONLY - COMMENT NEXT LINE
            this.SaveTestOutput(
                outputDirectory => GenericRecognizer.SaveDebugSpectrogram(allResults, null, outputDirectory, "UpwardsTrack1"));

            Assert.AreEqual(2, allResults.NewEvents.Count);

            var @event = (SpectralEvent)allResults.NewEvents[0];

            Assert.AreEqual(10.0, @event.EventStartSeconds, 0.1);
            Assert.AreEqual(10.1, @event.EventEndSeconds, 0.1);
            Assert.AreEqual(6450, @event.LowFrequencyHertz);
            Assert.AreEqual(10750, @event.HighFrequencyHertz);

            @event = (SpectralEvent)allResults.NewEvents[1];
            Assert.AreEqual(11.0, @event.EventStartSeconds, 0.1);
            Assert.AreEqual(11.24, @event.EventEndSeconds, 0.1);
            Assert.AreEqual(6450, @event.LowFrequencyHertz);
            Assert.AreEqual(7310, @event.HighFrequencyHertz);
        }
Beispiel #13
0
        public void TestOnebinTrackAlgorithm()
        {
            // Set up the recognizer parameters.
            var parameters = new OnebinTrackParameters()
            {
                MinHertz         = 500,
                MaxHertz         = 6000,
                MinDuration      = 0.2,
                MaxDuration      = 1.1,
                DecibelThreshold = 2.0,
                CombinePossibleSyllableSequence = false,
                //SyllableStartDifference = TimeSpan.FromSeconds(0.2),
                //SyllableHertzGap = 300,
            };

            //Set up the virtual recording.
            int    samplerate         = 22050;
            double signalDuration     = 13.0; //seconds
            var    segmentStartOffset = TimeSpan.FromSeconds(60.0);

            // set up the config for a virtual spectrogram.
            var sonoConfig = new SonogramConfig()
            {
                WindowSize              = 512,
                WindowStep              = 512,
                WindowOverlap           = 0.0, // this must be set
                WindowFunction          = WindowFunctions.HANNING.ToString(),
                NoiseReductionType      = NoiseReductionType.Standard,
                NoiseReductionParameter = 0.0,
                Duration   = TimeSpan.FromSeconds(signalDuration),
                SampleRate = samplerate,
            };

            var spectrogram = this.CreateArtificialSpectrogramToTestTracksAndHarmonics(sonoConfig);

            //var image1 = SpectrogramTools.GetSonogramPlusCharts(spectrogram, null, null, null);
            //results.Sonogram.GetImage().Save(this.outputDirectory + "\\debug.png");

            var plots = new List <Plot>();

            var(spectralEvents, dBArray) = OnebinTrackAlgorithm.GetOnebinTracks(
                spectrogram,
                parameters,
                segmentStartOffset);

            // draw a plot of max decibels in each frame
            double decibelNormalizationMax = 5 * parameters.DecibelThreshold.Value;
            var    dBThreshold             = parameters.DecibelThreshold.Value / decibelNormalizationMax;
            var    normalisedDecibelArray  = DataTools.NormaliseInZeroOne(dBArray, 0, decibelNormalizationMax);
            var    plot1 = new Plot("decibel max", normalisedDecibelArray, dBThreshold);

            plots.Add(plot1);

            var allResults = new RecognizerResults()
            {
                NewEvents  = new List <EventCommon>(),
                Hits       = null,
                ScoreTrack = null,
                Plots      = new List <Plot>(),
                Sonogram   = null,
            };

            // combine the results i.e. add the events list of call events.
            allResults.NewEvents.AddRange(spectralEvents);
            allResults.Plots.AddRange(plots);
            allResults.Sonogram = spectrogram;

            // DEBUG PURPOSES
            this.SaveTestOutput(
                outputDirectory => GenericRecognizer.SaveDebugSpectrogram(allResults, null, outputDirectory, "WhistleTrack"));

            //NOTE: There are 16 whistles in the test spectrogram ...
            // but three of them are too weak to be detected at this threshold.
            Assert.AreEqual(13, allResults.NewEvents.Count);

            var @event = (SpectralEvent)allResults.NewEvents[0];

            Assert.AreEqual(60 + 0.0, @event.EventStartSeconds, 0.1);
            Assert.AreEqual(60 + 0.35, @event.EventEndSeconds, 0.1);
            Assert.AreEqual(2150, @event.LowFrequencyHertz);
            Assert.AreEqual(2193, @event.HighFrequencyHertz);

            @event = (SpectralEvent)allResults.NewEvents[4];
            Assert.AreEqual(60 + 5.0, @event.EventStartSeconds, 0.1);
            Assert.AreEqual(60 + 6.0, @event.EventEndSeconds, 0.1);
            Assert.AreEqual(989, @event.LowFrequencyHertz);
            Assert.AreEqual(1032, @event.HighFrequencyHertz);

            @event = (SpectralEvent)allResults.NewEvents[11];
            Assert.AreEqual(60 + 11.0, @event.EventStartSeconds, 0.1);
            Assert.AreEqual(60 + 12.0, @event.EventEndSeconds, 0.1);
            Assert.AreEqual(989, @event.LowFrequencyHertz);
            Assert.AreEqual(1032, @event.HighFrequencyHertz);
        }
        /// <summary>
        /// This method is called once per segment (typically one-minute segments).
        /// </summary>
        /// <param name="audioRecording">one minute of audio recording.</param>
        /// <param name="config">config file that contains parameters used by all profiles.</param>
        /// <param name="segmentStartOffset">when recording starts.</param>
        /// <param name="getSpectralIndexes">not sure what this is.</param>
        /// <param name="outputDirectory">where the recognizer results can be found.</param>
        /// <param name="imageWidth"> assuming ????.</param>
        /// <returns>recognizer results.</returns>
        public override RecognizerResults Recognize(
            AudioRecording audioRecording,
            Config config,
            TimeSpan segmentStartOffset,
            Lazy <IndexCalculateResult[]> getSpectralIndexes,
            DirectoryInfo outputDirectory,
            int?imageWidth)
        {
            //class BotaurusPoiciloptilusConfig is define at bottom of this file.
            var genericConfig = (BotaurusPoiciloptilusConfig)config;
            var recognizer    = new GenericRecognizer();

            RecognizerResults combinedResults = recognizer.Recognize(
                audioRecording,
                genericConfig,
                segmentStartOffset,
                getSpectralIndexes,
                outputDirectory,
                imageWidth);

            // DO POST-PROCESSING of EVENTS
            var events = combinedResults.NewEvents;

            // Following two commented lines are different ways of casting lists.
            //var newEvents = spectralEvents.Cast<EventCommon>().ToList();
            //var spectralEvents = events.Select(x => (SpectralEvent)x).ToList();
            List <EventCommon> newEvents;

            // NOTE: If the dB threshold is set low, may get lots of little events.
            if (genericConfig.CombinePossibleSyllableSequence)
            {
                // Convert events to spectral events for combining of possible sequences.
                var spectralEvents = events.Cast <SpectralEvent>().ToList();
                var startDiff      = genericConfig.SyllableStartDifference;
                var hertzDiff      = genericConfig.SyllableHertzGap;
                newEvents = CompositeEvent.CombineSimilarProximalEvents(spectralEvents, TimeSpan.FromSeconds(startDiff), (int)hertzDiff);
            }
            else
            {
                newEvents = events;
            }

            //filter the events for duration in seconds
            var minimumEventDuration = 0.5;

            if (genericConfig.CombinePossibleSyllableSequence)
            {
                minimumEventDuration = 2.0;
            }

            var filteredEvents = new List <EventCommon>();

            foreach (var ev in newEvents)
            {
                var eventDuration = ((SpectralEvent)ev).EventDurationSeconds;
                if (eventDuration > minimumEventDuration && eventDuration < 11.0)
                {
                    filteredEvents.Add(ev);
                }
            }

            combinedResults.NewEvents = filteredEvents;

            //UNCOMMENT following line if you want special debug spectrogram, i.e. with special plots.
            //  NOTE: Standard spectrograms are produced by setting SaveSonogramImages: "True" or "WhenEventsDetected" in UserName.SpeciesName.yml config file.
            //GenericRecognizer.SaveDebugSpectrogram(territorialResults, genericConfig, outputDirectory, audioRecording.BaseName);
            return(combinedResults);
        }