Exemple #1
0
        /// <summary>
        /// This method returns spectral peak tracks enclosed in acoustic events.
        /// It averages dB log values incorrectly but it is faster than doing many log conversions.
        /// </summary>
        public static (List <AcousticEvent> Events, double[] CombinedIntensity) GetSpectralPeakTracks(
            SpectrogramStandard sonogram,
            int minHz,
            int maxHz,
            int nyquist,
            double decibelThreshold,
            double minDuration,
            double maxDuration,
            bool combinePossibleHarmonics,
            TimeSpan segmentStartOffset)
        {
            var sonogramData = sonogram.Data;
            int frameCount   = sonogramData.GetLength(0);
            int binCount     = sonogramData.GetLength(1);

            double binWidth          = nyquist / (double)binCount;
            int    minBin            = (int)Math.Round(minHz / binWidth);
            int    maxBin            = (int)Math.Round(maxHz / binWidth);
            int    bandwidthBinCount = maxBin - minBin + 1;
            var    frameDuration     = sonogram.FrameDuration;
            var    frameStep         = sonogram.FrameStep;
            var    frameOverStep     = frameDuration - frameStep;

            // list of accumulated acoustic events
            var events = new List <AcousticEvent>();

            //Find all spectral peaks and place in peaks matrix
            var peaks = new double[frameCount, bandwidthBinCount];

            for (int row = 0; row < frameCount; row++)
            {
                for (int col = minBin - 1; col < maxBin - 1; col++)
                {
                    if (sonogramData[row, col] < decibelThreshold)
                    {
                        continue;
                    }

                    // if given matrix element is greater than in freq bin either side
                    bool isPeak = (sonogramData[row, col] > sonogramData[row, col - 1]) && (sonogramData[row, col] > sonogramData[row, col + 1]);
                    if (isPeak)
                    {
                        peaks[row, col] = sonogramData[row, col];
                    }
                }
            }

            // Look for track starts and initialise them as events.
            // Cannot include edge rows & columns because of edge effects.
            // Each row is a time frame which is a spectrum. Each column is a frequency bin
            var combinedIntensityArray = new double[frameCount];

            for (int row = 0; row < frameCount; row++)
            {
                for (int col = 3; col < bandwidthBinCount - 3; col++)
                {
                    // Visit each spectral peak in order. Each may be start of possible track
                    if (peaks[row, col] < decibelThreshold)
                    {
                        continue;
                    }

                    //have the beginning of a potential track
                    var track = GetTrack(peaks, row, col, decibelThreshold);

                    int    trackStartFrame = track.GetStartFrame();
                    int    trackEndFrame   = track.GetEndFrame();
                    double trackDuration   = ((trackEndFrame - trackStartFrame) * frameStep) + frameOverStep;

                    // calculate max and min bin IDs in the original spectrogram
                    int trackBottomBin = track.GetBottomFreqBin();
                    int trackTopBin    = track.GetTopFreqBin();

                    //If track has length within duration bounds, then create an event
                    if (trackDuration >= minDuration && trackDuration <= maxDuration)
                    {
                        var oblong = new Oblong(track.GetStartFrame(), trackBottomBin, track.GetEndFrame(), trackTopBin);
                        var ae     = new AcousticEvent(segmentStartOffset, oblong, nyquist, binCount, frameDuration, frameStep, frameCount)
                        {
                            // get the track as matrix
                            TheTrack = track,
                        };
                        events.Add(ae);

                        // fill the intensity array
                        var amplitudeTrack = track.GetAmplitudeOverTimeFrames();
                        for (int i = 0; i < amplitudeTrack.Length; i++)
                        {
                            combinedIntensityArray[row + i] += amplitudeTrack[i];
                        }
                    }
                }
            }

            // Combine coincident events that are stacked one above other.
            // This will help in some cases to combine related events.
            var startDifference = TimeSpan.FromSeconds(0.2);
            var hertzGap        = 200;

            if (combinePossibleHarmonics)
            {
                events = AcousticEvent.CombinePotentialStackedTracks(events, startDifference, hertzGap);
            }

            return(events, combinedIntensityArray);
        }