Пример #1
0
        public void Event_should_be_published_using_instance_type()
        {
            Scenario.Define(() => new Context {
                Id = Guid.NewGuid()
            })
            .WithEndpoint <Publisher>(b =>
                                      b.When(c => c.EventASubscribed && c.EventBSubscribed, (bus, ctx) =>
            {
                var message = new CompositeEvent
                {
                    ContextId = ctx.Id
                };
                bus.Publish(message);
            }))
            .WithEndpoint <Subscriber>(b => b.Given((bus, context) =>
            {
                bus.Subscribe <IEventA>();
                bus.Subscribe <IEventB>();

                if (context.HasNativePubSubSupport)
                {
                    context.EventASubscribed = true;
                    context.EventBSubscribed = true;
                }
            }))
            .Done(c => c.GotEventA && c.GotEventB)
            .Repeat(r => r.For(Serializers.Xml))
            .Should(c =>
            {
                Assert.True(c.GotEventA);
                Assert.True(c.GotEventB);
            })
            .Run(TimeSpan.FromSeconds(20));
        }
        public void CompositeEventTest()
        {
            var e1 = new CompositeEvent(CompositeEventType.Listening, string.Empty, true);
            var e2 = new CompositeEvent(CompositeEventType.Listening, string.Empty, true);

            Assert.IsTrue(e1.Equals(e2));
        }
        public async Task Event_should_be_published_using_instance_type()
        {
            await Scenario.Define <Context>(c => { c.Id = Guid.NewGuid(); })
            .WithEndpoint <Publisher>(b =>
                                      b.When(c => c.EventASubscribed && c.EventBSubscribed, (session, ctx) =>
            {
                var message = new CompositeEvent
                {
                    ContextId = ctx.Id
                };
                return(session.Publish(message));
            }))
            .WithEndpoint <Subscriber>(b => b.When(async(session, context) =>
            {
                await session.Subscribe <IEventA>();
                await session.Subscribe <IEventB>();

                if (context.HasNativePubSubSupport)
                {
                    context.EventASubscribed = true;
                    context.EventBSubscribed = true;
                }
            }))
            .Done(c => c.GotEventA && c.GotEventB)
            .Repeat(r => r.For(Serializers.Xml))
            .Should(c =>
            {
                Assert.True(c.GotEventA);
                Assert.True(c.GotEventB);
            })
            .Run(TimeSpan.FromSeconds(20));
        }
        public void WaitForEvent(CompositeEvent compositeEvent)
        {
            if (!_eventWaitHandles.ContainsKey(compositeEvent))
            {
                throw new KeyNotFoundException(JsonConvert.SerializeObject(compositeEvent));
            }

            _eventWaitHandles[compositeEvent].WaitOne();
        }
        public void WaitForEvent(CompositeEvent compositeEvent, TimeSpan timeout, bool exitContext)
        {
            if (!_eventWaitHandles.ContainsKey(compositeEvent))
            {
                throw new KeyNotFoundException(JsonConvert.SerializeObject(compositeEvent));
            }

            _eventWaitHandles[compositeEvent].WaitOne(timeout, exitContext);
        }
Пример #6
0
    /// <summary>
    /// This is helper method to get event field value from the <see cref="CompositeEvent{T}"/>.
    /// </summary>
    /// <typeparam name="TEvent">The type of the event handler.</typeparam>
    /// <param name="evt">The <see cref="CompositeEvent{T}"/>.</param>
    /// <returns>The value of the field holding event handlers.</returns>
    ///// <exception cref="InvalidOperationException">If the event represented by given <see cref="CompositeEvent{T}"/> is not stored using strong references.</exception>
    ///// <seealso cref="Qi4CS.Core.API.Model.EventStorage"/>
    ///// <seealso cref="Qi4CS.Core.API.Model.EventStorageStyleAttribute"/>
    public static TEvent GetEventFieldValue <TEvent>(this CompositeEvent <TEvent> evt)
        where TEvent : class
    {
        //// TODO this should actually be a method of CompositeEvent interfaces. Since it is not possible to write this same method cleanly as extension method for CompositeEvent (without the generic parameter)
        //if ( ( (CompositeEventSPI) evt ).Model.GetEventStorageKind() != Qi4CS.Core.API.Model.EventStorage.STRONG_REFS )
        //{
        //   throw new InvalidOperationException( "Getting field value of event is only possible for events stored as strong reference." );
        //}

        return(evt.InvokeFunctionWithRef <EventInfo, TEvent>(ReturnParam));
    }
Пример #7
0
        /// <summary>
        /// This method is called once per segment (typically one-minute segments).
        /// </summary>
        /// <param name="audioRecording">one minute of audio recording.</param>
        /// <param name="config">config file that contains parameters used by all profiles.</param>
        /// <param name="segmentStartOffset">when recording starts.</param>
        /// <param name="getSpectralIndexes">not sure what this is.</param>
        /// <param name="outputDirectory">where the recognizer results can be found.</param>
        /// <param name="imageWidth"> assuming ????.</param>
        /// <returns>recognizer results.</returns>
        public override RecognizerResults Recognize(
            AudioRecording audioRecording,
            Config config,
            TimeSpan segmentStartOffset,
            Lazy <IndexCalculateResult[]> getSpectralIndexes,
            DirectoryInfo outputDirectory,
            int?imageWidth)
        {
            //class NinoxBoobookConfig is defined at bottom of this file.
            var genericConfig = (NinoxBoobookConfig)config;
            var recognizer    = new GenericRecognizer();

            RecognizerResults combinedResults = recognizer.Recognize(
                audioRecording,
                genericConfig,
                segmentStartOffset,
                getSpectralIndexes,
                outputDirectory,
                imageWidth);

            // DO POST-PROCESSING of EVENTS

            // Filter out the chirp events for possible combining.
            var(chirpEvents, others) = combinedResults.NewEvents.FilterForEventType <ChirpEvent, EventCommon>();

            // Uncomment the next line when want to obtain the event frequency profiles.
            // WriteFrequencyProfiles(chirpEvents);

            // Calculate frequency profile score for each event
            foreach (var ev in chirpEvents)
            {
                SetFrequencyProfileScore((ChirpEvent)ev);
            }

            // Combine overlapping events. If the dB threshold is set low, may get lots of little events.
            var newEvents = CompositeEvent.CombineOverlappingEvents(chirpEvents.Cast <EventCommon>().ToList());

            if (genericConfig.CombinePossibleSyllableSequence)
            {
                // convert events to spectral events for possible combining.
                var(spectralEvents, _) = combinedResults.NewEvents.FilterForEventType <SpectralEvent, EventCommon>();

                var startDiff = genericConfig.SyllableStartDifference;
                var hertzDiff = genericConfig.SyllableHertzGap;
                newEvents = CompositeEvent.CombineSimilarProximalEvents(spectralEvents, TimeSpan.FromSeconds(startDiff), (int)hertzDiff);
            }

            combinedResults.NewEvents = newEvents;

            //UNCOMMENT following line if you want special debug spectrogram, i.e. with special plots.
            //  NOTE: Standard spectrograms are produced by setting SaveSonogramImages: "True" or "WhenEventsDetected" in <Towsey.PteropusSpecies.yml> config file.
            //GenericRecognizer.SaveDebugSpectrogram(territorialResults, genericConfig, outputDirectory, audioRecording.BaseName);
            return(combinedResults);
        }
Пример #8
0
        public void TestEventMerging()
        {
            // make a list of three events
            var events           = new List <SpectralEvent>();
            var segmentStartTime = TimeSpan.FromSeconds(10);
            var event1           = new SpectralEvent(segmentStartOffset: segmentStartTime, eventStartRecordingRelative: 11.0, eventEndRecordingRelative: 16.0, minFreq: 1000, maxFreq: 6000)
            {
                Name  = "Event1",
                Score = 1.0,
            };

            events.Add(event1);

            var event2 = new SpectralEvent(segmentStartOffset: segmentStartTime, eventStartRecordingRelative: 12.0, eventEndRecordingRelative: 15.0, minFreq: 1500, maxFreq: 8000)
            {
                Name  = "Event2",
                Score = 5.0,
            };

            events.Add(event2);

            var event3 = new SpectralEvent(segmentStartOffset: segmentStartTime, eventStartRecordingRelative: 17.0, eventEndRecordingRelative: 19.0, minFreq: 1000, maxFreq: 8000)
            {
                Name  = "Event3",
                Score = 9.0,
            };

            events.Add(event3);

            // combine Overlapping acoustic events
            var newEvents = CompositeEvent.CombineOverlappingEvents(events: events.Cast <EventCommon>().ToList());

            events = newEvents.Cast <SpectralEvent>().ToList();

            //require two events, the first being a composite of two events.
            Assert.AreEqual(2, events.Count);
            Assert.AreEqual(typeof(CompositeEvent), events[0].GetType());

            Assert.AreEqual(2, ((CompositeEvent)events[0]).ComponentEvents.Count);

            Assert.AreEqual(11.0, events[0].EventStartSeconds);
            Assert.AreEqual(16.0, events[0].EventEndSeconds);
            Assert.AreEqual(1000, events[0].LowFrequencyHertz);
            Assert.AreEqual(8000, events[0].HighFrequencyHertz);
            Assert.AreEqual(5.0, events[0].Score);

            Assert.AreEqual(typeof(SpectralEvent), events[1].GetType());
            Assert.AreEqual(17.0, events[1].EventStartSeconds);
            Assert.AreEqual(19.0, events[1].EventEndSeconds);
            Assert.AreEqual(1000, events[1].LowFrequencyHertz);
            Assert.AreEqual(8000, events[1].HighFrequencyHertz);
            Assert.AreEqual(9.0, events[1].Score);
        }
Пример #9
0
        internal void AddEvent(CompositeEvent newEvent)
        {
            if (_events == null)
            {
                return;
            }

            if (newEvent == null)
            {
                throw new ArgumentNullException(nameof(newEvent));
            }

            _events.Add(newEvent);
        }
Пример #10
0
#pragma warning restore 649

        #region GenericInvocator Members

        public virtual Object Invoke(Object composite, System.Reflection.MethodInfo method, Object[] args)
        {
            CompositeEvent cEvent = this._state.Events[this._state.QualifiedNamesForMethods[method]];

            if (cEvent.ReflectionInfo.GetAddMethod().Equals(method))
            {
                cEvent.AddEventHandlerAsObject(args[0]);
            }
            else
            {
                cEvent.RemoveEventHandlerAsObject(args[0]);
            }
            return(null);
        }
        internal CompositeRootHttpServerTesterConnection(string prefix, string sessionToken)
        {
            if (string.IsNullOrEmpty(sessionToken))
            {
                throw new InvalidOperationException(Resources.MustHaveValidSessionToken);
            }

            var listeningEvent = new CompositeEvent(CompositeEventType.Listening, string.Empty, true);

            _eventWaitHandles         = new Dictionary <CompositeEvent, EventWaitHandle>();
            Client                    = new WebClient();
            Client.OpenReadCompleted += WebClient_OpenReadCompleted;
            AddEventWaitHandle(listeningEvent);

            Client.OpenReadAsync(new Uri(string.Format(CultureInfo.InvariantCulture, prefix + "{0}/event", sessionToken)));
            _eventWaitHandles[listeningEvent].WaitOne();
            SessionToken = sessionToken;
        }
        public async Task Event_should_be_published_using_instance_type()
        {
            var context = await Scenario.Define <Context>(c => { c.Id = Guid.NewGuid(); })
                          .WithEndpoint <Publisher>(b =>
                                                    b.When(c => c.EventASubscribed && c.EventBSubscribed, (session, ctx) =>
            {
                var message = new CompositeEvent
                {
                    ContextId = ctx.Id
                };
                return(session.Publish(message));
            }))
                          .WithEndpoint <Subscriber>(b => b.When((session, ctx) =>
            {
                ctx.EventASubscribed = true;
                ctx.EventBSubscribed = true;
                return(Task.FromResult(0));
            }))
                          .Done(c => c.GotEventA && c.GotEventB)
                          .Run(TimeSpan.FromSeconds(20));

            Assert.True(context.GotEventA);
            Assert.True(context.GotEventB);
        }
Пример #13
0
 /// <summary>
 /// Invokes the <see cref="CompositeStateParticipant{T}.TryInvokeFunctionWithRef"/> method with given delegate, assuming that field type is same as event type.
 /// </summary>
 /// <typeparam name="TEvent">The type of the property.</typeparam>
 /// <param name="evt">The <see cref="CompositeEvent{T}"/>.</param>
 /// <param name="function">The delegate to invoke.</param>
 /// <returns>The result of <paramref name="function"/>.</returns>
 /// <exception cref="InvalidOperationException">If <see cref="CompositeStateParticipant{T}.TryInvokeFunctionWithRef"/> returns <c>false</c>, that is, when the field type does not match <typeparamref name="TEvent"/>.</exception>
 /// <remarks>
 /// TODO link to documentation about how field type is deduced (it is not always the same as type of property or event).
 /// </remarks>
 /// <seealso cref="CompositeStateParticipant{T}.TryInvokeFunctionWithRef"/>
 public static TEvent InvokeFunctionWithRefSameType <TEvent>(this CompositeEvent <TEvent> evt, FunctionWithRef <TEvent> function)
     where TEvent : class
 {
     return(evt.InvokeFunctionWithRef(function));
 }
Пример #14
0
 /// <summary>
 /// Invokes the <see cref="CompositeStateParticipant{T}.TryInvokeActionWithRef"/> method with given delegate, assuming that field type is same as event type.
 /// </summary>
 /// <typeparam name="TEvent">The type of the property.</typeparam>
 /// <param name="evt">The <see cref="CompositeEvent{T}"/>.</param>
 /// <param name="action">The delegate to invoke.</param>
 /// <exception cref="InvalidOperationException">If <see cref="CompositeStateParticipant{T}.TryInvokeFunctionWithRef"/> returns <c>false</c>, that is, when the field type does not match <typeparamref name="TEvent"/>.</exception>
 /// <remarks>
 /// TODO link to documentation about how field type is deduced (it is not always the same as type of property or event).
 /// </remarks>
 /// <seealso cref="CompositeStateParticipant{T}.TryInvokeActionWithRef"/>
 public static void InvokeActionWithRefSameType <TEvent>(this CompositeEvent <TEvent> evt, ActionWithRef <TEvent> action)
     where TEvent : class
 {
     evt.InvokeActionWithRef(action);
 }
Пример #15
0
        /// <summary>
        /// This method returns foward (spectral peak) tracks enclosed in spectral events.
        /// It averages dB log values incorrectly but it is faster than doing many log conversions.
        /// </summary>
        /// <param name="sonogram">The spectrogram to be searched.</param>
        /// <returns>A list of acoustic events containing foward tracks.</returns>
        public static (List <EventCommon> Events, double[] CombinedIntensity) GetForwardTracks(
            SpectrogramStandard sonogram,
            ForwardTrackParameters parameters,
            TimeSpan segmentStartOffset)
        {
            var    sonogramData     = sonogram.Data;
            int    frameCount       = sonogramData.GetLength(0);
            int    binCount         = sonogramData.GetLength(1);
            int    nyquist          = sonogram.NyquistFrequency;
            double binWidth         = nyquist / (double)binCount;
            int    minBin           = (int)Math.Round(parameters.MinHertz.Value / binWidth);
            int    maxBin           = (int)Math.Round(parameters.MaxHertz.Value / binWidth);
            double minDuration      = parameters.MinDuration.Value;
            double maxDuration      = parameters.MaxDuration.Value;
            double decibelThreshold = parameters.DecibelThreshold.Value;

            var converter = new UnitConverters(
                segmentStartOffset: segmentStartOffset.TotalSeconds,
                sampleRate: sonogram.SampleRate,
                frameSize: sonogram.Configuration.WindowSize,
                frameOverlap: sonogram.Configuration.WindowOverlap);

            //Find all spectral peaks and place in peaks matrix
            var peaks = new double[frameCount, binCount];

            for (int row = 0; row < frameCount; row++)
            {
                for (int col = minBin + 1; col < maxBin - 1; col++)
                {
                    if (sonogramData[row, col] < decibelThreshold)
                    {
                        continue;
                    }

                    // if given matrix element is greater than in freq bin either side
                    bool isPeak = (sonogramData[row, col] > sonogramData[row, col - 1]) && (sonogramData[row, col] > sonogramData[row, col + 1]);
                    if (isPeak)
                    {
                        peaks[row, col] = sonogramData[row, col];
                    }
                }
            }

            var tracks = GetForwardTracks(peaks, minDuration, maxDuration, decibelThreshold, converter);

            // initialise tracks as events and get the combined intensity array.
            // list of accumulated acoustic events
            var events = new List <SpectralEvent>();
            var combinedIntensityArray = new double[frameCount];

            // The following lines are used only for debug purposes.
            //var options = new EventRenderingOptions(new UnitConverters(segmentStartOffset.TotalSeconds, sonogram.Duration.TotalSeconds, nyquist, frameCount, binCount));
            //var spectrogram = sonogram.GetImage(doHighlightSubband: false, add1KHzLines: true, doMelScale: false);

            // Initialise events with tracks.
            foreach (var track in tracks)
            {
                //Following line used only for debug purposes. Can save as image.
                //spectrogram.Mutate(x => track.Draw(x, options));
                var maxScore   = decibelThreshold * 5;
                var scoreRange = new Interval <double>(0, maxScore);
                var ae         = new ChirpEvent(track, scoreRange)
                {
                    SegmentStartSeconds    = segmentStartOffset.TotalSeconds,
                    SegmentDurationSeconds = frameCount * converter.SecondsPerFrameStep,
                    Name = "noName",
                };

                events.Add(ae);

                // fill the intensity array
                var startRow       = converter.FrameFromStartTime(track.StartTimeSeconds);
                var amplitudeTrack = track.GetAmplitudeOverTimeFrames();
                for (int i = 0; i < amplitudeTrack.Length; i++)
                {
                    combinedIntensityArray[startRow + i] = Math.Max(combinedIntensityArray[startRow + i], amplitudeTrack[i]);
                }
            }

            List <EventCommon> returnEvents = events.Cast <EventCommon>().ToList();

            // Combine coincident events that are stacked one above other.
            // This will help in some cases to combine related events.
            if (parameters.CombinePossibleHarmonics)
            {
                returnEvents = CompositeEvent.CombinePotentialStackedTracks(events, parameters.HarmonicsStartDifference, parameters.HarmonicsHertzGap);
            }

            // Combine events that are temporally close and in the same frequency band.
            // This will help in some cases to combine related events.
            if (parameters.CombinePossibleSyllableSequence)
            {
                var timeDiff = TimeSpan.FromSeconds(parameters.SyllableStartDifference);
                returnEvents = CompositeEvent.CombineSimilarProximalEvents(events, timeDiff, parameters.SyllableHertzGap);
            }

            return(returnEvents, combinedIntensityArray);
        }
 public void AddEventWaitHandle(CompositeEvent compositeEvent)
 {
     _eventWaitHandles.Add(compositeEvent, new ManualResetEvent(false));
 }
Пример #17
0
        /// <summary>
        /// This method is called once per segment (typically one-minute segments).
        /// </summary>
        /// <param name="audioRecording">one minute of audio recording.</param>
        /// <param name="config">config file that contains parameters used by all profiles.</param>
        /// <param name="segmentStartOffset">when recording starts.</param>
        /// <param name="getSpectralIndexes">not sure what this is.</param>
        /// <param name="outputDirectory">where the recognizer results can be found.</param>
        /// <param name="imageWidth"> assuming ????.</param>
        /// <returns>recognizer results.</returns>
        public override RecognizerResults Recognize(
            AudioRecording audioRecording,
            Config config,
            TimeSpan segmentStartOffset,
            Lazy <IndexCalculateResult[]> getSpectralIndexes,
            DirectoryInfo outputDirectory,
            int?imageWidth)
        {
            //class BotaurusPoiciloptilusConfig is define at bottom of this file.
            var genericConfig = (BotaurusPoiciloptilusConfig)config;
            var recognizer    = new GenericRecognizer();

            RecognizerResults combinedResults = recognizer.Recognize(
                audioRecording,
                genericConfig,
                segmentStartOffset,
                getSpectralIndexes,
                outputDirectory,
                imageWidth);

            // DO POST-PROCESSING of EVENTS
            var events = combinedResults.NewEvents;

            // Following two commented lines are different ways of casting lists.
            //var newEvents = spectralEvents.Cast<EventCommon>().ToList();
            //var spectralEvents = events.Select(x => (SpectralEvent)x).ToList();
            List <EventCommon> newEvents;

            // NOTE: If the dB threshold is set low, may get lots of little events.
            if (genericConfig.CombinePossibleSyllableSequence)
            {
                // Convert events to spectral events for combining of possible sequences.
                var spectralEvents = events.Cast <SpectralEvent>().ToList();
                var startDiff      = genericConfig.SyllableStartDifference;
                var hertzDiff      = genericConfig.SyllableHertzGap;
                newEvents = CompositeEvent.CombineSimilarProximalEvents(spectralEvents, TimeSpan.FromSeconds(startDiff), (int)hertzDiff);
            }
            else
            {
                newEvents = events;
            }

            //filter the events for duration in seconds
            var minimumEventDuration = 0.5;

            if (genericConfig.CombinePossibleSyllableSequence)
            {
                minimumEventDuration = 2.0;
            }

            var filteredEvents = new List <EventCommon>();

            foreach (var ev in newEvents)
            {
                var eventDuration = ((SpectralEvent)ev).EventDurationSeconds;
                if (eventDuration > minimumEventDuration && eventDuration < 11.0)
                {
                    filteredEvents.Add(ev);
                }
            }

            combinedResults.NewEvents = filteredEvents;

            //UNCOMMENT following line if you want special debug spectrogram, i.e. with special plots.
            //  NOTE: Standard spectrograms are produced by setting SaveSonogramImages: "True" or "WhenEventsDetected" in UserName.SpeciesName.yml config file.
            //GenericRecognizer.SaveDebugSpectrogram(territorialResults, genericConfig, outputDirectory, audioRecording.BaseName);
            return(combinedResults);
        }
Пример #18
0
        /// <summary>
        /// EXPANATION: A vertical track is a near click or rapidly frequency-modulated tone. A good example is the whip component of the whip-bird call.
        /// They would typically be only a few time-frames duration.
        /// THis method averages dB log values incorrectly but it is faster than doing many log conversions and is accurate enough for the purpose.
        /// </summary>
        /// <param name="sonogram">The spectrogram to be searched.</param>
        /// <param name="parameters">parameters for the upwards track algorithm.</param>
        /// <param name="segmentStartOffset">The start time of the current recording segment under analysis.</param>
        /// <returns>A list of acoustic events containing foward tracks.</returns>
        public static (List <EventCommon> Events, double[] CombinedIntensity) GetUpwardTracks(
            SpectrogramStandard sonogram,
            AnalysisPrograms.Recognizers.Base.UpwardTrackParameters parameters,
            TimeSpan segmentStartOffset)
        {
            var    sonogramData      = sonogram.Data;
            int    frameCount        = sonogramData.GetLength(0);
            int    binCount          = sonogramData.GetLength(1);
            var    frameStep         = sonogram.FrameStep;
            int    nyquist           = sonogram.NyquistFrequency;
            double binWidth          = nyquist / (double)binCount;
            int    minBin            = (int)Math.Round(parameters.MinHertz.Value / binWidth);
            int    maxBin            = (int)Math.Round(parameters.MaxHertz.Value / binWidth);
            var    minBandwidthHertz = parameters.MinBandwidthHertz.Value;
            var    maxBandwidthHertz = parameters.MaxBandwidthHertz.Value;
            var    decibelThreshold  = parameters.DecibelThreshold.Value;

            var converter = new UnitConverters(
                segmentStartOffset: segmentStartOffset.TotalSeconds,
                sampleRate: sonogram.SampleRate,
                frameSize: sonogram.Configuration.WindowSize,
                frameOverlap: sonogram.Configuration.WindowOverlap);

            // Find all frame peaks and place in peaks matrix
            // avoid row edge effects.
            var peaks = new double[frameCount, binCount];

            for (int row = 1; row < frameCount - 1; row++)
            {
                for (int col = minBin; col < maxBin; col++)
                {
                    if (sonogramData[row, col] < decibelThreshold)
                    {
                        continue;
                    }

                    // if given matrix element is greater than in frame either side
                    bool isPeak = (sonogramData[row, col] > sonogramData[row - 1, col]) && (sonogramData[row, col] > sonogramData[row + 1, col]);
                    if (isPeak)
                    {
                        peaks[row, col] = sonogramData[row, col];
                    }
                }
            }

            //NOTE: the Peaks matrix is same size as the sonogram.
            var tracks = GetUpwardTracks(peaks, minBin, maxBin, minBandwidthHertz, maxBandwidthHertz, decibelThreshold, converter);

            // initialise tracks as events and get the combined intensity array.
            var events = new List <SpectralEvent>();
            var temporalIntensityArray = new double[frameCount];
            var scoreRange             = new Interval <double>(0.0, decibelThreshold * 5);

            foreach (var track in tracks)
            {
                var ae = new WhipEvent(track, scoreRange)
                {
                    SegmentStartSeconds    = segmentStartOffset.TotalSeconds,
                    SegmentDurationSeconds = frameCount * converter.SecondsPerFrameStep,
                    Name = "Whip",
                };

                events.Add(ae);

                // fill the intensity array
                //var startRow = (int)converter.TemporalScale.To(track.StartTimeSeconds);
                var startRow       = converter.FrameFromStartTime(track.StartTimeSeconds);
                var amplitudeTrack = track.GetAmplitudeOverTimeFrames();
                for (int i = 0; i < amplitudeTrack.Length; i++)
                {
                    temporalIntensityArray[startRow + i] += amplitudeTrack[i];
                }
            }

            List <EventCommon> returnEvents = events.Cast <EventCommon>().ToList();

            // combine proximal events that occupy similar frequency band
            if (parameters.CombineProximalSimilarEvents)
            {
                returnEvents = CompositeEvent.CombineSimilarProximalEvents(events, parameters.SyllableStartDifference, parameters.SyllableHertzDifference);
            }

            return(returnEvents, temporalIntensityArray);
        }
 protected override void OnWriteEvent(HttpListenerContext content, CompositeRoot compositeRoot, CompositeEvent compositeEvent, StreamWriter eventStreamWriter)
 {
     base.OnWriteEvent(content, compositeRoot, compositeEvent, eventStreamWriter);
 }