public static Image DrawSonogram(BaseSonogram sonogram, IEnumerable <EventCommon> events) { var image = new Image_MultiTrack(sonogram.GetImage(false, true, doMelScale: false)); image.AddTrack(ImageTrack.GetTimeTrack(sonogram.Duration, sonogram.FramesPerSecond)); ////image.AddTrack(ImageTrack.GetWavEnvelopeTrack(sonogram, image.sonogramImage.Width)); image.AddTrack(ImageTrack.GetSegmentationTrack(sonogram)); //############################################################################################ TODO TODO //convert blob events to acoustic events for drawing purposes var aeEvents = new List <AcousticEvent>(); foreach (var be in events) { aeEvents.Add(EventConverters.ConvertSpectralEventToAcousticEvent((SpectralEvent)be)); } image.AddEvents( aeEvents, sonogram.NyquistFrequency, sonogram.Configuration.FreqBinCount, sonogram.FramesPerSecond); return(image.GetImage()); }
/// <summary> /// This method draws a spectrogram with other useful information attached. /// </summary> /// <param name="sonogram">of BaseSonogram class.</param> /// <param name="events">a list of acoustic events.</param> /// <param name="plots">a list of plots relevant to the spectrogram scores.</param> /// <param name="hits">not often used - can be null.</param> public static Image <Rgb24> GetSonogramPlusCharts( BaseSonogram sonogram, List <AcousticEvent> events, List <Plot> plots, double[,] hits) { // convert AcousticEvents to EventsCommon List <EventCommon> newEvents = EventConverters.ConvertAcousticEventsToSpectralEvents(events); var compositeImage = GetSonogramPlusCharts(sonogram, newEvents, plots, hits); return(compositeImage); }
/// <summary> /// Detect using EPR. /// </summary> /// <param name="wavFilePath"> /// The wav file path. /// </param> /// <param name="eprNormalisedMinScore"> /// The epr Normalised Min Score. /// </param> /// <returns> /// Tuple containing base Sonogram and list of acoustic events. /// </returns> public static Tuple <BaseSonogram, List <AcousticEvent> > Detect(FileInfo wavFilePath, Aed.AedConfiguration aedConfiguration, double eprNormalisedMinScore, TimeSpan segmentStartOffset) { Tuple <EventCommon[], AudioRecording, BaseSonogram> aed = Aed.Detect(wavFilePath, aedConfiguration, segmentStartOffset); var events = aed.Item1; var newEvents = new List <AcousticEvent>(); foreach (var be in events) { newEvents.Add(EventConverters.ConvertSpectralEventToAcousticEvent((SpectralEvent)be)); } var aeEvents = newEvents.Select(ae => Util.fcornersToRect(ae.TimeStart, ae.TimeEnd, ae.HighFrequencyHertz, ae.LowFrequencyHertz)).ToList(); Log.Debug("EPR start"); var eprRects = EventPatternRecog.DetectGroundParrots(aeEvents, eprNormalisedMinScore); Log.Debug("EPR finished"); var sonogram = aed.Item3; SonogramConfig config = sonogram.Configuration; double framesPerSec = 1 / config.GetFrameOffset(); // Surely this should go somewhere else double freqBinWidth = config.NyquistFreq / (double)config.FreqBinCount; // TODO this is common with AED var eprEvents = new List <AcousticEvent>(); foreach (var rectScore in eprRects) { var ae = new AcousticEvent( segmentStartOffset, rectScore.Item1.Left, rectScore.Item1.Right - rectScore.Item1.Left, rectScore.Item1.Bottom, rectScore.Item1.Top); ae.SetTimeAndFreqScales(framesPerSec, freqBinWidth); ae.SetTimeAndFreqScales(sonogram.NyquistFrequency, sonogram.Configuration.WindowSize, 0); ae.SetScores(rectScore.Item2, 0, 1); ae.BorderColour = aedConfiguration.AedEventColor; ae.SegmentStartSeconds = segmentStartOffset.TotalSeconds; ae.SegmentDurationSeconds = aed.Item2.Duration.TotalSeconds; eprEvents.Add(ae); } return(Tuple.Create(sonogram, eprEvents)); }
public async Task <Event> Handle(CreateEventCommand request, CancellationToken cancellationToken) { var venueDraft = request.EventInput.Venue == null ? null : Venue.CreateDraft( request.EventInput.Venue.Name, request.EventInput.Venue.Sections.ToContract() ); if (request.EventInput.Venue != null) { await _venueRepository.AddOneAsync(venueDraft); } var evenDraft = Models.Event.CreateDraft ( request.EventInput.Name, request.EventInput.Type, request.EventInput.Category, request.EventInput.Url, request.EventInput.PrimaryOrganizerId, request.EventInput.Summary, request.EventInput.StartDate, request.EventInput.EndDate, venueDraft == null ? string.Empty : venueDraft.Id, request.EventInput.Tags, request.EventInput.IsFree, EventConverters.ToContract(request.EventInput.Address), request.EventInput.Image.ToContract() ); await _eventRepository.AddOneAsync(evenDraft); await _bus.Publish <EventCreated>(new { EventId = evenDraft.Id, VenueId = venueDraft?.Id, Name = evenDraft.Name, VenueSections = venueDraft?.Sections.ToVenueSectionsCreated().ToList() }, cancellationToken); return(new Event { Id = evenDraft.Id, Name = evenDraft.Name }); }
public override AnalysisResult2 Analyze <T>(AnalysisSettings analysisSettings, SegmentSettings <T> segmentSettings) { var recording = new AudioRecording(segmentSettings.SegmentAudioFile.FullName); // get indices configuration - extracted in BeforeAnalyze var acousticIndicesConfig = (RecognizerConfig)analysisSettings.Configuration; // get a lazily calculated indices function - if you never get the lazy value, the indices will never be calculated var lazyIndices = this.GetLazyIndices( recording, analysisSettings, segmentSettings, acousticIndicesConfig.HighResolutionIndices); // determine imageWidth for output images int imageWidth = (int)Math.Floor( recording.Duration.TotalSeconds / acousticIndicesConfig.HighResolutionIndices.IndexCalculationDuration); // execute actual analysis RecognizerResults results = this.Recognize( recording, analysisSettings.Configuration, segmentSettings.SegmentStartOffset, lazyIndices, segmentSettings.SegmentOutputDirectory, imageWidth); var analysisResults = new AnalysisResult2(analysisSettings, segmentSettings, recording.Duration); BaseSonogram sonogram = results.Sonogram; double[,] hits = results.Hits; var predictedEvents = results.GetAllEvents(); // double check all the events have the right offset in case it was missed foreach (var predictedEvent in predictedEvents) { predictedEvent.SegmentStartSeconds = segmentSettings.SegmentStartOffset.TotalSeconds; predictedEvent.SegmentDurationSeconds = recording.Duration.TotalSeconds; } analysisResults.Events = predictedEvents.ToArray(); // compress high resolution indices - and save them. // IF they aren't used, empty values are returned. if (lazyIndices.IsValueCreated) { this.SummarizeHighResolutionIndices( analysisResults, lazyIndices.Value, acousticIndicesConfig.HighResolutionIndices); } // write intermediate output if necessary if (analysisSettings.AnalysisDataSaveBehavior) { this.WriteEventsFile(segmentSettings.SegmentEventsFile, analysisResults.Events); analysisResults.EventsFile = segmentSettings.SegmentEventsFile; } if (analysisSettings.AnalysisDataSaveBehavior) { this.WriteSummaryIndicesFile(segmentSettings.SegmentSummaryIndicesFile, analysisResults.SummaryIndices); } if (analysisSettings.AnalysisDataSaveBehavior) { analysisResults.SpectraIndicesFiles = this.WriteSpectrumIndicesFiles( segmentSettings.SegmentSpectrumIndicesDirectory, segmentSettings.Segment.SourceMetadata.Identifier, analysisResults.SpectralIndices); } if (analysisSettings.AnalysisImageSaveBehavior.ShouldSave(analysisResults.Events.Length)) { string imagePath = segmentSettings.SegmentImageFile.FullName; const double EventThreshold = 0.1; var plots = results.Plots ?? new List <Plot>(); //TODO Remove this when we remove AcousticEvent. var convertedEvents = predictedEvents.Select(ec => ec is AcousticEvent ae ? EventConverters.ConvertAcousticEventToSpectralEvent(ae) : (EventCommon)ec).ToList(); Image image = this.DrawSonogram(sonogram, hits, plots, convertedEvents, EventThreshold); image.Save(imagePath); analysisResults.ImageFile = segmentSettings.SegmentImageFile; // draw a fancy high res index image // IF indices aren't used, no image is drawn. if (lazyIndices.IsValueCreated) { this.DrawLongDurationSpectrogram( segmentSettings.SegmentOutputDirectory, recording.BaseName, results.ScoreTrack, lazyIndices.Value, acousticIndicesConfig.HighResolutionIndices); } } return(analysisResults); }
public static (List <EventCommon> SpectralEvents, double[] AmplitudeArray, double[] HarmonicIntensityScores) GetComponentsWithHarmonics( SpectrogramStandard spectrogram, int minHz, int maxHz, int nyquist, double decibelThreshold, double dctThreshold, double minDuration, double maxDuration, int minFormantGap, int maxFormantGap, TimeSpan segmentStartOffset) { // Event threshold - Determines FP / FN trade-off for events. //double eventThreshold = 0.2; var sonogramData = spectrogram.Data; int frameCount = sonogramData.GetLength(0); int binCount = sonogramData.GetLength(1); double freqBinWidth = nyquist / (double)binCount; int minBin = (int)Math.Round(minHz / freqBinWidth); int maxBin = (int)Math.Round(maxHz / freqBinWidth); // extract the sub-band double[,] subMatrix = MatrixTools.Submatrix(spectrogram.Data, 0, minBin, frameCount - 1, maxBin); //ii: DETECT HARMONICS // now look for harmonics in search band using the Xcorrelation-DCT method. var results = CrossCorrelation.DetectHarmonicsInSpectrogramData(subMatrix, decibelThreshold); // set up score arrays double[] dBArray = results.Item1; double[] harmonicIntensityScores = results.Item2; //an array of formant intesnity int[] maxIndexArray = results.Item3; for (int r = 0; r < frameCount; r++) { if (harmonicIntensityScores[r] < dctThreshold) { continue; } //ignore locations with incorrect formant gap int maxId = maxIndexArray[r]; int bandBinCount = maxBin - minBin + 1; double freqBinGap = 2 * bandBinCount / (double)maxId; double formantGap = freqBinGap * freqBinWidth; if (formantGap < minFormantGap || formantGap > maxFormantGap) { harmonicIntensityScores[r] = 0.0; } } // smooth the harmonicIntensityScores array to allow for brief gaps. harmonicIntensityScores = DataTools.filterMovingAverageOdd(harmonicIntensityScores, 3); //extract the events based on length and threshhold. // Note: This method does NOT do prior smoothing of the score array. var harmonicEvents = AcousticEvent.ConvertScoreArray2Events( harmonicIntensityScores, minHz, maxHz, spectrogram.FramesPerSecond, spectrogram.FBinWidth, dctThreshold, minDuration, maxDuration, segmentStartOffset); var spectralEvents = new List <EventCommon>(); // add in temporary names to the events // These can be altered later. foreach (var he in harmonicEvents) { var se = EventConverters.ConvertAcousticEventToSpectralEvent(he); spectralEvents.Add(se); se.Name = "Harmonics"; //se.ComponentName = "Harmonics"; } return(spectralEvents, dBArray, harmonicIntensityScores); }