public AnalysisResult2 Analyze <T>(AnalysisSettings analysisSettings, SegmentSettings <T> segmentSettings) { var audioFile = segmentSettings.SegmentAudioFile; var recording = new AudioRecording(audioFile.FullName); var sourceRecordingName = recording.BaseName; var outputDirectory = segmentSettings.SegmentOutputDirectory; bool saveCsv = analysisSettings.AnalysisDataSaveBehavior; var analysisResult = new AnalysisResult2(analysisSettings, segmentSettings, recording.Duration); // generate spectrogram // TODO the following may need to be checked since change of method signature in December 2019. //var configurationDictionary = new Dictionary<string, string>(configuration.ToDictionary()); //configurationDictionary[ConfigKeys.Recording.Key_RecordingCallName] = audioFile.FullName; //configurationDictionary[ConfigKeys.Recording.Key_RecordingFileName] = audioFile.Name; //var soxImage = new FileInfo(Path.Combine(segmentSettings.SegmentOutputDirectory.FullName, audioFile.Name + ".SOX.png")); var configInfo = ConfigFile.Deserialize <AnalyzerConfig>(analysisSettings.ConfigFile); var spectrogramResult = Audio2Sonogram.GenerateSpectrogramImages(audioFile, configInfo, sourceRecordingName); // this analysis produces no results! But we still print images (that is the point) if (analysisSettings.AnalysisImageSaveBehavior.ShouldSave(analysisResult.Events.Length)) { Debug.Assert(segmentSettings.SegmentImageFile.Exists); } if (saveCsv) { var basename = Path.GetFileNameWithoutExtension(segmentSettings.SegmentAudioFile.Name); var spectrogramCsvFile = outputDirectory.CombineFile(basename + ".Spectrogram.csv"); Csv.WriteMatrixToCsv(spectrogramCsvFile, spectrogramResult.DecibelSpectrogram.Data, TwoDimensionalArray.None); } return(analysisResult); }
public AnalysisResult2 Analyze <T>(AnalysisSettings analysisSettings, SegmentSettings <T> segmentSettings) { var audioFile = segmentSettings.SegmentAudioFile; var recording = new AudioRecording(audioFile.FullName); var sourceRecordingName = recording.BaseName; var configuration = (SpectrogramGeneratorConfig)analysisSettings.Configuration; var analysisResult = new AnalysisResult2(analysisSettings, segmentSettings, recording.Duration); var spectrogramResult = GenerateSpectrogramImages(audioFile, configuration, sourceRecordingName); // this analysis produces no results! But we still print images (that is the point) if (analysisSettings.AnalysisImageSaveBehavior.ShouldSave(analysisResult.Events.Length)) { ImageExtensions.Save(spectrogramResult.CompositeImage, segmentSettings.SegmentImageFile.FullName); } //if (saveCsv) //{ // var basename = Path.GetFileNameWithoutExtension(segmentSettings.SegmentAudioFile.Name); // var spectrogramCsvFile = outputDirectory.CombineFile(basename + ".Spectrogram.csv"); // Csv.WriteMatrixToCsv(spectrogramCsvFile, spectrogramResult.DecibelSpectrogram.Data, TwoDimensionalArray.None); //} return(analysisResult); }
public override AnalysisResult2 Analyze <T>(AnalysisSettings analysisSettings, SegmentSettings <T> segmentSettings) { FileInfo audioFile = segmentSettings.SegmentAudioFile; var aedConfig = GetAedParametersFromConfigFileOrDefaults(analysisSettings.Configuration); var results = Detect(audioFile, aedConfig, segmentSettings.SegmentStartOffset); var analysisResults = new AnalysisResult2(analysisSettings, segmentSettings, results.Item2.Duration); analysisResults.AnalysisIdentifier = this.Identifier; analysisResults.Events = results.Item1; BaseSonogram sonogram = results.Item3; if (analysisSettings.AnalysisDataSaveBehavior) { this.WriteEventsFile(segmentSettings.SegmentEventsFile, analysisResults.Events); analysisResults.EventsFile = segmentSettings.SegmentEventsFile; } if (analysisSettings.AnalysisDataSaveBehavior) { // noop } // save image of sonograms if (analysisSettings.AnalysisImageSaveBehavior.ShouldSave(analysisResults.Events.Length)) { Image image = DrawSonogram(sonogram, results.Item1); image.Save(segmentSettings.SegmentImageFile.FullName); analysisResults.ImageFile = segmentSettings.SegmentImageFile; } return(analysisResults); }
/// <summary> /// A WRAPPER AROUND THE analyser.Analyze(analysisSettings) METHOD /// To be called as an executable with command line arguments. /// </summary> public static void Execute(Arguments arguments) { Contract.Requires(arguments != null); var(analysisSettings, segmentSettings) = arguments.ToAnalysisSettings(); TimeSpan offsetStart = TimeSpan.FromSeconds(arguments.Start ?? 0); TimeSpan duration = TimeSpan.FromSeconds(arguments.Duration ?? 0); int resampleRate = ConfigDictionary.GetInt(AnalysisKeys.ResampleRate, analysisSettings.ConfigDict); // EXTRACT THE REQUIRED RECORDING SEGMENT FileInfo tempF = segmentSettings.SegmentAudioFile; if (tempF.Exists) { tempF.Delete(); } if (duration == TimeSpan.Zero) { // Process entire file AudioFilePreparer.PrepareFile(arguments.Source, tempF, new AudioUtilityRequest { TargetSampleRate = resampleRate }, analysisSettings.AnalysisTempDirectoryFallback); ////var fiSegment = AudioFilePreparer.PrepareFile(diOutputDir, fiSourceFile, , Human2.RESAMPLE_RATE); } else { AudioFilePreparer.PrepareFile(arguments.Source, tempF, new AudioUtilityRequest { TargetSampleRate = resampleRate, OffsetStart = offsetStart, OffsetEnd = offsetStart.Add(duration) }, analysisSettings.AnalysisTempDirectoryFallback); ////var fiSegmentOfSourceFile = AudioFilePreparer.PrepareFile(diOutputDir, new FileInfo(recordingPath), MediaTypes.MediaTypeWav, TimeSpan.FromMinutes(2), TimeSpan.FromMinutes(3), RESAMPLE_RATE); } //DO THE ANALYSIS // ############################################################################################################################################# // BROKEN! throw new NotImplementedException("Broken in code updates"); IAnalyser2 analyser = null; //new Rain_OBSOLETE(); AnalysisResult2 result = analyser.Analyze <FileInfo>(analysisSettings, null /*broken */); /*DataTable dt = result.Data; * //############################################################################################################################################# * * // ADD IN ADDITIONAL INFO TO RESULTS TABLE * if (dt != null) * { * int iter = 0; // dummy - iteration number would ordinarily be available at this point. * int startMinute = (int)offsetStart.TotalMinutes; * foreach (DataRow row in dt.Rows) * { * row[InitialiseIndexProperties.KEYRankOrder] = iter; * row[InitialiseIndexProperties.KEYStartMinute] = startMinute; * row[InitialiseIndexProperties.KEYSegmentDuration] = result.AudioDuration.TotalSeconds; * } * * CsvTools.DataTable2CSV(dt, segmentSettings.SegmentSummaryIndicesFile.FullName); * //DataTableTools.WriteTable2Console(dt); * }*/ }
/// <summary> /// This method calls IndexCalculateSixOnly.Analysis() to calculate six spectral indices /// and then calls ContentSignatures.AnalyzeOneMinute() to obtain a content description derived from those indices and an array of functional templates. /// </summary> public override AnalysisResult2 Analyze <T>(AnalysisSettings analysisSettings, SegmentSettings <T> segmentSettings) { // set the start time for the current recording segment. Default is zero. var elapsedTimeAtStartOfRecording = segmentSettings.SegmentStartOffset; var startMinuteId = (int)Math.Round(elapsedTimeAtStartOfRecording.TotalMinutes); var audioFile = segmentSettings.SegmentAudioFile; var recording = new AudioRecording(audioFile.FullName); // Calculate six spectral indices. var segmentResults = IndexCalculateSixOnly.Analysis( recording, segmentSettings.SegmentStartOffset, segmentSettings.Segment.SourceMetadata.SampleRate); // DO THE CONTENT DESCRIPTION FOR ONE MINUTE HERE // First get acoustic indices for one minute, convert to Dictionary and normalize the values. var indicesDictionary = segmentResults.AsArray().ToTwoDimensionalArray(SpectralIndexValuesForContentDescription.CachedSelectors); //var indicesDictionary = IndexCalculateSixOnly.ConvertIndicesToDictionary(segmentResults); foreach (string key in ContentSignatures.IndexNames) { var indexBounds = ContentSignatures.IndexValueBounds[key]; var indexArray = indicesDictionary[key]; var normalisedVector = DataTools.NormaliseInZeroOne(indexArray, indexBounds[0], indexBounds[1]); indicesDictionary[key] = normalisedVector; } // scan templates over one minute of indices to get content description var descriptionResultForOneMinute = ContentSignatures.AnalyzeOneMinute( this.functionalTemplates, this.templatesAsDictionary, indicesDictionary.ToDictionary(kvp => kvp.Key, kvp => kvp.Value.GetRow(0)), // this line converts dictionary of one-row matrices to dictionary of arrays. startMinuteId); // set up the analysis results to return var analysisResults = new AnalysisResult2(analysisSettings, segmentSettings, recording.Duration) { AnalysisIdentifier = this.Identifier, SpectralIndices = new SpectralIndexBase[] { // Transfer the spectral index results to AnalysisResults // TODO: consider not returning this value if it is not needed in summarize segmentResults, }, MiscellaneousResults = { { nameof(DescriptionResult), descriptionResultForOneMinute }, }, }; analysisResults.SpectralIndices[0].ResultStartSeconds = segmentSettings.SegmentStartOffset.TotalSeconds; //spectralIndexBase.ResultStartSeconds >= result.SegmentStartOffset.TotalSeconds, return(analysisResults); }
/// <summary> /// A WRAPPER AROUND THE analyzer.Analyze(analysisSettings) METHOD /// To be called as an executable with command line arguments. /// </summary> /// <param name="arguments"> /// The command line arguments. /// </param> public static void Execute(Arguments arguments) { Contract.Requires(arguments != null); var(analysisSettings, segmentSettings) = arguments.ToAnalysisSettings(); TimeSpan start = TimeSpan.FromSeconds(arguments.Start ?? 0); TimeSpan duration = TimeSpan.FromSeconds(arguments.Duration ?? 0); // EXTRACT THE REQUIRED RECORDING SEGMENT FileInfo tempF = segmentSettings.SegmentAudioFile; if (duration == TimeSpan.Zero) { // Process entire file AudioFilePreparer.PrepareFile( arguments.Source, tempF, new AudioUtilityRequest { TargetSampleRate = ResampleRate }, analysisSettings.AnalysisTempDirectoryFallback); } else { AudioFilePreparer.PrepareFile( arguments.Source, tempF, new AudioUtilityRequest { TargetSampleRate = ResampleRate, OffsetStart = start, OffsetEnd = start.Add(duration), }, analysisSettings.AnalysisTempDirectoryFallback); } // DO THE ANALYSIS /* ############################################################################################################################################# */ IAnalyser2 analyser = new LitoriaFallax_OBSOLETE(); //IAnalyser2 analyser = new Canetoad(); analyser.BeforeAnalyze(analysisSettings); AnalysisResult2 result = analyser.Analyze(analysisSettings, segmentSettings); /* ############################################################################################################################################# */ if (result.Events.Length > 0) { LoggedConsole.WriteLine("{0} events found", result.Events.Length); } else { LoggedConsole.WriteLine("No events found"); } }
public override AnalysisResult2 Analyze <T>(AnalysisSettings analysisSettings, SegmentSettings <T> segmentSettings) { FileInfo audioFile = segmentSettings.SegmentAudioFile; /* ###################################################################### */ Dictionary <string, string> configuration = analysisSettings.Configuration.ToDictionary(); KoalaMaleResults results = Analysis(audioFile, configuration, segmentSettings.SegmentStartOffset); /* ###################################################################### */ BaseSonogram sonogram = results.Sonogram; double[,] hits = results.Hits; Plot scores = results.Plot; var analysisResults = new AnalysisResult2(analysisSettings, segmentSettings, results.RecordingtDuration) { AnalysisIdentifier = this.Identifier, }; analysisResults.Events = results.Events.ToArray(); if (analysisSettings.AnalysisDataSaveBehavior) { this.WriteEventsFile(segmentSettings.SegmentEventsFile, analysisResults.Events); analysisResults.EventsFile = segmentSettings.SegmentEventsFile; } if (analysisSettings.AnalysisDataSaveBehavior) { TimeSpan unitTime = TimeSpan.FromMinutes(1.0); analysisResults.SummaryIndices = this.ConvertEventsToSummaryIndices( analysisResults.Events, unitTime, analysisResults.SegmentAudioDuration, 0); this.WriteSummaryIndicesFile(segmentSettings.SegmentSummaryIndicesFile, analysisResults.SummaryIndices); } if (analysisSettings.AnalysisImageSaveBehavior.ShouldSave(analysisResults.Events.Length)) { string imagePath = segmentSettings.SegmentImageFile.FullName; const double EventThreshold = 0.1; Image image = DrawSonogram(sonogram, hits, scores, results.Events, EventThreshold); image.Save(imagePath, ImageFormat.Png); analysisResults.ImageFile = segmentSettings.SegmentImageFile; } return(analysisResults); }
/// <summary> /// Compress high resolution indices - intended to be used when summarizing results. /// Summarize method not yet written. /// </summary> /// <param name="analysisResults"></param> /// <param name="indexResults"></param> /// <param name="highResolutionParsedConfiguration"></param> private void SummarizeHighResolutionIndices( AnalysisResult2 analysisResults, IndexCalculateResult[] indexResults, AcousticIndices.AcousticIndicesConfig highResolutionParsedConfiguration) { // NOW COMPRESS THE HI-RESOLUTION SPECTRAL INDICES TO LOW RES double lowResolution = highResolutionParsedConfiguration.GetDoubleOrNull("LowResolution") ?? 60.0; TimeSpan imageScale = TimeSpan.FromSeconds(lowResolution); TimeSpan dataScale = highResolutionParsedConfiguration.IndexCalculationDuration.Seconds(); var dictionaryOfSpectra = indexResults.Select(icr => icr.SpectralIndexValues).ToArray().ToTwoDimensionalArray(SpectralIndexValues.CachedSelectors, TwoDimensionalArray.Rotate90ClockWise); var spectralSelection = IndexMatrices.CompressIndexSpectrograms(dictionaryOfSpectra, imageScale, dataScale); // check that have not compressed matrices to zero length double[,] matrix = spectralSelection.First().Value; if (matrix.GetLength(0) == 0 || matrix.GetLength(1) == 0) { LoggedConsole.WriteErrorLine("WARNING: SPECTRAL INDEX MATRICES compressed to zero length!!!!!!!!!!!!!!!!!!!!!!!!"); } // Place LOW RESOLUTION SPECTRAL INDICES INTO analysisResults before returning. //int windowLength = (int?)highResolutionConfig[AnalysisKeys.FrameLength] ?? IndexCalculate.DefaultWindowSize; var indexProperties = highResolutionParsedConfiguration.IndexProperties; SpectralIndexValues.CheckExistenceOfSpectralIndexValues(indexProperties); // Init a new spectral indices class and populate it with spectral indices var spectrums = SpectralIndexValues.ImportFromDictionary(spectralSelection); for (int i = 0; i < spectrums.Length; i++) { spectrums[i].ResultStartSeconds = (analysisResults.SegmentStartOffset + TimeSpan.FromSeconds(i * lowResolution)).TotalSeconds; spectrums[i].SegmentDurationSeconds = imageScale.TotalSeconds; spectrums[i].FileName = ((SegmentSettings <object>)analysisResults.SegmentSettings).Segment.SourceMetadata.Identifier; } // assign to the analysis result analysisResults.SpectralIndices = spectrums; // TODO TODO TODO // ALSO NEED TO COMPRESS THE analysisResults.SummaryIndices To LOW RESOLUTION //var summaryIndexValues = new SummaryIndexValues(); //summaryIndexValues.BackgroundNoise = ETC; // ETC //var summaryiv = new SummaryIndexValues[1]; //summaryiv[0] = summaryIndexValues; //analysisResults.SummaryIndices = summaryiv; }
public override AnalysisResult2 Analyze <T>(AnalysisSettings analysisSettings, SegmentSettings <T> segmentSettings) { FileInfo audioFile = segmentSettings.SegmentAudioFile; // execute actual analysis dynamic configuration = analysisSettings.Configuration; var recording = new AudioRecording(audioFile.FullName); Log.Debug("Canetoad sample rate:" + recording.SampleRate); RecognizerResults results = Analysis(recording, configuration, segmentSettings.SegmentStartOffset, segmentSettings.SegmentOutputDirectory); var analysisResults = new AnalysisResult2(analysisSettings, segmentSettings, recording.Duration); BaseSonogram sonogram = results.Sonogram; double[,] hits = results.Hits; Plot scores = results.Plots.First(); List <AcousticEvent> predictedEvents = results.Events; analysisResults.Events = predictedEvents.ToArray(); if (analysisSettings.AnalysisDataSaveBehavior) { this.WriteEventsFile(segmentSettings.SegmentEventsFile, analysisResults.Events); analysisResults.EventsFile = segmentSettings.SegmentEventsFile; } if (analysisSettings.AnalysisDataSaveBehavior) { var unitTime = TimeSpan.FromMinutes(1.0); analysisResults.SummaryIndices = this.ConvertEventsToSummaryIndices(analysisResults.Events, unitTime, analysisResults.SegmentAudioDuration, 0); analysisResults.SummaryIndicesFile = segmentSettings.SegmentSummaryIndicesFile; this.WriteSummaryIndicesFile(segmentSettings.SegmentSummaryIndicesFile, analysisResults.SummaryIndices); } if (analysisSettings.AnalysisImageSaveBehavior.ShouldSave(analysisResults.Events.Length)) { string imagePath = segmentSettings.SegmentImageFile.FullName; const double EventThreshold = 0.1; Image image = DrawSonogram(sonogram, hits, scores, predictedEvents, EventThreshold); image.Save(imagePath, ImageFormat.Png); analysisResults.ImageFile = segmentSettings.SegmentImageFile; } return(analysisResults); }
public AnalysisResult2 Analyze <T>(AnalysisSettings analysisSettings, SegmentSettings <T> segmentSettings) { var audioFile = segmentSettings.SegmentAudioFile; var recording = new AudioRecording(audioFile.FullName); var outputDirectory = segmentSettings.SegmentOutputDirectory; var analysisResult = new AnalysisResult2(analysisSettings, segmentSettings, recording.Duration); Config configuration = ConfigFile.Deserialize(analysisSettings.ConfigFile); bool saveCsv = analysisSettings.AnalysisDataSaveBehavior; if (configuration.GetBool(AnalysisKeys.MakeSoxSonogram)) { Log.Warn("SoX spectrogram generation config variable found (and set to true) but is ignored when running as an IAnalyzer"); } // generate spectrogram var configurationDictionary = new Dictionary <string, string>(configuration.ToDictionary()); configurationDictionary[ConfigKeys.Recording.Key_RecordingCallName] = audioFile.FullName; configurationDictionary[ConfigKeys.Recording.Key_RecordingFileName] = audioFile.Name; var soxImage = new FileInfo(Path.Combine(segmentSettings.SegmentOutputDirectory.FullName, audioFile.Name + ".SOX.png")); var spectrogramResult = Audio2Sonogram.GenerateFourSpectrogramImages( audioFile, soxImage, configurationDictionary, dataOnly: analysisSettings.AnalysisImageSaveBehavior.ShouldSave(analysisResult.Events.Length), makeSoxSonogram: false); // this analysis produces no results! // but we still print images (that is the point) if (analysisSettings.AnalysisImageSaveBehavior.ShouldSave(analysisResult.Events.Length)) { Debug.Assert(segmentSettings.SegmentImageFile.Exists); } if (saveCsv) { var basename = Path.GetFileNameWithoutExtension(segmentSettings.SegmentAudioFile.Name); var spectrogramCsvFile = outputDirectory.CombineFile(basename + ".Spectrogram.csv"); Csv.WriteMatrixToCsv(spectrogramCsvFile, spectrogramResult.DecibelSpectrogram.Data, TwoDimensionalArray.None); } return(analysisResult); }
public override AnalysisResult2 Analyze <T>(AnalysisSettings analysisSettings, SegmentSettings <T> segmentSettings) { FileInfo audioFile = segmentSettings.SegmentAudioFile; var eprNormalizedMinScore = GetEprParametersFromConfigFileOrDefaults(analysisSettings.Configuration); var aedConfigFile = ConfigFile.Resolve( analysisSettings.Configuration["AedConfig"], analysisSettings.ConfigFile.Directory); var rawAedConfig = ConfigFile.Deserialize(aedConfigFile); var aedConfig = Aed.GetAedParametersFromConfigFileOrDefaults(rawAedConfig); Tuple <BaseSonogram, List <AcousticEvent> > results = Detect(audioFile, aedConfig, eprNormalizedMinScore, segmentSettings.SegmentStartOffset); var analysisResults = new AnalysisResult2(analysisSettings, segmentSettings, results.Item1.Duration) { AnalysisIdentifier = this.Identifier, Events = results.Item2.ToArray(), }; BaseSonogram sonogram = results.Item1; if (analysisSettings.AnalysisDataSaveBehavior) { this.WriteEventsFile(segmentSettings.SegmentEventsFile, analysisResults.Events); analysisResults.EventsFile = segmentSettings.SegmentEventsFile; } if (analysisSettings.AnalysisDataSaveBehavior) { var unitTime = TimeSpan.FromMinutes(1.0); analysisResults.SummaryIndices = this.ConvertEventsToSummaryIndices(analysisResults.Events, unitTime, analysisResults.SegmentAudioDuration, 0); this.WriteSummaryIndicesFile(segmentSettings.SegmentSummaryIndicesFile, analysisResults.SummaryIndices); } // save image of sonograms if (analysisSettings.AnalysisImageSaveBehavior.ShouldSave(analysisResults.Events.Length)) { Image image = Aed.DrawSonogram(sonogram, results.Item2); image.Save(segmentSettings.SegmentImageFile.FullName, ImageFormat.Png); analysisResults.ImageFile = segmentSettings.SegmentImageFile; } return(analysisResults); }
public AnalysisResult2 Analyze <T>(AnalysisSettings analysisSettings, SegmentSettings <T> segmentSettings) { var audioFile = segmentSettings.SegmentAudioFile; var recording = new AudioRecording(audioFile.FullName); var sourceRecordingName = recording.BaseName; // TODO get the start and end-time offsets for accurate labeling of the time scale. //if (arguments.StartOffset.HasValue ^ arguments.EndOffset.HasValue) //{ // throw new InvalidStartOrEndException("If StartOffset or EndOffset is specified, then both must be specified"); //} // set default offsets - only use defaults if not provided in arguments list // var offsetsProvided = arguments.StartOffset.HasValue && arguments.EndOffset.HasValue; //TimeSpan? startOffset; //TimeSpan? endOffset; //if (offsetsProvided) //{ // startOffset = TimeSpan.FromSeconds(arguments.StartOffset.Value); // endOffset = TimeSpan.FromSeconds(arguments.EndOffset.Value); //} //var outputDirectory = segmentSettings.SegmentOutputDirectory; //bool saveCsv = analysisSettings.AnalysisDataSaveBehavior; var analysisResult = new AnalysisResult2(analysisSettings, segmentSettings, recording.Duration); var configInfo = ConfigFile.Deserialize <AnalyzerConfig>(analysisSettings.ConfigFile); var spectrogramResult = Audio2Sonogram.GenerateSpectrogramImages(audioFile, configInfo, sourceRecordingName); // this analysis produces no results! But we still print images (that is the point) // if (analysisSettings.AnalysisImageSaveBehavior.ShouldSave(analysisResult.Events.Length)) // { // Debug.Assert(condition: segmentSettings.SegmentImageFile.Exists, "Warning: Image file must exist."); spectrogramResult.CompositeImage.Save(segmentSettings.SegmentImageFile.FullName, ImageFormat.Png); // } //if (saveCsv) //{ // var basename = Path.GetFileNameWithoutExtension(segmentSettings.SegmentAudioFile.Name); // var spectrogramCsvFile = outputDirectory.CombineFile(basename + ".Spectrogram.csv"); // Csv.WriteMatrixToCsv(spectrogramCsvFile, spectrogramResult.DecibelSpectrogram.Data, TwoDimensionalArray.None); //} return(analysisResult); }
public override AnalysisResult2 Analyze <T>(AnalysisSettings analysisSettings, SegmentSettings <T> segmentSettings) { FileInfo audioFile = segmentSettings.SegmentAudioFile; // execute actual analysis Dictionary <string, string> configuration = analysisSettings.Configuration; LimnodynastesConvexResults results = Analysis(audioFile, configuration, analysisSettings, segmentSettings); var analysisResults = new AnalysisResult2(analysisSettings, segmentSettings, results.RecordingDuration); BaseSonogram sonogram = results.Sonogram; double[,] hits = results.Hits; Plot scores = results.Plot; List <AcousticEvent> predictedEvents = results.Events; analysisResults.Events = predictedEvents.ToArray(); if (analysisSettings.AnalysisDataSaveBehavior) { this.WriteEventsFile(segmentSettings.SegmentEventsFile, analysisResults.Events); analysisResults.EventsFile = segmentSettings.SegmentEventsFile; } if (analysisSettings.AnalysisDataSaveBehavior) { var unitTime = TimeSpan.FromMinutes(1.0); analysisResults.SummaryIndices = this.ConvertEventsToSummaryIndices(analysisResults.Events, unitTime, analysisResults.SegmentAudioDuration, 0); this.WriteSummaryIndicesFile(segmentSettings.SegmentSummaryIndicesFile, analysisResults.SummaryIndices); } if (analysisSettings.AnalysisImageSaveBehavior.ShouldSave(analysisResults.Events.Length)) { string imagePath = segmentSettings.SegmentImageFile.FullName; const double EventThreshold = 0.1; Image image = DrawSonogram(sonogram, hits, scores, predictedEvents, EventThreshold); image.Save(imagePath, ImageFormat.Png); analysisResults.ImageFile = segmentSettings.SegmentImageFile; } return(analysisResults); }
public override AnalysisResult2 Analyze <T>(AnalysisSettings analysisSettings, SegmentSettings <T> segmentSettings) { FileInfo audioFile = segmentSettings.SegmentAudioFile; var aedConfig = GetAedParametersFromConfigFileOrDefaults(analysisSettings.Configuration); var results = Detect(audioFile, aedConfig, segmentSettings.SegmentStartOffset); var analysisResults = new AnalysisResult2(analysisSettings, segmentSettings, results.Item2.Duration); analysisResults.AnalysisIdentifier = this.Identifier; analysisResults.Events = results.Item1; BaseSonogram sonogram = results.Item3; if (analysisSettings.AnalysisDataSaveBehavior) { this.WriteEventsFile(segmentSettings.SegmentEventsFile, analysisResults.Events); analysisResults.EventsFile = segmentSettings.SegmentEventsFile; } if (analysisSettings.AnalysisDataSaveBehavior) { var unitTime = TimeSpan.FromMinutes(1.0); analysisResults.SummaryIndices = this.ConvertEventsToSummaryIndices(analysisResults.Events, unitTime, analysisResults.SegmentAudioDuration, 0); this.WriteSummaryIndicesFile(segmentSettings.SegmentSummaryIndicesFile, analysisResults.SummaryIndices); analysisResults.SummaryIndicesFile = segmentSettings.SegmentSummaryIndicesFile; } // save image of sonograms if (analysisSettings.AnalysisImageSaveBehavior.ShouldSave(analysisResults.Events.Length)) { Image image = DrawSonogram(sonogram, results.Item1); image.Save(segmentSettings.SegmentImageFile.FullName, ImageFormat.Png); analysisResults.ImageFile = segmentSettings.SegmentImageFile; } return(analysisResults); }
public override AnalysisResult2 Analyze <T>(AnalysisSettings analysisSettings, SegmentSettings <T> segmentSettings) { Contract.Requires(segmentSettings.SegmentStartOffset == segmentSettings.Segment.StartOffsetSeconds.Seconds()); var recording = new AudioRecording(segmentSettings.SegmentAudioFile); var segment = (RemoteSegmentWithData)segmentSettings.Segment; // sometimes events will share the same audio block so we have to analyze each event // within this segment of audio IReadOnlyCollection <object> importedEvents = segment.Data; Log.Debug($"Calculating event statistics for {importedEvents.Count} items in {segmentSettings.SegmentAudioFile}"); EventStatistics[] results = new EventStatistics[importedEvents.Count]; int index = 0; foreach (var importedEventObject in importedEvents) { var importedEvent = (ImportedEvent)importedEventObject; var temporalRange = new Range <TimeSpan>( importedEvent.EventStartSeconds.Value.Seconds(), importedEvent.EventEndSeconds.Value.Seconds()); var spectralRange = new Range <double>( importedEvent.LowFrequencyHertz.Value, importedEvent.HighFrequencyHertz.Value); Log.Debug( $"Calculating event statistics for {importedEvent.AudioEventId},{temporalRange}," + $"{spectralRange} in {segmentSettings.SegmentAudioFile}, Duration: {recording.Duration}"); // Repeat sanity check here. Previous duration sanity check only checks the header of the audio file, // but that still allows for a fragmented audio file to have been downloaded, shorter than it should be var expectedDuration = segment.Offsets.Size().Seconds(); var durationDelta = expectedDuration - recording.Duration; if (durationDelta > 1.0.Seconds()) { Log.Warn( $"Media ({segmentSettings.SegmentAudioFile}) did not have expected duration." + $" Expected: {expectedDuration}, Actual: {recording.Duration}"); } var configuration = (EventStatisticsConfiguration)analysisSettings.Configuration; var statistics = EventStatisticsCalculate.AnalyzeAudioEvent( recording, temporalRange, spectralRange, configuration, segmentSettings.SegmentStartOffset); if (statistics.Error) { Log.Warn($"Event statistics failed for {importedEvent.AudioEventId},{temporalRange}," + $"{spectralRange} in {segmentSettings.SegmentAudioFile}, Duration: {recording.Duration}"); } // lastly add some metadata to make the results useful statistics.Order = importedEvent.Order; statistics.AudioRecordingId = segment.Source.Id; statistics.AudioRecordingRecordedDate = segment.SourceMetadata.RecordedDate; statistics.AudioEventId = importedEvent.AudioEventId; results[index] = statistics; index++; } var result = new AnalysisResult2(analysisSettings, segmentSettings, recording.Duration); result.Events = results; return(result); }
/// <summary> /// This entrypoint should be used for testing short files (less than 2 minutes) /// </summary> public static void Execute(Arguments arguments) { MainEntry.WarnIfDevleoperEntryUsed("EventRecognizer entry does not do any audio maniuplation."); Log.Info("Running event recognizer"); var sourceAudio = arguments.Source; var configFile = arguments.Config.ToFileInfo(); var outputDirectory = arguments.Output; if (configFile == null) { throw new FileNotFoundException("No config file argument provided"); } else if (!configFile.Exists) { Log.Warn($"Config file {configFile.FullName} not found... attempting to resolve config file"); configFile = ConfigFile.Resolve(configFile.Name, Directory.GetCurrentDirectory().ToDirectoryInfo()); } LoggedConsole.WriteLine("# Recording file: " + sourceAudio.FullName); LoggedConsole.WriteLine("# Configuration file: " + configFile); LoggedConsole.WriteLine("# Output folder: " + outputDirectory); // find an appropriate event IAnalyzer IAnalyser2 recognizer = AnalyseLongRecording.FindAndCheckAnalyser <IEventRecognizer>( arguments.AnalysisIdentifier, configFile.Name); Log.Info("Attempting to run recognizer: " + recognizer.Identifier); Log.Info("Reading configuration file"); Config configuration = ConfigFile.Deserialize <RecognizerBase.RecognizerConfig>(configFile); // get default settings AnalysisSettings analysisSettings = recognizer.DefaultSettings; // convert arguments to analysis settings analysisSettings = arguments.ToAnalysisSettings( analysisSettings, outputIntermediate: true, resultSubDirectory: recognizer.Identifier, configuration: configuration); // Enable this if you want the Config file ResampleRate parameter to work. // Generally however the ResampleRate should remain at 22050Hz for all recognizers. //analysisSettings.AnalysisTargetSampleRate = (int) configuration[AnalysisKeys.ResampleRate]; // get transform input audio file - if needed Log.Info("Querying source audio file"); var audioUtilityRequest = new AudioUtilityRequest() { TargetSampleRate = analysisSettings.AnalysisTargetSampleRate, }; var preparedFile = AudioFilePreparer.PrepareFile( outputDirectory, sourceAudio, MediaTypes.MediaTypeWav, audioUtilityRequest, outputDirectory); var source = preparedFile.SourceInfo.ToSegment(); var prepared = preparedFile.TargetInfo.ToSegment(FileSegment.FileDateBehavior.None); var segmentSettings = new SegmentSettings <FileInfo>( analysisSettings, source, (analysisSettings.AnalysisOutputDirectory, analysisSettings.AnalysisTempDirectory), prepared); if (preparedFile.TargetInfo.SampleRate.Value != analysisSettings.AnalysisTargetSampleRate) { Log.Warn("Input audio sample rate does not match target sample rate"); } // Execute a pre analyzer hook recognizer.BeforeAnalyze(analysisSettings); // execute actual analysis - output data will be written Log.Info("Running recognizer: " + recognizer.Identifier); AnalysisResult2 results = recognizer.Analyze(analysisSettings, segmentSettings); // run summarize code - output data can be written Log.Info("Running recognizer summary: " + recognizer.Identifier); recognizer.SummariseResults( analysisSettings, source, results.Events, results.SummaryIndices, results.SpectralIndices, new[] { results }); //Log.Info("Recognizer run, saving extra results"); // TODO: Michael, output anything else as you wish. Log.Debug("Clean up temporary files"); if (source.Source.FullName != prepared.Source.FullName) { prepared.Source.Delete(); } int eventCount = results?.Events?.Length ?? 0; Log.Info($"Number of detected events: {eventCount}"); Log.Success(recognizer.Identifier + " recognizer has completed"); }
public static void CorrectEvent(AnalysisResult2 result, EventBase eventToBeFixed, int totalEventsSoFar, int totalEventsInResultSoFar) { // no corrections need to be made }
public override AnalysisResult2 Analyze <T>(AnalysisSettings analysisSettings, SegmentSettings <T> segmentSettings) { var configuration = (StandardizedFeatureExtractionConfig)analysisSettings.Configuration; var audioFile = segmentSettings.SegmentAudioFile; var recording = new AudioRecording(audioFile.FullName); // Configurations non-specific for bands TimeSpan indexCalculationDuration = configuration.IndexCalculationDurationTimeSpan; TimeSpan bgNoiseNeighbourhood = configuration.BgNoiseBuffer; // Bands List <StandardizedFeatureExtractionConfig.BandsProperties> bandsList = configuration.Bands; // Check if there are identical bands CheckForIdenticalBands(bandsList); // Estimate total number of subsegments double segmentDurationSeconds = segmentSettings.AnalysisIdealSegmentDuration.TotalSeconds; double subsegmentDuration = indexCalculationDuration.TotalSeconds; int subsegmentCount = (int)Math.Round(segmentDurationSeconds / subsegmentDuration); int totalSubsegmentCount = subsegmentCount * bandsList.Count; // Store results of all subsegments var analysisResults = new AnalysisResult2(analysisSettings, segmentSettings, recording.Duration); analysisResults.AnalysisIdentifier = this.Identifier; var trackScores = new List <Plot>(totalSubsegmentCount); var tracks = new List <SpectralTrack>(totalSubsegmentCount); analysisResults.SummaryIndices = new SummaryIndexBase[totalSubsegmentCount]; analysisResults.SpectralIndices = new SpectralIndexBase[totalSubsegmentCount]; // Create list to store images, one for each band. They are later combined into one image. var list = new List <Image <Rgb24> >(); string imagePath = segmentSettings.SegmentImageFile.FullName; int maxImageWidth = 0; int bandCount = 0; foreach (var band in bandsList) { Log.DebugFormat("Starting band {0}/{1}", bandCount + 1, bandsList.Count); // Calculate spectral indices // get a fresh copy of the ICC config var config = (IndexCalculateConfig)((ICloneable)configuration).Clone(); // Add values specific for band from custom configuration file to config config.MinBandWidth = band.Bandwidth.Min; config.MaxBandWidth = band.Bandwidth.Max; config.FrameLength = band.FftWindow; if (band.MelScale != 0) { config.FrequencyScale = FreqScaleType.Mel; config.MelScale = band.MelScale; } else { config.FrequencyScale = FreqScaleType.Linear; } // Calculate indices for each subsegment and for each band IndexCalculateResult[] subsegmentResults = AcousticIndices.CalculateIndicesInSubsegments( recording, segmentSettings.SegmentStartOffset, segmentSettings.AnalysisIdealSegmentDuration, indexCalculationDuration, config.IndexProperties, segmentSettings.Segment.SourceMetadata.SampleRate, config); int columnsAmplitudeSpectrogram = subsegmentResults[0].AmplitudeSpectrogram.GetLength(1); double[,] amplitudeSpectrogramSegment = new double[0, columnsAmplitudeSpectrogram]; for (int i = 0; i < subsegmentResults.Length; i++) { var indexCalculateResult = subsegmentResults[i]; indexCalculateResult.SummaryIndexValues.FileName = segmentSettings.Segment.SourceMetadata.Identifier; indexCalculateResult.SpectralIndexValues.FileName = segmentSettings.Segment.SourceMetadata.Identifier; analysisResults.SummaryIndices[bandCount + (i * bandsList.Count)] = indexCalculateResult.SummaryIndexValues; analysisResults.SpectralIndices[bandCount + (i * bandsList.Count)] = indexCalculateResult.SpectralIndexValues; trackScores.AddRange(indexCalculateResult.TrackScores); if (indexCalculateResult.Tracks != null) { tracks.AddRange(indexCalculateResult.Tracks); } if (analysisSettings.AnalysisImageSaveBehavior.ShouldSave()) { // Add amplitude spectrograms of each subsegment together to get amplitude spectrogram of one segment double[,] amplitudeSpectrogramSubsegment = indexCalculateResult.AmplitudeSpectrogram; amplitudeSpectrogramSegment = MatrixTools.ConcatenateMatrixRows( amplitudeSpectrogramSegment, amplitudeSpectrogramSubsegment); } } if (analysisSettings.AnalysisImageSaveBehavior.ShouldSave()) { // Create image of amplitude spectrogram var image = ImageTools.DrawReversedMatrix(MatrixTools.MatrixRotate90Anticlockwise(amplitudeSpectrogramSegment)); // Label information string minBandWidth = band.Bandwidth.Min.ToString(); string maxBandWidth = band.Bandwidth.Max.ToString(); string fftWindow = band.FftWindow.ToString(); string mel; string melScale; if (band.MelScale != 0) { mel = "Mel"; melScale = band.MelScale.ToString(); } else { mel = "Standard"; melScale = 0.ToString(); } // Create label string segmentSeparator = "_"; string[] segments = { minBandWidth, maxBandWidth, fftWindow, mel, melScale }; string labelText = segments.Aggregate(string.Empty, (aggregate, item) => aggregate + segmentSeparator + item); var stringFont = Drawing.Arial14; int width = 250; int height = image.Height; var label = new Image <Rgb24>(width, height); label.Mutate(g1 => { g1.Clear(Color.Gray); g1.DrawText(labelText, stringFont, Color.Black, new PointF(4, 30)); g1.DrawLine(new Pen(Color.Black, 1), 0, 0, width, 0); //draw upper boundary g1.DrawLine(new Pen(Color.Black, 1), 0, 1, width, 1); //draw upper boundary }); var labelledImage = ImageTools.CombineImagesInLine(label, image); // Add labeled image to list list.Add(labelledImage); // Update maximal width of image if (image.Width > maxImageWidth) { maxImageWidth = image.Width; } } bandCount += 1; Log.InfoFormat("Completed band {0}/{1}", bandCount, bandsList.Count); } if (analysisSettings.AnalysisDataSaveBehavior) { this.WriteSummaryIndicesFile(segmentSettings.SegmentSummaryIndicesFile, analysisResults.SummaryIndices); analysisResults.SummaryIndicesFile = segmentSettings.SegmentSummaryIndicesFile; analysisResults.SpectraIndicesFiles = this.WriteSpectrumIndicesFiles( segmentSettings.SegmentSpectrumIndicesDirectory, Path.GetFileNameWithoutExtension(segmentSettings.SegmentAudioFile.Name), analysisResults.SpectralIndices); } if (analysisSettings.AnalysisImageSaveBehavior.ShouldSave()) { var finalImage = ImageTools.CombineImagesVertically(list, maxImageWidth); finalImage.Save(imagePath); analysisResults.ImageFile = new FileInfo(imagePath); LoggedConsole.WriteLine("See {0} for spectrogram pictures", imagePath); } return(analysisResults); }
public static void CorrectSummaryIndex(AnalysisResult2 result, SummaryIndexBase indexToBeFixed, int totalSummaryIndicesSoFar, int totalSumaryIndicesInJustThisResultSoFar) { indexToBeFixed.RankOrder = totalSummaryIndicesSoFar; }
public override AnalysisResult2 Analyze <T>(AnalysisSettings analysisSettings, SegmentSettings <T> segmentSettings) { var fiAudioF = segmentSettings.SegmentAudioFile; var diOutputDir = segmentSettings.SegmentOutputDirectory; //###################################################################### var results = Analysis(fiAudioF, analysisSettings, segmentSettings.Segment.SourceMetadata.SampleRate, segmentSettings.SegmentStartOffset); //###################################################################### if (results == null) { return(null); //nothing to process (broken) } var sonogram = results.Item1; var hits = results.Item2; var scores = results.Item3; var predictedEvents = results.Item4; var recordingTimeSpan = results.Item5; var result = new AnalysisResult2(analysisSettings, segmentSettings, recordingTimeSpan); result.AnalysisIdentifier = this.Identifier; result.MiscellaneousResults["dataTable"] = null; DataTable dataTable = null; if (predictedEvents != null) { string analysisName = analysisSettings.ConfigDict[AnalysisKeys.AnalysisName]; string fName = Path.GetFileNameWithoutExtension(fiAudioF.Name); foreach (AcousticEvent ev in predictedEvents) { ev.FileName = fName; //ev.Name = analysisName; //TEMPORARY DISABLE ev.SegmentDurationSeconds = recordingTimeSpan.TotalSeconds; } //write events to a data table to return. dataTable = WriteEvents2DataTable(predictedEvents); string sortString = AnalysisKeys.EventStartAbs + " ASC"; dataTable = DataTableTools.SortTable(dataTable, sortString); //sort by start time before returning } if (analysisSettings.AnalysisDataSaveBehavior) { CsvTools.DataTable2CSV(dataTable, segmentSettings.SegmentEventsFile.FullName); } else { result.EventsFile = null; } if (analysisSettings.AnalysisDataSaveBehavior) { double scoreThreshold = 0.01; if (analysisSettings.ConfigDict.ContainsKey(AnalysisKeys.IntensityThreshold)) { scoreThreshold = ConfigDictionary.GetDouble(AnalysisKeys.IntensityThreshold, analysisSettings.ConfigDict); } TimeSpan unitTime = TimeSpan.FromSeconds(60); //index for each time span of i minute var indicesDT = this.ConvertEvents2Indices(dataTable, unitTime, recordingTimeSpan, scoreThreshold); CsvTools.DataTable2CSV(indicesDT, segmentSettings.SegmentSummaryIndicesFile.FullName); } else { result.SummaryIndices = null; } //save image of sonograms if (analysisSettings.AnalysisImageSaveBehavior.ShouldSave(predictedEvents.Count)) { string imagePath = segmentSettings.SegmentImageFile.FullName; Image image = DrawSonogram(sonogram, hits, scores, predictedEvents); image.Save(imagePath, ImageFormat.Png); } result.MiscellaneousResults["dataTable"] = dataTable; result.ImageFile = segmentSettings.SegmentImageFile; //result.DisplayItems = { { 0, "example" }, { 1, "example 2" }, } //result.OutputFiles = { { "exmaple file key", new FileInfo("Where's that file?") } } return(result); }
/// <summary> /// A WRAPPER AROUND THE Analysis() METHOD /// To be called as an executable with command line arguments. /// </summary> public static void Execute(Arguments arguments) { Contract.Requires(arguments != null); TimeSpan tsStart = TimeSpan.FromSeconds(arguments.Start ?? 0); TimeSpan tsDuration = TimeSpan.FromSeconds(arguments.Duration ?? 0); string outputDir = arguments.Output.FullName; // EXTRACT THE REQUIRED RECORDING SEGMENT FileInfo sourceF = arguments.Source; FileInfo tempF = TempFileHelper.NewTempFile(arguments.Output); if (tempF.Exists) { tempF.Delete(); } // GET INFO ABOUT THE SOURCE and the TARGET files - esp need the sampling rate AudioUtilityModifiedInfo preparedFile; if (tsDuration == TimeSpan.Zero) // Process entire file { preparedFile = AudioFilePreparer.PrepareFile(sourceF, tempF, new AudioUtilityRequest { TargetSampleRate = ResampleRate }, arguments.Output); } else { preparedFile = AudioFilePreparer.PrepareFile(sourceF, tempF, new AudioUtilityRequest { TargetSampleRate = ResampleRate, OffsetStart = tsStart, OffsetEnd = tsStart.Add(tsDuration) }, arguments.Output); } var(analysisSettings, segmentSettings) = arguments.ToAnalysisSettings( sourceSegment: preparedFile.SourceInfo.ToSegment(), preparedSegment: preparedFile.TargetInfo.ToSegment()); //get the data file to identify frog calls. Check it exists and then store full path in dictionary. string frogParametersPath = analysisSettings.ConfigDict[key_FROG_DATA]; FileInfo fi_FrogData = new FileInfo(Path.Combine(outputDir, frogParametersPath)); if (!fi_FrogData.Exists) { LoggedConsole.WriteLine("INVALID PATH: " + fi_FrogData.FullName); LoggedConsole.WriteLine("The config file must contain the name of a valid .csv file (containing frog call parameters) located in same directory as the .cfg file."); LoggedConsole.WriteLine("For example, use Key/Value pair: FROG_DATA_FILE=FrogDataAndCompilationFile.csv"); throw new InvalidOperationException(); } analysisSettings.ConfigDict[key_FROG_DATA] = fi_FrogData.FullName; // store full path in the dictionary. // DO THE ANALYSIS // ############################################################################################################################################# IAnalyser2 analyser = new Frogs_OBSOLETE(); AnalysisResult2 result = analyser.Analyze(analysisSettings, segmentSettings); throw new NotImplementedException("Intentionally broken"); /* * DataTable dt = result.Data; * if (dt == null) { throw new InvalidOperationException("Data table of results is null"); } * // ############################################################################################################################################# * * // ADD IN ADDITIONAL INFO TO RESULTS TABLE * AddContext2Table(dt, tsStart, result.AudioDuration); * CsvTools.DataTable2CSV(dt, segmentSettings.SegmentEventsFile.FullName); * // DataTableTools.WriteTable(augmentedTable); */ }
public override AnalysisResult2 Analyze <T>(AnalysisSettings analysisSettings, SegmentSettings <T> segmentSettings) { // boilerplate Analyzer var audioFile = segmentSettings.SegmentAudioFile; var sampleRate = segmentSettings.Segment.SourceMetadata.SampleRate; var recording = new AudioRecording(audioFile.FullName); var outputDirectory = segmentSettings.SegmentOutputDirectory; var analysisResults = new AnalysisResult2(analysisSettings, segmentSettings, recording.Duration); analysisResults.AnalysisIdentifier = this.Identifier; var result = new ChannelIntegrityIndices() { ResultStartSeconds = segmentSettings.SegmentStartOffset.TotalSeconds, }; // do some sanity checks if (recording.WavReader.Channels != 2) { throw new InvalidAudioChannelException($"The channel integrity analyzer requires exactly two channels but {recording.WavReader.Channels} channels found in file ({audioFile.FullName}"); } // actual analysis double[] channelLeft = recording.WavReader.GetChannel(0); double[] channelRight = recording.WavReader.GetChannel(1); double epsilon = recording.WavReader.Epsilon; ChannelIntegrity.SimilarityIndex(channelLeft, channelRight, epsilon, sampleRate, out var similarityIndex, out var decibelIndex, out var avDecibelBias, out var medianDecibelBias, out var lowDecibelBias, out var midDecibelBias, out var highDecibelBias); //double similarityIndex = ChannelIntegrity.SimilarityIndex(channelLeft, channelRight, epsilon, sampleRate.Value); result.ChannelSimilarity = similarityIndex; result.ChannelDiffDecibels = decibelIndex; result.AverageDecibelBias = avDecibelBias; result.MedianDecibelBias = medianDecibelBias; result.LowFreqDecibelBias = lowDecibelBias; result.MidFreqDecibelBias = midDecibelBias; result.HighFreqDecibelBias = highDecibelBias; ChannelIntegrity.ZeroCrossingIndex(channelLeft, channelRight, out var zeroCrossingFractionLeft, out var zeroCrossingFractionRight); result.ZeroCrossingFractionLeft = zeroCrossingFractionLeft; result.ZeroCrossingFractionRight = zeroCrossingFractionRight; // finish the analyzer analysisResults.Events = new EventBase[0]; analysisResults.SummaryIndices = new SummaryIndexBase[] { result }; analysisResults.SpectralIndices = new SpectralIndexBase[0]; if (analysisSettings.AnalysisDataSaveBehavior) { this.WriteSummaryIndicesFile(segmentSettings.SegmentSummaryIndicesFile, analysisResults.SummaryIndices); analysisResults.SummaryIndicesFile = segmentSettings.SegmentSummaryIndicesFile; } if (analysisSettings.AnalysisImageSaveBehavior.ShouldSave(analysisResults.Events.Length)) { throw new NotImplementedException(); } if (false && analysisSettings.AnalysisDataSaveBehavior) { throw new NotImplementedException(); } return(analysisResults); }
public override AnalysisResult2 Analyze <T>(AnalysisSettings analysisSettings, SegmentSettings <T> segmentSettings) { var recording = new AudioRecording(segmentSettings.SegmentAudioFile.FullName); // get indices configuration - extracted in BeforeAnalyze var acousticIndicesConfig = (RecognizerConfig)analysisSettings.Configuration; // get a lazily calculated indices function - if you never get the lazy value, the indices will never be calculated var lazyIndices = this.GetLazyIndices( recording, analysisSettings, segmentSettings, acousticIndicesConfig.HighResolutionIndices); // determine imageWidth for output images int imageWidth = (int)Math.Floor( recording.Duration.TotalSeconds / acousticIndicesConfig.HighResolutionIndices.IndexCalculationDuration); // execute actual analysis RecognizerResults results = this.Recognize( recording, analysisSettings.Configuration, segmentSettings.SegmentStartOffset, lazyIndices, segmentSettings.SegmentOutputDirectory, imageWidth); var analysisResults = new AnalysisResult2(analysisSettings, segmentSettings, recording.Duration); BaseSonogram sonogram = results.Sonogram; double[,] hits = results.Hits; var predictedEvents = results.Events; // double check all the events have the right offset in case it was missed foreach (var predictedEvent in predictedEvents) { predictedEvent.SegmentStartSeconds = segmentSettings.SegmentStartOffset.TotalSeconds; } analysisResults.Events = predictedEvents.ToArray(); // compress high resolution indices - and save them. // IF they aren't used, empty values are returned. if (lazyIndices.IsValueCreated) { this.SummarizeHighResolutionIndices( analysisResults, lazyIndices.Value, acousticIndicesConfig.HighResolutionIndices); } // write intermediate output if necessary if (analysisSettings.AnalysisDataSaveBehavior) { this.WriteEventsFile(segmentSettings.SegmentEventsFile, analysisResults.Events); analysisResults.EventsFile = segmentSettings.SegmentEventsFile; } if (analysisSettings.AnalysisDataSaveBehavior) { this.WriteSummaryIndicesFile(segmentSettings.SegmentSummaryIndicesFile, analysisResults.SummaryIndices); } if (analysisSettings.AnalysisDataSaveBehavior) { analysisResults.SpectraIndicesFiles = this.WriteSpectrumIndicesFiles( segmentSettings.SegmentSpectrumIndicesDirectory, segmentSettings.Segment.SourceMetadata.Identifier, analysisResults.SpectralIndices); } if (analysisSettings.AnalysisImageSaveBehavior.ShouldSave(analysisResults.Events.Length)) { string imagePath = segmentSettings.SegmentImageFile.FullName; const double EventThreshold = 0.1; var plots = results.Plots ?? new List <Plot>(); Image image = this.DrawSonogram(sonogram, hits, plots, predictedEvents, EventThreshold); image.Save(imagePath); analysisResults.ImageFile = segmentSettings.SegmentImageFile; // draw a fancy high res index image // IF indices aren't used, no image is drawn. if (lazyIndices.IsValueCreated) { this.DrawLongDurationSpectrogram( segmentSettings.SegmentOutputDirectory, recording.BaseName, results.ScoreTrack, lazyIndices.Value, acousticIndicesConfig.HighResolutionIndices); } } return(analysisResults); }
public static void CorrectSpectrumIndex(AnalysisResult2 result, SpectralIndexBase spectralIndexToBeFixed, int totalSpectrumIndicesSoFar, int totalSpectrumIndicesInResultSoFar) { // no corrections need to be made }
public AnalysisResult2 Analyze <T>(AnalysisSettings analysisSettings, SegmentSettings <T> segmentSettings) { var acousticIndicesConfiguration = (AcousticIndicesConfig)analysisSettings.AnalysisAnalyzerSpecificConfiguration; var indexCalculationDuration = acousticIndicesConfiguration.IndexCalculationDuration.Seconds(); var audioFile = segmentSettings.SegmentAudioFile; var recording = new AudioRecording(audioFile.FullName); var outputDirectory = segmentSettings.SegmentOutputDirectory; var analysisResults = new AnalysisResult2(analysisSettings, segmentSettings, recording.Duration); analysisResults.AnalysisIdentifier = this.Identifier; // calculate indices for each subsegment IndexCalculateResult[] subsegmentResults = CalculateIndicesInSubsegments( recording, segmentSettings.SegmentStartOffset, segmentSettings.AnalysisIdealSegmentDuration, indexCalculationDuration, acousticIndicesConfiguration.IndexProperties, segmentSettings.Segment.SourceMetadata.SampleRate, acousticIndicesConfiguration); var trackScores = new List <Plot>(subsegmentResults.Length); var tracks = new List <Track>(subsegmentResults.Length); analysisResults.SummaryIndices = new SummaryIndexBase[subsegmentResults.Length]; analysisResults.SpectralIndices = new SpectralIndexBase[subsegmentResults.Length]; for (int i = 0; i < subsegmentResults.Length; i++) { var indexCalculateResult = subsegmentResults[i]; indexCalculateResult.SummaryIndexValues.FileName = segmentSettings.Segment.SourceMetadata.Identifier; indexCalculateResult.SpectralIndexValues.FileName = segmentSettings.Segment.SourceMetadata.Identifier; analysisResults.SummaryIndices[i] = indexCalculateResult.SummaryIndexValues; analysisResults.SpectralIndices[i] = indexCalculateResult.SpectralIndexValues; trackScores.AddRange(indexCalculateResult.TrackScores); if (indexCalculateResult.Tracks != null) { tracks.AddRange(indexCalculateResult.Tracks); } } if (analysisSettings.AnalysisDataSaveBehavior) { this.WriteSummaryIndicesFile(segmentSettings.SegmentSummaryIndicesFile, analysisResults.SummaryIndices); analysisResults.SummaryIndicesFile = segmentSettings.SegmentSummaryIndicesFile; } if (analysisSettings.AnalysisDataSaveBehavior) { analysisResults.SpectraIndicesFiles = WriteSpectrumIndicesFilesCustom( segmentSettings.SegmentSpectrumIndicesDirectory, Path.GetFileNameWithoutExtension(segmentSettings.SegmentAudioFile.Name), analysisResults.SpectralIndices); } // write the segment spectrogram (typically of one minute duration) to CSV // this is required if you want to produced zoomed spectrograms at a resolution greater than 0.2 seconds/pixel bool saveSonogramData = analysisSettings.Configuration.GetBoolOrNull(AnalysisKeys.SaveSonogramData) ?? false; if (saveSonogramData || analysisSettings.AnalysisImageSaveBehavior.ShouldSave(analysisResults.Events.Length)) { var sonoConfig = new SonogramConfig(); // default values config sonoConfig.SourceFName = recording.FilePath; sonoConfig.WindowSize = acousticIndicesConfiguration.FrameLength; sonoConfig.WindowStep = analysisSettings.Configuration.GetIntOrNull(AnalysisKeys.FrameStep) ?? sonoConfig.WindowSize; // default = no overlap sonoConfig.WindowOverlap = (sonoConfig.WindowSize - sonoConfig.WindowStep) / (double)sonoConfig.WindowSize; // Linear or Octave frequency scale? bool octaveScale = analysisSettings.Configuration.GetBoolOrNull(AnalysisKeys.KeyOctaveFreqScale) ?? false; if (octaveScale) { sonoConfig.WindowStep = sonoConfig.WindowSize; sonoConfig.WindowOverlap = (sonoConfig.WindowSize - sonoConfig.WindowStep) / (double)sonoConfig.WindowSize; } ////sonoConfig.NoiseReductionType = NoiseReductionType.NONE; // the default ////sonoConfig.NoiseReductionType = NoiseReductionType.STANDARD; var sonogram = new SpectrogramStandard(sonoConfig, recording.WavReader); // remove the DC row of the spectrogram sonogram.Data = MatrixTools.Submatrix(sonogram.Data, 0, 1, sonogram.Data.GetLength(0) - 1, sonogram.Data.GetLength(1) - 1); if (analysisSettings.AnalysisImageSaveBehavior.ShouldSave()) { string imagePath = Path.Combine(outputDirectory.FullName, segmentSettings.SegmentImageFile.Name); // NOTE: hits (SPT in this case) is intentionally not supported var image = DrawSonogram(sonogram, null, trackScores, tracks); image.Save(imagePath); analysisResults.ImageFile = new FileInfo(imagePath); } if (saveSonogramData) { string csvPath = Path.Combine(outputDirectory.FullName, recording.BaseName + ".csv"); Csv.WriteMatrixToCsv(csvPath.ToFileInfo(), sonogram.Data); } } return(analysisResults); }
/// <summary> /// A WRAPPER AROUND THE analyzer.Analyze(analysisSettings) METHOD /// To be called as an executable with command line arguments. /// </summary> /// <param name="arguments"> /// The command line arguments. /// </param> public static void Execute(Arguments arguments) { Contract.Requires(arguments != null); TimeSpan start = TimeSpan.FromSeconds(arguments.Start ?? 0); TimeSpan duration = TimeSpan.FromSeconds(arguments.Duration ?? 0); // EXTRACT THE REQUIRED RECORDING SEGMENT var audioUtilityRequest = new AudioUtilityRequest { TargetSampleRate = RESAMPLE_RATE }; if (duration == TimeSpan.Zero) { // Process entire file audioUtilityRequest = new AudioUtilityRequest { TargetSampleRate = RESAMPLE_RATE }; } else { audioUtilityRequest = new AudioUtilityRequest { TargetSampleRate = RESAMPLE_RATE, OffsetStart = start, OffsetEnd = start.Add(duration), }; } var preparedFile = AudioFilePreparer.PrepareFile( arguments.Output, arguments.Source, MediaTypes.MediaTypeWav, audioUtilityRequest, arguments.Output); var(analysisSettings, segmentSettings) = arguments.ToAnalysisSettings( sourceSegment: preparedFile.SourceInfo.ToSegment(), preparedSegment: preparedFile.TargetInfo.ToSegment()); // DO THE ANALYSIS /* ############################################################################################################################################# */ IAnalyser2 analyser = new CanetoadOld_OBSOLETE(); analyser.BeforeAnalyze(analysisSettings); AnalysisResult2 result = analyser.Analyze(analysisSettings, segmentSettings); /* ############################################################################################################################################# */ if (result.Events.Length > 0) { LoggedConsole.WriteLine("{0} events found", result.Events.Length); if (Log.IsDebugEnabled) { var firstEvent = (AcousticEvent)result.Events.First(); Log.Debug($"Event 0 profile: start={firstEvent.TimeStart}, duration={firstEvent.TimeStart - firstEvent.TimeEnd}"); } } else { LoggedConsole.WriteLine("No events found"); } }