private int Execute(Arguments arguments) { var errors = new List <string>(); Log.Info("Checking required executables and libraries can be found and loaded"); // this is an important call used in analyze long recordings. // This call effectively check is we can load types and if files are present (I think) try { AnalysisCoordinator.GetAnalyzers <IAnalyser2>(typeof(MainEntry).Assembly); } catch (ReflectionTypeLoadException rtlex) { errors.Add(ExceptionLookup.FormatReflectionTypeLoadException(rtlex, true)); } // master audio utility checks for available executables try { var utility = new MasterAudioUtility(); } catch (Exception ex) { errors.Add(ex.Message); } if (MainEntry.CheckForDataAnnotations() is string message) { errors.Add(message); } Type type = Type.GetType("Mono.Runtime"); if (type != null) { errors.Add($"We no longer use Mono with ${Meta.Name}. DO NOT prefix the {Meta.Name} prefix with `mono`."); } // don't have much more to check at the current time if (errors.Count == 0) { Log.Success("Valid environment"); return(ExceptionLookup.Ok); } else { foreach (var error in errors) { Log.Error(error); } // not using exception lookup on purpose - it's static constructor loads more types return(ExceptionLookup.UnhandledExceptionErrorCode); } }
public void TestNamedDirectoryWorksWithSubFolders() { var analyzer = new DummyAnalyzer(false); var settings = analyzer.DefaultSettings; settings.AnalysisOutputDirectory = this.TestOutputDirectory; var actual = AnalysisCoordinator.GetNamedDirectory(settings.AnalysisOutputDirectory, analyzer, "a", "b", "c"); var expected = this.TestOutputDirectory.Combine("Ecosounds.TempusSubstitutus/a/b/c"); Assert.AreEqual(expected.FullName, actual.FullName); }
private StringBuilder GetAnalyzersTable() { var analysers = AnalysisCoordinator .GetAnalyzers <IAnalyser2>(typeof(MainEntry).Assembly) .OrderBy(x => x.Identifier) .ToArray(); const string identifier = "Identifier"; var indentifierWidth = Math.Max(identifier.Length, analysers.Max((analyser2) => analyser2.Identifier.Length)) + 1; var typeLength = 16 + 1; int bufferWidth; try { bufferWidth = Console.BufferWidth; } catch (Exception) { bufferWidth = 80; } var consoleWidth = Math.Max((10 * (bufferWidth / 10)) - Environment.NewLine.Length, 80); var descrptionLength = consoleWidth - indentifierWidth - typeLength; string tableFormat = "{0, " + -indentifierWidth + "}{1, " + -typeLength + "}{2," + -descrptionLength + "}"; string header = string.Format(tableFormat, identifier, "Type", "Description"); StringBuilder table = new StringBuilder((analysers.Length + 3) * consoleWidth); table.AppendLine(header); table.AppendLine(string.Empty.PadRight(header.Length, '-')); foreach (IAnalyser2 analyser in analysers) { var isEventRecognizer = analyser is IEventRecognizer; var description = analyser.Description; if (string.IsNullOrWhiteSpace(description)) { description = "<No description>"; } if (description.Length > descrptionLength) { description = description.WordWrap(descrptionLength, indentifierWidth + typeLength); } table.AppendLine(string.Format(tableFormat, analyser.Identifier, isEventRecognizer ? "Event Recognizer" : "Type unknown", description)); } return(table); }
private AnalysisResult2[] TestAnalysisCoordinator(ISegment <FileInfo>[] segments) { Contract.Requires(segments != null); var preparer = new DummySourcePreparer(); AnalysisCoordinator coordinator = new AnalysisCoordinator( preparer, saveIntermediateWavFiles: SaveBehavior.Always, uniqueDirectoryPerSegment: false, isParallel: false); IAnalyser2 dummyAnalyzer = new DummyAnalyzer(false); var settings = dummyAnalyzer.DefaultSettings; settings.AnalysisOutputDirectory = this.TestOutputDirectory; settings.AnalysisTempDirectory = this.TestOutputDirectory.Combine("Temp"); return(coordinator.Run(segments, dummyAnalyzer, settings)); }
public static T FindAndCheckAnalyzer <T>(string analysisIdentifier, string partialIdentifier) where T : class, IAnalyser2 { string searchName; if (analysisIdentifier.IsNotWhitespace()) { searchName = analysisIdentifier; Log.Debug($"Searching for exact analysis identifier name {searchName} (from a CLI option)"); } else { // split name (e.g. "Towsey.Acoustics.Zooming.yml") on periods var fragments = partialIdentifier.Split(new[] { "." }, StringSplitOptions.RemoveEmptyEntries); Contract.Requires <CommandLineArgumentException>( fragments.Length >= 2, $"We need at least two segments to search for an analyzer, supplied name `{partialIdentifier}` is insufficient."); // assume identifier (e.g. "Towsey.Acoustic") in first two segments searchName = fragments[0] + "." + fragments[1]; Log.Debug($"Searching for partial analysis identifier name. `{searchName}` extracted from `{partialIdentifier}`"); } var analyzers = AnalysisCoordinator.GetAnalyzers <T>(typeof(MainEntry).Assembly).ToList(); T analyzer = analyzers.FirstOrDefault(a => a.Identifier == searchName); if (analyzer == null) { var error = $"We cannot determine what analysis you want to run. We tried to search for \"{searchName}\""; LoggedConsole.WriteErrorLine(error); var knownAnalyzers = analyzers.Aggregate(string.Empty, (a, i) => a + $" {i.Identifier}\n"); LoggedConsole.WriteLine("Available analyzers are:\n" + knownAnalyzers); throw new ValidationException($"Cannot find an IAnalyser2 with the name `{searchName}`"); } Log.Info($"Using analyzer {analyzer.Identifier}"); return(analyzer); }
public void SummariseResults(AnalysisSettings settings, FileSegment inputFileSegment, EventBase[] events, SummaryIndexBase[] indices, SpectralIndexBase[] spectralIndices, AnalysisResult2[] results) { var acousticIndicesConfig = (AcousticIndicesConfig)settings.AnalysisAnalyzerSpecificConfiguration; var sourceAudio = inputFileSegment.Source; var resultsDirectory = AnalysisCoordinator.GetNamedDirectory(settings.AnalysisOutputDirectory, this); bool tileOutput = acousticIndicesConfig.TileOutput; var frameWidth = acousticIndicesConfig.FrameLength; int sampleRate = AppConfigHelper.DefaultTargetSampleRate; sampleRate = acousticIndicesConfig.ResampleRate ?? sampleRate; // Gather settings for rendering false color spectrograms var ldSpectrogramConfig = acousticIndicesConfig.LdSpectrogramConfig; string basename = Path.GetFileNameWithoutExtension(sourceAudio.Name); // output to disk (so other analyzers can use the data, // only data - configuration settings that generated these indices // this data can then be used by post-process analyses /* NOTE: The value for FrameStep is used only when calculating a standard spectrogram * FrameStep is NOT used when calculating Summary and Spectral indices. */ var indexConfigData = new IndexGenerationData() { RecordingExtension = inputFileSegment.Source.Extension, RecordingBasename = basename, RecordingStartDate = inputFileSegment.TargetFileStartDate, RecordingDuration = inputFileSegment.TargetFileDuration.Value, SampleRateOriginal = inputFileSegment.TargetFileSampleRate.Value, SampleRateResampled = sampleRate, FrameLength = frameWidth, FrameStep = settings.Configuration.GetIntOrNull(AnalysisKeys.FrameStep) ?? frameWidth, IndexCalculationDuration = acousticIndicesConfig.IndexCalculationDurationTimeSpan, BgNoiseNeighbourhood = acousticIndicesConfig.BgNoiseBuffer, AnalysisStartOffset = inputFileSegment.SegmentStartOffset ?? TimeSpan.Zero, MaximumSegmentDuration = settings.AnalysisMaxSegmentDuration, BackgroundFilterCoeff = SpectrogramConstants.BACKGROUND_FILTER_COEFF, LongDurationSpectrogramConfig = ldSpectrogramConfig, }; var icdPath = FilenameHelpers.AnalysisResultPath( resultsDirectory, basename, IndexGenerationData.FileNameFragment, "json"); Json.Serialise(icdPath.ToFileInfo(), indexConfigData); // gather spectra to form spectrograms. Assume same spectra in all analyzer results // this is the most efficient way to do this // gather up numbers and strings store in memory, write to disk one time // this method also AUTOMATICALLY SORTS because it uses array indexing var dictionaryOfSpectra = spectralIndices.ToTwoDimensionalArray(SpectralIndexValues.CachedSelectors, TwoDimensionalArray.Rotate90ClockWise); // Calculate the index distribution statistics and write to a json file. Also save as png image var indexDistributions = IndexDistributions.WriteSpectralIndexDistributionStatistics(dictionaryOfSpectra, resultsDirectory, basename); // HACK: do not render false color spectrograms unless IndexCalculationDuration = 60.0 (the normal resolution) if (acousticIndicesConfig.IndexCalculationDurationTimeSpan != 60.0.Seconds()) { Log.Warn("False color spectrograms were not rendered"); } else { FileInfo indicesPropertiesConfig = acousticIndicesConfig.IndexPropertiesConfig.ToFileInfo(); // Actually draw false color / long duration spectrograms Tuple <Image <Rgb24>, string>[] images = LDSpectrogramRGB.DrawSpectrogramsFromSpectralIndices( inputDirectory: resultsDirectory, outputDirectory: resultsDirectory, ldSpectrogramConfig: ldSpectrogramConfig, indexPropertiesConfigPath: indicesPropertiesConfig, indexGenerationData: indexConfigData, basename: basename, analysisType: this.Identifier, indexSpectrograms: dictionaryOfSpectra, indexStatistics: indexDistributions, imageChrome: (!tileOutput).ToImageChrome()); if (tileOutput) { Debug.Assert(images.Length == 2); Log.Info("Tiling output at scale: " + acousticIndicesConfig.IndexCalculationDuration); foreach (var image in images) { TileOutput(resultsDirectory, Path.GetFileNameWithoutExtension(sourceAudio.Name), image.Item2 + ".Tile", inputFileSegment, image.Item1); } } } }
/// <summary> /// 2. Analyses long audio recording (mp3 or wav) as per passed config file. Outputs an events.csv file AND an /// indices.csv file /// Signed off: Michael Towsey 4th December 2012 /// </summary> public static void Execute(Arguments arguments) { if (arguments == null) { throw new NoDeveloperMethodException(); } LoggedConsole.WriteLine("# PROCESS LONG RECORDING"); LoggedConsole.WriteLine("# DATE AND TIME: " + DateTime.Now); // 1. set up the necessary files var sourceAudio = arguments.Source; var configFile = arguments.Config.ToFileInfo(); var outputDirectory = arguments.Output; var tempFilesDirectory = arguments.TempDir; // if a temp dir is not given, use output dir as temp dir if (tempFilesDirectory == null) { Log.Warn("No temporary directory provided, using output directory"); tempFilesDirectory = outputDirectory; } // try an automatically find the config file if (configFile == null) { throw new FileNotFoundException("No config file argument provided"); } else if (!configFile.Exists) { Log.Warn($"Config file {configFile.FullName} not found... attempting to resolve config file"); // we use .ToString() here to get the original input string - Using fullname always produces an absolute path wrt to pwd... we don't want to prematurely make asusmptions: // e.g. We require a missing absolute path to fail... that wouldn't work with .Name // e.g. We require a relative path to try and resolve, using .FullName would fail the first absolute check inside ResolveConfigFile configFile = ConfigFile.Resolve(configFile.ToString(), Directory.GetCurrentDirectory().ToDirectoryInfo()); } if (arguments.StartOffset.HasValue ^ arguments.EndOffset.HasValue) { throw new InvalidStartOrEndException("If StartOffset or EndOffset is specified, then both must be specified"); } if (arguments.StartOffset.HasValue && arguments.EndOffset.HasValue && arguments.EndOffset.Value <= arguments.StartOffset.Value) { throw new InvalidStartOrEndException("Start offset must be less than end offset."); } LoggedConsole.WriteLine("# Recording file: " + sourceAudio.FullName); LoggedConsole.WriteLine("# Configuration file: " + configFile); LoggedConsole.WriteLine("# Output folder: " + outputDirectory); LoggedConsole.WriteLine("# Temp File Directory: " + tempFilesDirectory); // optionally copy logs / config to make results easier to understand // TODO: remove, see https://github.com/QutEcoacoustics/audio-analysis/issues/133 if (arguments.WhenExitCopyConfig || arguments.WhenExitCopyLog) { AppDomain.CurrentDomain.ProcessExit += (sender, args) => { Cleanup(arguments, configFile); }; } // 2. initialize the analyzer // we're changing the way resolving config files works. Ideally, we'd like to use statically typed config files // but we can't do that unless we know which type we have to load first! Currently analyzer to load is in // the config file so we can't know which analyzer we can use. Thus we will change to using the file name, // or an argument to resolve the analyzer to load. // Get analysis name: IAnalyser2 analyzer = FindAndCheckAnalyzer <IAnalyser2>(arguments.AnalysisIdentifier, configFile.Name); // 2. get the analysis config AnalyzerConfig configuration = analyzer.ParseConfig(configFile); SaveBehavior saveIntermediateWavFiles = configuration.SaveIntermediateWavFiles; bool saveIntermediateDataFiles = configuration.SaveIntermediateCsvFiles; SaveBehavior saveSonogramsImages = configuration.SaveSonogramImages; bool filenameDate = configuration.RequireDateInFilename; if (configuration[AnalysisKeys.AnalysisName].IsNotWhitespace()) { Log.Warn("Your config file has `AnalysisName` set - this property is deprecated and ignored"); } // AT 2018-02: changed logic so default index properties loaded if not provided FileInfo indicesPropertiesConfig = IndexProperties.Find(configuration, configFile); if (indicesPropertiesConfig == null || !indicesPropertiesConfig.Exists) { Log.Warn("IndexProperties config can not be found! Loading a default"); indicesPropertiesConfig = ConfigFile.Default <Dictionary <string, IndexProperties> >(); } LoggedConsole.WriteLine("# IndexProperties Cfg: " + indicesPropertiesConfig.FullName); // min score for an acceptable event Log.Info("Minimum event threshold has been set to " + configuration.EventThreshold); FileSegment.FileDateBehavior defaultBehavior = FileSegment.FileDateBehavior.Try; if (filenameDate) { if (!FileDateHelpers.FileNameContainsDateTime(sourceAudio.Name)) { throw new InvalidFileDateException( "When RequireDateInFilename option is set, the filename of the source audio file must contain " + "a valid AND UNAMBIGUOUS date. Such a date was not able to be parsed."); } defaultBehavior = FileSegment.FileDateBehavior.Required; } // 3. initilize AnalysisCoordinator class that will do the analysis var analysisCoordinator = new AnalysisCoordinator( new LocalSourcePreparer(), saveIntermediateWavFiles, false, arguments.Parallel); // 4. get the segment of audio to be analysed // if tiling output, specify that FileSegment needs to be able to read the date var fileSegment = new FileSegment(sourceAudio, arguments.AlignToMinute, null, defaultBehavior); var bothOffsetsProvided = arguments.StartOffset.HasValue && arguments.EndOffset.HasValue; if (bothOffsetsProvided) { fileSegment.SegmentStartOffset = TimeSpan.FromSeconds(arguments.StartOffset.Value); fileSegment.SegmentEndOffset = TimeSpan.FromSeconds(arguments.EndOffset.Value); } else { Log.Debug("Neither start nor end segment offsets provided. Therefore both were ignored."); } // 6. initialize the analysis settings object var analysisSettings = analyzer.DefaultSettings; analysisSettings.ConfigFile = configFile; analysisSettings.Configuration = configuration; analysisSettings.AnalysisOutputDirectory = outputDirectory; analysisSettings.AnalysisTempDirectory = tempFilesDirectory; analysisSettings.AnalysisDataSaveBehavior = saveIntermediateDataFiles; analysisSettings.AnalysisImageSaveBehavior = saveSonogramsImages; analysisSettings.AnalysisChannelSelection = arguments.Channels; analysisSettings.AnalysisMixDownToMono = arguments.MixDownToMono; var segmentDuration = configuration.SegmentDuration?.Seconds(); if (!segmentDuration.HasValue) { segmentDuration = analysisSettings.AnalysisMaxSegmentDuration ?? TimeSpan.FromMinutes(1); Log.Warn( $"Can't read `{nameof(AnalyzerConfig.SegmentDuration)}` from config file. " + $"Default value of {segmentDuration} used)"); } analysisSettings.AnalysisMaxSegmentDuration = segmentDuration.Value; var segmentOverlap = configuration.SegmentOverlap?.Seconds(); if (!segmentOverlap.HasValue) { segmentOverlap = analysisSettings.SegmentOverlapDuration; Log.Warn( $"Can't read `{nameof(AnalyzerConfig.SegmentOverlap)}` from config file. " + $"Default value of {segmentOverlap} used)"); } analysisSettings.SegmentOverlapDuration = segmentOverlap.Value; // set target sample rate var resampleRate = configuration.ResampleRate; if (!resampleRate.HasValue) { resampleRate = analysisSettings.AnalysisTargetSampleRate ?? AppConfigHelper.DefaultTargetSampleRate; Log.Warn( $"Can't read {nameof(configuration.ResampleRate)} from config file. " + $"Default value of {resampleRate} used)"); } analysisSettings.AnalysisTargetSampleRate = resampleRate; Log.Info( $"{nameof(configuration.SegmentDuration)}={segmentDuration}, " + $"{nameof(configuration.SegmentOverlap)}={segmentOverlap}, " + $"{nameof(configuration.ResampleRate)}={resampleRate}"); // 7. ####################################### DO THE ANALYSIS ################################### LoggedConsole.WriteLine("START ANALYSIS ..."); var analyserResults = analysisCoordinator.Run(fileSegment, analyzer, analysisSettings); // ############################################################################################## // 8. PROCESS THE RESULTS LoggedConsole.WriteLine(string.Empty); LoggedConsole.WriteLine("START PROCESSING RESULTS ..."); if (analyserResults == null) { LoggedConsole.WriteErrorLine("###################################################\n"); LoggedConsole.WriteErrorLine("The Analysis Run Coordinator has returned a null result."); LoggedConsole.WriteErrorLine("###################################################\n"); throw new AnalysisOptionDevilException(); } // Merge and correct main result types EventBase[] mergedEventResults = ResultsTools.MergeResults(analyserResults, ar => ar.Events, ResultsTools.CorrectEvent); SummaryIndexBase[] mergedIndicesResults = ResultsTools.MergeResults(analyserResults, ar => ar.SummaryIndices, ResultsTools.CorrectSummaryIndex); SpectralIndexBase[] mergedSpectralIndexResults = ResultsTools.MergeResults(analyserResults, ar => ar.SpectralIndices, ResultsTools.CorrectSpectrumIndex); // not an exceptional state, do not throw exception if (mergedEventResults != null && mergedEventResults.Length == 0) { LoggedConsole.WriteWarnLine("The analysis produced no EVENTS (mergedResults had zero count)"); } if (mergedIndicesResults != null && mergedIndicesResults.Length == 0) { LoggedConsole.WriteWarnLine("The analysis produced no Summary INDICES (mergedResults had zero count)"); } if (mergedSpectralIndexResults != null && mergedSpectralIndexResults.Length == 0) { LoggedConsole.WriteWarnLine("The analysis produced no Spectral INDICES (merged results had zero count)"); } // 9. CREATE SUMMARY INDICES IF NECESSARY (FROM EVENTS) #if DEBUG // get the duration of the original source audio file - need this to convert Events datatable to Indices Datatable var audioUtility = new MasterAudioUtility(tempFilesDirectory); var mimeType = MediaTypes.GetMediaType(sourceAudio.Extension); var sourceInfo = audioUtility.Info(sourceAudio); // updated by reference all the way down in LocalSourcePreparer Debug.Assert(fileSegment.TargetFileDuration == sourceInfo.Duration); #endif var duration = fileSegment.TargetFileDuration.Value; ResultsTools.ConvertEventsToIndices( analyzer, mergedEventResults, ref mergedIndicesResults, duration, configuration.EventThreshold); int eventsCount = mergedEventResults?.Length ?? 0; int numberOfRowsOfIndices = mergedIndicesResults?.Length ?? 0; // 10. Allow analysers to post-process // TODO: remove results directory if possible var instanceOutputDirectory = AnalysisCoordinator.GetNamedDirectory(analysisSettings.AnalysisOutputDirectory, analyzer); // 11. IMPORTANT - this is where IAnalyser2's post processor gets called. // Produces all spectrograms and images of SPECTRAL INDICES. // Long duration spectrograms are drawn IFF analysis type is Towsey.Acoustic analyzer.SummariseResults(analysisSettings, fileSegment, mergedEventResults, mergedIndicesResults, mergedSpectralIndexResults, analyserResults); // 12. SAVE THE RESULTS string fileNameBase = Path.GetFileNameWithoutExtension(sourceAudio.Name); var eventsFile = ResultsTools.SaveEvents(analyzer, fileNameBase, instanceOutputDirectory, mergedEventResults); var indicesFile = ResultsTools.SaveSummaryIndices(analyzer, fileNameBase, instanceOutputDirectory, mergedIndicesResults); var spectraFile = ResultsTools.SaveSpectralIndices(analyzer, fileNameBase, instanceOutputDirectory, mergedSpectralIndexResults); // 13. THIS IS WHERE SUMMARY INDICES ARE PROCESSED // Convert summary indices to black and white tracks image if (mergedIndicesResults == null) { Log.Info("No summary indices produced"); } else { if (indicesPropertiesConfig == null || !indicesPropertiesConfig.Exists) { throw new InvalidOperationException("Cannot process indices without an index configuration file, the file could not be found!"); } // this arbitrary amount of data. if (mergedIndicesResults.Length > 5000) { Log.Warn("Summary Indices Image not able to be drawn - there are too many indices to render"); } else { var basename = Path.GetFileNameWithoutExtension(fileNameBase); string imageTitle = $"SOURCE:{basename}, {Meta.OrganizationTag}; "; // Draw Tracks-Image of Summary indices // set time scale resolution for drawing of summary index tracks TimeSpan timeScale = TimeSpan.FromSeconds(0.1); Bitmap tracksImage = IndexDisplay.DrawImageOfSummaryIndices( IndexProperties.GetIndexProperties(indicesPropertiesConfig), indicesFile, imageTitle, timeScale, fileSegment.TargetFileStartDate); var imagePath = FilenameHelpers.AnalysisResultPath(instanceOutputDirectory, basename, "SummaryIndices", ImageFileExt); tracksImage.Save(imagePath); } } // 14. wrap up, write stats LoggedConsole.WriteLine("INDICES CSV file(s) = " + (indicesFile?.Name ?? "<<No indices result, no file!>>")); LoggedConsole.WriteLine("\tNumber of rows (i.e. minutes) in CSV file of indices = " + numberOfRowsOfIndices); LoggedConsole.WriteLine(string.Empty); if (eventsFile == null) { LoggedConsole.WriteLine("An Events CSV file was NOT returned."); } else { LoggedConsole.WriteLine("EVENTS CSV file(s) = " + eventsFile.Name); LoggedConsole.WriteLine("\tNumber of events = " + eventsCount); } Log.Success($"Analysis Complete.\nSource={sourceAudio.Name}\nOutput={instanceOutputDirectory.FullName}"); }
public static async Task <int> ExecuteAsync(Arguments arguments) { Log.Info("Event statistics analysis begin"); // validate arguments var input = arguments.Source; var config = arguments.Config.ToFileInfo(); if (!input.Exists) { throw new FileNotFoundException("Cannot find source file", input.FullName); } // try an automatically find the config file if (config == null) { throw new FileNotFoundException("No config file argument provided"); } else if (!config.Exists) { Log.Warn($"Config file {config.FullName} not found... attempting to resolve config file"); // we use the original input string - Using FileInfo fullname always produces an // absolute path relative to pwd... we don't want to prematurely make assumptions: // e.g. We require a missing absolute path to fail... that wouldn't work with .Name // e.g. We require a relative path to try and resolve, using .FullName would fail the first absolute // check inside ResolveConfigFile config = ConfigFile.Resolve(arguments.Config, Directory.GetCurrentDirectory().ToDirectoryInfo()); } // if a temp dir is not given, use output dir as temp dir if (arguments.TempDir == null) { Log.Warn("No temporary directory provided, using backup directory"); } // Remote: create an instance of our API helpers IApi api = arguments.WorkbenchApi.IsNullOrEmpty() ? Api.Default : Api.Parse(arguments.WorkbenchApi); // log some helpful messages Log.Info("Events file: " + input); Log.Info("Configuration file: " + config); Log.Info("Output folder: " + arguments.Output); Log.Info("Temp File Directory: " + arguments.TempDir); Log.Info("Api: " + api); // Remote: Test we can log in to the workbench var auth = new AuthenticationService(api); Task <IAuthenticatedApi> task; if (arguments.AuthenticationToken.IsNotWhitespace()) { Log.Debug("Using token for authentication"); task = auth.CheckLogin(arguments.AuthenticationToken); } else { var username = LoggedConsole.Prompt("Enter your username or email for the acoustic workbench:"); var password = LoggedConsole.Prompt("Enter your password for the acoustic workbench:", forPassword: true); task = auth.Login(username, password); //task = auth.Login("*****@*****.**", "tsettest"); } LoggedConsole.WriteWaitingLine(task, "Logging into workbench..."); var authenticatedApi = await task.TimeoutAfter(Service.ClientTimeout).ConfigureAwait(false); Log.Info("Login success" + authenticatedApi); // read events/annotation file Log.Info("Now reading input data"); // Read events from provided CSV file. // Also tag them with an order index to allow sorting in the same order as they were provided to us. var events = Csv .ReadFromCsv <ImportedEvent>(input, throwOnMissingField: false) .Select( (x, i) => { x.Order = i; return(x); }) .ToArray(); if (events.Length == 0) { Log.Warn("No events imported - source file empty. Exiting"); return(ExceptionLookup.NoData); } Log.Info($"Events read, {events.Length} read."); // need to validate the events var invalidEvents = events.Where(e => !e.IsValid()).ToArray(); if (invalidEvents.Length > 0) { throw new InvalidOperationException( "Invalid event detected." + $" {invalidEvents.Length} events are not valid. The first invalid event is {invalidEvents[0]}"); } // next gather meta data for all events // and transform list of events into list of segments // NOTE: to save on I/O sometimes if events share the same audio block, then multiple events will be // bundled into the same analysis segment. var resolver = new EventMetadataResolver( authenticatedApi, PaddingFunction, arguments.Parallel ? 25 : 1); var metadataTask = resolver.GetRemoteMetadata(events); // wait for 1 second per event - this should be an order of magnitude greater than what is needed ISegment <AudioRecording>[] segments = await metadataTask.TimeoutAfter(events.Length); Log.Info($"Metadata collected, preparing to start analysis"); // finally time to start preparing jobs ISourcePreparer preparer = new RemoteSourcePreparer(authenticatedApi, allowSegmentcutting: false); AnalysisCoordinator coordinator = new AnalysisCoordinator( preparer, SaveBehavior.Never, uniqueDirectoryPerSegment: false, isParallel: arguments.Parallel); // instantiate the Analysis EventStatisticsAnalysis analysis = new EventStatisticsAnalysis(); // derserialize the config file var configuration = analysis.ParseConfig(config); AnalysisSettings settings = analysis.DefaultSettings; settings.AnalysisOutputDirectory = arguments.Output; settings.AnalysisTempDirectory = arguments.TempDir; settings.Configuration = configuration; var results = coordinator.Run(segments, analysis, settings); var allEvents = results.SelectMany(es => es.Events).ToArray(); var eventsWithErrors = allEvents.Count(x => ((EventStatistics)x).Error); if (eventsWithErrors > 0) { Log.Warn($"Errors occurred when calculating statistics for {eventsWithErrors} events."); } Log.Trace("Sorting event statistics results"); Array.Sort(allEvents); Log.Info("Executing summary"); // TODO: implement if needed analysis.SummariseResults(settings, null, allEvents, null, null, results); Log.Debug("Summary complete"); var instanceOutputDirectory = AnalysisCoordinator.GetNamedDirectory(settings.AnalysisOutputDirectory, analysis); var resultName = FilenameHelpers.AnalysisResultPath( instanceOutputDirectory, input, analysis.Identifier, "csv"); // NOTE: we are only saving event files Log.Info($"Writing results to {resultName}"); analysis.WriteEventsFile(resultName.ToFileInfo(), allEvents.AsEnumerable()); Log.Debug("Writing events completed"); var summaryStats = new { numberEvents = allEvents.Length, durationEvents = allEvents.Sum(x => ((EventStatistics)x).EventDurationSeconds), numberRecordings = allEvents.Select(x => ((EventStatistics)x).AudioRecordingId).Distinct().Count(), durationAudioProcessed = results.Sum(x => x.SegmentAudioDuration.TotalSeconds), remoteAudioDownloaded = (preparer as RemoteSourcePreparer)?.TotalBytesRecieved, }; Log.Info("Summary statistics:\n" + Json.SerializeToString(summaryStats)); Log.Success("Event statistics analysis complete!"); return(ExceptionLookup.Ok); }
private void TestAnalysisCoordinatorPaths(SaveBehavior wav, bool unique, DirectoryInfo temp, State[] states) { Contract.Requires(states.Length == 5); var preparer = new DummySourcePreparer(); AnalysisCoordinator coordinator = new AnalysisCoordinator( preparer, saveIntermediateWavFiles: wav, uniqueDirectoryPerSegment: unique, isParallel: false); // an empty non-existent file var source = TempFileHelper.NewTempFile(this.TestOutputDirectory).Touch(); FileSegment segment = new FileSegment(source, duration: 120.0.Seconds(), sampleRate: 123456); var dummyAnalyzer = new DummyAnalyzer(true); var settings = dummyAnalyzer.DefaultSettings; Trace.WriteLine("Class output directory:" + this.TestOutputDirectory.FullName); settings.AnalysisMaxSegmentDuration = 60.Seconds(); settings.AnalysisOutputDirectory = this.AnalysisOutput; settings.AnalysisTempDirectory = temp; this.settingsAccessor.Fields["fallbackTempDirectory"]?.SetValue(settings, this.FallbackTemp.FullName); var task = Task.Run(() => { var results = coordinator.Run(segment, dummyAnalyzer, settings); // summarize is currently called manually dummyAnalyzer.SummariseResults(settings, segment, null, null, null, results); }); // set up path strings string basename = Path.GetFileNameWithoutExtension(source.Name); var paths = new CoordinatorPathTestSet { output = this.AnalysisOutput.FullName, temp = (temp ?? this.TestTemp).FullName, tempNull = this.FallbackTemp.FullName, fragment = "Ecosounds.TempusSubstitutus", unique1 = basename + "_000000.00-000060.00", unique2 = basename + "_000060.00-000120.00", source = basename, }; // wait for the analyzer to pause while (!dummyAnalyzer.IsPaused) { Thread.Sleep(0); } // manually pump the analysis // before analyze this.AssertFilesAreAsExpected(0, states[0], paths); dummyAnalyzer.Pump(); // segment 1 this.AssertFilesAreAsExpected(1, states[1], paths); dummyAnalyzer.Pump(); // segment 2 this.AssertFilesAreAsExpected(2, states[2], paths); dummyAnalyzer.Pump(); // after summarize this.AssertFilesAreAsExpected(3, states[3], paths); // complete // TODO: remove this rubbish and stick in a IoC file system for testing! dummyAnalyzer.Pump(false); do { Thread.Sleep(0.5.Seconds()); dummyAnalyzer.Pump(false); }while (!task.IsCompleted); task.Wait(1.0.Seconds()); this.AssertFilesAreAsExpected(4, states[4], paths); Assert.IsTrue(task.IsCompleted, "task was not yet completed"); }
private int Execute(Arguments arguments) { var errors = new List <string>(); Log.Info("Checking required executables and libraries can be found and loaded"); // this is an important call used in analyze long recordings. // This call effectively check is we can load types and if files are present (I think) try { AnalysisCoordinator.GetAnalyzers <IAnalyser2>(typeof(MainEntry).Assembly); } catch (ReflectionTypeLoadException rtlex) { errors.Add(ExceptionLookup.FormatReflectionTypeLoadException(rtlex, true)); } // master audio utility checks for available executables try { var utility = new MasterAudioUtility(); } catch (Exception ex) { errors.Add(ex.Message); } if (MainEntry.CheckForDataAnnotations() is string message) { errors.Add(message); } if (AppConfigHelper.IsMono) { Type type = Type.GetType("Mono.Runtime"); if (type != null) { MethodInfo displayName = type.GetMethod("GetDisplayName", BindingFlags.NonPublic | BindingFlags.Static); if (displayName?.Invoke(null, null) is string name) { var version = Regex.Match(name, @".*(\d+\.\d+\.\d+\.\d+).*").Groups[1].Value; Console.WriteLine(version); if (new Version(version) > new Version(5, 5)) { Log.Success($"Your mono version {name} is greater than our required Mono version 5.5"); } else { errors.Add($"Mono version is {name}, we require at least Mono 5.5"); } } else { errors.Add("Could not get Mono display name"); } } } // don't have much more to check at the current time if (errors.Count == 0) { Log.Success("Valid environment"); return(ExceptionLookup.Ok); } else { foreach (var error in errors) { Log.Error(error); } // not using exception lookup on purpose - it's static constructor loads more types return(ExceptionLookup.UnhandledExceptionErrorCode); } }
public override void SummariseResults( AnalysisSettings analysisSettings, FileSegment inputFileSegment, EventBase[] events, SummaryIndexBase[] indices, SpectralIndexBase[] spectralIndices, AnalysisResult2[] results) { // below is example of how to access values in ContentDescription config file. //sampleRate = analysisSettings.Configuration.GetIntOrNull(AnalysisKeys.ResampleRate) ?? sampleRate; var cdConfiguration = (CdConfig)analysisSettings.Configuration; var ldSpectrogramConfig = cdConfiguration.LdSpectrogramConfig; //var cdConfigFile = analysisSettings.ConfigFile; //var configDirectory = cdConfigFile.DirectoryName ?? throw new ArgumentNullException(nameof(cdConfigFile), "Null value"); var sourceAudio = inputFileSegment.Source; string basename = Path.GetFileNameWithoutExtension(sourceAudio.Name); var resultsDirectory = AnalysisCoordinator.GetNamedDirectory(analysisSettings.AnalysisOutputDirectory, this); // check for null values - this was recommended by ReSharper! if (inputFileSegment.TargetFileDuration == null || inputFileSegment.TargetFileSampleRate == null) { throw new NullReferenceException(); } // output config data to disk so other analyzers can use the data, // Should contain data only - i.e. the configuration settings that generated these indices // this data can then be used by later analysis processes. var indexConfigData = new IndexGenerationData() { RecordingExtension = inputFileSegment.Source.Extension, RecordingBasename = basename, RecordingStartDate = inputFileSegment.TargetFileStartDate, RecordingDuration = inputFileSegment.TargetFileDuration.Value, SampleRateOriginal = inputFileSegment.TargetFileSampleRate.Value, SampleRateResampled = ContentSignatures.SampleRate, FrameLength = ContentSignatures.FrameSize, FrameStep = ContentSignatures.FrameSize, IndexCalculationDuration = TimeSpan.FromSeconds(ContentSignatures.IndexCalculationDurationInSeconds), BgNoiseNeighbourhood = TimeSpan.FromSeconds(5), // default value for content description AnalysisStartOffset = inputFileSegment.SegmentStartOffset ?? TimeSpan.Zero, MaximumSegmentDuration = analysisSettings.AnalysisMaxSegmentDuration, BackgroundFilterCoeff = SpectrogramConstants.BACKGROUND_FILTER_COEFF, LongDurationSpectrogramConfig = ldSpectrogramConfig, }; var icdPath = FilenameHelpers.AnalysisResultPath( resultsDirectory, basename, IndexGenerationData.FileNameFragment, "json"); Json.Serialise(icdPath.ToFileInfo(), indexConfigData); // gather spectra to form spectrograms. Assume same spectra in all analyzer results var dictionaryOfSpectra = spectralIndices.ToTwoDimensionalArray(SpectralIndexValuesForContentDescription.CachedSelectors, TwoDimensionalArray.Rotate90ClockWise); // Calculate the index distribution statistics and write to a json file. Also save as png image // The following method returns var indexDistributions =, but we have no use for them. IndexDistributions.WriteSpectralIndexDistributionStatistics(dictionaryOfSpectra, resultsDirectory, basename); // Draw ldfc spectrograms and return path to 2maps image. string ldfcSpectrogramPath = DrawSpectrogramsFromSpectralIndices( ldSpectrogramConfig, outputDirectory: resultsDirectory, indexGenerationData: indexConfigData, basename: basename, indexSpectrograms: dictionaryOfSpectra); // Gather the content description results into an array of DescriptionResult and then convert to dictionary var allContentDescriptionResults = results.Select(x => (DescriptionResult)x.MiscellaneousResults[nameof(DescriptionResult)]); var contentDictionary = DataProcessing.ConvertResultsToDictionaryOfArrays(allContentDescriptionResults.ToList()); // Write the results to a csv file var filePath = Path.Combine(resultsDirectory.FullName, "AcousticSignatures.csv"); // TODO: fix this so it writes header and a column of content description values. //Csv.WriteToCsv(new FileInfo(filePath), contentDictionary); FileTools.WriteDictionaryAsCsvFile(contentDictionary, filePath); // prepare graphical plots of the acoustic signatures. var contentPlots = GetPlots(contentDictionary); var images = GraphsAndCharts.DrawPlotDistributions(contentPlots); var plotsImage = ImageTools.CombineImagesVertically(images); plotsImage.Save(Path.Combine(resultsDirectory.FullName, "DistributionsOfContentScores.png")); // Attach content description plots to LDFC spectrogram and write to file var ldfcSpectrogram = Image.Load <Rgb24>(ldfcSpectrogramPath); var image = ContentVisualization.DrawLdfcSpectrogramWithContentScoreTracks(ldfcSpectrogram, contentPlots); var path3 = Path.Combine(resultsDirectory.FullName, basename + ".ContentDescription.png"); image.Save(path3); }