private void Execute(Arguments arguments)
        {
            Log.Info("Checking required executables can be found");

            // master audio utlility checks for available executables
            var utility = new MasterAudioUtility();

            if (AppConfigHelper.IsMono)
            {
                Type type = Type.GetType("Mono.Runtime");
                if (type != null)
                {
                    MethodInfo displayName = type.GetMethod("GetDisplayName", BindingFlags.NonPublic | BindingFlags.Static);
                    if (displayName != null)
                    {
                        var name = displayName.Invoke(null, null);
                        Log.Info($"Mono version is {name}, we require at least Mono 5.5");
                    }
                    else
                    {
                        Log.Warn("Could not check Mono version");
                    }
                }
            }

            // don't have much more to check at the current time
            Log.Success("Valid environment");
        }
예제 #2
0
        //########################################################################################################################################################################
        //########################################################################################################################################################################
        //##  STATIC METHODS BELOW ###############################################################################################################################################
        //########################################################################################################################################################################
        //########################################################################################################################################################################

        /// <summary>
        /// TODO - this is long winded way to get file. Need to talk to Mark.
        /// </summary>
        public static AudioRecording GetAudioRecording(FileInfo sourceFile, int resampleRate, string opDir, string opFileName)
        {
            if (!sourceFile.Exists)
            {
                return(null);
            }

            string        opPath            = Path.Combine(opDir, opFileName); //path location/name of extracted recording segment
            IAudioUtility audioUtility      = new MasterAudioUtility();
            var           info              = audioUtility.Info(sourceFile);   // Get duration of the source file
            int           startMilliseconds = 0;
            int           endMilliseconds   = (int)info.Duration.Value.TotalMilliseconds;

            MasterAudioUtility.SegmentToWav(
                sourceFile,
                new FileInfo(opPath),
                new AudioUtilityRequest
            {
                TargetSampleRate = resampleRate,
                OffsetStart      = TimeSpan.FromMilliseconds(startMilliseconds),
                OffsetEnd        = TimeSpan.FromMilliseconds(endMilliseconds),
            });

            return(new AudioRecording(opPath));
        }
예제 #3
0
        private static string GetNewName(FileInfo file, TimeSpan timezone)
        {
            var fileName     = file.Name;
            var fileLength   = file.Length;
            var lastModified = file.LastWriteTime;
            var mediaType    = MediaTypes.GetMediaType(file.Extension);

            var audioUtility = new MasterAudioUtility();
            var info         = audioUtility.Info(file);
            var duration     = info.Duration.HasValue ? info.Duration.Value : TimeSpan.Zero;

            var recordingStart = lastModified - duration;

            // some tweaking to get nice file names - round the minutes of last mod and duration
            // ticks are in 100-nanosecond intervals

            //var modifiedRecordingStart = lastModified.Round(TimeSpan.FromSeconds(15))
            //                             - duration.Round(TimeSpan.FromSeconds(15));

            //// DateTime rounded = new DateTime(((now.Ticks + 25000000) / 50000000) * 50000000);

            ////var roundedTotalSeconds = Math.Round(mediaFile.RecordingStart.TimeOfDay.TotalSeconds);
            ////var modifiedRecordingStart = mediaFile.RecordingStart.Date.AddSeconds(roundedTotalSeconds);

            var dateWithOffset = new DateTimeOffset(recordingStart, timezone);
            var dateTime       = dateWithOffset.ToUniversalTime().ToString(AppConfigHelper.StandardDateFormatUtc);
            var ext            = fileName.Substring(fileName.LastIndexOf('.') + 1).ToLowerInvariant();

            var prefix = fileName.Substring(0, fileName.LastIndexOf('.'));
            var result = string.Format("{0}_{1}.{2}", prefix, dateTime, ext);

            return(result);
        }
        /// <summary>
        /// The prepare file.
        /// </summary>
        /// <param name="request">
        ///   The request.
        /// </param>
        public static AudioUtilityModifiedInfo PrepareFile(FileInfo sourceFile, FileInfo outputFile, AudioUtilityRequest request, DirectoryInfo temporaryFilesDirectory)
        {
            var    audioUtility    = new MasterAudioUtility(temporaryFilesDirectory);
            var    sourceMimeType  = MediaTypes.GetMediaType(sourceFile.Extension);
            var    outputMimeType  = MediaTypes.GetMediaType(outputFile.Extension);
            string outputDirectory = Path.GetDirectoryName(outputFile.FullName);

            if (!Directory.Exists(outputDirectory))
            {
                Directory.CreateDirectory(outputDirectory);
            }

            audioUtility.Modify(
                sourceFile,
                sourceMimeType,
                outputFile,
                outputMimeType,
                request);

            var result = new AudioUtilityModifiedInfo
            {
                TargetInfo = audioUtility.Info(outputFile),
                SourceInfo = audioUtility.Info(sourceFile),
            };

            return(result);
        }
        private int Execute(Arguments arguments)
        {
            var errors = new List <string>();

            Log.Info("Checking required executables and libraries can be found and loaded");

            // this is an important call used in analyze long recordings.
            // This call effectively check is we can load types and if files are present (I think)
            try
            {
                AnalysisCoordinator.GetAnalyzers <IAnalyser2>(typeof(MainEntry).Assembly);
            }
            catch (ReflectionTypeLoadException rtlex)
            {
                errors.Add(ExceptionLookup.FormatReflectionTypeLoadException(rtlex, true));
            }

            // master audio utility checks for available executables
            try
            {
                var utility = new MasterAudioUtility();
            }
            catch (Exception ex)
            {
                errors.Add(ex.Message);
            }

            if (MainEntry.CheckForDataAnnotations() is string message)
            {
                errors.Add(message);
            }


            Type type = Type.GetType("Mono.Runtime");

            if (type != null)
            {
                errors.Add($"We no longer use Mono with ${Meta.Name}. DO NOT prefix the {Meta.Name} prefix with `mono`.");
            }


            // don't have much more to check at the current time
            if (errors.Count == 0)
            {
                Log.Success("Valid environment");

                return(ExceptionLookup.Ok);
            }
            else
            {
                foreach (var error in errors)
                {
                    Log.Error(error);
                }

                // not using exception lookup on purpose - it's static constructor loads more types
                return(ExceptionLookup.UnhandledExceptionErrorCode);
            }
        }
예제 #6
0
        public static AudioToSonogramResult AnalyseOneRecording(
            FileInfo sourceRecording,
            Dictionary <string, string> configDict,
            TimeSpan localEventStart,
            TimeSpan localEventEnd,
            int minHz,
            int maxHz,
            DirectoryInfo outDirectory)
        {
            // set a threshold for determining energy distribution in call
            // NOTE: value of this threshold depends on whether working with decibel, energy or amplitude values
            const double threshold = 9.0;

            int resampleRate = AppConfigHelper.DefaultTargetSampleRate;

            if (configDict.ContainsKey(AnalysisKeys.ResampleRate))
            {
                resampleRate = int.Parse(configDict[AnalysisKeys.ResampleRate]);
            }

            configDict[ConfigKeys.Recording.Key_RecordingCallName] = sourceRecording.FullName;
            configDict[ConfigKeys.Recording.Key_RecordingFileName] = sourceRecording.Name;

            // 1: GET RECORDING and make temporary copy
            // put temp audio FileSegment in same directory as the required output image.
            var tempAudioSegment = TempFileHelper.NewTempFile(outDirectory, "wav");

            // delete the temp audio file if it already exists.
            if (File.Exists(tempAudioSegment.FullName))
            {
                File.Delete(tempAudioSegment.FullName);
            }

            // This line creates a temporary version of the source file downsampled as per entry in the config file
            MasterAudioUtility.SegmentToWav(sourceRecording, tempAudioSegment, new AudioUtilityRequest()
            {
                TargetSampleRate = resampleRate
            });

            // 2: Generate sonogram image files
            AudioToSonogramResult result = GenerateSpectrogramImages(tempAudioSegment, configDict, outDirectory);

            // 3: GET the SNR statistics
            TimeSpan eventDuration = localEventEnd - localEventStart;

            result.SnrStatistics = SNR.Calculate_SNR_ShortRecording(tempAudioSegment, configDict, localEventStart, eventDuration, minHz, maxHz, threshold);

            // 4: Delete the temp file
            File.Delete(tempAudioSegment.FullName);

            return(result);
        }
예제 #7
0
        public static FileInfo CreateTemporaryAudioFile(FileInfo sourceRecording, DirectoryInfo outDir, int resampleRate)
        {
            // put temp FileSegment in same directory as the required output image.
            var tempAudioSegment = new FileInfo(Path.Combine(outDir.FullName, "tempWavFile.wav"));

            // delete the temp audio file if it already exists.
            if (File.Exists(tempAudioSegment.FullName))
            {
                File.Delete(tempAudioSegment.FullName);
            }

            // This line creates a temporary version of the source file downsampled as per entry in the config file
            MasterAudioUtility.SegmentToWav(sourceRecording, tempAudioSegment, new AudioUtilityRequest()
            {
                TargetSampleRate = resampleRate
            });
            return(tempAudioSegment);
        }
예제 #8
0
        public void MasterAudioUtilityCheckRequestValidFailsWhenWavpackIsMissing()
        {
            var utility = new MasterAudioUtility(
                (FfmpegAudioUtility)TestHelper.GetAudioUtilityFfmpeg(),
                null,
                (SoxAudioUtility)TestHelper.GetAudioUtilitySox(),
                (FfmpegRawPcmAudioUtility)TestHelper.GetAudioUtilityFfmpegRawPcm());

            var source = PathHelper.GetTestAudioFile("Raw_audio_id_cd6e8ba1-11b4-4724-9562-f6ec893110aa.wv");

            Assert.ThrowsException <AudioFormatNotSupportedException>(
                () => utility.Info(source),
                "Converting from WavPack is not supported because we cannot find a wvunpack binary.");

            Assert.ThrowsException <AudioFormatNotSupportedException>(
                () => utility.Modify(source, MediaTypes.MediaTypeWavpack, PathHelper.GetTempFile(MediaTypes.ExtWav), MediaTypes.MediaTypeWav, new AudioUtilityRequest()),
                "Converting from WavPack is not supported because we cannot find a wvunpack binary.");
        }
예제 #9
0
        public void MasterAudioUtilityAllowsOptionalSupportForMp3splt()
        {
            // creation should normally fail but MasterAudioUtility was changed so that Mp3Splt was optional
            var utility = new MasterAudioUtility(
                (FfmpegAudioUtility)TestHelper.GetAudioUtilityFfmpeg(),
                null, //(Mp3SpltAudioUtility)TestHelper.GetAudioUtilityMp3Splt(),
                (WavPackAudioUtility)TestHelper.GetAudioUtilityWavunpack(),
                (SoxAudioUtility)TestHelper.GetAudioUtilitySox(),
                (FfmpegRawPcmAudioUtility)TestHelper.GetAudioUtilityFfmpegRawPcm());

            // but it throws an exception if we try and segment a mp3
            TestHelper.ExceptionMatches <NotSupportedException>(
                () => utility.Modify(
                    PathHelper.GetTestAudioFile("Currawongs_curlew_West_Knoll_Bees_20091102-183000.mp3"),
                    MediaTypes.MediaTypeMp3,
                    TempFileHelper.NewTempFile(),
                    MediaTypes.MediaTypeWav,
                    new AudioUtilityRequest()),
                "MP3 conversion not supported because mp3splt utility has not been configured");
        }
예제 #10
0
        /// <summary>
        /// This method extracts a recording segment and saves it to disk at the location fiOutputSegment.
        /// </summary>
        public static void ExtractSegment(FileInfo fiSource, TimeSpan start, TimeSpan end, TimeSpan buffer, int sampleRate, FileInfo fiOutputSegment)
        {
            // EXTRACT RECORDING SEGMENT
            int startMilliseconds = (int)(start.TotalMilliseconds - buffer.TotalMilliseconds);
            int endMilliseconds   = (int)(end.TotalMilliseconds + buffer.TotalMilliseconds);

            if (startMilliseconds < 0)
            {
                startMilliseconds = 0;
            }

            ////if (endMilliseconds <= 0) endMilliseconds = (int)(segmentDuration * 60000) - 1;//no need to worry about end
            MasterAudioUtility.SegmentToWav(
                fiSource,
                fiOutputSegment,
                new AudioUtilityRequest
            {
                TargetSampleRate = sampleRate,
                OffsetStart      = TimeSpan.FromMilliseconds(startMilliseconds),
                OffsetEnd        = TimeSpan.FromMilliseconds(endMilliseconds),
                ////Channel = 2 // set channel number or mixdowntomono=true  BUT NOT BOTH!!!
                ////MixDownToMono  =true
            });
        }
예제 #11
0
        public static void Execute(Arguments args)
        {
            if (args == null)
            {
                throw new NoDeveloperMethodException();
            }

            IEnumerable <FileInfo> files = null;

            if (args.InputDirectory != null)
            {
                var shouldRecurse = args.Recurse ? SearchOption.AllDirectories : SearchOption.TopDirectoryOnly;
                files = args.InputDirectory.ToDirectoryInfo().EnumerateFiles("*.*", shouldRecurse);
            }
            else
            {
                // skip the output file
                files = File.ReadLines(args.InputFile)
                        .Where(l => !string.IsNullOrWhiteSpace(l))
                        .Select(l => l.Trim(' ', '"'))
                        .Distinct()
                        .OrderBy(l => l)
                        .Select(l => new FileInfo(l));
            }

            var mau       = new MasterAudioUtility();
            var stopwatch = new Stopwatch();

            var headers = "\"" + string.Join("\", \"",
                                             "SourceFile",
                                             "SampleRate (hertz)",
                                             "BitsPerSecond",
                                             "BitsPerSample",
                                             "ChannelCount",
                                             "Duration (sec)",
                                             "MediaType",
                                             "FileSize (bytes)",
                                             "SHA256 Hash",
                                             "Identifier") + "\"";

            using (var fs = File.Open(args.OutputFile, FileMode.Create, FileAccess.Write, FileShare.Read))
                using (var sw = new StreamWriter(fs))
                {
                    sw.WriteLine(headers);

                    foreach (var file in files)
                    {
                        try
                        {
                            stopwatch.Restart();
                            var info = mau.Info(file);
                            stopwatch.Stop();

                            var infoTime = stopwatch.Elapsed;

                            stopwatch.Restart();
                            var hash = SHA256Hash(file);
                            stopwatch.Stop();

                            Console.WriteLine("info: {1} hash: {2} for {0}.", file.Name, infoTime, stopwatch.Elapsed);

                            var output = string.Format("{0}, {1}, {2}, {3}, {4}, {5}, {6}, {7}, {8}, {9}",
                                                       CsvSafeString(info.SourceFile != null ? info.SourceFile.ToString() : string.Empty),
                                                       CsvSafeString(info.SampleRate.HasValue ? info.SampleRate.Value.ToString() : string.Empty),
                                                       CsvSafeString(info.BitsPerSecond.HasValue ? info.BitsPerSecond.Value.ToString() : string.Empty),
                                                       CsvSafeString(info.BitsPerSample.HasValue ? info.BitsPerSample.Value.ToString() : string.Empty),
                                                       CsvSafeString(info.ChannelCount.HasValue ? info.ChannelCount.Value.ToString() : string.Empty),
                                                       CsvSafeString(info.Duration.HasValue ? info.Duration.Value.TotalSeconds.ToString() : string.Empty),
                                                       CsvSafeString(info.MediaType),
                                                       CsvSafeString(info.SourceFile.Length.ToString()),
                                                       CsvSafeString(hash),
                                                       GetIdentifierFromPath(info.SourceFile.FullName));

                            sw.WriteLine(output);

                            sw.Flush();
                            fs.Flush();
                        }
                        catch (Exception ex)
                        {
                            if (Log.IsWarnEnabled)
                            {
                                Log.Warn("Error processing " + file, ex);
                            }
                        }
                    }
                }
        }
예제 #12
0
        /// <summary>
        /// 2. Analyses long audio recording (mp3 or wav) as per passed config file. Outputs an events.csv file AND an
        /// indices.csv file
        /// Signed off: Michael Towsey 4th December 2012
        /// </summary>
        public static void Execute(Arguments arguments)
        {
            if (arguments == null)
            {
                throw new NoDeveloperMethodException();
            }

            LoggedConsole.WriteLine("# PROCESS LONG RECORDING");
            LoggedConsole.WriteLine("# DATE AND TIME: " + DateTime.Now);

            // 1. set up the necessary files
            var sourceAudio        = arguments.Source;
            var configFile         = arguments.Config.ToFileInfo();
            var outputDirectory    = arguments.Output;
            var tempFilesDirectory = arguments.TempDir;

            // if a temp dir is not given, use output dir as temp dir
            if (tempFilesDirectory == null)
            {
                Log.Warn("No temporary directory provided, using output directory");
                tempFilesDirectory = outputDirectory;
            }

            // try an automatically find the config file
            if (configFile == null)
            {
                throw new FileNotFoundException("No config file argument provided");
            }
            else if (!configFile.Exists)
            {
                Log.Warn($"Config file {configFile.FullName} not found... attempting to resolve config file");

                // we use .ToString() here to get the original input string - Using fullname always produces an absolute path wrt to pwd... we don't want to prematurely make asusmptions:
                // e.g. We require a missing absolute path to fail... that wouldn't work with .Name
                // e.g. We require a relative path to try and resolve, using .FullName would fail the first absolute check inside ResolveConfigFile
                configFile = ConfigFile.Resolve(configFile.ToString(), Directory.GetCurrentDirectory().ToDirectoryInfo());
            }

            if (arguments.StartOffset.HasValue ^ arguments.EndOffset.HasValue)
            {
                throw new InvalidStartOrEndException("If StartOffset or EndOffset is specified, then both must be specified");
            }

            if (arguments.StartOffset.HasValue && arguments.EndOffset.HasValue && arguments.EndOffset.Value <= arguments.StartOffset.Value)
            {
                throw new InvalidStartOrEndException("Start offset must be less than end offset.");
            }

            LoggedConsole.WriteLine("# Recording file:      " + sourceAudio.FullName);
            LoggedConsole.WriteLine("# Configuration file:  " + configFile);
            LoggedConsole.WriteLine("# Output folder:       " + outputDirectory);
            LoggedConsole.WriteLine("# Temp File Directory: " + tempFilesDirectory);

            // optionally copy logs / config to make results easier to understand
            // TODO: remove, see https://github.com/QutEcoacoustics/audio-analysis/issues/133
            if (arguments.WhenExitCopyConfig || arguments.WhenExitCopyLog)
            {
                AppDomain.CurrentDomain.ProcessExit += (sender, args) => { Cleanup(arguments, configFile); };
            }

            // 2. initialize the analyzer
            // we're changing the way resolving config files works. Ideally, we'd like to use statically typed config files
            // but we can't do that unless we know which type we have to load first! Currently analyzer to load is in
            // the config file so we can't know which analyzer we can use. Thus we will change to using the file name,
            // or an argument to resolve the analyzer to load.
            // Get analysis name:
            IAnalyser2 analyzer = FindAndCheckAnalyzer <IAnalyser2>(arguments.AnalysisIdentifier, configFile.Name);

            // 2. get the analysis config
            AnalyzerConfig configuration = analyzer.ParseConfig(configFile);

            SaveBehavior saveIntermediateWavFiles  = configuration.SaveIntermediateWavFiles;
            bool         saveIntermediateDataFiles = configuration.SaveIntermediateCsvFiles;
            SaveBehavior saveSonogramsImages       = configuration.SaveSonogramImages;

            bool filenameDate = configuration.RequireDateInFilename;

            if (configuration[AnalysisKeys.AnalysisName].IsNotWhitespace())
            {
                Log.Warn("Your config file has `AnalysisName` set - this property is deprecated and ignored");
            }

            // AT 2018-02: changed logic so default index properties loaded if not provided
            FileInfo indicesPropertiesConfig = IndexProperties.Find(configuration, configFile);

            if (indicesPropertiesConfig == null || !indicesPropertiesConfig.Exists)
            {
                Log.Warn("IndexProperties config can not be found! Loading a default");
                indicesPropertiesConfig = ConfigFile.Default <Dictionary <string, IndexProperties> >();
            }

            LoggedConsole.WriteLine("# IndexProperties Cfg: " + indicesPropertiesConfig.FullName);

            // min score for an acceptable event
            Log.Info("Minimum event threshold has been set to " + configuration.EventThreshold);

            FileSegment.FileDateBehavior defaultBehavior = FileSegment.FileDateBehavior.Try;
            if (filenameDate)
            {
                if (!FileDateHelpers.FileNameContainsDateTime(sourceAudio.Name))
                {
                    throw new InvalidFileDateException(
                              "When RequireDateInFilename option is set, the filename of the source audio file must contain "
                              + "a valid AND UNAMBIGUOUS date. Such a date was not able to be parsed.");
                }

                defaultBehavior = FileSegment.FileDateBehavior.Required;
            }

            // 3. initilize AnalysisCoordinator class that will do the analysis
            var analysisCoordinator = new AnalysisCoordinator(
                new LocalSourcePreparer(),
                saveIntermediateWavFiles,
                false,
                arguments.Parallel);

            // 4. get the segment of audio to be analysed
            // if tiling output, specify that FileSegment needs to be able to read the date
            var fileSegment         = new FileSegment(sourceAudio, arguments.AlignToMinute, null, defaultBehavior);
            var bothOffsetsProvided = arguments.StartOffset.HasValue && arguments.EndOffset.HasValue;

            if (bothOffsetsProvided)
            {
                fileSegment.SegmentStartOffset = TimeSpan.FromSeconds(arguments.StartOffset.Value);
                fileSegment.SegmentEndOffset   = TimeSpan.FromSeconds(arguments.EndOffset.Value);
            }
            else
            {
                Log.Debug("Neither start nor end segment offsets provided. Therefore both were ignored.");
            }

            // 6. initialize the analysis settings object
            var analysisSettings = analyzer.DefaultSettings;

            analysisSettings.ConfigFile                = configFile;
            analysisSettings.Configuration             = configuration;
            analysisSettings.AnalysisOutputDirectory   = outputDirectory;
            analysisSettings.AnalysisTempDirectory     = tempFilesDirectory;
            analysisSettings.AnalysisDataSaveBehavior  = saveIntermediateDataFiles;
            analysisSettings.AnalysisImageSaveBehavior = saveSonogramsImages;
            analysisSettings.AnalysisChannelSelection  = arguments.Channels;
            analysisSettings.AnalysisMixDownToMono     = arguments.MixDownToMono;

            var segmentDuration = configuration.SegmentDuration?.Seconds();

            if (!segmentDuration.HasValue)
            {
                segmentDuration = analysisSettings.AnalysisMaxSegmentDuration ?? TimeSpan.FromMinutes(1);
                Log.Warn(
                    $"Can't read `{nameof(AnalyzerConfig.SegmentDuration)}` from config file. "
                    + $"Default value of {segmentDuration} used)");
            }

            analysisSettings.AnalysisMaxSegmentDuration = segmentDuration.Value;

            var segmentOverlap = configuration.SegmentOverlap?.Seconds();

            if (!segmentOverlap.HasValue)
            {
                segmentOverlap = analysisSettings.SegmentOverlapDuration;
                Log.Warn(
                    $"Can't read `{nameof(AnalyzerConfig.SegmentOverlap)}` from config file. "
                    + $"Default value of {segmentOverlap} used)");
            }

            analysisSettings.SegmentOverlapDuration = segmentOverlap.Value;

            // set target sample rate
            var resampleRate = configuration.ResampleRate;

            if (!resampleRate.HasValue)
            {
                resampleRate = analysisSettings.AnalysisTargetSampleRate ?? AppConfigHelper.DefaultTargetSampleRate;
                Log.Warn(
                    $"Can't read {nameof(configuration.ResampleRate)} from config file. "
                    + $"Default value of {resampleRate} used)");
            }

            analysisSettings.AnalysisTargetSampleRate = resampleRate;

            Log.Info(
                $"{nameof(configuration.SegmentDuration)}={segmentDuration}, "
                + $"{nameof(configuration.SegmentOverlap)}={segmentOverlap}, "
                + $"{nameof(configuration.ResampleRate)}={resampleRate}");

            // 7. ####################################### DO THE ANALYSIS ###################################
            LoggedConsole.WriteLine("START ANALYSIS ...");
            var analyserResults = analysisCoordinator.Run(fileSegment, analyzer, analysisSettings);

            // ##############################################################################################
            // 8. PROCESS THE RESULTS
            LoggedConsole.WriteLine(string.Empty);
            LoggedConsole.WriteLine("START PROCESSING RESULTS ...");
            if (analyserResults == null)
            {
                LoggedConsole.WriteErrorLine("###################################################\n");
                LoggedConsole.WriteErrorLine("The Analysis Run Coordinator has returned a null result.");
                LoggedConsole.WriteErrorLine("###################################################\n");
                throw new AnalysisOptionDevilException();
            }

            // Merge and correct main result types
            EventBase[]         mergedEventResults         = ResultsTools.MergeResults(analyserResults, ar => ar.Events, ResultsTools.CorrectEvent);
            SummaryIndexBase[]  mergedIndicesResults       = ResultsTools.MergeResults(analyserResults, ar => ar.SummaryIndices, ResultsTools.CorrectSummaryIndex);
            SpectralIndexBase[] mergedSpectralIndexResults = ResultsTools.MergeResults(analyserResults, ar => ar.SpectralIndices, ResultsTools.CorrectSpectrumIndex);

            // not an exceptional state, do not throw exception
            if (mergedEventResults != null && mergedEventResults.Length == 0)
            {
                LoggedConsole.WriteWarnLine("The analysis produced no EVENTS (mergedResults had zero count)");
            }

            if (mergedIndicesResults != null && mergedIndicesResults.Length == 0)
            {
                LoggedConsole.WriteWarnLine("The analysis produced no Summary INDICES (mergedResults had zero count)");
            }

            if (mergedSpectralIndexResults != null && mergedSpectralIndexResults.Length == 0)
            {
                LoggedConsole.WriteWarnLine("The analysis produced no Spectral INDICES (merged results had zero count)");
            }

            // 9. CREATE SUMMARY INDICES IF NECESSARY (FROM EVENTS)
#if DEBUG
            // get the duration of the original source audio file - need this to convert Events datatable to Indices Datatable
            var audioUtility = new MasterAudioUtility(tempFilesDirectory);
            var mimeType     = MediaTypes.GetMediaType(sourceAudio.Extension);
            var sourceInfo   = audioUtility.Info(sourceAudio);

            // updated by reference all the way down in LocalSourcePreparer
            Debug.Assert(fileSegment.TargetFileDuration == sourceInfo.Duration);
#endif
            var duration = fileSegment.TargetFileDuration.Value;

            ResultsTools.ConvertEventsToIndices(
                analyzer,
                mergedEventResults,
                ref mergedIndicesResults,
                duration,
                configuration.EventThreshold);
            int eventsCount           = mergedEventResults?.Length ?? 0;
            int numberOfRowsOfIndices = mergedIndicesResults?.Length ?? 0;

            // 10. Allow analysers to post-process

            // TODO: remove results directory if possible
            var instanceOutputDirectory =
                AnalysisCoordinator.GetNamedDirectory(analysisSettings.AnalysisOutputDirectory, analyzer);

            // 11. IMPORTANT - this is where IAnalyser2's post processor gets called.
            // Produces all spectrograms and images of SPECTRAL INDICES.
            // Long duration spectrograms are drawn IFF analysis type is Towsey.Acoustic
            analyzer.SummariseResults(analysisSettings, fileSegment, mergedEventResults, mergedIndicesResults, mergedSpectralIndexResults, analyserResults);

            // 12. SAVE THE RESULTS
            string fileNameBase = Path.GetFileNameWithoutExtension(sourceAudio.Name);

            var eventsFile  = ResultsTools.SaveEvents(analyzer, fileNameBase, instanceOutputDirectory, mergedEventResults);
            var indicesFile = ResultsTools.SaveSummaryIndices(analyzer, fileNameBase, instanceOutputDirectory, mergedIndicesResults);
            var spectraFile = ResultsTools.SaveSpectralIndices(analyzer, fileNameBase, instanceOutputDirectory, mergedSpectralIndexResults);

            // 13. THIS IS WHERE SUMMARY INDICES ARE PROCESSED
            //     Convert summary indices to black and white tracks image
            if (mergedIndicesResults == null)
            {
                Log.Info("No summary indices produced");
            }
            else
            {
                if (indicesPropertiesConfig == null || !indicesPropertiesConfig.Exists)
                {
                    throw new InvalidOperationException("Cannot process indices without an index configuration file, the file could not be found!");
                }

                // this arbitrary amount of data.
                if (mergedIndicesResults.Length > 5000)
                {
                    Log.Warn("Summary Indices Image not able to be drawn - there are too many indices to render");
                }
                else
                {
                    var    basename   = Path.GetFileNameWithoutExtension(fileNameBase);
                    string imageTitle = $"SOURCE:{basename},   {Meta.OrganizationTag};  ";

                    // Draw Tracks-Image of Summary indices
                    // set time scale resolution for drawing of summary index tracks
                    TimeSpan timeScale   = TimeSpan.FromSeconds(0.1);
                    Bitmap   tracksImage =
                        IndexDisplay.DrawImageOfSummaryIndices(
                            IndexProperties.GetIndexProperties(indicesPropertiesConfig),
                            indicesFile,
                            imageTitle,
                            timeScale,
                            fileSegment.TargetFileStartDate);
                    var imagePath = FilenameHelpers.AnalysisResultPath(instanceOutputDirectory, basename, "SummaryIndices", ImageFileExt);
                    tracksImage.Save(imagePath);
                }
            }

            // 14. wrap up, write stats
            LoggedConsole.WriteLine("INDICES CSV file(s) = " + (indicesFile?.Name ?? "<<No indices result, no file!>>"));
            LoggedConsole.WriteLine("\tNumber of rows (i.e. minutes) in CSV file of indices = " + numberOfRowsOfIndices);
            LoggedConsole.WriteLine(string.Empty);

            if (eventsFile == null)
            {
                LoggedConsole.WriteLine("An Events CSV file was NOT returned.");
            }
            else
            {
                LoggedConsole.WriteLine("EVENTS CSV file(s) = " + eventsFile.Name);
                LoggedConsole.WriteLine("\tNumber of events = " + eventsCount);
            }

            Log.Success($"Analysis Complete.\nSource={sourceAudio.Name}\nOutput={instanceOutputDirectory.FullName}");
        }
예제 #13
0
        private int Execute(Arguments arguments)
        {
            var errors = new List <string>();

            Log.Info("Checking required executables and libraries can be found and loaded");

            // this is an important call used in analyze long recordings.
            // This call effectively check is we can load types and if files are present (I think)
            try
            {
                AnalysisCoordinator.GetAnalyzers <IAnalyser2>(typeof(MainEntry).Assembly);
            }
            catch (ReflectionTypeLoadException rtlex)
            {
                errors.Add(ExceptionLookup.FormatReflectionTypeLoadException(rtlex, true));
            }

            // master audio utility checks for available executables
            try
            {
                var utility = new MasterAudioUtility();
            }
            catch (Exception ex)
            {
                errors.Add(ex.Message);
            }

            if (MainEntry.CheckForDataAnnotations() is string message)
            {
                errors.Add(message);
            }

            if (AppConfigHelper.IsMono)
            {
                Type type = Type.GetType("Mono.Runtime");
                if (type != null)
                {
                    MethodInfo displayName = type.GetMethod("GetDisplayName", BindingFlags.NonPublic | BindingFlags.Static);

                    if (displayName?.Invoke(null, null) is string name)
                    {
                        var version = Regex.Match(name, @".*(\d+\.\d+\.\d+\.\d+).*").Groups[1].Value;
                        Console.WriteLine(version);
                        if (new Version(version) > new Version(5, 5))
                        {
                            Log.Success($"Your mono version {name} is greater than our required Mono version 5.5");
                        }
                        else
                        {
                            errors.Add($"Mono version is {name}, we require at least Mono 5.5");
                        }
                    }
                    else
                    {
                        errors.Add("Could not get Mono display name");
                    }
                }
            }

            // don't have much more to check at the current time
            if (errors.Count == 0)
            {
                Log.Success("Valid environment");

                return(ExceptionLookup.Ok);
            }
            else
            {
                foreach (var error in errors)
                {
                    Log.Error(error);
                }

                // not using exception lookup on purpose - it's static constructor loads more types
                return(ExceptionLookup.UnhandledExceptionErrorCode);
            }
        }
예제 #14
0
        public static void Main(Arguments arguments)
        {
            // 1. set up the necessary files
            FileInfo      sourceRecording = arguments.Source;
            FileInfo      configFile      = arguments.Config.ToFileInfo();
            DirectoryInfo opDir           = arguments.Output;

            opDir.Create();

            if (arguments.StartOffset.HasValue ^ arguments.EndOffset.HasValue)
            {
                throw new InvalidStartOrEndException("If StartOffset or EndOffset is specified, then both must be specified");
            }

            var offsetsProvided = arguments.StartOffset.HasValue && arguments.EndOffset.HasValue;

            // set default offsets - only use defaults if not provided in argments list
            TimeSpan?startOffset = null;
            TimeSpan?endOffset   = null;

            if (offsetsProvided)
            {
                startOffset = TimeSpan.FromSeconds(arguments.StartOffset.Value);
                endOffset   = TimeSpan.FromSeconds(arguments.EndOffset.Value);
            }

            const string Title = "# MAKE A SONOGRAM FROM AUDIO RECORDING and do OscillationsGeneric activity.";
            string       date  = "# DATE AND TIME: " + DateTime.Now;

            LoggedConsole.WriteLine(Title);
            LoggedConsole.WriteLine(date);
            LoggedConsole.WriteLine("# Input  audio file: " + sourceRecording.Name);

            string sourceName = Path.GetFileNameWithoutExtension(sourceRecording.FullName);

            // 2. get the config dictionary
            Config configuration = ConfigFile.Deserialize(configFile);

            // below three lines are examples of retrieving info from Config config
            // string analysisIdentifier = configuration[AnalysisKeys.AnalysisName];
            // bool saveIntermediateWavFiles = (bool?)configuration[AnalysisKeys.SaveIntermediateWavFiles] ?? false;
            // scoreThreshold = (double?)configuration[AnalysisKeys.EventThreshold] ?? scoreThreshold;

            // Resample rate must be 2 X the desired Nyquist. Default is that of recording.
            var resampleRate = configuration.GetIntOrNull(AnalysisKeys.ResampleRate) ?? AppConfigHelper.DefaultTargetSampleRate;

            var configDict = new Dictionary <string, string>(configuration.ToDictionary());

            // #NOISE REDUCTION PARAMETERS
            //string noisereduce = configDict[ConfigKeys.Mfcc.Key_NoiseReductionType];
            configDict[AnalysisKeys.NoiseDoReduction]   = "false";
            configDict[AnalysisKeys.NoiseReductionType] = "NONE";

            configDict[AnalysisKeys.AddAxes] = configuration[AnalysisKeys.AddAxes] ?? "true";
            configDict[AnalysisKeys.AddSegmentationTrack] = configuration[AnalysisKeys.AddSegmentationTrack] ?? "true";

            configDict[ConfigKeys.Recording.Key_RecordingCallName] = sourceRecording.FullName;
            configDict[ConfigKeys.Recording.Key_RecordingFileName] = sourceRecording.Name;

            configDict[AnalysisKeys.AddTimeScale]         = configuration[AnalysisKeys.AddTimeScale] ?? "true";
            configDict[AnalysisKeys.AddAxes]              = configuration[AnalysisKeys.AddAxes] ?? "true";
            configDict[AnalysisKeys.AddSegmentationTrack] = configuration[AnalysisKeys.AddSegmentationTrack] ?? "true";

            // ####################################################################

            // print out the sonogram parameters
            LoggedConsole.WriteLine("\nPARAMETERS");
            foreach (KeyValuePair <string, string> kvp in configDict)
            {
                LoggedConsole.WriteLine("{0}  =  {1}", kvp.Key, kvp.Value);
            }

            LoggedConsole.WriteLine("Sample Length for detecting oscillations = {0}", SampleLength);

            // 3: GET RECORDING
            FileInfo tempAudioSegment = new FileInfo(Path.Combine(opDir.FullName, "tempWavFile.wav"));

            // delete the temp audio file if it already exists.
            if (File.Exists(tempAudioSegment.FullName))
            {
                File.Delete(tempAudioSegment.FullName);
            }

            // This line creates a temporary version of the source file downsampled as per entry in the config file
            MasterAudioUtility.SegmentToWav(sourceRecording, tempAudioSegment, new AudioUtilityRequest()
            {
                TargetSampleRate = resampleRate
            });

            // 1) get amplitude spectrogram
            AudioRecording recordingSegment = new AudioRecording(tempAudioSegment.FullName);
            SonogramConfig sonoConfig       = new SonogramConfig(configDict); // default values config
            BaseSonogram   sonogram         = new AmplitudeSonogram(sonoConfig, recordingSegment.WavReader);

            Console.WriteLine("FramesPerSecond = {0}", sonogram.FramesPerSecond);

            // remove the DC bin
            sonogram.Data = MatrixTools.Submatrix(sonogram.Data, 0, 1, sonogram.FrameCount - 1, sonogram.Configuration.FreqBinCount);

            // ###############################################################
            // DO LocalContrastNormalisation
            //int fieldSize = 9;
            //sonogram.Data = LocalContrastNormalisation.ComputeLCN(sonogram.Data, fieldSize);
            // LocalContrastNormalisation over frequency bins is better and faster.
            int    neighbourhood = 15;
            double contrastLevel = 0.5;

            sonogram.Data = NoiseRemoval_Briggs.NoiseReduction_byLCNDivision(sonogram.Data, neighbourhood, contrastLevel);

            // ###############################################################
            // lowering the sensitivity threshold increases the number of hits.
            if (configDict.ContainsKey(AnalysisKeys.OscilDetection2014SensitivityThreshold))
            {
                Oscillations2014.DefaultSensitivityThreshold = double.Parse(configDict[AnalysisKeys.OscilDetection2014SensitivityThreshold]);
            }

            if (configDict.ContainsKey(AnalysisKeys.OscilDetection2014SampleLength))
            {
                Oscillations2014.DefaultSampleLength = int.Parse(configDict[AnalysisKeys.OscilDetection2014SensitivityThreshold]);
            }

            var list1 = new List <Image>();

            //var result = Oscillations2014.GetFreqVsOscillationsDataAndImage(sonogram, 64, "Autocorr-FFT");
            //list1.Add(result.FreqOscillationImage);
            var result = Oscillations2014.GetFreqVsOscillationsDataAndImage(sonogram, "Autocorr-FFT");

            list1.Add(result.FreqOscillationImage);
            result = Oscillations2014.GetFreqVsOscillationsDataAndImage(sonogram, "Autocorr-SVD-FFT");
            list1.Add(result.FreqOscillationImage);
            result = Oscillations2014.GetFreqVsOscillationsDataAndImage(sonogram, "Autocorr-WPD");
            list1.Add(result.FreqOscillationImage);
            Image compositeOscImage1 = ImageTools.CombineImagesInLine(list1.ToArray());

            // ###############################################################

            // init the sonogram image stack
            var sonogramList = new List <Image>();
            var image        = sonogram.GetImageFullyAnnotated("AMPLITUDE SPECTROGRAM");

            sonogramList.Add(image);

            //string testPath = @"C:\SensorNetworks\Output\Sonograms\amplitudeSonogram.png";
            //image.Save(testPath, ImageFormat.Png);

            Image envelopeImage = ImageTrack.DrawWaveEnvelopeTrack(recordingSegment, image.Width);

            sonogramList.Add(envelopeImage);

            // 2) now draw the standard decibel spectrogram
            sonogram = new SpectrogramStandard(sonoConfig, recordingSegment.WavReader);

            // ###############################################################
            list1 = new List <Image>();

            //result = Oscillations2014.GetFreqVsOscillationsDataAndImage(sonogram, 64, "Autocorr-FFT");
            //list1.Add(result.FreqOscillationImage);
            result = Oscillations2014.GetFreqVsOscillationsDataAndImage(sonogram, "Autocorr-FFT");
            list1.Add(result.FreqOscillationImage);
            result = Oscillations2014.GetFreqVsOscillationsDataAndImage(sonogram, "Autocorr-SVD-FFT");
            list1.Add(result.FreqOscillationImage);
            result = Oscillations2014.GetFreqVsOscillationsDataAndImage(sonogram, "Autocorr-WPD");
            list1.Add(result.FreqOscillationImage);
            Image compositeOscImage2 = ImageTools.CombineImagesInLine(list1.ToArray());

            // ###############################################################
            //image = sonogram.GetImageFullyAnnotated("DECIBEL SPECTROGRAM");
            //list.Add(image);

            // combine eight images
            list1 = new List <Image>();
            list1.Add(compositeOscImage1);
            list1.Add(compositeOscImage2);
            Image  compositeOscImage3 = ImageTools.CombineImagesVertically(list1.ToArray());
            string imagePath3         = Path.Combine(opDir.FullName, sourceName + "_freqOscilMatrix.png");

            compositeOscImage3.Save(imagePath3, ImageFormat.Png);

            Image segmentationImage = ImageTrack.DrawSegmentationTrack(
                sonogram,
                EndpointDetectionConfiguration.K1Threshold,
                EndpointDetectionConfiguration.K2Threshold,
                image.Width);

            sonogramList.Add(segmentationImage);

            // 3) now draw the noise reduced decibel spectrogram
            sonoConfig.NoiseReductionType      = NoiseReductionType.Standard;
            sonoConfig.NoiseReductionParameter = configuration.GetDoubleOrNull(AnalysisKeys.NoiseBgThreshold) ?? 3.0;

            sonogram = new SpectrogramStandard(sonoConfig, recordingSegment.WavReader);
            image    = sonogram.GetImageFullyAnnotated("NOISE-REDUCED DECIBEL  SPECTROGRAM");
            sonogramList.Add(image);

            // ###############################################################
            // deriving osscilation graph from this noise reduced spectrogram did not work well
            //Oscillations2014.SaveFreqVsOscillationsDataAndImage(sonogram, sampleLength, algorithmName, opDir);
            // ###############################################################

            Image  compositeSonogram = ImageTools.CombineImagesVertically(sonogramList);
            string imagePath2        = Path.Combine(opDir.FullName, sourceName + ".png");

            compositeSonogram.Save(imagePath2, ImageFormat.Png);

            LoggedConsole.WriteLine("\n##### FINISHED FILE ###################################################\n");
        }