예제 #1
0
        protected AnalyzerConfigIndexProperties()
        {
            void OnLoaded(IConfig config)
            {
                var indicesPropertiesConfig = Indices.IndexProperties.Find(this, this.ConfigPath);

                this.IndexPropertiesConfig = indicesPropertiesConfig.Path.ToOsPath();
                this.IndexProperties       = ConfigFile.Deserialize <IndexPropertiesCollection>(this.IndexPropertiesConfig);
            }

            this.Loaded += OnLoaded;
        }
예제 #2
0
        public SpectrogramZoomingConfig()
        {
            this.Loaded += config =>
            {
                // search
                var indicesPropertiesConfig = Indices.IndexProperties.Find(this, this.ConfigPath);
                this.IndexPropertiesConfig = indicesPropertiesConfig.Path.ToOsPath();

                // load
                this.IndexProperties = ConfigFile.Deserialize <IndexPropertiesCollection>(this.IndexPropertiesConfig);
            };
        }
        public void TestOfSummaryIndices()
        {
            var sourceRecording       = PathHelper.ResolveAsset(@"Recordings\BAC2_20071008-085040.wav");
            var configFile            = PathHelper.ResolveConfigFile(@"Towsey.Acoustic.yml");
            var indexPropertiesConfig = PathHelper.ResolveConfigFile(@"IndexPropertiesConfig.yml");

            // var outputDir = this.outputDirectory;
            // Create temp directory to store output
            if (!this.outputDirectory.Exists)
            {
                this.outputDirectory.Create();
            }

            var indexCalculateConfig = ConfigFile.Deserialize <IndexCalculateConfig>(configFile);

            // CHANGE CONFIG PARAMETERS HERE IF REQUIRED
            //indexCalculateConfig.IndexCalculationDuration = TimeSpan.FromSeconds(20);
            //indexCalculateConfig.SetTypeOfFreqScale("Octave");

            var results = IndexCalculate.Analysis(
                new AudioRecording(sourceRecording),
                TimeSpan.Zero,
                indexCalculateConfig.IndexProperties,
                22050,
                TimeSpan.Zero,
                indexCalculateConfig,
                returnSonogramInfo: true);

            var summaryIndices = results.SummaryIndexValues;

            Assert.AreEqual(0.6793287, summaryIndices.AcousticComplexity, AllowedDelta);
            Assert.AreEqual(0.484520, summaryIndices.Activity, AllowedDelta);
            Assert.AreEqual(0.000000, summaryIndices.AvgEntropySpectrum, AllowedDelta);
            Assert.AreEqual(-30.946519, summaryIndices.AvgSignalAmplitude, AllowedDelta);
            Assert.AreEqual(11.533420, summaryIndices.AvgSnrOfActiveFrames, AllowedDelta);
            Assert.AreEqual(-39.740775, summaryIndices.BackgroundNoise, AllowedDelta);
            Assert.AreEqual(21, summaryIndices.ClusterCount);
            Assert.AreEqual(0.153191, summaryIndices.EntropyOfAverageSpectrum, AllowedDelta);
            Assert.AreEqual(0.301929, summaryIndices.EntropyOfCoVSpectrum, AllowedDelta);
            Assert.AreEqual(0.260999, summaryIndices.EntropyOfPeaksSpectrum, AllowedDelta);
            Assert.AreEqual(0.522080, summaryIndices.EntropyOfVarianceSpectrum, AllowedDelta);
            Assert.AreEqual(0.0, summaryIndices.EntropyPeaks, AllowedDelta);
            Assert.AreEqual(2.0, summaryIndices.EventsPerSecond, AllowedDelta);
            Assert.AreEqual(0.140306, summaryIndices.HighFreqCover, AllowedDelta);
            Assert.AreEqual(0.137873, summaryIndices.MidFreqCover, AllowedDelta);
            Assert.AreEqual(0.055341, summaryIndices.LowFreqCover, AllowedDelta);
            Assert.AreEqual(0.957433, summaryIndices.Ndsi, AllowedDelta);
            Assert.AreEqual(27.877206, summaryIndices.Snr, AllowedDelta);
            Assert.AreEqual(6.240310, summaryIndices.SptDensity, AllowedDelta);
            Assert.AreEqual(0, summaryIndices.ResultStartSeconds);
            Assert.AreEqual(0.162216, summaryIndices.TemporalEntropy, AllowedDelta);
            Assert.AreEqual(401, summaryIndices.ThreeGramCount, AllowedDelta);
        }
예제 #4
0
        protected AnalyzerConfigIndexProperties()
        {
            void OnLoaded(IConfig config)
            {
                var indicesPropertiesConfig = Indices.IndexProperties.Find(this);

                this.IndexPropertiesConfig = indicesPropertiesConfig.FullName;
                this.IndexProperties       = ConfigFile.Deserialize <IndexPropertiesCollection>(this.IndexPropertiesConfig);
            }

            this.Loaded += OnLoaded;
        }
예제 #5
0
        public void TestAudio2Sonogram()
        {
            var testFile   = PathHelper.ResolveAsset("curlew.wav");
            var configFile = PathHelper.ResolveConfigFile("Towsey.SpectrogramGenerator.yml");
            var config     = ConfigFile.Deserialize <SpectrogramGeneratorConfig>(configFile);

            var result = GenerateSpectrogramImages(testFile, config, null);

            this.ActualImage = result.CompositeImage;

            // by default all visualizations are enabled
            Assert.That.ImageIsSize(Width, All.Sum(x => x.Value), result.CompositeImage);
        }
        public void TestOfSpectralIndices_ICD20()
        {
            //var indexPropertiesConfig = PathHelper.ResolveConfigFile(@"IndexPropertiesConfig.yml");
            var sourceRecording = PathHelper.ResolveAsset(@"Recordings\BAC2_20071008-085040.wav");
            var configFile      = PathHelper.ResolveConfigFile(@"Towsey.Acoustic.yml");

            // var outputDir = this.outputDirectory;
            // Create temp directory to store output
            if (!this.outputDirectory.Exists)
            {
                this.outputDirectory.Create();
            }

            var recording = new AudioRecording(sourceRecording);

            // CHANGE CONFIG PARAMETERS HERE IF REQUIRED
            var indexCalculateConfig = ConfigFile.Deserialize <IndexCalculateConfig>(configFile);

            indexCalculateConfig.IndexCalculationDurationTimeSpan = TimeSpan.FromSeconds(20);

            var results = IndexCalculate.Analysis(
                recording,
                TimeSpan.FromSeconds(40), // assume that this is the third of three 20 second subsegments
                indexCalculateConfig.IndexProperties,
                22050,
                TimeSpan.Zero,
                indexCalculateConfig,
                returnSonogramInfo: true);

            var spectralIndices = results.SpectralIndexValues;

            // TEST the SPECTRAL INDICES
            var resourcesDir         = PathHelper.ResolveAssetPath("Indices");
            var expectedSpectrumFile = new FileInfo(resourcesDir + "\\BGN_ICD20.bin");

            //Binary.Serialize(expectedSpectrumFile, spectralIndices.BGN);
            var expectedVector = Binary.Deserialize <double[]>(expectedSpectrumFile);

            CollectionAssert.That.AreEqual(expectedVector, spectralIndices.BGN, AllowedDelta);

            expectedSpectrumFile = new FileInfo(resourcesDir + "\\CVR_ICD20.bin");

            //Binary.Serialize(expectedSpectrumFile, spectralIndices.CVR);
            expectedVector = Binary.Deserialize <double[]>(expectedSpectrumFile);
            CollectionAssert.That.AreEqual(expectedVector, spectralIndices.CVR, AllowedDelta);

            var outputImagePath1 = Path.Combine(this.outputDirectory.FullName, "SpectralIndices_ICD20.png");
            var image            = SpectralIndexValues.CreateImageOfSpectralIndices(spectralIndices);

            image.Save(outputImagePath1);
        }
예제 #7
0
        /// <inheritdoc />
        public override AnalyzerConfig ParseConfig(FileInfo file)
        {
            RuntimeHelpers.RunClassConstructor(typeof(GenericRecognizerConfig).TypeHandle);
            var result = ConfigFile.Deserialize <GenericRecognizerConfig>(file);

            // validation of configs can be done here
            // sanity check the algorithm
            string algorithmName;

            foreach (var(profileName, profile) in result.Profiles)
            {
                if (profile is CommonParameters c)
                {
                    c.MinHertz.ConfigNotNull(nameof(c.MinHertz), file);
                    c.MaxHertz.ConfigNotNull(nameof(c.MaxHertz), file);
                }

                switch (profile)
                {
                case BlobParameters _:
                    algorithmName = "Blob";
                    break;

                case OscillationParameters _:
                    algorithmName = "Oscillation";
                    break;

                case WhistleParameters _:
                    algorithmName = "Whistle";
                    break;

                case HarmonicParameters _:
                    algorithmName = "Harmonics";
                    throw new NotImplementedException("The harmonic algorithm has not been implemented yet");
                    break;

                case Aed.AedConfiguration _:
                    algorithmName = "AED";
                    break;

                default:
                    var allowedAlgorithms =
                        $"{nameof(BlobParameters)}, {nameof(OscillationParameters)}, {nameof(WhistleParameters)}, {nameof(HarmonicParameters)}, {nameof(Aed.AedConfiguration)}";
                    throw new ConfigFileException($"The algorithm type in profile {profileName} is not recognized. It must be one of {allowedAlgorithms}");
                }
            }

            return(result);
        }
예제 #8
0
        private static Dictionary <string, string> GetConfiguration(FileInfo configFile)
        {
            var configuration = ConfigFile.Deserialize(configFile);

            var configDict = new Dictionary <string, string>(configuration.ToDictionary())
            {
                [AnalysisKeys.AddAxes] = (configuration.GetBoolOrNull(AnalysisKeys.AddAxes) ?? true).ToString(),
                [AnalysisKeys.AddSegmentationTrack] = (configuration.GetBoolOrNull(AnalysisKeys.AddSegmentationTrack) ?? true).ToString(),
                [AnalysisKeys.AddTimeScale]         = configuration[AnalysisKeys.AddTimeScale] ?? "true",
                [AnalysisKeys.AddAxes] = configuration[AnalysisKeys.AddAxes] ?? "true",
                [AnalysisKeys.AddSegmentationTrack] = configuration[AnalysisKeys.AddSegmentationTrack] ?? "true",
            };

            return(configDict);
        }
        public void SupportForDeserializing()
        {
            var file = ConfigFile.Resolve("Towsey.Acoustic.yml");

            // this mainly tests if the machinery works
            var configuration = ConfigFile.Deserialize <AcousticIndices.AcousticIndicesConfig>(file);

            // we don't care so much about the value
            Assert.IsTrue(configuration.IndexCalculationDuration > 0);
            Assert.IsNotNull(configuration.ConfigPath);
            Assert.That.FileExists(configuration.ConfigPath);

            // the type should autoload indexproperties
            Assert.IsNotNull(configuration.IndexProperties);
        }
        public void SupportForUntypedDeserializing()
        {
            var file = ConfigFile.Resolve("Towsey.Acoustic.yml");

            // this mainly tests if the machinery works
            var configuration = ConfigFile.Deserialize(file);

            // we don't care so much about the value
            Assert.IsTrue(configuration.GetDouble("IndexCalculationDuration") > 0);
            Assert.IsNotNull(configuration.ConfigPath);
            Assert.That.FileExists(configuration.ConfigPath);

            // we should not be dealing with any sub-types
            Assert.IsInstanceOfType(configuration, typeof(Config));
        }
예제 #11
0
        public void TestChromelessImage()
        {
            var indexPropertiesFile = ConfigFile.Default <IndexPropertiesCollection>();
            var indexProperties     = ConfigFile.Deserialize <IndexPropertiesCollection>(indexPropertiesFile);

            var indexSpectrograms = new Dictionary <string, double[, ]>(6);
            var indexStatistics   = new Dictionary <string, IndexDistributions.SpectralStats>();
            var keys = (LDSpectrogramRGB.DefaultColorMap1 + "-" + LDSpectrogramRGB.DefaultColorMap2).Split('-');

            foreach (var key in keys)
            {
                var matrix = new double[256, 60].Fill(indexProperties[key].DefaultValue);
                indexSpectrograms.Add(key, matrix);
                double[] array = DataTools.Matrix2Array(matrix);
                indexStatistics.Add(key, IndexDistributions.GetModeAndOneTailedStandardDeviation(array, 300, IndexDistributions.UpperPercentileDefault));
            }

            var images = LDSpectrogramRGB.DrawSpectrogramsFromSpectralIndices(
                inputDirectory: null,
                outputDirectory: this.outputDirectory,
                ldSpectrogramConfig: new LdSpectrogramConfig(),
                indexPropertiesConfigPath: indexPropertiesFile,
                indexGenerationData: new IndexGenerationData()
            {
                AnalysisStartOffset      = 0.Seconds(),
                FrameLength              = 512,
                IndexCalculationDuration = 60.0.Seconds(),
                RecordingBasename        = "RGB_TEST",
                RecordingDuration        = 60.0.Seconds(),
                SampleRateResampled      = 22050,
            },
                basename: "RGB_TEST",
                analysisType: AcousticIndices.AnalysisName,
                indexSpectrograms: indexSpectrograms,
                summaryIndices: Enumerable
                .Range(0, 60)
                .Select((x) => new SummaryIndexValues(60.0.Seconds(), indexProperties))
                .Cast <SummaryIndexBase>()
                .ToArray(),
                indexStatistics: indexStatistics,
                imageChrome: ImageChrome.Without);

            foreach (var(image, key) in images)
            {
                Assert.That.ImageIsSize(60, 256, image);
                Assert.That.ImageRegionIsColor(Rectangle.FromLTRB(0, 0, 60, 256), Color.Black, (Bitmap)image);
            }
        }
예제 #12
0
        private static Dictionary <string, string> GetConfigDictionary(FileInfo configFile, bool writeParameters)
        {
            Config configuration = ConfigFile.Deserialize(configFile);

            // var configDict = new Dictionary<string, string>((Dictionary<string, string>)configuration);
            var configDict = new Dictionary <string, string>(configuration.ToDictionary())
            {
                // below three lines are examples of retrieving info from Config config
                // string analysisIdentifier = configuration[AnalysisKeys.AnalysisName];
                // bool saveIntermediateWavFiles = (bool?)configuration[AnalysisKeys.SaveIntermediateWavFiles] ?? false;
                // scoreThreshold = (double?)configuration[AnalysisKeys.EventThreshold] ?? scoreThreshold;

                // Resample rate must be 2 X the desired Nyquist.
                // WARNING: Default used to be the SR of the recording. NOW DEFAULT = 22050.
                [AnalysisKeys.ResampleRate] = configuration[AnalysisKeys.ResampleRate] ?? "22050",

                [AnalysisKeys.AddAxes] = (configuration.GetBoolOrNull(AnalysisKeys.AddAxes) ?? true).ToString(),
                [AnalysisKeys.AddSegmentationTrack] = (configuration.GetBoolOrNull(AnalysisKeys.AddSegmentationTrack) ?? true).ToString(),
            };

            // # REDUCTION FACTORS for freq and time dimensions
            // #TimeReductionFactor: 1
            // #FreqReductionFactor: 1

            bool makeSoxSonogram = configuration.GetBoolOrNull(AnalysisKeys.MakeSoxSonogram) ?? false;

            configDict[AnalysisKeys.SonogramTitle]        = configuration[AnalysisKeys.SonogramTitle] ?? "Sonogram";
            configDict[AnalysisKeys.SonogramComment]      = configuration[AnalysisKeys.SonogramComment] ?? "Sonogram produced using SOX";
            configDict[AnalysisKeys.SonogramColored]      = configuration[AnalysisKeys.SonogramColored] ?? "false";
            configDict[AnalysisKeys.SonogramQuantisation] = configuration[AnalysisKeys.SonogramQuantisation] ?? "128";
            configDict[AnalysisKeys.AddTimeScale]         = configuration[AnalysisKeys.AddTimeScale] ?? "true";
            configDict[AnalysisKeys.AddAxes] = configuration[AnalysisKeys.AddAxes] ?? "true";
            configDict[AnalysisKeys.AddSegmentationTrack] = configuration[AnalysisKeys.AddSegmentationTrack] ?? "true";

            if (!writeParameters)
            {
                return(configDict);
            }

            // print out the sonogram parameters
            LoggedConsole.WriteLine("\nPARAMETERS");
            foreach (KeyValuePair <string, string> kvp in configDict)
            {
                LoggedConsole.WriteLine("{0}  =  {1}", kvp.Key, kvp.Value);
            }

            return(configDict);
        }
예제 #13
0
        public static void Main(Arguments arguments)
        {
            // 1. set up the necessary files
            var           sourceRecording = arguments.Source;
            var           configInfo      = ConfigFile.Deserialize <AnalyzerConfig>(arguments.Config.ToFileInfo());
            DirectoryInfo output          = arguments.Output;

            if (!output.Exists)
            {
                output.Create();
            }

            //if (arguments.StartOffset.HasValue ^ arguments.EndOffset.HasValue)
            //{
            //    throw new InvalidStartOrEndException("If StartOffset or EndOffset is specified, then both must be specified");
            //}
            // set default offsets - only use defaults if not provided in arguments list
            // var offsetsProvided = arguments.StartOffset.HasValue && arguments.EndOffset.HasValue;
            //TimeSpan? startOffset;
            //TimeSpan? endOffset;
            //if (offsetsProvided)
            //{
            //    startOffset = TimeSpan.FromSeconds(arguments.StartOffset.Value);
            //    endOffset = TimeSpan.FromSeconds(arguments.EndOffset.Value);
            //}

            const string title = "# MAKE MULTIPLE SONOGRAMS FROM AUDIO RECORDING";
            string       date  = "# DATE AND TIME: " + DateTime.Now;

            LoggedConsole.WriteLine(title);
            LoggedConsole.WriteLine(date);
            LoggedConsole.WriteLine("# Input  audio file: " + sourceRecording.Name);

            // 3: CREATE A TEMPORARY RECORDING
            int resampleRate     = configInfo.GetIntOrNull("ResampleRate") ?? 22050;
            var tempAudioSegment = AudioRecording.CreateTemporaryAudioFile(sourceRecording, output, resampleRate);

            // 4: GENERATE SPECTROGRAM images
            //string sourceName = sourceRecording.FullName;
            string sourceName = Path.GetFileNameWithoutExtension(sourceRecording.FullName);
            var    result     = GenerateSpectrogramImages(tempAudioSegment, configInfo, sourceName);

            // 5: Save the image
            var outputImageFile = new FileInfo(Path.Combine(output.FullName, sourceName + ".Spectrograms.png"));

            result.CompositeImage.Save(outputImageFile.FullName, ImageFormat.Png);
        }
        public void TheDeserializeMethodsCachesConfigReads()
        {
            void AssertMessageCount(int typedCount, int untypedCount)
            {
                var messages = memoryAppender.GetEvents();

                Assert.AreEqual(typedCount, messages.Count(x => x.RenderedMessage.Contains(" typed ")));
                Assert.AreEqual(untypedCount, messages.Count(x => x.RenderedMessage.Contains(" untyped ")));
            }

            TestSetup.TestLogging.ModifyVerbosity(Level.All, quietConsole: true);

            // this should be a fresh read
            var configuration1 = ConfigFile.Deserialize <AcousticIndices.AcousticIndicesConfig>(knownConfigFile);

            // index properties should get loaded as well
            AssertMessageCount(2, 0);

            // but not this, it was already read as a "typed" variant
            var configuration2 = ConfigFile.Deserialize(knownConfigFile);

            AssertMessageCount(2, 0);

            // this should be pulled from the cache
            var configuration3 = ConfigFile.Deserialize <AcousticIndices.AcousticIndicesConfig>(knownConfigFile);

            AssertMessageCount(2, 0);

            // so should this
            var configuration4 = ConfigFile.Deserialize(knownConfigFile);

            AssertMessageCount(2, 0);

            // none of them should be the same object
            Assert.AreNotSame(configuration1, configuration2);
            Assert.AreNotSame(configuration1, configuration3);
            Assert.AreNotSame(configuration3, configuration4);
            Assert.AreNotSame(configuration2, configuration4);

            // they all should have values
            Assert.AreEqual(60.0, configuration1.IndexCalculationDuration);
            Assert.AreEqual(60.0, configuration2.GetDouble(nameof(AcousticIndices.AcousticIndicesConfig.IndexCalculationDuration)));
            Assert.AreEqual(60.0, configuration3.IndexCalculationDuration);
            Assert.AreEqual(60.0, configuration4.GetDouble(nameof(AcousticIndices.AcousticIndicesConfig.IndexCalculationDuration)));

            TestSetup.TestLogging.ModifyVerbosity(Level.Info, quietConsole: true);
        }
        public AnalysisResult2 Analyze <T>(AnalysisSettings analysisSettings, SegmentSettings <T> segmentSettings)
        {
            var audioFile       = segmentSettings.SegmentAudioFile;
            var recording       = new AudioRecording(audioFile.FullName);
            var outputDirectory = segmentSettings.SegmentOutputDirectory;

            var    analysisResult = new AnalysisResult2(analysisSettings, segmentSettings, recording.Duration);
            Config configuration  = ConfigFile.Deserialize(analysisSettings.ConfigFile);

            bool saveCsv = analysisSettings.AnalysisDataSaveBehavior;

            if (configuration.GetBool(AnalysisKeys.MakeSoxSonogram))
            {
                Log.Warn("SoX spectrogram generation config variable found (and set to true) but is ignored when running as an IAnalyzer");
            }

            // generate spectrogram
            var configurationDictionary = new Dictionary <string, string>(configuration.ToDictionary());

            configurationDictionary[ConfigKeys.Recording.Key_RecordingCallName] = audioFile.FullName;
            configurationDictionary[ConfigKeys.Recording.Key_RecordingFileName] = audioFile.Name;
            var soxImage = new FileInfo(Path.Combine(segmentSettings.SegmentOutputDirectory.FullName, audioFile.Name + ".SOX.png"));

            var spectrogramResult = Audio2Sonogram.GenerateFourSpectrogramImages(
                audioFile,
                soxImage,
                configurationDictionary,
                dataOnly: analysisSettings.AnalysisImageSaveBehavior.ShouldSave(analysisResult.Events.Length),
                makeSoxSonogram: false);

            // this analysis produces no results!
            // but we still print images (that is the point)
            if (analysisSettings.AnalysisImageSaveBehavior.ShouldSave(analysisResult.Events.Length))
            {
                Debug.Assert(segmentSettings.SegmentImageFile.Exists);
            }

            if (saveCsv)
            {
                var basename           = Path.GetFileNameWithoutExtension(segmentSettings.SegmentAudioFile.Name);
                var spectrogramCsvFile = outputDirectory.CombineFile(basename + ".Spectrogram.csv");
                Csv.WriteMatrixToCsv(spectrogramCsvFile, spectrogramResult.DecibelSpectrogram.Data, TwoDimensionalArray.None);
            }

            return(analysisResult);
        }
예제 #16
0
        public void Deserialize_ReturnsValuesForAllProperties()
        {
            // Arrange
            Config.Load();
            string value = "Config.json";

            ConfigFile expected = new ConfigFile();

            // Act
            var actual = ConfigFile.Deserialize(value);

            // Assert
            Assert.NotEmpty(actual.DefaultWalletFileName);
            Assert.NotEmpty(actual.Network);
            Assert.NotEmpty(actual.ConnectionType);
            Assert.NotEmpty(actual.CanSpendUnconfirmed);
        }
예제 #17
0
        public override AnalysisResult2 Analyze <T>(AnalysisSettings analysisSettings, SegmentSettings <T> segmentSettings)
        {
            FileInfo audioFile = segmentSettings.SegmentAudioFile;

            var eprNormalizedMinScore = GetEprParametersFromConfigFileOrDefaults(analysisSettings.Configuration);

            var aedConfigFile = ConfigFile.Resolve(
                analysisSettings.Configuration["AedConfig"],
                analysisSettings.ConfigFile.Directory);

            var rawAedConfig = ConfigFile.Deserialize(aedConfigFile);
            var aedConfig    = Aed.GetAedParametersFromConfigFileOrDefaults(rawAedConfig);

            Tuple <BaseSonogram, List <AcousticEvent> > results = Detect(audioFile, aedConfig, eprNormalizedMinScore, segmentSettings.SegmentStartOffset);

            var analysisResults = new AnalysisResult2(analysisSettings, segmentSettings, results.Item1.Duration)
            {
                AnalysisIdentifier = this.Identifier,
                Events             = results.Item2.ToArray(),
            };
            BaseSonogram sonogram = results.Item1;

            if (analysisSettings.AnalysisDataSaveBehavior)
            {
                this.WriteEventsFile(segmentSettings.SegmentEventsFile, analysisResults.Events);
                analysisResults.EventsFile = segmentSettings.SegmentEventsFile;
            }

            if (analysisSettings.AnalysisDataSaveBehavior)
            {
                var unitTime = TimeSpan.FromMinutes(1.0);
                analysisResults.SummaryIndices = this.ConvertEventsToSummaryIndices(analysisResults.Events, unitTime, analysisResults.SegmentAudioDuration, 0);

                this.WriteSummaryIndicesFile(segmentSettings.SegmentSummaryIndicesFile, analysisResults.SummaryIndices);
            }

            // save image of sonograms
            if (analysisSettings.AnalysisImageSaveBehavior.ShouldSave(analysisResults.Events.Length))
            {
                Image image = Aed.DrawSonogram(sonogram, results.Item2);
                image.Save(segmentSettings.SegmentImageFile.FullName, ImageFormat.Png);
                analysisResults.ImageFile = segmentSettings.SegmentImageFile;
            }

            return(analysisResults);
        }
예제 #18
0
        public override AnalyzerConfig ParseConfig(FileInfo file)
        {
            RuntimeHelpers.RunClassConstructor(typeof(NinoxBoobookConfig).TypeHandle);
            var config = ConfigFile.Deserialize <NinoxBoobookConfig>(file);

            // validation of configs can be done here
            GenericRecognizer.ValidateProfileTagsMatchAlgorithms(config.Profiles, file);

            // This call sets a restriction so that only one generic algorithm is used.
            // CHANGE this to accept multiple generic algorithms as required.
            //if (result.Profiles.SingleOrDefault() is ForwardTrackParameters)
            if (config.Profiles?.Count == 1 && config.Profiles.First().Value is ForwardTrackParameters)
            {
                return(config);
            }

            throw new ConfigFileException("NinoxBoobook expects one and only one ForwardTrack algorithm.", file);
        }
예제 #19
0
        public AnalyzerConfig ParseConfig(FileInfo file)
        {
            var config = ConfigFile.Deserialize <SpectrogramGeneratorConfig>(file);

            if (config.SaveSonogramImages != SaveBehavior.Always)
            {
                Log.Warn($"The spectrogram generator is not configured to save any images! Set `{nameof(SpectrogramGeneratorConfig.SaveSonogramImages)}` to `{SaveBehavior.Always}` to save the spectrograms!");
            }

            if (config.Images.IsNullOrEmpty())
            {
                Log.Warn($"The spectrogram generator is not configured to produce any image types!"
                         + $" Set `{nameof(SpectrogramGeneratorConfig.Images)}` to any of `{typeof(SpectrogramImageType).PrintEnumOptions()}`."
                         + " Choosing decibel spectrogram noise reduced only by default!");
            }

            return(config);
        }
예제 #20
0
        private static Dictionary <string, string> GetConfigurationForConvCnn(FileInfo configFile)
        {
            Config configuration = ConfigFile.Deserialize(configFile);

            var configDict = new Dictionary <string, string>(configuration.ToDictionary())
            {
                [AnalysisKeys.AddAxes] = (configuration.GetBoolOrNull(AnalysisKeys.AddAxes) ?? true).ToString(),
                [AnalysisKeys.AddSegmentationTrack] = (configuration.GetBoolOrNull(AnalysisKeys.AddSegmentationTrack) ?? true).ToString(),
                [AnalysisKeys.AddTimeScale]         = configuration[AnalysisKeys.AddTimeScale] ?? "true",

                [AnalysisKeys.AddAxes] = configuration[AnalysisKeys.AddAxes] ?? "true",

                [AnalysisKeys.AddSegmentationTrack] = configuration[AnalysisKeys.AddSegmentationTrack] ?? "true",
            };

            ////bool makeSoxSonogram = (bool?)configuration[AnalysisKeys.MakeSoxSonogram] ?? false;
            return(configDict);
        }
예제 #21
0
        public ZoomParameters(DirectoryInfo inputDirectory, FileInfo config, bool omitBasename)
        {
            this.SpectrogramZoomingConfig = ConfigFile.Deserialize <SpectrogramZoomingConfig>(config);

            // results of search for index properties config
            Log.Debug("Using index properties file: " + this.SpectrogramZoomingConfig.IndexPropertiesConfig);

            // get the indexDistributions and the indexGenerationData AND the common.OriginalBasename
            var paths = CheckNeededFilesExist(inputDirectory);

            this.IndexGenerationData = Json.Deserialize <IndexGenerationData>(paths.indexGenerationDataFile);
            this.IndexDistributions  = Indices.IndexDistributions.Deserialize(paths.indexDistributionsFile);

            // double check file format matches what we expect
            this.VerifyOriginalBasename(paths);

            this.OmitBasename = omitBasename;
        }
예제 #22
0
        public AnalysisResult2 Analyze <T>(AnalysisSettings analysisSettings, SegmentSettings <T> segmentSettings)
        {
            var audioFile           = segmentSettings.SegmentAudioFile;
            var recording           = new AudioRecording(audioFile.FullName);
            var sourceRecordingName = recording.BaseName;

            // TODO get the start and end-time offsets for accurate labeling of the time scale.
            //if (arguments.StartOffset.HasValue ^ arguments.EndOffset.HasValue)
            //{
            //    throw new InvalidStartOrEndException("If StartOffset or EndOffset is specified, then both must be specified");
            //}
            // set default offsets - only use defaults if not provided in arguments list
            // var offsetsProvided = arguments.StartOffset.HasValue && arguments.EndOffset.HasValue;
            //TimeSpan? startOffset;
            //TimeSpan? endOffset;
            //if (offsetsProvided)
            //{
            //    startOffset = TimeSpan.FromSeconds(arguments.StartOffset.Value);
            //    endOffset = TimeSpan.FromSeconds(arguments.EndOffset.Value);
            //}

            //var outputDirectory = segmentSettings.SegmentOutputDirectory;
            //bool saveCsv = analysisSettings.AnalysisDataSaveBehavior;

            var analysisResult    = new AnalysisResult2(analysisSettings, segmentSettings, recording.Duration);
            var configInfo        = ConfigFile.Deserialize <AnalyzerConfig>(analysisSettings.ConfigFile);
            var spectrogramResult = Audio2Sonogram.GenerateSpectrogramImages(audioFile, configInfo, sourceRecordingName);

            // this analysis produces no results! But we still print images (that is the point)
            // if (analysisSettings.AnalysisImageSaveBehavior.ShouldSave(analysisResult.Events.Length))
            // {
            //     Debug.Assert(condition: segmentSettings.SegmentImageFile.Exists, "Warning: Image file must exist.");
            spectrogramResult.CompositeImage.Save(segmentSettings.SegmentImageFile.FullName, ImageFormat.Png);
            // }

            //if (saveCsv)
            //{
            //    var basename = Path.GetFileNameWithoutExtension(segmentSettings.SegmentAudioFile.Name);
            //    var spectrogramCsvFile = outputDirectory.CombineFile(basename + ".Spectrogram.csv");
            //    Csv.WriteMatrixToCsv(spectrogramCsvFile, spectrogramResult.DecibelSpectrogram.Data, TwoDimensionalArray.None);
            //}

            return(analysisResult);
        }
예제 #23
0
        public static void Execute(Arguments arguments)
        {
            MainEntry.WarnIfDeveloperEntryUsed();

            TowseyLibrary.Log.Verbosity = 1;
            string date = "# DATE AND TIME: " + DateTime.Now;

            LoggedConsole.WriteLine("# Running acoustic event detection.");
            LoggedConsole.WriteLine(date);

            FileInfo      recodingFile     = arguments.Source;
            var           recodingBaseName = recodingFile.BaseName();
            DirectoryInfo outputDir        = arguments.Output.Combine(EcosoundsAedIdentifier);

            outputDir.Create();

            Log.Info("# Output folder =" + outputDir);
            Log.Info("# Recording file: " + recodingFile.Name);

            // READ PARAMETER VALUES FROM INI FILE
            AedConfiguration configruation = ConfigFile.Deserialize <AedConfiguration>(arguments.Config);
            var aedConfig = GetAedParametersFromConfigFileOrDefaults(configruation);
            var results   = Detect(recodingFile, aedConfig, TimeSpan.Zero);

            // print image
            // save image of sonograms
            var   outputImagePath = outputDir.CombineFile(recodingBaseName + ".Sonogram.png");
            Image image           = DrawSonogram(results.Item3, results.Item1);

            image.Save(outputImagePath.FullName, ImageFormat.Png);
            Log.Info("Image saved to: " + outputImagePath.FullName);

            // output csv
            var outputCsvPath = outputDir.CombineFile(recodingBaseName + ".Events.csv");

            WriteEventsFileStatic(outputCsvPath, results.Item1);
            Log.Info("CSV file saved to: " + outputCsvPath.FullName);

            TowseyLibrary.Log.WriteLine("Finished");
        }
        public static void Execute(Arguments arguments)
        {
            MainEntry.WarnIfDeveloperEntryUsed();

            if (arguments == null)
            {
                throw new NoDeveloperMethodException();
            }

            // READ PARAMETER VALUES FROM INI FILE
            var config    = ConfigFile.Deserialize(arguments.Config);
            var aedConfig = Aed.GetAedParametersFromConfigFileOrDefaults(config);

            var input = arguments.Source;
            Tuple <BaseSonogram, List <AcousticEvent> > result = Detect(input, aedConfig, Default.eprNormalisedMinScore, TimeSpan.Zero);
            List <AcousticEvent> eprEvents = result.Item2;

            eprEvents.Sort((ae1, ae2) => ae1.TimeStart.CompareTo(ae2.TimeStart));

            LoggedConsole.WriteLine();
            foreach (AcousticEvent ae in eprEvents)
            {
                LoggedConsole.WriteLine(ae.TimeStart + "," + ae.EventDurationSeconds + "," + ae.LowFrequencyHertz + "," + ae.HighFrequencyHertz);
            }

            LoggedConsole.WriteLine();

            string       outputFolder = arguments.Config.ToFileInfo().DirectoryName;
            string       wavFilePath  = input.FullName;
            BaseSonogram sonogram     = result.Item1;
            string       imagePath    = Path.Combine(outputFolder, Path.GetFileNameWithoutExtension(wavFilePath) + ".png");
            var          image        = Aed.DrawSonogram(sonogram, eprEvents);

            image.Save(imagePath);

            //ProcessingTypes.SaveAeCsv(eprEvents, outputFolder, wavFilePath);

            Log.Info("Finished");
        }
예제 #25
0
        public static Dictionary <string, string> GetConfigDictionary(FileInfo configFile)
        {
            var configuration = ConfigFile.Deserialize(configFile);

            // var configDict = new Dictionary<string, string>((Dictionary<string, string>)configuration);
            var configDict = new Dictionary <string, string>
            {
                // below three lines are examples of retrieving info from Config config
                // string analysisIdentifier = configuration[AnalysisKeys.AnalysisName];
                // bool saveIntermediateWavFiles = (bool?)configuration[AnalysisKeys.SaveIntermediateWavFiles] ?? false;
                // scoreThreshold = (double?)configuration[AnalysisKeys.EventThreshold] ?? scoreThreshold;
                // ####################################################################

                [AnalysisKeys.ResampleRate]         = configuration[AnalysisKeys.ResampleRate] ?? "22050",
                [AnalysisKeys.AddAxes]              = (configuration.GetBoolOrNull(AnalysisKeys.AddAxes) ?? true).ToString(),
                [AnalysisKeys.AddSegmentationTrack] = (configuration.GetBoolOrNull(AnalysisKeys.AddSegmentationTrack) ?? true).ToString(),
                [AnalysisKeys.AddTimeScale]         = configuration[AnalysisKeys.AddTimeScale] ?? "true",
                [AnalysisKeys.AddAxes]              = configuration[AnalysisKeys.AddAxes] ?? "true",
            };

            // SET THE 2 KEY PARAMETERS HERE FOR DETECTION OF OSCILLATION
            // often need different frame size doing Oscil Detection
            const int oscilDetection2014FrameSize = 256;

            configDict[AnalysisKeys.OscilDetection2014FrameSize] = oscilDetection2014FrameSize.ToString();

            // Set the sample or patch length i.e. the number of frames used when looking for oscillations along freq bins
            // 64 is better where many birds and fast changing activity
            // 128 is better where slow moving changes to acoustic activity
            //const int sampleLength = 64;
            const int sampleLength = 128;

            configDict[AnalysisKeys.OscilDetection2014SampleLength] = sampleLength.ToString();

            const double sensitivityThreshold = 0.3;

            configDict[AnalysisKeys.OscilDetection2014SensitivityThreshold] = sensitivityThreshold.ToString(CultureInfo.CurrentCulture);
            return(configDict);
        }
예제 #26
0
 public AnalyzerConfig ParseConfig(FileInfo file)
 {
     return(ConfigFile.Deserialize <AcousticIndicesConfig>(file));
 }
예제 #27
0
        public void Execute(Arguments arguments)
        {
            LoggedConsole.WriteLine("feature learning process...");

            var inputDir     = @"D:\Mahnoosh\Liz\Least_Bittern\";
            var inputPath    = Path.Combine(inputDir, "TrainSet\\one_min_recordings");
            var trainSetPath = Path.Combine(inputDir, "TrainSet\\train_data");

            // var testSetPath = Path.Combine(inputDir, "TestSet");
            var configPath = @"D:\Mahnoosh\Liz\Least_Bittern\FeatureLearningConfig.yml";
            var resultDir  = Path.Combine(inputDir, "FeatureLearning");

            Directory.CreateDirectory(resultDir);

            // var outputMelImagePath = Path.Combine(resultDir, "MelScaleSpectrogram.png");
            // var outputNormMelImagePath = Path.Combine(resultDir, "NormalizedMelScaleSpectrogram.png");
            // var outputNoiseReducedMelImagePath = Path.Combine(resultDir, "NoiseReducedMelSpectrogram.png");
            // var outputReSpecImagePath = Path.Combine(resultDir, "ReconstrcutedSpectrogram.png");
            // var outputClusterImagePath = Path.Combine(resultDir, "Clusters.bmp");

            // +++++++++++++++++++++++++++++++++++++++++++++++++patch sampling from 1-min recordings

            var configFile = configPath.ToFileInfo();

            if (configFile == null)
            {
                throw new FileNotFoundException("No config file argument provided");
            }
            else if (!configFile.Exists)
            {
                throw new ArgumentException($"Config file {configFile.FullName} not found");
            }

            var configuration = ConfigFile.Deserialize <FeatureLearningSettings>(configFile);
            int patchWidth    =
                (configuration.MaxFreqBin - configuration.MinFreqBin + 1) / configuration.NumFreqBand;

            var clusteringOutputList = FeatureLearning.UnsupervisedFeatureLearning(configuration, inputPath);

            List <double[][]> allBandsCentroids = new List <double[][]>();

            for (int i = 0; i < clusteringOutputList.Count; i++)
            {
                var clusteringOutput = clusteringOutputList[i];

                // writing centroids to a csv file
                // note that Csv.WriteToCsv can't write data types like dictionary<int, double[]> (problems with arrays)
                // I converted the dictionary values to a matrix and used the Csv.WriteMatrixToCsv
                // it might be a better way to do this
                string pathToClusterCsvFile = Path.Combine(resultDir, "ClusterCentroids" + i.ToString() + ".csv");
                var    clusterCentroids     = clusteringOutput.ClusterIdCentroid.Values.ToArray();
                Csv.WriteMatrixToCsv(pathToClusterCsvFile.ToFileInfo(), clusterCentroids.ToMatrix());

                // sorting clusters based on size and output it to a csv file
                Dictionary <int, double> clusterIdSize = clusteringOutput.ClusterIdSize;
                int[] sortOrder = KmeansClustering.SortClustersBasedOnSize(clusterIdSize);

                // Write cluster ID and size to a CSV file
                string pathToClusterSizeCsvFile = Path.Combine(resultDir, "ClusterSize" + i.ToString() + ".csv");
                Csv.WriteToCsv(pathToClusterSizeCsvFile.ToFileInfo(), clusterIdSize);

                // Draw cluster image directly from clustering output
                List <KeyValuePair <int, double[]> > list = clusteringOutput.ClusterIdCentroid.ToList();
                double[][] centroids = new double[list.Count][];

                for (int j = 0; j < list.Count; j++)
                {
                    centroids[j] = list[j].Value;
                }

                allBandsCentroids.Add(centroids);

                List <double[, ]> allCentroids = new List <double[, ]>();
                for (int k = 0; k < centroids.Length; k++)
                {
                    // convert each centroid to a matrix in order of cluster ID
                    // double[,] cent = PatchSampling.ArrayToMatrixByColumn(centroids[i], patchWidth, patchHeight);
                    // OR: in order of cluster size
                    double[,] cent = MatrixTools.ArrayToMatrixByColumn(centroids[sortOrder[k]], patchWidth, configuration.PatchHeight);

                    // normalize each centroid
                    double[,] normCent = DataTools.normalise(cent);

                    // add a row of zero to each centroid
                    double[,] cent2 = PatchSampling.AddRow(normCent);

                    allCentroids.Add(cent2);
                }

                // concatenate all centroids
                double[,] mergedCentroidMatrix = PatchSampling.ListOf2DArrayToOne2DArray(allCentroids);

                // Draw clusters
                var clusterImage = ImageTools.DrawMatrixWithoutNormalisation(mergedCentroidMatrix);
                clusterImage.RotateFlip(RotateFlipType.Rotate270FlipNone);
                var outputClusteringImage = Path.Combine(resultDir, "ClustersWithGrid" + i.ToString() + ".bmp");
                clusterImage.Save(outputClusteringImage);
            }

            // extracting features
            FeatureExtraction.UnsupervisedFeatureExtraction(configuration, allBandsCentroids, trainSetPath, resultDir);
            LoggedConsole.WriteLine("Done...");
        }
예제 #28
0
 public override AnalyzerConfig ParseConfig(FileInfo file)
 {
     return(ConfigFile.Deserialize <RecognizerConfig>(file));
 }
        public void TestOfSpectralIndices_Octave()
        {
            // create a two-minute artificial recording containing five harmonics.
            int    sampleRate = 64000;
            double duration   = 120; // signal duration in seconds

            int[] harmonics = { 500, 1000, 2000, 4000, 8000 };
            var   recording = DspFilters.GenerateTestRecording(sampleRate, duration, harmonics, WaveType.Sine);

            // cut out one minute from 30 - 90 seconds and incorporate into AudioRecording
            int startSample           = sampleRate * 30; // start two minutes into recording
            int subsegmentSampleCount = sampleRate * 60; // get 60 seconds

            double[] subsamples          = DataTools.Subarray(recording.WavReader.Samples, startSample, subsegmentSampleCount);
            var      wr                  = new Acoustics.Tools.Wav.WavReader(subsamples, 1, 16, sampleRate);
            var      subsegmentRecording = new AudioRecording(wr);

            //var indexPropertiesConfig = PathHelper.ResolveConfigFile(@"IndexPropertiesConfig.yml");
            var configFile = PathHelper.ResolveConfigFile(@"Towsey.Acoustic.yml");

            // Create temp directory to store output
            if (!this.outputDirectory.Exists)
            {
                this.outputDirectory.Create();
            }

            // CHANGE CONFIG PARAMETERS HERE IF REQUIRED
            var indexCalculateConfig = ConfigFile.Deserialize <IndexCalculateConfig>(configFile);

            indexCalculateConfig.FrequencyScale = FreqScaleType.Octave;

            var freqScale = new FrequencyScale(indexCalculateConfig.FrequencyScale);

            indexCalculateConfig.FrameLength = freqScale.WindowSize;

            var results = IndexCalculate.Analysis(
                subsegmentRecording,
                TimeSpan.Zero,
                indexCalculateConfig.IndexProperties,
                sampleRate,
                TimeSpan.Zero,
                indexCalculateConfig,
                returnSonogramInfo: true);

            var spectralIndices = results.SpectralIndexValues;

            // draw the output image of all spectral indices
            var outputImagePath1 = Path.Combine(this.outputDirectory.FullName, "SpectralIndices_Octave.png");
            var image            = SpectralIndexValues.CreateImageOfSpectralIndices(spectralIndices);

            image.Save(outputImagePath1);

            // TEST the BGN SPECTRAL INDEX
            Assert.AreEqual(256, spectralIndices.BGN.Length);

            var resourcesDir         = PathHelper.ResolveAssetPath("Indices");
            var expectedSpectrumFile = new FileInfo(resourcesDir + "\\BGN_OctaveScale.bin");

            //Binary.Serialize(expectedSpectrumFile, spectralIndices.BGN);
            var expectedVector = Binary.Deserialize <double[]>(expectedSpectrumFile);

            CollectionAssert.That.AreEqual(expectedVector, spectralIndices.BGN, AllowedDelta);

            expectedSpectrumFile = new FileInfo(resourcesDir + "\\CVR_OctaveScale.bin");

            //Binary.Serialize(expectedSpectrumFile, spectralIndices.CVR);
            expectedVector = Binary.Deserialize <double[]>(expectedSpectrumFile);
            CollectionAssert.That.AreEqual(expectedVector, spectralIndices.CVR, AllowedDelta);
        }
        public void TestOfSpectralIndices()
        {
            //var indexPropertiesConfig = PathHelper.ResolveConfigFile(@"IndexPropertiesConfig.yml");
            var sourceRecording = PathHelper.ResolveAsset(@"Recordings\BAC2_20071008-085040.wav");
            var configFile      = PathHelper.ResolveConfigFile(@"Towsey.Acoustic.yml");

            // var outputDir = this.outputDirectory;
            var resourcesDir = PathHelper.ResolveAssetPath("Indices");

            if (!this.outputDirectory.Exists)
            {
                this.outputDirectory.Create();
            }

            var indexCalculateConfig = ConfigFile.Deserialize <IndexCalculateConfig>(configFile);

            // CHANGE CONFIG PARAMETERS HERE IF REQUIRED
            //indexCalculateConfig.IndexCalculationDuration = TimeSpan.FromSeconds(20);
            //indexCalculateConfig.SetTypeOfFreqScale("Octave");

            var results = IndexCalculate.Analysis(
                new AudioRecording(sourceRecording),
                TimeSpan.Zero,
                indexCalculateConfig.IndexProperties,
                22050,
                TimeSpan.Zero,
                indexCalculateConfig,
                returnSonogramInfo: true);

            var spectralIndices = results.SpectralIndexValues;

            // TEST the SPECTRAL INDICES
            // After serializing the expected vector and writing to the resources directory, comment the Binary.Serialise line.

            // 1:ACI
            var expectedSpectrumFile = new FileInfo(resourcesDir + "\\ACI.bin");

            //Binary.Serialize(expectedSpectrumFile, spectralIndices.ACI);
            var expectedVector = Binary.Deserialize <double[]>(expectedSpectrumFile);

            CollectionAssert.That.AreEqual(expectedVector, spectralIndices.ACI, AllowedDelta);

            // 2:BGN
            expectedSpectrumFile = new FileInfo(resourcesDir + "\\BGN.bin");

            // Binary.Serialize(expectedSpectrumFile, spectralIndices.BGN);
            expectedVector = Binary.Deserialize <double[]>(expectedSpectrumFile);
            CollectionAssert.That.AreEqual(expectedVector, spectralIndices.BGN, AllowedDelta);

            // 3:CVR
            expectedSpectrumFile = new FileInfo(resourcesDir + "\\CVR.bin");

            // Binary.Serialize(expectedSpectrumFile, spectralIndices.CVR);
            expectedVector = Binary.Deserialize <double[]>(expectedSpectrumFile);
            CollectionAssert.That.AreEqual(expectedVector, spectralIndices.CVR, AllowedDelta);

            // 4:ENT
            expectedSpectrumFile = new FileInfo(resourcesDir + "\\ENT.bin");

            // Binary.Serialize(expectedSpectrumFile, spectralIndices.ENT);
            expectedVector = Binary.Deserialize <double[]>(expectedSpectrumFile);
            CollectionAssert.That.AreEqual(expectedVector, spectralIndices.ENT, AllowedDelta);

            // 5:EVN
            expectedSpectrumFile = new FileInfo(resourcesDir + "\\EVN.bin");

            // Binary.Serialize(expectedSpectrumFile, spectralIndices.EVN);
            expectedVector = Binary.Deserialize <double[]>(expectedSpectrumFile);
            CollectionAssert.That.AreEqual(expectedVector, spectralIndices.EVN, AllowedDelta);

            // 6:OSC
            expectedSpectrumFile = new FileInfo(resourcesDir + "\\OSC.bin");

            //Binary.Serialize(expectedSpectrumFile, spectralIndices.OSC);
            expectedVector = Binary.Deserialize <double[]>(expectedSpectrumFile);
            CollectionAssert.That.AreEqual(expectedVector, spectralIndices.OSC, AllowedDelta);

            // 7:PMN
            expectedSpectrumFile = new FileInfo(resourcesDir + "\\PMN.bin");

            // Binary.Serialize(expectedSpectrumFile, spectralIndices.PMN);
            expectedVector = Binary.Deserialize <double[]>(expectedSpectrumFile);
            CollectionAssert.That.AreEqual(expectedVector, spectralIndices.PMN, AllowedDelta);

            // 8:RHZ
            expectedSpectrumFile = new FileInfo(resourcesDir + "\\RHZ.bin");

            // Binary.Serialize(expectedSpectrumFile, spectralIndices.RHZ);
            expectedVector = Binary.Deserialize <double[]>(expectedSpectrumFile);
            CollectionAssert.That.AreEqual(expectedVector, spectralIndices.RHZ, AllowedDelta);

            // 9:RNG
            expectedSpectrumFile = new FileInfo(resourcesDir + "\\RNG.bin");

            // Binary.Serialize(expectedSpectrumFile, spectralIndices.RNG);
            expectedVector = Binary.Deserialize <double[]>(expectedSpectrumFile);
            CollectionAssert.That.AreEqual(expectedVector, spectralIndices.RNG, AllowedDelta);

            // 10:RPS
            expectedSpectrumFile = new FileInfo(resourcesDir + "\\RPS.bin");

            // Binary.Serialize(expectedSpectrumFile, spectralIndices.RPS);
            expectedVector = Binary.Deserialize <double[]>(expectedSpectrumFile);
            CollectionAssert.That.AreEqual(expectedVector, spectralIndices.RPS, AllowedDelta);

            // 11:RVT
            expectedSpectrumFile = new FileInfo(resourcesDir + "\\RVT.bin");

            // Binary.Serialize(expectedSpectrumFile, spectralIndices.RVT);
            expectedVector = Binary.Deserialize <double[]>(expectedSpectrumFile);
            CollectionAssert.That.AreEqual(expectedVector, spectralIndices.RVT, AllowedDelta);

            // 12:SPT
            expectedSpectrumFile = new FileInfo(resourcesDir + "\\SPT.bin");

            // Binary.Serialize(expectedSpectrumFile, spectralIndices.SPT);
            expectedVector = Binary.Deserialize <double[]>(expectedSpectrumFile);
            CollectionAssert.That.AreEqual(expectedVector, spectralIndices.SPT, AllowedDelta);

            var outputImagePath = Path.Combine(this.outputDirectory.FullName, "SpectralIndices.png");
            var image           = SpectralIndexValues.CreateImageOfSpectralIndices(spectralIndices);

            image.Save(outputImagePath);
        }