public static void DrawStackOfZoomedSpectrograms( DirectoryInfo inputDirectory, DirectoryInfo outputDirectory, AnalysisIoInputDirectory io, ZoomParameters common, string analysisTag, TimeSpan focalTime, int imageWidth) { var zoomConfig = common.SpectrogramZoomingConfig; LdSpectrogramConfig ldsConfig = common.SpectrogramZoomingConfig.LdSpectrogramConfig; //var distributions = common.IndexDistributions; string fileStem = common.OriginalBasename; var indexGeneration = common.IndexGenerationData; TimeSpan dataScale = indexGeneration.IndexCalculationDuration; // ####################### DERIVE ZOOMED OUT SPECTROGRAMS FROM SPECTRAL INDICES //var indexGenerationData = common.IndexGenerationData; var indexProperties = zoomConfig.IndexProperties; var(spectra, filteredIndexProperties) = ZoomCommon.LoadSpectra(io, analysisTag, fileStem, zoomConfig.LdSpectrogramConfig, indexProperties); Stopwatch sw = Stopwatch.StartNew(); // Set the default time-scales in seconds per pixel. // These were changed on 3rd April 2019 to better match those in the current zooming config file. double[] imageScales = { 60, 30, 15, 7.5, 3.2, 1.6, 0.8, 0.4, 0.2 }; if (zoomConfig.SpectralIndexScale != null) { imageScales = zoomConfig.SpectralIndexScale; } sw = Stopwatch.StartNew(); int scaleCount = imageScales.Length; var imageList = new List <Image>(); for (int i = 0; i < scaleCount; i++) { var imageScale = TimeSpan.FromSeconds(imageScales[i]); var image = DrawIndexSpectrogramAtScale(ldsConfig, indexGeneration, filteredIndexProperties, focalTime, dataScale, imageScale, imageWidth, spectra, fileStem); if (image != null) { imageList.Add(image); string name = $"{fileStem}_FocalZoom_min{focalTime.TotalMinutes:f1}_scale{imageScales[i]}.png"; image.Save(Path.Combine(outputDirectory.FullName, name)); } } sw.Stop(); LoggedConsole.WriteLine("Finished spectrograms derived from spectral indices. Elapsed time = " + sw.Elapsed.TotalSeconds + " seconds"); // NOTE: The following code is deprecated. It was originally developed to provide some intermediate steps between the hi-resolution false-colour spectrograms // and the standard grey scale spectrograms. // ####################### DERIVE ZOOMED IN SPECTROGRAMS FROM STANDARD SPECTRAL FRAMES /* * int[] compressionFactor = { 8, 4, 2, 1 }; * int compressionCount = compressionFactor.Length; * sw = Stopwatch.StartNew(); * double frameStepInSeconds = indexGeneration.FrameStep / (double)indexGeneration.SampleRateResampled; * TimeSpan frameScale = TimeSpan.FromTicks((long)Math.Round(frameStepInSeconds * 10000000)); * if (zoomConfig.SpectralFrameScale != null) * { * imageScales = zoomConfig.SpectralFrameScale; * * // TODO: CONVERT IMAGE scales into Compression factors. * compressionCount = imageScales.Length; * compressionFactor = new int[compressionCount]; * compressionFactor[compressionCount - 1] = 1; * double denom = imageScales[compressionCount - 1]; * * for (int i = 0; i < compressionCount - 1; i++) * { * compressionFactor[i] = (int)Math.Round(imageScales[i] / denom); * } * } * * int maxCompression = compressionFactor[0]; * TimeSpan maxImageDuration = TimeSpan.FromTicks(maxCompression * imageWidth * frameScale.Ticks); * * TimeSpan halfMaxImageDuration = TimeSpan.FromMilliseconds(maxImageDuration.TotalMilliseconds / 2); * TimeSpan startTimeOfMaxImage = TimeSpan.Zero; * if (focalTime != TimeSpan.Zero) * { * startTimeOfMaxImage = focalTime - halfMaxImageDuration; * } * * TimeSpan startTimeOfData = TimeSpan.FromMinutes(Math.Floor(startTimeOfMaxImage.TotalMinutes)); * * List<double[]> frameData = ReadFrameData(inputDirectory, fileStem, startTimeOfMaxImage, maxImageDuration, zoomConfig, indexGeneration.MaximumSegmentDuration.Value); * * // get the index data to add into the * // TimeSpan imageScale1 = TimeSpan.FromSeconds(0.1); * double[,] indexData = spectra["PMN"]; * * // make the images * for (int i = 0; i < compressionCount; i++) * { * int factor = compressionFactor[i]; * var image = DrawFrameSpectrogramAtScale(ldsConfig, indexGeneration, startTimeOfData, factor, frameData, indexData, focalTime, frameScale, imageWidth); * if (image != null) * { * imageList.Add(image); * } * } * * sw.Stop(); * LoggedConsole.WriteLine("Finished spectrograms derived from standard frames. Elapsed time = " + sw.Elapsed.TotalSeconds + " seconds"); */ // combine the images into a stack Image combinedImage = ImageTools.CombineImagesVertically(imageList); string fileName = $"{fileStem}_FocalZOOM_min{focalTime.TotalMinutes:f1}.png"; combinedImage.Save(Path.Combine(outputDirectory.FullName, fileName)); }
/// <summary> /// This method can add in absolute time if you want. /// Currently commented out - see below. /// </summary> public static Image DrawIndexSpectrogramAtScale( LdSpectrogramConfig config, IndexGenerationData indexGenerationData, Dictionary <string, IndexProperties> indexProperties, TimeSpan focalTime, TimeSpan dataScale, TimeSpan imageScale, int imageWidth, Dictionary <string, double[, ]> spectra, string basename) { if (spectra == null) { LoggedConsole.WriteLine("WARNING: NO SPECTRAL DATA SUPPLIED"); return(null); } // check that scalingFactor >= 1.0 double scalingFactor = Math.Round(imageScale.TotalMilliseconds / dataScale.TotalMilliseconds); if (scalingFactor < 1.0) { LoggedConsole.WriteLine("WARNING: Scaling Factor < 1.0"); return(null); } Dictionary <string, IndexProperties> dictIp = indexProperties; dictIp = InitialiseIndexProperties.FilterIndexPropertiesForSpectralOnly(dictIp); // calculate start time by combining DatetimeOffset with minute offset. TimeSpan sourceMinuteOffset = indexGenerationData.AnalysisStartOffset; if (indexGenerationData.RecordingStartDate.HasValue) { DateTimeOffset dto = (DateTimeOffset)indexGenerationData.RecordingStartDate; sourceMinuteOffset = dto.TimeOfDay + sourceMinuteOffset; } // calculate data duration from column count of abitrary matrix var kvp = spectra.First(); var matrix = kvp.Value; //var matrix = spectra["ACI"]; // assume this key will always be present!! TimeSpan dataDuration = TimeSpan.FromSeconds(matrix.GetLength(1) * dataScale.TotalSeconds); TimeSpan recordingStartTime = TimeSpan.Zero; // default = zero minute of day i.e. midnight recordingStartTime = indexGenerationData.RecordingStartDate.Value.TimeOfDay.Add(indexGenerationData.AnalysisStartOffset); TimeSpan offsetTime = TimeSpan.Zero; TimeSpan imageDuration = TimeSpan.FromTicks(imageWidth * imageScale.Ticks); TimeSpan halfImageDuration = TimeSpan.FromTicks(imageWidth * imageScale.Ticks / 2); TimeSpan startTime = TimeSpan.Zero; if (focalTime != TimeSpan.Zero) { startTime = focalTime - halfImageDuration; } if (startTime < TimeSpan.Zero) { offsetTime = TimeSpan.Zero - startTime; startTime = TimeSpan.Zero; } TimeSpan endTime = imageDuration; if (focalTime != TimeSpan.Zero) { endTime = focalTime + halfImageDuration; } if (endTime > dataDuration) { endTime = dataDuration; } TimeSpan spectrogramDuration = endTime - startTime; int spectrogramWidth = (int)(spectrogramDuration.Ticks / imageScale.Ticks); // get the plain unchromed spectrogram Image ldfcSpectrogram = ZoomCommon.DrawIndexSpectrogramCommon( config, indexGenerationData, indexProperties, startTime, endTime, dataScale, imageScale, imageWidth, spectra, basename); if (ldfcSpectrogram == null) { LoggedConsole.WriteLine("WARNING: NO SPECTROGRAM AT SCALE " + imageScale); return(null); } // now chrome spectrogram Graphics g2 = Graphics.FromImage(ldfcSpectrogram); // draw red line at focus time if (focalTime != TimeSpan.Zero) { Pen pen = new Pen(Color.Red); TimeSpan focalOffset = focalTime - startTime; int x1 = (int)(focalOffset.Ticks / imageScale.Ticks); g2.DrawLine(pen, x1, 0, x1, ldfcSpectrogram.Height); } // draw the title bar int nyquist = 22050 / 2; // default if (indexGenerationData.SampleRateResampled > 0) { nyquist = indexGenerationData.SampleRateResampled / 2; } int herzInterval = 1000; if (config != null) { herzInterval = config.YAxisTicInterval; } string title = string.Format("SCALE={0}s/px. Duration={1} ", imageScale.TotalSeconds, spectrogramDuration); //add chrome // NEXT LINE USED ONLY IF WANT ABSOLUTE TIME //startTime += recordingStartTime; Image titleBar = DrawTitleBarOfZoomSpectrogram(title, ldfcSpectrogram.Width); ldfcSpectrogram = FrameZoomSpectrogram( ldfcSpectrogram, titleBar, startTime, imageScale, config.XAxisTicInterval, nyquist, herzInterval); // create the base canvas image on which to centre the focal image Image image = new Bitmap(imageWidth, ldfcSpectrogram.Height); Graphics g1 = Graphics.FromImage(image); g1.Clear(Color.DarkGray); int xOffset = (int)(offsetTime.Ticks / imageScale.Ticks); g1.DrawImage(ldfcSpectrogram, xOffset, 0); return(image); }
/// <summary> /// This method can add in the absolute recording start time. However currently disabled. /// </summary> /// <param name="config">v</param> /// <param name="indexGenerationData">indexGenerationData</param> /// <param name="startTimeOfData">startTimeOfData</param> /// <param name="compressionFactor">compressionFactor</param> /// <param name="frameData">frameData</param> /// <param name="indexData">indexData</param> /// <param name="focalTime">focalTime</param> /// <param name="frameScale">frameScale</param> /// <param name="imageWidth">imageWidth</param> public static Image DrawFrameSpectrogramAtScale( LdSpectrogramConfig config, IndexGenerationData indexGenerationData, TimeSpan startTimeOfData, int compressionFactor, List <double[]> frameData, double[,] indexData, TimeSpan focalTime, TimeSpan frameScale, int imageWidth) { if (frameData == null || frameData.Count == 0) { LoggedConsole.WriteLine("WARNING: NO SPECTRAL SPECTROGRAM DATA SUPPLIED"); return(null); } // var recordingStartTime = TimeSpan.Zero; // default = zero minute of day i.e. midnight // var recordingStartTime = TimeTools.DateTimePlusTimeSpan(indexGenerationData.RecordingStartDate, indexGenerationData.AnalysisStartOffset); TimeSpan imageScale = TimeSpan.FromTicks(frameScale.Ticks * compressionFactor); TimeSpan imageDuration = TimeSpan.FromTicks(imageWidth * imageScale.Ticks); TimeSpan halfImageDuration = TimeSpan.FromTicks(imageWidth * imageScale.Ticks / 2); TimeSpan startTime = focalTime - halfImageDuration; if (startTime < TimeSpan.Zero) { startTime = TimeSpan.Zero; } int startIndex = (int)((startTime.Ticks - startTimeOfData.Ticks) / frameScale.Ticks); int requiredFrameCount = imageWidth * compressionFactor; List <double[]> frameSelection = frameData.GetRange(startIndex, requiredFrameCount); double[,] spectralSelection = MatrixTools.ConvertList2Matrix(frameSelection); // compress spectrograms to correct scale if (compressionFactor > 1) { spectralSelection = TemporalMatrix.CompressFrameSpectrograms(spectralSelection, compressionFactor); } Image spectrogramImage = DrawStandardSpectrogramInFalseColour(spectralSelection); Graphics g2 = Graphics.FromImage(spectrogramImage); int x1 = (int)(halfImageDuration.Ticks / imageScale.Ticks); // draw focus time on image if (focalTime != TimeSpan.Zero) { Pen pen = new Pen(Color.Red); g2.DrawLine(pen, x1, 0, x1, spectrogramImage.Height); } int nyquist = 22050 / 2; // default if (indexGenerationData.SampleRateResampled > 0) { nyquist = indexGenerationData.SampleRateResampled / 2; } int herzInterval = config.YAxisTicInterval; string title = $"ZOOM SCALE={imageScale.TotalMilliseconds}ms/pixel Image duration={imageDuration} "; Image titleBar = DrawTitleBarOfZoomSpectrogram(title, spectrogramImage.Width); // add the recording start time ONLY IF WANT ABSOLUTE TIME SCALE - obtained from info in file name // startTime += recordingStartTime; spectrogramImage = FrameZoomSpectrogram(spectrogramImage, titleBar, startTime, imageScale, config.XAxisTicInterval, nyquist, herzInterval); // MAY WANT THESE CLIPPING TRACKS AT SOME POINT // read high amplitude and clipping info into an image //string indicesFile = Path.Combine(configuration.InputDirectoryInfo.FullName, fileStem + ".csv"); //string indicesFile = Path.Combine(config.InputDirectoryInfo.FullName, fileStem + ".Indices.csv"); //string indicesFile = Path.Combine(configuration.InputDirectoryInfo.FullName, fileStem + "_" + configuration.AnalysisType + ".csv"); //Image imageX = DrawSummaryIndices.DrawHighAmplitudeClippingTrack(indicesFile.ToFileInfo()); //if (null != imageX) imageX.Save(Path.Combine(outputDirectory.FullName, fileStem + ".ClipHiAmpl.png")); // create the base image Image image = new Bitmap(imageWidth, spectrogramImage.Height); Graphics g1 = Graphics.FromImage(image); g1.Clear(Color.DarkGray); //int xOffset = (int)(startTime.Ticks / imageScale.Ticks); int xOffset = (imageWidth / 2) - x1; g1.DrawImage(spectrogramImage, xOffset, 0); return(image); }
public static async Task <int> Execute(RibbonPlot.Arguments arguments) { if (arguments.InputDirectories.IsNullOrEmpty()) { throw new CommandLineArgumentException( $"{nameof(arguments.InputDirectories)} is null or empty - please provide at least one source directory"); } var doNotExist = arguments.InputDirectories.Where(x => !x.Exists); if (doNotExist.Any()) { throw new CommandLineArgumentException( $"The following directories given to {nameof(arguments.InputDirectories)} do not exist: " + doNotExist.FormatList()); } if (arguments.OutputDirectory == null) { arguments.OutputDirectory = arguments.InputDirectories.First(); Log.Warn( $"{nameof(arguments.OutputDirectory)} was not provided and was automatically set to source directory {arguments.OutputDirectory}"); } if (arguments.Midnight == null || arguments.Midnight == TimeSpan.Zero) { // we need this to be width of day and not zero for rounding functions later on arguments.Midnight = RibbonPlotDomain; Log.Debug($"{nameof(arguments.Midnight)} was reset to {arguments.Midnight}"); } if (arguments.Midnight < TimeSpan.Zero || arguments.Midnight > RibbonPlotDomain) { throw new InvalidStartOrEndException($"{nameof(arguments.Midnight)} cannot be less than `00:00` or greater than `{RibbonPlotDomain}`"); } LoggedConsole.Write("Begin scanning directories"); var allIndexFiles = arguments.InputDirectories.SelectMany(IndexGenerationData.FindAll); if (allIndexFiles.IsNullOrEmpty()) { throw new MissingDataException($"Could not find `{IndexGenerationData.FileNameFragment}` files in:" + arguments.InputDirectories.FormatList()); } Log.Debug("Checking files have dates"); var indexGenerationDatas = allIndexFiles.Select(IndexGenerationData.Load); var datedIndices = FileDateHelpers.FilterObjectsForDates( indexGenerationDatas, x => x.Source, y => y.RecordingStartDate, arguments.TimeSpanOffsetHint); LoggedConsole.WriteLine($"{datedIndices.Count} index generation data files were loaded"); if (datedIndices.Count == 0) { throw new MissingDataException("No index generation files had dates, cannot proceed"); } // now find the ribbon plots for these images - there are typically two color maps per index generation var datesMappedToColorMaps = new Dictionary <string, Dictionary <DateTimeOffset, FileInfo> >(2); foreach (var(date, indexData) in datedIndices) { Add(indexData.LongDurationSpectrogramConfig.ColorMap1); Add(indexData.LongDurationSpectrogramConfig.ColorMap2); void Add(string colorMap) { if (!datesMappedToColorMaps.ContainsKey(colorMap)) { datesMappedToColorMaps.Add(colorMap, new Dictionary <DateTimeOffset, FileInfo>(datedIndices.Count)); } // try to find the associated ribbon var searchPattern = "*" + colorMap + LdSpectrogramRibbons.SpectralRibbonTag + "*"; if (Log.IsVerboseEnabled()) { Log.Verbose($"Searching `{indexData.Source?.Directory}` with pattern `{searchPattern}`."); } var ribbonFile = indexData.Source?.Directory?.EnumerateFiles(searchPattern).FirstOrDefault(); if (ribbonFile == null) { Log.Warn($"Did not find expected ribbon file for color map {colorMap} in directory `{indexData.Source?.Directory}`." + "This can happen if the ribbon is missing or if more than one file matches the color map."); } datesMappedToColorMaps[colorMap].Add(date, ribbonFile); } } // get the min and max dates and other things var stats = new RibbonPlotStats(datedIndices, arguments.Midnight.Value); Log.Debug($"Files found between {stats.Min:O} and {stats.Max:O}, rendering between {stats.Start:O} and {stats.End:O}, in {stats.Buckets} buckets"); bool success = false; foreach (var(colorMap, ribbons) in datesMappedToColorMaps) { Log.Info($"Rendering ribbon plot for color map {colorMap}"); if (ribbons.Count(x => x.Value.NotNull()) == 0) { Log.Error($"There are no ribbon files found for color map {colorMap} - skipping this color map"); continue; } var image = CreateRibbonPlot(datedIndices, ribbons, stats); var midnight = arguments.Midnight == RibbonPlotDomain ? string.Empty : "Midnight=" + arguments.Midnight.Value.ToString("hhmm"); var path = FilenameHelpers.AnalysisResultPath( arguments.OutputDirectory, arguments.OutputDirectory.Name, "RibbonPlot", "png", colorMap, midnight); using (var file = File.Create(path)) { image.SaveAsPng(file); } image.Dispose(); success = true; } if (success == false) { throw new MissingDataException("Could not find any ribbon files for any of the color maps. No ribbon plots were produced."); } LoggedConsole.WriteSuccessLine("Completed"); return(ExceptionLookup.Ok); }
/// <summary> /// This entrypoint should be used for testing short files (less than 2 minutes) /// </summary> public static void Execute(Arguments arguments) { MainEntry.WarnIfDeveloperEntryUsed("EventRecognizer entry does not do any audio maniuplation."); Log.Info("Running event recognizer"); var sourceAudio = arguments.Source; var configFile = arguments.Config.ToFileInfo(); var outputDirectory = arguments.Output; if (configFile == null) { throw new FileNotFoundException("No config file argument provided"); } else if (!configFile.Exists) { Log.Warn($"Config file {configFile.FullName} not found... attempting to resolve config file"); configFile = ConfigFile.Resolve(configFile.Name, Directory.GetCurrentDirectory().ToDirectoryInfo()); } LoggedConsole.WriteLine("# Recording file: " + sourceAudio.FullName); LoggedConsole.WriteLine("# Configuration file: " + configFile); LoggedConsole.WriteLine("# Output folder: " + outputDirectory); // find an appropriate event IAnalyzer IAnalyser2 recognizer = AnalyseLongRecording.FindAndCheckAnalyzer <IEventRecognizer>( arguments.AnalysisIdentifier, configFile.Name); Log.Info("Attempting to run recognizer: " + recognizer.Identifier); Log.Info("Reading configuration file"); Config configuration = ConfigFile.Deserialize <RecognizerBase.RecognizerConfig>(configFile); // get default settings AnalysisSettings analysisSettings = recognizer.DefaultSettings; // convert arguments to analysis settings analysisSettings = arguments.ToAnalysisSettings( analysisSettings, outputIntermediate: true, resultSubDirectory: recognizer.Identifier, configuration: configuration); // Enable this if you want the Config file ResampleRate parameter to work. // Generally however the ResampleRate should remain at 22050Hz for all recognizers. //analysisSettings.AnalysisTargetSampleRate = (int) configuration[AnalysisKeys.ResampleRate]; // get transform input audio file - if needed Log.Info("Querying source audio file"); var audioUtilityRequest = new AudioUtilityRequest() { TargetSampleRate = analysisSettings.AnalysisTargetSampleRate, }; var preparedFile = AudioFilePreparer.PrepareFile( outputDirectory, sourceAudio, MediaTypes.MediaTypeWav, audioUtilityRequest, outputDirectory); var source = preparedFile.SourceInfo.ToSegment(); var prepared = preparedFile.TargetInfo.ToSegment(FileSegment.FileDateBehavior.None); var segmentSettings = new SegmentSettings <FileInfo>( analysisSettings, source, (analysisSettings.AnalysisOutputDirectory, analysisSettings.AnalysisTempDirectory), prepared); if (preparedFile.TargetInfo.SampleRate.Value != analysisSettings.AnalysisTargetSampleRate) { Log.Warn("Input audio sample rate does not match target sample rate"); } // Execute a pre analyzer hook recognizer.BeforeAnalyze(analysisSettings); // execute actual analysis - output data will be written Log.Info("Running recognizer: " + recognizer.Identifier); AnalysisResult2 results = recognizer.Analyze(analysisSettings, segmentSettings); // run summarize code - output data can be written Log.Info("Running recognizer summary: " + recognizer.Identifier); recognizer.SummariseResults( analysisSettings, source, results.Events, results.SummaryIndices, results.SpectralIndices, new[] { results }); //Log.Info("Recognizer run, saving extra results"); // TODO: Michael, output anything else as you wish. Log.Debug("Clean up temporary files"); if (source.Source.FullName != prepared.Source.FullName) { prepared.Source.Delete(); } int eventCount = results?.Events?.Length ?? 0; Log.Info($"Number of detected events: {eventCount}"); Log.Success(recognizer.Identifier + " recognizer has completed"); }
public static void Main(Arguments arguments) { var output = arguments.Output; if (!output.Exists) { output.Create(); } const string title = "# PRE-PROCESS SHORT AUDIO RECORDINGS FOR Convolutional DNN"; string date = "# DATE AND TIME: " + DateTime.Now; LoggedConsole.WriteLine(title); LoggedConsole.WriteLine(date); LoggedConsole.WriteLine("# Input Query file: " + arguments.QueryWavFile); LoggedConsole.WriteLine("# Input target file: " + arguments.TargtWavFile); LoggedConsole.WriteLine("# Configure file: " + arguments.Config); LoggedConsole.WriteLine("# Output directory: " + output.Name); // 1. set up the necessary files FileInfo queryWavfile = arguments.QueryWavFile.ToFileInfo(); FileInfo queryCsvfile = arguments.QueryCsvFile.ToFileInfo(); FileInfo targtWavfile = arguments.TargtWavFile.ToFileInfo(); FileInfo targtCsvfile = arguments.TargtCsvFile.ToFileInfo(); FileInfo configFile = arguments.Config.ToFileInfo(); DirectoryInfo opDir = output; // 2. get the config dictionary Config configuration = ConfigFile.Deserialize(configFile); // below four lines are examples of retrieving info from Config config //Config configuration = Yaml.Deserialise(configFile); // string analysisIdentifier = configuration[AnalysisKeys.AnalysisName]; // int resampleRate = (int?)configuration[AnalysisKeys.ResampleRate] ?? AppConfigHelper.DefaultTargetSampleRate; var configDict = new Dictionary <string, string>(configuration.ToDictionary()); configDict[AnalysisKeys.AddAxes] = configuration[AnalysisKeys.AddAxes] ?? "true"; configDict[AnalysisKeys.AddSegmentationTrack] = configuration[AnalysisKeys.AddSegmentationTrack] ?? "true"; //bool makeSoxSonogram = (bool?)configuration[AnalysisKeys.MakeSoxSonogram] ?? false; configDict[AnalysisKeys.AddTimeScale] = configuration[AnalysisKeys.AddTimeScale] ?? "true"; configDict[AnalysisKeys.AddAxes] = configuration[AnalysisKeys.AddAxes] ?? "true"; configDict[AnalysisKeys.AddSegmentationTrack] = configuration[AnalysisKeys.AddSegmentationTrack] ?? "true"; // print out the parameters LoggedConsole.WriteLine("\nPARAMETERS"); foreach (KeyValuePair <string, string> kvp in configDict) { LoggedConsole.WriteLine("{0} = {1}", kvp.Key, kvp.Value); } //set up the output file //string header = "File Name, MinFreq(Hz), MaxFreq(Hz), StartTime(s), EndTime(s), Duration(s), Annotated by expert(Y-1/N-0),Correct Annotation(Y-1/N-0)"; string header = "File Name,MinFreq(Hz),MaxFreq(Hz),StartTime(s),EndTime(s),Duration(s),Threshold,Snr,FractionOfFramesGTThreshold,FractionOfFramesGTThirdSNR,path2Spectrograms"; string opPath = Path.Combine(opDir.FullName, "OUTPUT.csv"); using (StreamWriter writer = new StreamWriter(opPath)) { writer.WriteLine(header); } // reads the entire file var data = FileTools.ReadTextFile(queryCsvfile.FullName); // read single record from csv file var record = CsvDataRecord.ReadDataRecord(data[1]); if (!queryWavfile.Exists) { string warning = string.Format("FILE DOES NOT EXIST >>>," + arguments.QueryWavFile); LoggedConsole.WriteWarnLine(warning); return; } // #################################################################### var result = AnalyseOneRecording(queryWavfile, configDict, record.EventStartSeconds, record.EventEndSeconds, record.LowFrequencyHertz, record.HighFrequencyHertz, opDir); // CONSTRUCT the outputline for csv file // fileName,Threshold,Snr,FractionOfFramesGTThreshold,FractionOfFramesGTThirdSNR,path string line = string.Format("{0},{1},{2},{3:f2},{4:f2},{5:f2},{6:f1},{7:f3},{8:f3},{9:f3},{10}", record.WavFileName, record.LowFrequencyHertz, record.HighFrequencyHertz, record.EventStartSeconds.TotalSeconds, record.EventEndSeconds.TotalSeconds, result.SnrStatistics.ExtractDuration.TotalSeconds, result.SnrStatistics.Threshold, result.SnrStatistics.Snr, result.SnrStatistics.FractionOfFramesExceedingThreshold, result.SnrStatistics.FractionOfFramesExceedingOneThirdSnr, result.SpectrogramFile.FullName); // It is helpful to write to the output file as we go, so as to keep a record of where we are up to. // This requires to open and close the output file at each iteration using (StreamWriter writer = new StreamWriter(opPath, true)) { writer.WriteLine(line); } // #################################################################### result = AnalyseOneRecording(targtWavfile, configDict, record.EventStartSeconds, record.EventEndSeconds, record.LowFrequencyHertz, record.HighFrequencyHertz, opDir); // CONSTRUCT the outputline for csv file // fileName,Threshold,Snr,FractionOfFramesGTThreshold,FractionOfFramesGTThirdSNR,path //string line = String.Format("{0},{1},{2},{3:f2},{4:f2},{5:f2},{6:f1},{7:f3},{8:f3},{9:f3},{10}", // record.wavFile_name, record.low_frequency_hertz, record.high_frequency_hertz, // record.event_start_seconds.TotalSeconds, record.event_end_seconds.TotalSeconds, // result.SnrStatistics.ExtractDuration.TotalSeconds, // result.SnrStatistics.Threshold, result.SnrStatistics.Snr, // result.SnrStatistics.FractionOfFramesExceedingThreshold, result.SnrStatistics.FractionOfFramesExceedingOneThirdSNR, // result.SpectrogramFile.FullName); // It is helpful to write to the output file as we go, so as to keep a record of where we are up to. // This requires to open and close the output file at each iteration using (StreamWriter writer = new StreamWriter(opPath, true)) { writer.WriteLine(line); } } // end MAIN()
/// <summary> /// Calculates an ROC score for the predictions and tags provided in the passed data table. /// First order the data by appropriate score as per the sort string /// </summary> /// <param name="dt"></param> /// <param name="countOfTargetPositives"></param> /// <param name="predictionCount"></param> public static void ROCCurve(DataTable dt, int totalPositiveCount, int totalNegativeCount, string sortString) { dt = DataTableTools.SortTable(dt, sortString); double previousRecall = 0.0; int cumulativeTP = 0; int cumulativeFP = 0; double area = 0.0; //area under the ROC curve List <double> ROC_Curve = new List <double>(); double maxAccuracy = 0.0; double precisionAtMax = 0.0; double specificityAtMax = 0.0; double recallAtMax = 0.0; double scoreAtMax = 0.0; int optimumCount = 0; //double precisionAt30 = 0.0; //double recallAt30 = 0.0; //double scoreAt30 = 0.0; int count = 0; foreach (DataRow row in dt.Rows) { int value = (int)row["TP"]; if (value == 1) { cumulativeTP++; } else if ((int)row["FP"] == 1) { cumulativeFP++; } double recall = cumulativeTP / (double)totalPositiveCount; //the true positive rate double specificity = cumulativeFP / (double)totalNegativeCount; double precision = cumulativeTP / (double)(cumulativeTP + cumulativeFP); double accuracy = (recall + precision) / 2; if (accuracy > maxAccuracy) { optimumCount = count; maxAccuracy = accuracy; recallAtMax = recall; precisionAtMax = precision; specificityAtMax = specificity; scoreAtMax = (double)row[AnalysisKeys.EventNormscore]; } count++; //if (count == 30) //{ // recallAt30 = recall; // precisionAt30 = precision; // scoreAt30 = (double)row[Keys.EVENT_NORMSCORE]; //} //double delta = precision * (recall - previousRecall); double delta = specificity * (recall - previousRecall); //double fpRate = 1 - specificity; //double delta = fpRate * (recall - previousRecall); area += delta; if (delta > 0.0) { ROC_Curve.Add(delta); } previousRecall = recall; } //foreach row in table if (ROC_Curve.Count > 0) { DataTools.writeBarGraph(ROC_Curve.ToArray()); LoggedConsole.WriteLine("Area under ROC curve = {0:f4}", area); LoggedConsole.WriteLine("Max accuracy={0:f3} for score threshold={1:f3}", maxAccuracy, scoreAtMax); LoggedConsole.WriteLine(" where recall={0:f3}, precision={1:f3}, specifcity={2:f3}", recallAtMax, precisionAtMax, specificityAtMax); //LoggedConsole.WriteLine("At 30 samples: recall={0:f3}, precision={1:f3}, at score={2:f3}", recallAt30, precisionAt30, scoreAt30); } }
public static Image DrawDistanceSpectrogram(LDSpectrogramRGB cs1, LDSpectrogramRGB cs2) { string[] keys = cs1.ColorMap.Split('-'); string key = keys[0]; double[,] m1Red = cs1.GetNormalisedSpectrogramMatrix(key); IndexDistributions.SpectralStats stats = IndexDistributions.GetModeAndOneTailedStandardDeviation(m1Red); cs1.IndexStats.Add(key, stats); m1Red = MatrixTools.Matrix2ZScores(m1Red, stats.Mode, stats.StandardDeviation); ////LoggedConsole.WriteLine("1.{0}: Min={1:f2} Max={2:f2} Mode={3:f2}+/-{4:f3} (SD=One-tailed)", key, dict["min"], dict["max"], dict["mode"], dict["sd"]); key = keys[1]; double[,] m1Grn = cs1.GetNormalisedSpectrogramMatrix(key); stats = IndexDistributions.GetModeAndOneTailedStandardDeviation(m1Grn); cs1.IndexStats.Add(key, stats); m1Grn = MatrixTools.Matrix2ZScores(m1Grn, stats.Mode, stats.StandardDeviation); ////LoggedConsole.WriteLine("1.{0}: Min={1:f2} Max={2:f2} Mode={3:f2}+/-{4:f3} (SD=One-tailed)", key, dict["min"], dict["max"], dict["mode"], dict["sd"]); key = keys[2]; double[,] m1Blu = cs1.GetNormalisedSpectrogramMatrix(key); stats = IndexDistributions.GetModeAndOneTailedStandardDeviation(m1Blu); cs1.IndexStats.Add(key, stats); m1Blu = MatrixTools.Matrix2ZScores(m1Blu, stats.Mode, stats.StandardDeviation); ////LoggedConsole.WriteLine("1.{0}: Min={1:f2} Max={2:f2} Mode={3:f2}+/-{4:f3} (SD=One-tailed)", key, dict["min"], dict["max"], dict["mode"], dict["sd"]); key = keys[0]; double[,] m2Red = cs2.GetNormalisedSpectrogramMatrix(key); stats = IndexDistributions.GetModeAndOneTailedStandardDeviation(m2Red); cs2.IndexStats.Add(key, stats); m2Red = MatrixTools.Matrix2ZScores(m2Red, stats.Mode, stats.StandardDeviation); ////LoggedConsole.WriteLine("2.{0}: Min={1:f2} Max={2:f2} Mode={3:f2}+/-{4:f3} (SD=One-tailed)", key, dict["min"], dict["max"], dict["mode"], dict["sd"]); key = keys[1]; double[,] m2Grn = cs2.GetNormalisedSpectrogramMatrix(key); stats = IndexDistributions.GetModeAndOneTailedStandardDeviation(m2Grn); cs2.IndexStats.Add(key, stats); m2Grn = MatrixTools.Matrix2ZScores(m2Grn, stats.Mode, stats.StandardDeviation); ////LoggedConsole.WriteLine("2.{0}: Min={1:f2} Max={2:f2} Mode={3:f2}+/-{4:f3} (SD=One-tailed)", key, dict["min"], dict["max"], dict["mode"], dict["sd"]); key = keys[2]; double[,] m2Blu = cs2.GetNormalisedSpectrogramMatrix(key); stats = IndexDistributions.GetModeAndOneTailedStandardDeviation(m2Blu); cs2.IndexStats.Add(key, stats); m2Blu = MatrixTools.Matrix2ZScores(m2Blu, stats.Mode, stats.StandardDeviation); ////LoggedConsole.WriteLine("2.{0}: Min={1:f2} Max={2:f2} Mode={3:f2}+/-{4:f3} (SD=One-tailed)", key, dict["min"], dict["max"], dict["mode"], dict["sd"]); var v1 = new double[3]; double[] mode1 = { cs1.IndexStats[keys[0]].Mode, cs1.IndexStats[keys[1]].Mode, cs1.IndexStats[keys[2]].Mode, }; double[] stDv1 = { cs1.IndexStats[keys[0]].StandardDeviation, cs1.IndexStats[keys[1]].StandardDeviation, cs1.IndexStats[keys[2]].StandardDeviation, }; LoggedConsole.WriteLine( "1: avACI={0:f3}+/-{1:f3}; avTEN={2:f3}+/-{3:f3}; avCVR={4:f3}+/-{5:f3}", mode1[0], stDv1[0], mode1[1], stDv1[1], mode1[2], stDv1[2]); var v2 = new double[3]; double[] mode2 = { cs2.IndexStats[keys[0]].Mode, cs2.IndexStats[keys[1]].Mode, cs2.IndexStats[keys[2]].Mode, }; double[] stDv2 = { cs2.IndexStats[keys[0]].StandardDeviation, cs2.IndexStats[keys[1]].StandardDeviation, cs2.IndexStats[keys[2]].StandardDeviation, }; LoggedConsole.WriteLine( "2: avACI={0:f3}+/-{1:f3}; avTEN={2:f3}+/-{3:f3}; avCVR={4:f3}+/-{5:f3}", mode2[0], stDv2[0], mode2[1], stDv2[1], mode2[2], stDv2[2]); // assume all matricies are normalised and of the same dimensions int rows = m1Red.GetLength(0); // number of rows int cols = m1Red.GetLength(1); // number var d12Matrix = new double[rows, cols]; var d11Matrix = new double[rows, cols]; var d22Matrix = new double[rows, cols]; for (int row = 0; row < rows; row++) { for (int col = 0; col < cols; col++) { v1[0] = m1Red[row, col]; v1[1] = m1Grn[row, col]; v1[2] = m1Blu[row, col]; v2[0] = m2Red[row, col]; v2[1] = m2Grn[row, col]; v2[2] = m2Blu[row, col]; d12Matrix[row, col] = DataTools.EuclidianDistance(v1, v2); d11Matrix[row, col] = (v1[0] + v1[1] + v1[2]) / 3; // get average of the normalised values d22Matrix[row, col] = (v2[0] + v2[1] + v2[2]) / 3; // following lines are for debugging purposes // if ((row == 150) && (col == 1100)) // { // LoggedConsole.WriteLine("V1={0:f3}, {1:f3}, {2:f3}", v1[0], v1[1], v1[2]); // LoggedConsole.WriteLine("V2={0:f3}, {1:f3}, {2:f3}", v2[0], v2[1], v2[2]); // LoggedConsole.WriteLine("EDist12={0:f4}; ED11={1:f4}; ED22={2:f4}", d12Matrix[row, col], d11Matrix[row, col], d22Matrix[row, col]); // } } } // rows double[] array = DataTools.Matrix2Array(d12Matrix); double avDist, sdDist; NormalDist.AverageAndSD(array, out avDist, out sdDist); for (int row = 0; row < rows; row++) { for (int col = 0; col < cols; col++) { d12Matrix[row, col] = (d12Matrix[row, col] - avDist) / sdDist; } } // int MaxRGBValue = 255; // int v; double zScore; Dictionary <string, Color> colourChart = GetDifferenceColourChart(); Color colour; var bmp = new Bitmap(cols, rows, PixelFormat.Format24bppRgb); for (int row = 0; row < rows; row++) { for (int col = 0; col < cols; col++) { zScore = d12Matrix[row, col]; if (d11Matrix[row, col] >= d22Matrix[row, col]) { if (zScore > 3.08) { colour = colourChart["+99.9%"]; } // 99.9% conf else { if (zScore > 2.33) { colour = colourChart["+99.0%"]; } // 99.0% conf else { if (zScore > 1.65) { colour = colourChart["+95.0%"]; } // 95% conf else { if (zScore < 0.0) { colour = colourChart["NoValue"]; } else { // v = Convert.ToInt32(zScore * MaxRGBValue); // colour = Color.FromArgb(v, 0, v); colour = colourChart["+NotSig"]; } } } } // if() else bmp.SetPixel(col, row, colour); } else { if (zScore > 3.08) { colour = colourChart["-99.9%"]; } // 99.9% conf else { if (zScore > 2.33) { colour = colourChart["-99.0%"]; } // 99.0% conf else { if (zScore > 1.65) { colour = colourChart["-95.0%"]; } // 95% conf else { if (zScore < 0.0) { colour = colourChart["NoValue"]; } else { // v = Convert.ToInt32(zScore * MaxRGBValue); // if() // colour = Color.FromArgb(0, v, v); colour = colourChart["-NotSig"]; } } } } // if() else bmp.SetPixel(col, row, colour); } } // all rows } // all rows return(bmp); }
/// <summary> /// This method compares the acoustic indices derived from two different long duration recordings of the same length. /// It takes as input any number of csv files of acoustic indices in spectrogram columns. /// Typically there will be at least three indices csv files for each of the original recordings to be compared. /// The method produces four spectrogram image files: /// 1) A negative false-colour spectrogram derived from the indices of recording 1. /// 2) A negative false-colour spectrogram derived from the indices of recording 2. /// 3) A spectrogram of euclidean distances bewteen the two input files. /// 4) The above three spectrograms combined in one image. /// </summary> /// <param name="inputDirectory"> /// </param> /// <param name="inputFileName1"> /// </param> /// <param name="inputFileName2"> /// </param> /// <param name="outputDirectory"> /// </param> public static void DrawDistanceSpectrogram( DirectoryInfo inputDirectory, FileInfo inputFileName1, FileInfo inputFileName2, DirectoryInfo outputDirectory) { // PARAMETERS string outputFileName1 = inputFileName1.Name; var cs1 = new LDSpectrogramRGB(minuteOffset, xScale, sampleRate, frameWidth, colorMap); cs1.ColorMode = colorMap; cs1.BackgroundFilter = backgroundFilterCoeff; string[] keys = colorMap.Split('-'); cs1.ReadCsvFiles(inputDirectory, inputFileName1.Name, keys); // ColourSpectrogram.BlurSpectrogram(cs1); // cs1.DrawGreyScaleSpectrograms(opdir, opFileName1); cs1.DrawNegativeFalseColourSpectrogram(outputDirectory, outputFileName1); string imagePath = Path.Combine(outputDirectory.FullName, outputFileName1 + ".COLNEG.png"); Image spg1Image = ImageTools.ReadImage2Bitmap(imagePath); if (spg1Image == null) { LoggedConsole.WriteLine("SPECTROGRAM IMAGE DOES NOT EXIST: {0}", imagePath); return; } int nyquist = cs1.SampleRate / 2; int herzInterval = 1000; string title = string.Format( "FALSE COLOUR SPECTROGRAM: {0}. (scale:hours x kHz) (colour: R-G-B={1})", inputFileName1, cs1.ColorMode); Image titleBar = LDSpectrogramRGB.DrawTitleBarOfFalseColourSpectrogram(title, spg1Image.Width); spg1Image = LDSpectrogramRGB.FrameLDSpectrogram( spg1Image, titleBar, cs1, nyquist, herzInterval); string outputFileName2 = inputFileName2.Name; var cs2 = new LDSpectrogramRGB(minuteOffset, xScale, sampleRate, frameWidth, colorMap); cs2.ColorMode = colorMap; cs2.BackgroundFilter = backgroundFilterCoeff; cs2.ReadCsvFiles(inputDirectory, inputFileName2.Name, keys); // cs2.DrawGreyScaleSpectrograms(opdir, opFileName2); cs2.DrawNegativeFalseColourSpectrogram(outputDirectory, outputFileName2); imagePath = Path.Combine(outputDirectory.FullName, outputFileName2 + ".COLNEG.png"); Image spg2Image = ImageTools.ReadImage2Bitmap(imagePath); if (spg2Image == null) { LoggedConsole.WriteLine("SPECTROGRAM IMAGE DOES NOT EXIST: {0}", imagePath); return; } title = string.Format( "FALSE COLOUR SPECTROGRAM: {0}. (scale:hours x kHz) (colour: R-G-B={1})", inputFileName2, cs2.ColorMode); titleBar = LDSpectrogramRGB.DrawTitleBarOfFalseColourSpectrogram(title, spg2Image.Width); spg2Image = LDSpectrogramRGB.FrameLDSpectrogram( spg2Image, titleBar, cs1, nyquist, herzInterval); string outputFileName4 = inputFileName1 + ".EuclidianDistance.png"; Image deltaSp = DrawDistanceSpectrogram(cs1, cs2); Color[] colorArray = LDSpectrogramRGB.ColourChart2Array(GetDifferenceColourChart()); titleBar = DrawTitleBarOfEuclidianDistanceSpectrogram( inputFileName1.Name, inputFileName2.Name, colorArray, deltaSp.Width, SpectrogramConstants.HEIGHT_OF_TITLE_BAR); deltaSp = LDSpectrogramRGB.FrameLDSpectrogram(deltaSp, titleBar, cs2, nyquist, herzInterval); deltaSp.Save(Path.Combine(outputDirectory.FullName, outputFileName4)); string outputFileName5 = inputFileName1 + ".2SpectrogramsAndDistance.png"; var images = new Image[3]; images[0] = spg1Image; images[1] = spg2Image; images[2] = deltaSp; Image combinedImage = ImageTools.CombineImagesVertically(images); combinedImage.Save(Path.Combine(outputDirectory.FullName, outputFileName5)); }
public static void Execute(Arguments arguments) { if (arguments == null) { throw new NoDeveloperMethodException(); } string description; switch (arguments.ZoomAction) { case Arguments.ZoomActionType.Focused: description = "# DRAW STACK OF FOCUSED MULTI-SCALE LONG DURATION SPECTROGRAMS DERIVED FROM SPECTRAL INDICES."; break; case Arguments.ZoomActionType.Tile: description = "# DRAW ZOOMING SPECTROGRAMS DERIVED FROM SPECTRAL INDICES OBTAINED FROM AN AUDIO RECORDING"; break; default: throw new ArgumentOutOfRangeException(); } LoggedConsole.WriteLine(description); LoggedConsole.WriteLine("# Spectrogram Zooming config : " + arguments.SpectrogramZoomingConfig); LoggedConsole.WriteLine("# Input Directory : " + arguments.SourceDirectory); LoggedConsole.WriteLine("# Output Directory : " + arguments.Output); var common = new ZoomParameters( arguments.SourceDirectory.ToDirectoryEntry(), arguments.SpectrogramZoomingConfig.ToFileEntry(), !string.IsNullOrEmpty(arguments.OutputFormat)); LoggedConsole.WriteLine("# File name of recording : " + common.OriginalBasename); // create file systems for reading input and writing output. It gets //arguments.SourceDirectory.ToDirectoryInfo(), //arguments.Output.ToDirectoryInfo(), var io = FileSystemProvider.GetInputOutputFileSystems( arguments.SourceDirectory, FileSystemProvider.MakePath(arguments.Output, common.OriginalBasename, arguments.OutputFormat, "Tiles")) .EnsureInputIsDirectory(); switch (arguments.ZoomAction) { case Arguments.ZoomActionType.Focused: // draw a focused multi-resolution pyramid of images TimeSpan focalTime; if (arguments.FocusMinute.HasValue) { focalTime = TimeSpan.FromMinutes(arguments.FocusMinute.Value); } else { throw new ArgumentException("FocusMinute is null, cannot proceed"); } ZoomFocusedSpectrograms.DrawStackOfZoomedSpectrograms( arguments.SourceDirectory.ToDirectoryInfo(), arguments.Output.ToDirectoryInfo(), io, common, AcousticIndices.TowseyAcoustic, focalTime, imageWidth: 1500); break; case Arguments.ZoomActionType.Tile: // Create the super tiles for a full set of recordings ZoomTiledSpectrograms.DrawTiles( io, common, AcousticIndices.TowseyAcoustic); break; default: Log.Warn("Other ZoomAction results in standard LD Spectrogram to be drawn"); // draw standard false color spectrograms - useful to check what spectrograms of the individual // indices are like. throw new NotImplementedException(); /*LDSpectrogramRGB.DrawSpectrogramsFromSpectralIndices( * arguments.SourceDirectory, * arguments.Output, * arguments.SpectrogramConfigPath, * arguments.IndexPropertiesConfig);*/ break; } }
private static void ConvertsCorrectly( string filename, string mimetype, string outputMimeType, TimeSpan expectedDuration, TimeSpan maxVariance, AudioUtilityRequest customRequest = null) { foreach (var util in new[] { TestHelper.GetAudioUtility() }) { var dir = PathHelper.GetTempDir(); var output = dir.CombineFile( Path.GetFileNameWithoutExtension(filename) + "_converted." + MediaTypes.GetExtension(outputMimeType)); var audioUtilRequest = customRequest ?? new AudioUtilityRequest { }; var input = PathHelper.GetTestAudioFile(filename); util.Modify(input, mimetype, output, outputMimeType, audioUtilRequest); var utilInfoOutput = util.Info(output); var infoOutput = GetDurationInfo(util.Info(output)); var compareResult = "Expected duration " + expectedDuration + " actual duration " + utilInfoOutput.Duration + " expected max variation " + maxVariance + " actual variation " + expectedDuration.Subtract( utilInfoOutput.Duration.HasValue ? utilInfoOutput.Duration.Value : TimeSpan.Zero) .Duration(); using (ConsoleRedirector cr = new ConsoleRedirector()) { LoggedConsole.WriteLine(compareResult); } var message = $"{compareResult}.{Environment.NewLine}Info output: {infoOutput}"; Assert.IsTrue( TestHelper.CompareTimeSpans(expectedDuration, utilInfoOutput.Duration.Value, maxVariance), message); var info = util.Info(output); PathHelper.DeleteTempDir(dir); /* * var sb = new StringBuilder(); * foreach (var item in info) * { * sb.AppendLine(item.Key + ": " + item.Value); * } */ if (info?.RawData != null && info.RawData.ContainsKey("STREAM codec_long_name")) { var codec = info.RawData["STREAM codec_long_name"]; if (outputMimeType == MediaTypes.MediaTypeWav) { Assert.IsTrue(codec == MediaTypes.CodecWavPcm16BitLe); } else if (outputMimeType == MediaTypes.MediaTypeOggAudio) { Assert.IsTrue(codec == MediaTypes.CodecVorbis); } else if (outputMimeType == MediaTypes.MediaTypeMp3) { Assert.IsTrue(codec == MediaTypes.CodecMp3); } else if (outputMimeType == MediaTypes.MediaTypeWebMAudio) { Assert.IsTrue(codec == MediaTypes.CodecVorbis); } else { Assert.IsTrue(codec == MediaTypes.ExtUnknown); } } } }
/// <summary> /// 2. Analyses long audio recording (mp3 or wav) as per passed config file. Outputs an events.csv file AND an /// indices.csv file /// Signed off: Michael Towsey 4th December 2012 /// </summary> public static void Execute(Arguments arguments) { if (arguments == null) { throw new NoDeveloperMethodException(); } LoggedConsole.WriteLine("# PROCESS LONG RECORDING"); LoggedConsole.WriteLine("# DATE AND TIME: " + DateTime.Now); // 1. set up the necessary files var sourceAudio = arguments.Source; var configFile = arguments.Config.ToFileInfo(); var outputDirectory = arguments.Output; var tempFilesDirectory = arguments.TempDir; // if a temp dir is not given, use output dir as temp dir if (tempFilesDirectory == null) { Log.Warn("No temporary directory provided, using output directory"); tempFilesDirectory = outputDirectory; } // try an automatically find the config file if (configFile == null) { throw new FileNotFoundException("No config file argument provided"); } else if (!configFile.Exists) { Log.Warn($"Config file {configFile.FullName} not found... attempting to resolve config file"); // we use .ToString() here to get the original input string. // Using fullname always produces an absolute path relative to pwd... we don't want to prematurely make assumptions: // e.g. We require a missing absolute path to fail... that wouldn't work with .Name // e.g. We require a relative path to try and resolve, using .FullName would fail the first absolute check inside ResolveConfigFile configFile = ConfigFile.Resolve(configFile.ToString(), Directory.GetCurrentDirectory().ToDirectoryInfo()); } if (arguments.StartOffset.HasValue ^ arguments.EndOffset.HasValue) { throw new InvalidStartOrEndException("If StartOffset or EndOffset is specified, then both must be specified"); } if (arguments.StartOffset.HasValue && arguments.EndOffset.HasValue && arguments.EndOffset.Value <= arguments.StartOffset.Value) { throw new InvalidStartOrEndException("Start offset must be less than end offset."); } LoggedConsole.WriteLine("# Recording file: " + sourceAudio.FullName); LoggedConsole.WriteLine("# Configuration file: " + configFile); LoggedConsole.WriteLine("# Output folder: " + outputDirectory); LoggedConsole.WriteLine("# Temp File Directory: " + tempFilesDirectory); // optionally copy logs / config to make results easier to understand // TODO: remove, see https://github.com/QutEcoacoustics/audio-analysis/issues/133 if (arguments.WhenExitCopyConfig || arguments.WhenExitCopyLog) { AppDomain.CurrentDomain.ProcessExit += (sender, args) => { Cleanup(arguments, configFile); }; } // 2. initialize the analyzer // we're changing the way resolving config files works. Ideally, we'd like to use statically typed config files // but we can't do that unless we know which type we have to load first! Currently analyzer to load is in // the config file so we can't know which analyzer we can use. Thus we will change to using the file name, // or an argument to resolve the analyzer to load. // Get analysis name: IAnalyser2 analyzer = FindAndCheckAnalyzer <IAnalyser2>(arguments.AnalysisIdentifier, configFile.Name); // 2. get the analysis config AnalyzerConfig configuration = analyzer.ParseConfig(configFile); SaveBehavior saveIntermediateWavFiles = configuration.SaveIntermediateWavFiles; bool saveIntermediateDataFiles = configuration.SaveIntermediateCsvFiles; SaveBehavior saveSonogramsImages = configuration.SaveSonogramImages; bool filenameDate = configuration.RequireDateInFilename; if (configuration[AnalysisKeys.AnalysisName].IsNotWhitespace()) { Log.Warn("Your config file has `AnalysisName` set - this property is deprecated and ignored"); } // AT 2018-02: changed logic so default index properties loaded if not provided FileInfo indicesPropertiesConfig = IndexProperties.Find(configuration, configFile); if (indicesPropertiesConfig == null || !indicesPropertiesConfig.Exists) { Log.Warn("IndexProperties config can not be found! Loading a default"); indicesPropertiesConfig = ConfigFile.Default <Dictionary <string, IndexProperties> >(); } LoggedConsole.WriteLine("# IndexProperties Cfg: " + indicesPropertiesConfig.FullName); // min score for an acceptable event Log.Info("Minimum event threshold has been set to " + configuration.EventThreshold); FileSegment.FileDateBehavior defaultBehavior = FileSegment.FileDateBehavior.Try; if (filenameDate) { if (!FileDateHelpers.FileNameContainsDateTime(sourceAudio.Name)) { throw new InvalidFileDateException( "When RequireDateInFilename option is set, the filename of the source audio file must contain " + "a valid AND UNAMBIGUOUS date. Such a date was not able to be parsed."); } defaultBehavior = FileSegment.FileDateBehavior.Required; } // 3. initilize AnalysisCoordinator class that will do the analysis var analysisCoordinator = new AnalysisCoordinator( new LocalSourcePreparer(), saveIntermediateWavFiles, false, arguments.Parallel); // 4. get the segment of audio to be analysed // if tiling output, specify that FileSegment needs to be able to read the date var fileSegment = new FileSegment(sourceAudio, arguments.AlignToMinute, null, defaultBehavior); var bothOffsetsProvided = arguments.StartOffset.HasValue && arguments.EndOffset.HasValue; if (bothOffsetsProvided) { fileSegment.SegmentStartOffset = TimeSpan.FromSeconds(arguments.StartOffset.Value); fileSegment.SegmentEndOffset = TimeSpan.FromSeconds(arguments.EndOffset.Value); } else { Log.Debug("Neither start nor end segment offsets provided. Therefore both were ignored."); } // 6. initialize the analysis settings object var analysisSettings = analyzer.DefaultSettings; analysisSettings.ConfigFile = configFile; analysisSettings.Configuration = configuration; analysisSettings.AnalysisOutputDirectory = outputDirectory; analysisSettings.AnalysisTempDirectory = tempFilesDirectory; analysisSettings.AnalysisDataSaveBehavior = saveIntermediateDataFiles; analysisSettings.AnalysisImageSaveBehavior = saveSonogramsImages; analysisSettings.AnalysisChannelSelection = arguments.Channels; analysisSettings.AnalysisMixDownToMono = arguments.MixDownToMono; var segmentDuration = configuration.SegmentDuration?.Seconds(); if (!segmentDuration.HasValue) { segmentDuration = analysisSettings.AnalysisMaxSegmentDuration ?? TimeSpan.FromMinutes(1); Log.Warn( $"Can't read `{nameof(AnalyzerConfig.SegmentDuration)}` from config file. " + $"Default value of {segmentDuration} used)"); } analysisSettings.AnalysisMaxSegmentDuration = segmentDuration.Value; var segmentOverlap = configuration.SegmentOverlap?.Seconds(); if (!segmentOverlap.HasValue) { segmentOverlap = analysisSettings.SegmentOverlapDuration; Log.Warn( $"Can't read `{nameof(AnalyzerConfig.SegmentOverlap)}` from config file. " + $"Default value of {segmentOverlap} used)"); } analysisSettings.SegmentOverlapDuration = segmentOverlap.Value; // set target sample rate var resampleRate = configuration.ResampleRate; if (!resampleRate.HasValue) { resampleRate = analysisSettings.AnalysisTargetSampleRate ?? AppConfigHelper.DefaultTargetSampleRate; Log.Warn( $"Can't read {nameof(configuration.ResampleRate)} from config file. " + $"Default value of {resampleRate} used)"); } analysisSettings.AnalysisTargetSampleRate = resampleRate; Log.Info( $"{nameof(configuration.SegmentDuration)}={segmentDuration}, " + $"{nameof(configuration.SegmentOverlap)}={segmentOverlap}, " + $"{nameof(configuration.ResampleRate)}={resampleRate}"); // 7. ####################################### DO THE ANALYSIS ################################### LoggedConsole.WriteLine("START ANALYSIS ..."); var analyserResults = analysisCoordinator.Run(fileSegment, analyzer, analysisSettings); // ############################################################################################## // 8. PROCESS THE RESULTS LoggedConsole.WriteLine(string.Empty); LoggedConsole.WriteLine("START PROCESSING RESULTS ..."); if (analyserResults == null) { LoggedConsole.WriteErrorLine("###################################################\n"); LoggedConsole.WriteErrorLine("The Analysis Run Coordinator has returned a null result."); LoggedConsole.WriteErrorLine("###################################################\n"); throw new AnalysisOptionDevilException(); } // Merge and correct main result types EventBase[] mergedEventResults = ResultsTools.MergeResults(analyserResults, ar => ar.Events, ResultsTools.CorrectEvent); SummaryIndexBase[] mergedIndicesResults = ResultsTools.MergeResults(analyserResults, ar => ar.SummaryIndices, ResultsTools.CorrectSummaryIndex); SpectralIndexBase[] mergedSpectralIndexResults = ResultsTools.MergeResults(analyserResults, ar => ar.SpectralIndices, ResultsTools.CorrectSpectrumIndex); // not an exceptional state, do not throw exception if (mergedEventResults != null && mergedEventResults.Length == 0) { LoggedConsole.WriteWarnLine("The analysis produced no EVENTS (mergedResults had zero count)"); } if (mergedIndicesResults != null && mergedIndicesResults.Length == 0) { LoggedConsole.WriteWarnLine("The analysis produced no Summary INDICES (mergedResults had zero count)"); } if (mergedSpectralIndexResults != null && mergedSpectralIndexResults.Length == 0) { LoggedConsole.WriteWarnLine("The analysis produced no Spectral INDICES (merged results had zero count)"); } // 9. CREATE SUMMARY INDICES IF NECESSARY (FROM EVENTS) #if DEBUG // get the duration of the original source audio file - need this to convert Events datatable to Indices Datatable var audioUtility = new MasterAudioUtility(tempFilesDirectory); var mimeType = MediaTypes.GetMediaType(sourceAudio.Extension); var sourceInfo = audioUtility.Info(sourceAudio); // updated by reference all the way down in LocalSourcePreparer Debug.Assert(fileSegment.TargetFileDuration == sourceInfo.Duration); #endif var duration = fileSegment.TargetFileDuration.Value; ResultsTools.ConvertEventsToIndices( analyzer, mergedEventResults, ref mergedIndicesResults, duration, configuration.EventThreshold); int eventsCount = mergedEventResults?.Length ?? 0; int numberOfRowsOfIndices = mergedIndicesResults?.Length ?? 0; // 10. Allow analysers to post-process // TODO: remove results directory if possible var instanceOutputDirectory = AnalysisCoordinator.GetNamedDirectory(analysisSettings.AnalysisOutputDirectory, analyzer); // 11. IMPORTANT - this is where IAnalyser2's post processor gets called. // Produces all spectrograms and images of SPECTRAL INDICES. // Long duration spectrograms are drawn IFF analysis type is Towsey.Acoustic analyzer.SummariseResults(analysisSettings, fileSegment, mergedEventResults, mergedIndicesResults, mergedSpectralIndexResults, analyserResults); // 12. SAVE THE RESULTS string fileNameBase = Path.GetFileNameWithoutExtension(sourceAudio.Name); var eventsFile = ResultsTools.SaveEvents(analyzer, fileNameBase, instanceOutputDirectory, mergedEventResults); var indicesFile = ResultsTools.SaveSummaryIndices(analyzer, fileNameBase, instanceOutputDirectory, mergedIndicesResults); var spectraFile = ResultsTools.SaveSpectralIndices(analyzer, fileNameBase, instanceOutputDirectory, mergedSpectralIndexResults); // 13. THIS IS WHERE SUMMARY INDICES ARE PROCESSED // Convert summary indices to black and white tracks image if (mergedIndicesResults == null) { Log.Info("No summary indices produced"); } else { if (indicesPropertiesConfig == null || !indicesPropertiesConfig.Exists) { throw new InvalidOperationException("Cannot process indices without an index configuration file, the file could not be found!"); } // this arbitrary amount of data. if (mergedIndicesResults.Length > 5000) { Log.Warn("Summary Indices Image not able to be drawn - there are too many indices to render"); } else { var basename = Path.GetFileNameWithoutExtension(fileNameBase); string imageTitle = $"SOURCE:{basename}, {Meta.OrganizationTag}; "; // Draw Tracks-Image of Summary indices // set time scale resolution for drawing of summary index tracks TimeSpan timeScale = TimeSpan.FromSeconds(0.1); Image <Rgb24> tracksImage = IndexDisplay.DrawImageOfSummaryIndices( IndexProperties.GetIndexProperties(indicesPropertiesConfig), indicesFile, imageTitle, timeScale, fileSegment.TargetFileStartDate); var imagePath = FilenameHelpers.AnalysisResultPath(instanceOutputDirectory, basename, "SummaryIndices", ImageFileExt); tracksImage.Save(imagePath); } } // 14. wrap up, write stats LoggedConsole.WriteLine("INDICES CSV file(s) = " + (indicesFile?.Name ?? "<<No indices result, no file!>>")); LoggedConsole.WriteLine("\tNumber of rows (i.e. minutes) in CSV file of indices = " + numberOfRowsOfIndices); LoggedConsole.WriteLine(string.Empty); if (eventsFile == null) { LoggedConsole.WriteLine("An Events CSV file was NOT returned."); } else { LoggedConsole.WriteLine("EVENTS CSV file(s) = " + eventsFile.Name); LoggedConsole.WriteLine("\tNumber of events = " + eventsCount); } Log.Success($"Analysis Complete.\nSource={sourceAudio.Name}\nOutput={instanceOutputDirectory.FullName}"); }
/// <summary> /// THE KEY ANALYSIS METHOD /// </summary> /// <param name="recording"></param> /// <param name="sonoConfig"></param> /// <param name="lrConfig"></param> /// <param name="returnDebugImage"></param> /// <param name="segmentStartOffset"></param> /// <returns></returns> private static Tuple <BaseSonogram, double[, ], double[], List <AcousticEvent>, Image> Analysis( AudioRecording recording, SonogramConfig sonoConfig, LewinsRailConfig lrConfig, bool returnDebugImage, TimeSpan segmentStartOffset) { if (recording == null) { LoggedConsole.WriteLine("AudioRecording == null. Analysis not possible."); return(null); } int sr = recording.SampleRate; int upperBandMinHz = lrConfig.UpperBandMinHz; int upperBandMaxHz = lrConfig.UpperBandMaxHz; int lowerBandMinHz = lrConfig.LowerBandMinHz; int lowerBandMaxHz = lrConfig.LowerBandMaxHz; //double decibelThreshold = lrConfig.DecibelThreshold; //dB //int windowSize = lrConfig.WindowSize; double eventThreshold = lrConfig.EventThreshold; //in 0-1 double minDuration = lrConfig.MinDuration; // seconds double maxDuration = lrConfig.MaxDuration; // seconds double minPeriod = lrConfig.MinPeriod; // seconds double maxPeriod = lrConfig.MaxPeriod; // seconds //double freqBinWidth = sr / (double)windowSize; double freqBinWidth = sr / (double)sonoConfig.WindowSize; //i: MAKE SONOGRAM double framesPerSecond = freqBinWidth; //the Xcorrelation-FFT technique requires number of bins to scan to be power of 2. //assuming sr=17640 and window=1024, then 64 bins span 1100 Hz above the min Hz level. i.e. 500 to 1600 //assuming sr=17640 and window=1024, then 128 bins span 2200 Hz above the min Hz level. i.e. 500 to 2700 int upperBandMinBin = (int)Math.Round(upperBandMinHz / freqBinWidth) + 1; int upperBandMaxBin = (int)Math.Round(upperBandMaxHz / freqBinWidth) + 1; int lowerBandMinBin = (int)Math.Round(lowerBandMinHz / freqBinWidth) + 1; int lowerBandMaxBin = (int)Math.Round(lowerBandMaxHz / freqBinWidth) + 1; BaseSonogram sonogram = new SpectrogramStandard(sonoConfig, recording.WavReader); int rowCount = sonogram.Data.GetLength(0); int colCount = sonogram.Data.GetLength(1); //ALTERNATIVE IS TO USE THE AMPLITUDE SPECTRUM //var results2 = DSP_Frames.ExtractEnvelopeAndFFTs(recording.GetWavReader().Samples, sr, frameSize, windowOverlap); //double[,] matrix = results2.Item3; //amplitude spectrogram. Note that column zero is the DC or average energy value and can be ignored. //double[] avAbsolute = results2.Item1; //average absolute value over the minute recording ////double[] envelope = results2.Item2; //double windowPower = results2.Item4; double[] lowerArray = MatrixTools.GetRowAveragesOfSubmatrix(sonogram.Data, 0, lowerBandMinBin, rowCount - 1, lowerBandMaxBin); double[] upperArray = MatrixTools.GetRowAveragesOfSubmatrix(sonogram.Data, 0, upperBandMinBin, rowCount - 1, upperBandMaxBin); int step = (int)Math.Round(framesPerSecond); //take one second steps int stepCount = rowCount / step; int sampleLength = 64; //64 frames = 3.7 seconds. Suitable for Lewins Rail. double[] intensity = new double[rowCount]; double[] periodicity = new double[rowCount]; //###################################################################### //ii: DO THE ANALYSIS AND RECOVER SCORES for (int i = 0; i < stepCount; i++) { int start = step * i; double[] lowerSubarray = DataTools.Subarray(lowerArray, start, sampleLength); double[] upperSubarray = DataTools.Subarray(upperArray, start, sampleLength); if (lowerSubarray.Length != sampleLength || upperSubarray.Length != sampleLength) { break; } var spectrum = AutoAndCrossCorrelation.CrossCorr(lowerSubarray, upperSubarray); int zeroCount = 3; for (int s = 0; s < zeroCount; s++) { spectrum[s] = 0.0; //in real data these bins are dominant and hide other frequency content } spectrum = DataTools.NormaliseArea(spectrum); int maxId = DataTools.GetMaxIndex(spectrum); double period = 2 * sampleLength / (double)maxId / framesPerSecond; //convert maxID to period in seconds if (period < minPeriod || period > maxPeriod) { continue; } // lay down score for sample length for (int j = 0; j < sampleLength; j++) { if (intensity[start + j] < spectrum[maxId]) { intensity[start + j] = spectrum[maxId]; } periodicity[start + j] = period; } } //###################################################################### //iii: CONVERT SCORES TO ACOUSTIC EVENTS intensity = DataTools.filterMovingAverage(intensity, 5); var predictedEvents = AcousticEvent.ConvertScoreArray2Events( intensity, lowerBandMinHz, upperBandMaxHz, sonogram.FramesPerSecond, freqBinWidth, eventThreshold, minDuration, maxDuration, segmentStartOffset); CropEvents(predictedEvents, upperArray, segmentStartOffset); var hits = new double[rowCount, colCount]; //###################################################################### var scorePlot = new Plot("L.pect", intensity, lrConfig.IntensityThreshold); Image debugImage = null; if (returnDebugImage) { // display a variety of debug score arrays double[] normalisedScores; double normalisedThreshold; DataTools.Normalise(intensity, lrConfig.DecibelThreshold, out normalisedScores, out normalisedThreshold); var intensityPlot = new Plot("Intensity", normalisedScores, normalisedThreshold); DataTools.Normalise(periodicity, 10, out normalisedScores, out normalisedThreshold); var periodicityPlot = new Plot("Periodicity", normalisedScores, normalisedThreshold); var debugPlots = new List <Plot> { scorePlot, intensityPlot, periodicityPlot }; debugImage = DrawDebugImage(sonogram, predictedEvents, debugPlots, hits); } return(Tuple.Create(sonogram, hits, intensity, predictedEvents, debugImage)); } //Analysis()
public void TestCalculateEventStatistics() { var sampleRate = 22050; double duration = 28; int[] harmonics1 = { 500 }; int[] harmonics2 = { 500, 1000, 2000, 4000, 8000 }; var signal1 = DspFilters.GenerateTestSignal(sampleRate, duration, harmonics1, WaveType.Sine); var signal2 = DspFilters.GenerateTestSignal(sampleRate, 4, harmonics2, WaveType.Sine); var signal3 = DspFilters.GenerateTestSignal(sampleRate, duration, harmonics1, WaveType.Sine); var signal = DataTools.ConcatenateVectors(signal1, signal2, signal3); var wr = new WavReader(signal, 1, 16, sampleRate); var recording = new AudioRecording(wr); // this value is fake, but we set it to ensure output values are calculated correctly w.r.t. segment start var segmentOffset = 547.123.Seconds(); var start = TimeSpan.FromSeconds(28) + segmentOffset; var end = TimeSpan.FromSeconds(32) + segmentOffset; var lowFreq = 1500.0; var topFreq = 8500.0; var statsConfig = new EventStatisticsConfiguration() { FrameSize = 512, FrameStep = 512, }; var stats = EventStatisticsCalculate.AnalyzeAudioEvent( recording, (start, end).AsInterval(), (lowFreq, topFreq).AsInterval(), statsConfig, segmentOffset); LoggedConsole.WriteLine($"Stats: Temporal entropy = {stats.TemporalEnergyDistribution:f4}"); LoggedConsole.WriteLine($"Stats: Spectral entropy = {stats.SpectralEnergyDistribution:f4}"); LoggedConsole.WriteLine($"Stats: Spectral centroid= {stats.SpectralCentroid}"); LoggedConsole.WriteLine($"Stats: DominantFrequency= {stats.DominantFrequency}"); Assert.AreEqual(0.0, stats.TemporalEnergyDistribution, 1E-4); Assert.AreEqual(0.6062, stats.SpectralEnergyDistribution, 1E-4); Assert.AreEqual(6687, stats.SpectralCentroid); Assert.AreEqual(8003, stats.DominantFrequency); Assert.AreEqual(1500, stats.LowFrequencyHertz); Assert.AreEqual(8500, stats.HighFrequencyHertz); Assert.AreEqual(28.Seconds() + segmentOffset, stats.EventStartSeconds.Seconds()); Assert.AreEqual(32.Seconds() + segmentOffset, stats.EventEndSeconds.Seconds()); Assert.AreEqual(28.Seconds() + segmentOffset, stats.ResultStartSeconds.Seconds()); /* * // Assume linear scale. * int nyquist = sampleRate / 2; * var freqScale = new FrequencyScale(nyquist: nyquist, frameSize: statsConfig.FrameSize, hertzLinearGridInterval: 1000); * * var sonoConfig = new SonogramConfig * { * WindowSize = statsConfig.FrameSize, * WindowStep = statsConfig.FrameSize, * WindowOverlap = 0.0, * SourceFName = "SineSignal3", * NoiseReductionType = NoiseReductionType.Standard, * NoiseReductionParameter = 0.12, * }; * var sonogram = new AmplitudeSonogram(sonoConfig, recording.WavReader); * var image = sonogram.GetImage(); * string title = $"Spectrogram of Harmonics: SR={sampleRate} Window={freqScale.WindowSize}"; * image = sonogram.GetImageFullyAnnotated(image, title, freqScale.GridLineLocations); * string path = ; * image.Save(path); * * // get spectrum from row 1300 * var normalisedIndex = DataTools.normalise(MatrixTools.GetRow(sonogram.Data, 1300)); * var image2 = GraphsAndCharts.DrawGraph("SPECTRUM", normalisedIndex, 100); * string path2 = ; * image2.Save(path2); */ }
public static void Execute(Arguments arguments) { var inputDirs = arguments.InputDataDirectories.Select(FileInfoExtensions.ToDirectoryInfo); var output = arguments.OutputDirectory.ToDirectoryInfo(); string date = "# DATE AND TIME: " + DateTime.Now; LoggedConsole.WriteLine("\n# DRAW an EASY IMAGE from consecutive days of SUMMARY INDICES in CSV files."); LoggedConsole.WriteLine("# IT IS ASSUMED THAT THE CSV files are already concatenated into 24 hour files."); LoggedConsole.WriteLine(date); LoggedConsole.WriteLine("# Summary Index.csv files are in directories:"); foreach (DirectoryInfo dir in inputDirs) { LoggedConsole.WriteLine(" {0}", dir.FullName); } LoggedConsole.WriteLine("# Output directory: " + output); if (arguments.StartDate == null) { LoggedConsole.WriteLine("# Start date = NULL (No argument provided). Will revise start date ...."); } else { LoggedConsole.WriteLine("# Start date = " + arguments.StartDate.ToString()); } if (arguments.EndDate == null) { LoggedConsole.WriteLine("# End date = NULL (No argument provided). Will revise end date ...."); } else { LoggedConsole.WriteLine("# End date = " + arguments.EndDate.ToString()); } LoggedConsole.WriteLine("# FILE FILTER = " + arguments.FileFilter); LoggedConsole.WriteLine(); // PATTERN SEARCH FOR SUMMARY INDEX FILES. //string pattern = "*__Towsey.Acoustic.Indices.csv"; FileInfo[] csvFiles = IndexMatrices.GetFilesInDirectories(inputDirs.ToArray(), arguments.FileFilter); //LoggedConsole.WriteLine("# Subdirectories Count = " + subDirectories.Length); LoggedConsole.WriteLine("# SummaryIndexFiles.csv Count = " + csvFiles.Length); if (csvFiles.Length == 0) { LoggedConsole.WriteErrorLine("\n\nWARNING from method DrawEasyImage.Execute():"); LoggedConsole.WriteErrorLine(" No SUMMARY index files were found."); LoggedConsole.WriteErrorLine(" RETURNING EMPTY HANDED!"); return; } // Sort the files by date and return as a dictionary: sortedDictionaryOfDatesAndFiles<DateTimeOffset, FileInfo> //var sortedDictionaryOfDatesAndFiles = LDSpectrogramStitching.FilterFilesForDates(csvFiles, arguments.TimeSpanOffsetHint); // calculate new start date if passed value = null. DateTimeOffset?startDate = arguments.StartDate; DateTimeOffset?endDate = arguments.EndDate; TimeSpan totalTimespan = (DateTimeOffset)endDate - (DateTimeOffset)startDate; int dayCount = totalTimespan.Days + 1; // assume last day has full 24 hours of recording available. LoggedConsole.WriteLine("\n# Start date = " + startDate.ToString()); LoggedConsole.WriteLine("# End date = " + endDate.ToString()); LoggedConsole.WriteLine(string.Format("# Elapsed time = {0:f1} hours", dayCount * 24)); LoggedConsole.WriteLine("# Day count = " + dayCount + " (inclusive of start and end days)"); LoggedConsole.WriteLine("# Time Zone = " + arguments.TimeSpanOffsetHint.ToString()); // create top level output directory if it does not exist. DirectoryInfo opDir = output; if (!opDir.Exists) { opDir.Create(); } // SET UP DEFAULT SITE LOCATION INFO -- DISCUSS IWTH ANTHONY // The following location data is used only to draw the sunrise/sunset tracks on images. double?latitude = null; double?longitude = null; var siteDescription = new SiteDescription(); siteDescription.SiteName = arguments.FileStemName; siteDescription.Latitude = latitude; siteDescription.Longitude = longitude; // the following required if drawing the index images FileInfo indexPropertiesConfig = null; // require IndexGenerationData and indexPropertiesConfig for drawing //indexGenerationData = IndexGenerationData.GetIndexGenerationData(csvFiles[0].Directory); indexPropertiesConfig = arguments.IndexPropertiesConfig.ToFileInfo(); Dictionary <string, IndexProperties> listOfIndexProperties = IndexProperties.GetIndexProperties(indexPropertiesConfig); Tuple <List <string>, List <double[]> > tuple = CsvTools.ReadCSVFile(csvFiles[0].FullName); var names = tuple.Item1; // default EASY indices int redID = 3; // backgroundNoise int grnID = 5; // avSNROfActiveframes int bluID = 7; // events per second string rep = @"bgn-avsnr-evn"; // ACI Ht Hpeaks EASY indices if (false) { redID = 11; // ACI grnID = 12; // Ht //bluID = 13; // HavgSp //bluID = 14; // Hvariance //bluID = 15; // Hpeaks bluID = 16; // Hcov //bluID = 7; // SPT rep = @"aci-ht-hcov"; //rep = @"aci-ht-spt"; } // LF, MF, HF if (true) { redID = 10; // LF grnID = 9; // MF bluID = 8; // HF rep = @"lf-mf-hf"; } IndexProperties redIndexProps = listOfIndexProperties[names[redID]]; IndexProperties grnIndexProps = listOfIndexProperties[names[grnID]]; IndexProperties bluIndexProps = listOfIndexProperties[names[bluID]]; int dayPixelHeight = 4; int rowCount = (dayPixelHeight * dayCount) + 35; // +30 for grid lines int colCount = 1440; var bitmap = new Image <Rgb24>(colCount, rowCount); var colour = Color.Yellow; int currentRow = 0; var oneDay = TimeSpan.FromHours(24); int graphWidth = colCount; int trackHeight = 20; var stringFont = Drawing.Arial8; string[] monthNames = { "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec" }; // for drawing the y-axis scale int scaleWidth = trackHeight + 7; var yAxisScale = new Image <Rgb24>(scaleWidth, rowCount + (2 * trackHeight)); yAxisScale.Mutate(g => { g.Clear(Color.Black); // loop over days for (int d = 0; d < dayCount; d++) { var thisday = ((DateTimeOffset)startDate).AddDays(d); if (thisday.Day == 1) { int nextRow = currentRow + 1; for (int c = 0; c < colCount; c++) { bitmap[c, currentRow] = Color.Gray; bitmap[c, nextRow] = Color.Gray; } for (int c = 0; c < scaleWidth; c++) { yAxisScale[c, currentRow + trackHeight] = Color.Gray; yAxisScale[c, nextRow + trackHeight] = Color.Gray; } string month = monthNames[thisday.Month - 1]; if (thisday.Month == 1) // January { g.DrawText(thisday.Year.ToString(), stringFont, Color.White, new PointF(0, nextRow + trackHeight + 1)); //draw time g.DrawText(month, stringFont, Color.White, new PointF(1, nextRow + trackHeight + 11)); //draw time } else { g.DrawText(month, stringFont, Color.White, new PointF(1, nextRow + trackHeight + 1)); //draw time } currentRow += 2; } // get the exact date and time LoggedConsole.WriteLine($"READING DAY {d + 1} of {dayCount}: {thisday}"); // CREATE DAY LEVEL OUTPUT DIRECTORY for this day string dateString = $"{thisday.Year}{thisday.Month:D2}{thisday.Day:D2}"; tuple = CsvTools.ReadCSVFile(csvFiles[d].FullName); var arrays = tuple.Item2; var redArray = arrays[redID]; var grnArray = arrays[grnID]; var bluArray = arrays[bluID]; // NormaliseMatrixValues the indices redArray = DataTools.NormaliseInZeroOne(redArray, redIndexProps.NormMin, redIndexProps.NormMax); grnArray = DataTools.NormaliseInZeroOne(grnArray, grnIndexProps.NormMin, grnIndexProps.NormMax); bluArray = DataTools.NormaliseInZeroOne(bluArray, bluIndexProps.NormMin, bluIndexProps.NormMax); for (int c = 0; c < colCount; c++) { for (int r = 0; r < dayPixelHeight; r++) { //transformedValue = Math.Sqrt(redArray[c]); var transformedValue = redArray[c] * redArray[c]; int redVal = (int)Math.Round(transformedValue * 255); if (redVal < 0) { redVal = 0; } else if (redVal > 255) { redVal = 255; } //transformedValue = Math.Sqrt(grnArray[c]); transformedValue = grnArray[c] * grnArray[c]; // square the value int grnVal = (int)Math.Round(transformedValue * 255); if (grnVal < 0) { grnVal = 0; } else if (grnVal > 255) { grnVal = 255; } //transformedValue = Math.Sqrt(bluArray[c]); transformedValue = bluArray[c] * bluArray[c]; // square the value int bluVal = (int)Math.Round(transformedValue * 255); if (bluVal < 0) { bluVal = 0; } else if (bluVal > 255) { bluVal = 255; } bitmap[c, currentRow + r] = Color.FromRgb((byte)redVal, (byte)grnVal, (byte)bluVal); } } // over all columns currentRow += dayPixelHeight; if (thisday.Day % 7 == 0) { for (int c = 0; c < colCount; c++) { bitmap[c, currentRow] = Color.Gray; } currentRow++; } } // over days }); // draw on civil dawn and dusk lines int startdayOfYear = ((DateTimeOffset)startDate).DayOfYear; int endDayOfYear = ((DateTimeOffset)endDate).DayOfYear; SunAndMoon.AddSunRiseSetLinesToImage(bitmap, arguments.BrisbaneSunriseDatafile.ToFileInfo(), startdayOfYear, endDayOfYear, dayPixelHeight); // add the time scales Image <Rgb24> timeBmp1 = ImageTrack.DrawTimeRelativeTrack(oneDay, graphWidth, trackHeight); var imageList = new[] { timeBmp1, bitmap, timeBmp1 }; Image <Rgb24> compositeBmp1 = (Image <Rgb24>)ImageTools.CombineImagesVertically(imageList); imageList = new[] { yAxisScale, compositeBmp1 }; Image <Rgb24> compositeBmp2 = (Image <Rgb24>)ImageTools.CombineImagesInLine(imageList); // indices used for image string indicesDescription = $"{redIndexProps.Name}|{grnIndexProps.Name}|{bluIndexProps.Name}"; string startString = $"{startDate.Value.Year}/{startDate.Value.Month}/{startDate.Value.Day}"; string endString = $"{endDate.Value.Year}/{endDate.Value.Month}/{endDate.Value.Day}"; string title = $"EASY: {arguments.FileStemName} From {startString} to {endString} Indices: {indicesDescription}"; Image <Rgb24> titleBar = ImageTrack.DrawTitleTrack(compositeBmp2.Width, trackHeight, title); imageList = new[] { titleBar, compositeBmp2 }; compositeBmp2 = (Image <Rgb24>)ImageTools.CombineImagesVertically(imageList); var outputFileName = Path.Combine(opDir.FullName, arguments.FileStemName + "." + rep + ".EASY.png"); compositeBmp2.Save(outputFileName); } // Execute()
public static DataTable CalculateRecallPrecision(FileInfo fiPredictions, FileInfo fiGroundTruth) { string header_trueSex = "truSex"; string header_predictedSex = "preSex"; string header_Harmonics = "Harmonics"; string header_Quality = "Quality"; string[] ROC_HEADERS = { AnalysisKeys.EventStartAbs, //typeof(double) AnalysisKeys.EventStartMin, AnalysisKeys.EventStartSec, AnalysisKeys.EventIntensity, LSKiwiHelper.key_GRID_SCORE, LSKiwiHelper.key_DELTA_SCORE, LSKiwiHelper.key_CHIRP_SCORE, LSKiwiHelper.key_PEAKS_SNR_SCORE, LSKiwiHelper.key_BANDWIDTH_SCORE, AnalysisKeys.EventScore, AnalysisKeys.EventNormscore, header_predictedSex, header_Harmonics, header_trueSex, header_Quality, "TP", "FP","FN", }; //string[] ROC_HEADERS = { "startSec", "min", "secOffset", "intensity", "gridScore", "deltaScore", "chirpScore", "PeaksSnrScore" "bwScore", "comboScore", "normScore", "preSex", "Harmonics", "truSex", "Quality", "TP", "FP", "FN"}; Type[] ROC_COL_TYPES = { typeof(double), typeof(double), typeof(double), typeof(double), typeof(double), typeof(double), typeof(double), typeof(double), typeof(double), typeof(double), typeof(double), typeof(string), typeof(int), typeof(string), typeof(int), typeof(int), typeof(int), typeof(int) }; //ANDREW'S HEADERS: Selection, View, Channel, Begin Time (s), End Time (s), Low Freq (Hz), High Freq (Hz), Begin File, Species, Sex, Harmonics, Quality Type[] ANDREWS_TYPES = { typeof(string), typeof(string), typeof(int), typeof(double), typeof(double), typeof(double), typeof(double), typeof(string), typeof(string), typeof(string), typeof(int), typeof(int) }; bool isFirstRowHeader = true; var dtGroundTruth = CsvTools.ReadCSVToTable(fiGroundTruth.FullName, isFirstRowHeader, ANDREWS_TYPES); var dtPredictions = CsvTools.ReadCSVToTable(fiPredictions.FullName, isFirstRowHeader); dtPredictions = LSKiwiHelper.MergeAdjacentPredictions(dtPredictions); //var weights = LSKiwiHelper.GetFeatureWeights(); //to try different weightings. //string colName = "Species"; //string value = "LSK"; //DataTableTools.DeleteRows(dtADResults, colName, value); //delete rows where Species name is not "LSK" var dtOutput = DataTableTools.CreateTable(ROC_HEADERS, ROC_COL_TYPES); int TP = 0; int FP = 0; int FN = 0; foreach (DataRow myRow in dtPredictions.Rows) { double myStartSecAbs = (double)myRow[AnalysisKeys.EventStartAbs]; double startMin = (double)myRow[AnalysisKeys.EventStartMin]; double startSecOffset = (double)myRow[AnalysisKeys.EventStartSec]; double intensityScore = (double)myRow[AnalysisKeys.EventIntensity]; string name = (string)myRow[AnalysisKeys.EventName]; //double snrScore = (double)myRow[LSKiwiHelper.key_PEAKS_SNR_SCORE]; //double sdPeakScore = (double)myRow[LSKiwiHelper.key_PEAKS_STD_SCORE]; //standard deviation of peak snr's //double periodicityScore = (double)myRow[LSKiwiHelper.key_DELTA_SCORE]; double gridScore = (double)myRow[LSKiwiHelper.key_GRID_SCORE]; double deltScore = (double)myRow[LSKiwiHelper.key_DELTA_SCORE]; double chrpScore = (double)myRow[LSKiwiHelper.key_CHIRP_SCORE]; double peakSnrScore = (double)myRow[LSKiwiHelper.key_PEAKS_SNR_SCORE]; //average peak double bandWidthScore = (double)myRow[LSKiwiHelper.key_BANDWIDTH_SCORE]; //double comboScore = (double)myRow[LSKiwiHelper.key_COMBO_SCORE]; double eventScore = (double)myRow[AnalysisKeys.EventScore]; double normScore = (double)myRow[AnalysisKeys.EventNormscore]; string predictedSex; if (name.EndsWith("(m)")) { predictedSex = "M"; } else if (name.EndsWith("(f)")) { predictedSex = "F"; } else { predictedSex = "???"; } //List<string[]> excludeRules = LSKiwiHelper.GetExcludeRules(); //if (FilterEvent(myRow, excludeRules) == null) continue; DataRow opRow = dtOutput.NewRow(); opRow[AnalysisKeys.EventStartAbs] = myStartSecAbs; opRow[AnalysisKeys.EventStartMin] = startMin; opRow[AnalysisKeys.EventStartSec] = startSecOffset; opRow[AnalysisKeys.EventIntensity] = intensityScore; opRow[LSKiwiHelper.key_GRID_SCORE] = gridScore; opRow[LSKiwiHelper.key_DELTA_SCORE] = deltScore; opRow[LSKiwiHelper.key_CHIRP_SCORE] = chrpScore; opRow[LSKiwiHelper.key_PEAKS_SNR_SCORE] = peakSnrScore; opRow[LSKiwiHelper.key_BANDWIDTH_SCORE] = bandWidthScore; //opRow[LSKiwiHelper.key_COMBO_SCORE] = comboScore; opRow[AnalysisKeys.EventScore] = eventScore; opRow[AnalysisKeys.EventNormscore] = normScore; opRow[header_Quality] = 0; //fill in with blanks opRow[header_predictedSex] = predictedSex; opRow[header_trueSex] = "???"; opRow["TP"] = 0; opRow["FP"] = 0; opRow["FN"] = 0; bool isTP = false; foreach (DataRow trueEvent in dtGroundTruth.Rows) { double trueStart = (double)trueEvent["Begin Time (s)"]; string trueSex = (string)trueEvent["Sex"]; if (trueStart >= myStartSecAbs - 10 && trueStart <= myStartSecAbs + 20 && predictedSex == trueSex) //myStart is close to trueStart AND same sex THERFORE TRUE POSTIIVE { isTP = true; trueEvent["Begin Time (s)"] = double.NaN; //mark so that will not use again opRow[header_Quality] = trueEvent[header_Quality]; opRow[header_trueSex] = trueEvent["Sex"]; opRow[header_Harmonics] = trueEvent[header_Harmonics]; break; } } //foreach - AD loop if (isTP) { opRow["TP"] = 1; TP++; } else //FALSE POSITIVE { opRow["FP"] = 1; FP++; } dtOutput.Rows.Add(opRow); } //foreach - MY loop //now add in the false negatives foreach (DataRow trueEvent in dtGroundTruth.Rows) { double trueStart = (double)trueEvent["Begin Time (s)"]; if (!double.IsNaN(trueStart)) { DataRow row = dtOutput.NewRow(); row[AnalysisKeys.EventStartAbs] = trueStart; row[AnalysisKeys.EventStartMin] = (int)(trueStart / 60); row[AnalysisKeys.EventStartSec] = trueStart % 60; //row[Keys.EVENT_INTENSITY] = 0.0; //row[LSKiwiHelper.key_PEAKS_SNR_SCORE] = 0.0; //row[LSKiwiHelper.key_PEAKS_STD_SCORE] = 0.0; //row[LSKiwiHelper.key_DELTA_SCORE] = 0.0; //row[LSKiwiHelper.key_BANDWIDTH_SCORE] = 0.0; //row[Keys.EVENT_NORMSCORE] = 0.0; //row[LSKiwiHelper.key_NEW_COMBO_SCORE] = 0.0; row[header_predictedSex] = "???"; row["Harmonics"] = trueEvent["Harmonics"]; row["Quality"] = trueEvent["Quality"]; row[header_trueSex] = trueEvent["Sex"]; row["TP"] = 0; row["FP"] = 0; row["FN"] = 1; dtOutput.Rows.Add(row); FN++; } } double recall = TP / (double)(TP + FN); double specificity = TP / (double)(TP + FP); LoggedConsole.WriteLine("TP={0}, FP={1}, FN={2}", TP, FP, FN); LoggedConsole.WriteLine("RECALL={0:f3}, SPECIFICITY={1:f3}", recall, specificity); //use normalised score as the threshold to determine area under ROC curve int totalPositiveCount = dtGroundTruth.Rows.Count; int totalNegativeCount = FP; string sortString = AnalysisKeys.EventNormscore + " desc"; ROCCurve(dtOutput, totalPositiveCount, totalNegativeCount, sortString); //write ROC area above curve return(dtOutput); } //CalculateRecallPrecision()
public static void ConcatenateDays() { DirectoryInfo parentDir = new DirectoryInfo(@"C:\SensorNetworks\Output\Frommolt"); DirectoryInfo dataDir = new DirectoryInfo(parentDir + @"\AnalysisOutput\mono"); var imageDirectory = new DirectoryInfo(parentDir + @"\ConcatImageOutput"); //string indexPropertiesConfig = @"C:\Work\GitHub\audio-analysis\AudioAnalysis\AnalysisConfigFiles\IndexPropertiesConfigHiRes.yml"; DateTimeOffset?startDate = new DateTimeOffset(2012, 03, 29, 0, 0, 0, TimeSpan.Zero); DateTimeOffset?endDate = new DateTimeOffset(2012, 06, 20, 0, 0, 0, TimeSpan.Zero); var timeSpanOffsetHint = new TimeSpan(01, 0, 0); //string fileSuffix = @"2Maps.png"; //string fileSuffix = @"ACI-ENT-EVN.png"; // WARNING: POW was removed in December 2018 string fileSuffix = @"BGN-POW-EVN.png"; TimeSpan totalTimespan = (DateTimeOffset)endDate - (DateTimeOffset)startDate; int dayCount = totalTimespan.Days + 1; // assume last day has full 24 hours of recording available. bool verbose = true; if (verbose) { LoggedConsole.WriteLine("\n# Start date = " + startDate.ToString()); LoggedConsole.WriteLine("# End date = " + endDate.ToString()); LoggedConsole.WriteLine($"# Elapsed time = {dayCount * 24:f1} hours"); LoggedConsole.WriteLine("# Day count = " + dayCount + " (inclusive of start and end days)"); LoggedConsole.WriteLine("# Time Zone = " + timeSpanOffsetHint.ToString()); } //string dirMatch = "Monitoring_Rosin_2012*T*+0200_.merged.wav.channel_0.wav"; string stem = "Monitoring_Rosin_2012????T??0000+0200_.merged.wav.channel_"; string dirMatch = stem + "?.wav"; DirectoryInfo[] subDirectories = dataDir.GetDirectories(dirMatch, SearchOption.AllDirectories); string format = "yyyyMMdd"; string startDay = ((DateTimeOffset)startDate).ToString(format); //string fileMatch = stem + "?__" + fileSuffix; //FileInfo[] files = IndexMatrices.GetFilesInDirectories(subDirectories, fileMatch); // Sort the files by date and return as a dictionary: sortedDictionaryOfDatesAndFiles<DateTimeOffset, FileInfo> //var sortedDictionaryOfDatesAndFiles = FileDateHelpers.FilterFilesForDates(files, timeSpanOffsetHint); //following needed if a day is missing. int defaultDayWidth = 20; int defaultDayHeight = 300; var brush = Color.White; Font stringFont = Drawing.Tahoma12; var list = new List <Image <Rgb24> >(); // loop over days for (int d = 0; d < dayCount; d++) { Console.WriteLine($"Day {d} of {dayCount} days"); var thisday = ((DateTimeOffset)startDate).AddDays(d); string date = thisday.ToString(format); stem = "Monitoring_Rosin_" + date + "T??0000+0200_.merged.wav.channel_"; string fileMatch = stem + "?__" + fileSuffix; FileInfo[] files = IndexMatrices.GetFilesInDirectories(subDirectories, fileMatch); if (files.Length == 0) { Image <Rgb24> gapImage = new Image <Rgb24>(defaultDayWidth, defaultDayHeight); gapImage.Mutate(g5 => { g5.Clear(Color.Gray); g5.DrawText("Day", stringFont, brush, new PointF(2, 5)); g5.DrawText("missing", stringFont, brush, new PointF(2, 35)); }); list.Add(gapImage); continue; } // Sort the files by date and return as a dictionary: sortedDictionaryOfDatesAndFiles<DateTimeOffset, FileInfo> //var sortedDictionaryOfDatesAndFiles = FileDateHelpers.FilterFilesForDates(files, timeSpanOffsetHint); var image = ConcatenateFourChannelImages(files, imageDirectory, fileSuffix, date); defaultDayHeight = image.Height; list.Add(image); } var combinedImage = ImageTools.CombineImagesInLine(list); Image <Rgb24> labelImage1 = new Image <Rgb24>(combinedImage.Width, 24); labelImage1.Mutate(g1 => { g1.Clear(Color.Black); g1.DrawText(fileSuffix, stringFont, brush, new PointF(2, 2)); }); //labelImage1.Save(Path.Combine(imageDirectory.FullName, suffix1)); combinedImage.Mutate(g => { g.DrawImage(labelImage1, 0, 0); }); string fileName = string.Format(startDay + "." + fileSuffix); combinedImage.Save(Path.Combine(imageDirectory.FullName, fileName)); }
/// <summary> /// Initializes a new instance of the <see cref="BaseSonogram"/> class. /// BASE CONSTRUCTOR /// This constructor contains all steps required to prepare the amplitude spectrogram. /// The third boolean parameter is simply a place-filler to ensure a different Constructor signature. /// from the principle Constructor which follows. /// </summary> /// <param name="config">config file to use.</param> /// <param name="wav">wav.</param> /// <param name="dummy">filler boolean. Calculate in method.</param> public BaseSonogram(SonogramConfig config, WavReader wav, bool dummy) : this(config) { // As of 28 March 2017 drop capability to get sub-band of spectrogram because was not being used. // can be recovered later if desired. //bool doExtractSubband = this.SubBandMinHz > 0 || this.SubBandMaxHz < this.NyquistFrequency; this.Duration = wav.Time; double minDuration = 0.2; if (this.Duration.TotalSeconds < minDuration) { LoggedConsole.WriteLine("Signal must at least {0} seconds long to produce a sonogram!", minDuration); return; } //set config params to the current recording this.SampleRate = wav.SampleRate; this.Configuration.Duration = wav.Time; this.Configuration.SampleRate = wav.SampleRate; //also set the Nyquist this.MaxAmplitude = wav.CalculateMaximumAmplitude(); var recording = new AudioRecording(wav); var fftData = DSP_Frames.ExtractEnvelopeAndFfts( recording, config.WindowSize, config.WindowOverlap, this.Configuration.WindowFunction); // now recover required data //epsilon is a signal dependent minimum amplitude value to prevent possible subsequent log of zero value. this.Configuration.epsilon = fftData.Epsilon; this.Configuration.WindowPower = fftData.WindowPower; this.FrameCount = fftData.FrameCount; this.DecibelsPerFrame = fftData.FrameDecibels; //init normalised signal energy array but do nothing with it. This has to be done from outside this.DecibelsNormalised = new double[this.FrameCount]; this.Data = fftData.AmplitudeSpectrogram; // ENERGY PER FRAME and NORMALISED dB PER FRAME AND SNR // currently DoSnr = true by default if (config.DoSnr) { // If the FractionOfHighEnergyFrames PRIOR to noise removal exceeds SNR.FractionalBoundForMode, // then Lamel's noise removal algorithm may not work well. if (fftData.FractionOfHighEnergyFrames > SNR.FractionalBoundForMode) { Log.WriteIfVerbose("\nWARNING ##############"); Log.WriteIfVerbose( "\t############### BaseSonogram(): This is a high energy recording. Percent of high energy frames = {0:f0} > {1:f0}%", fftData.FractionOfHighEnergyFrames * 100, SNR.FractionalBoundForMode * 100); Log.WriteIfVerbose("\t############### Noise reduction algorithm may not work well in this instance!\n"); } //AUDIO SEGMENTATION/END POINT DETECTION - based on Lamel et al // Setting segmentation/endpoint detection parameters is broken as of September 2014. // The next line is a hack replacement EndpointDetectionConfiguration.SetDefaultSegmentationConfig(); this.SigState = EndpointDetectionConfiguration.DetermineVocalisationEndpoints(this.DecibelsPerFrame, this.FrameStep); } /* AS OF 30 MARCH 2017, NO LONGER IMPLEMENT SUB-BAND THINGS, because not being used for years. * // EXTRACT REQUIRED FREQUENCY BAND * if (doExtractSubband) * { * this.Data = SpectrogramTools.ExtractFreqSubband( * this.Data, * this.subBandMinHz, * this.subBandMaxHz, * this.Configuration.DoMelScale, * this.Configuration.FreqBinCount, * this.FBinWidth); * this.CalculateSubbandSNR(this.Data); * } */ }