public static void TestLogging() { Contract.Requires <InvalidOperationException>( configured, "The logger system must be initialised before the logging can be tested"); var log = LogManager.GetLogger(nameof(Logging)); log.Prompt("Log test PROMPT"); log.Fatal("Log test FATAL"); log.Error("Log test ERROR"); log.Warn("Log test WARN"); log.Success("Log test SUCCESS"); log.Info("Log test INFO"); log.Debug("Log test DEBUG"); log.Trace("Log test TRACE"); log.Verbose("Log test VERBOSE"); LoggedConsole.WriteFatalLine("Clean wrapper FATAL", new Exception("I'm a fake")); LoggedConsole.Log.Fatal("Clean log FATAL", new Exception("I'm a fake")); LoggedConsole.WriteErrorLine("Clean wrapper ERROR"); LoggedConsole.Log.Error("Clean log ERROR"); LoggedConsole.WriteWarnLine("Clean wrapper WARN"); LoggedConsole.Log.Warn("Clean log WARN"); LoggedConsole.WriteSuccessLine("Clean wrapper SUCCESS"); LoggedConsole.Log.Success("Clean log SUCCESS"); LoggedConsole.WriteLine("Clean wrapper INFO"); LoggedConsole.Log.Info("Clean log INFO"); }
/// <summary> /// Joins summary indices csv files together. /// This method merges ALL the passed files of acoustic indices /// It is assumed you are concatenating a sequence of consecutive short recordings. /// </summary> public static List <SummaryIndexValues> ConcatenateAllSummaryIndexFiles( FileInfo[] summaryIndexFiles, DirectoryInfo opDir, IndexGenerationData indexGenerationData, string outputFileBaseName) { var indexResolution = indexGenerationData.IndexCalculationDuration; var summaryIndices = IndexMatrices.ConcatenateSummaryIndexFilesWithTimeCheck(summaryIndexFiles, indexResolution); if (summaryIndices.Count == 0) { LoggedConsole.WriteErrorLine("WARNING: LDSpectrogramStitching.ConcatenateAllSummaryIndexFiles(): Empty List of SUMMARY indices returned!"); return(null); } // check length of data and make adjustments if required. // NOTHING done with this info at the moment. Could be used to truncate data to 24 hours. //int totalRowMinutes = (int)Math.Round(summaryIndices.Count() * indexResolution.TotalMinutes); // write out the list of data file names to JSON file. var arrayOfFileNames = summaryIndices.Select(x => x.FileName).ToArray(); var path = FilenameHelpers.AnalysisResultPath(opDir, outputFileBaseName, "FileNames", "json"); Json.Serialise(new FileInfo(path), arrayOfFileNames); return(summaryIndices); }
public CubeHelix(string mode) { if (mode.Equals(ColorCubeHelix.Default)) { //Hsl colorARgb = new Hsl(300, 0.5, 0.0); //Hsl colorBRgb = new Hsl(-240, 0.5, 1.0); Hsl colorARgb; Hsl colorBRgb; //CubeHelix(colorARgb, colorBRgb); //SetDefaultCubeHelix(); //string path = @"C:\SensorNetworks\Output\FalseColourSpectrograms\SpectrogramZoom\ZoomImages\testImage.png"; //TestImage(path); } else if (mode.Equals(ColorCubeHelix.Grayscale)) { this.ColourPallette = ImageTools.GrayScale(); } else if (mode.Equals(ColorCubeHelix.RedScale)) { this.SetRedScalePallette(); } else if (mode.Equals(ColorCubeHelix.CyanScale)) { this.SetCyanScalePallette(); } else { LoggedConsole.WriteErrorLine("WARNING: {0} is UNKNOWN COLOUR PALLETTE!", mode); } }
public void LinearFrequencyScale() { var recordingPath = PathHelper.ResolveAsset("Recordings", "BAC2_20071008-085040.wav"); var opFileStem = "BAC2_20071008"; var outputDir = this.outputDirectory; var outputImagePath = Path.Combine(outputDir.FullName, "LinearScaleSonogram.png"); var recording = new AudioRecording(recordingPath); // specfied linear scale int nyquist = 11025; int frameSize = 1024; int hertzInterval = 1000; var freqScale = new FrequencyScale(nyquist, frameSize, hertzInterval); var fst = freqScale.ScaleType; var sonoConfig = new SonogramConfig { WindowSize = freqScale.FinalBinCount * 2, WindowOverlap = 0.2, SourceFName = recording.BaseName, NoiseReductionType = NoiseReductionType.None, NoiseReductionParameter = 0.0, }; var sonogram = new SpectrogramStandard(sonoConfig, recording.WavReader); // DO NOISE REDUCTION var dataMatrix = SNR.NoiseReduce_Standard(sonogram.Data); sonogram.Data = dataMatrix; sonogram.Configuration.WindowSize = freqScale.WindowSize; var image = sonogram.GetImageFullyAnnotated(sonogram.GetImage(), "SPECTROGRAM: " + fst.ToString(), freqScale.GridLineLocations); image.Save(outputImagePath, ImageFormat.Png); // DO FILE EQUALITY TEST var stemOfExpectedFile = opFileStem + "_LinearScaleGridLineLocations.EXPECTED.json"; var stemOfActualFile = opFileStem + "_LinearScaleGridLineLocations.ACTUAL.json"; // Check that freqScale.GridLineLocations are correct var expectedFile1 = PathHelper.ResolveAsset("FrequencyScale\\" + stemOfExpectedFile); if (!expectedFile1.Exists) { LoggedConsole.WriteErrorLine("An EXPECTED results file does not exist. Test will fail!"); LoggedConsole.WriteErrorLine( $"If ACTUAL results file is correct, move it to dir `{PathHelper.TestResources}` and change its suffix to <.EXPECTED.json>"); } var resultFile1 = new FileInfo(Path.Combine(outputDir.FullName, stemOfActualFile)); Json.Serialise(resultFile1, freqScale.GridLineLocations); FileEqualityHelpers.TextFileEqual(expectedFile1, resultFile1); // Check that image dimensions are correct Assert.AreEqual(566, image.Height); Assert.AreEqual(1621, image.Width); }
/// <summary> /// Initializes a new instance of the <see cref="FrequencyScale"/> class. /// CONSTRUCTOR /// </summary> public FrequencyScale(FreqScaleType fst) { this.ScaleType = fst; if (fst == FreqScaleType.Linear) { LoggedConsole.WriteErrorLine("WARNING: Assigning DEFAULT parameters for Linear FREQUENCY SCALE."); LoggedConsole.WriteErrorLine(" Call other CONSTUCTOR to control linear scale."); this.Nyquist = 11025; this.WindowSize = 512; this.FinalBinCount = 256; this.HertzGridInterval = 1000; this.LinearBound = this.Nyquist; this.BinBounds = this.GetLinearBinBounds(); this.GridLineLocations = GetLinearGridLineLocations(this.Nyquist, this.HertzGridInterval, 256); } else if (fst == FreqScaleType.Mel) { LoggedConsole.WriteErrorLine("WARNING: Assigning DEFAULT parameters for MEL FREQUENCY SCALE."); this.Nyquist = 11025; this.WindowSize = 512; this.FinalBinCount = 128; this.HertzGridInterval = 1000; this.LinearBound = this.Nyquist; this.GridLineLocations = GetMelGridLineLocations(this.HertzGridInterval, this.Nyquist, this.FinalBinCount); } else { // assume octave scale is only other option OctaveFreqScale.GetOctaveScale(this); } }
/// <summary> /// Overlays a matrix of scores on an image, typically a spectrogram image. /// It is assumed that the spectrogram image is grey scale. /// NOTE: The score matrix must consist of integers from 0 to 255. /// NOTE: The image and the score matrix must have the same number of rows and columns. /// In case of a spectrogram, it is assumed that the rows are frequency bins and the columns are individual spectra. /// </summary> /// <param name="bmp">the spectrogram image.</param> /// <param name="hits">the matrix of scores or hits.</param> /// <returns>The new image with overlay of scores as red transparency.</returns> public static Image <Rgb24> OverlayScoresAsRedTransparency(Image <Rgb24> bmp, int[,] hits) { Image <Rgb24> newBmp = (Image <Rgb24>)bmp.Clone(); int rows = hits.GetLength(0); int cols = hits.GetLength(1); if (rows != bmp.Height || cols != bmp.Width) { LoggedConsole.WriteErrorLine("ERROR: Image and hits matrix do not have the same dimensions."); return(bmp); } for (int r = 0; r < rows; r++) { for (int c = 0; c < cols; c++) { if (hits[r, c] <= 0) { continue; } if (hits[r, c] > 255) { hits[r, c] = 255; } var pixel = bmp[c, r]; newBmp[c, r] = Color.FromRgb((byte)hits[r, c], pixel.G, pixel.B); } } return(newBmp); }
/// <summary> /// Returns the requested column of data from a CSV file and also returns the column header. /// </summary> public static double[] ReadColumnOfCsvFile(string fileName, int colNumber, out string header) { List <string> lines = FileTools.ReadTextFile(fileName); string[] words = lines[0].Split(','); header = words[colNumber]; // -1 because ignore header double[] array = new double[lines.Count - 1]; // read csv data into arrays. Ignore first line = header. for (int i = 1; i < lines.Count; i++) { words = lines[i].Split(','); if (words.Length <= colNumber) { array[i - 1] = 0.0; LoggedConsole.WriteErrorLine("WARNING: Error while reading line " + i + "of CSV file."); } else { array[i - 1] = double.TryParse(words[colNumber], out var value) ? value : 0.0; } } return(array); }
public void LinearFrequencyScaleDefault() { // relative path because post-Build command transfers files to ...\\Work\GitHub\...\bin\Debug subfolder. var recordingPath = @"Recordings\BAC2_20071008-085040.wav"; var opFileStem = "BAC2_20071008"; var outputDir = this.outputDirectory; var outputImagePath = Path.Combine(outputDir.FullName, "DefaultLinearScaleSonogram.png"); var recording = new AudioRecording(recordingPath); // default linear scale var fst = FreqScaleType.Linear; var freqScale = new FrequencyScale(fst); var sonoConfig = new SonogramConfig { WindowSize = freqScale.FinalBinCount * 2, WindowOverlap = 0.2, SourceFName = recording.BaseName, NoiseReductionType = NoiseReductionType.None, NoiseReductionParameter = 0.0, }; var sonogram = new SpectrogramStandard(sonoConfig, recording.WavReader); sonogram.Configuration.WindowSize = freqScale.WindowSize; // DO NOISE REDUCTION var dataMatrix = SNR.NoiseReduce_Standard(sonogram.Data); sonogram.Data = dataMatrix; var image = sonogram.GetImageFullyAnnotated(sonogram.GetImage(), "SPECTROGRAM: " + fst.ToString(), freqScale.GridLineLocations); image.Save(outputImagePath, ImageFormat.Png); // DO UNIT TESTING var stemOfExpectedFile = opFileStem + "_DefaultLinearScaleGridLineLocations.EXPECTED.json"; var stemOfActualFile = opFileStem + "_DefaultLinearScaleGridLineLocations.ACTUAL.json"; // Check that freqScale.GridLineLocations are correct var expectedFile1 = new FileInfo("FrequencyScale\\" + stemOfExpectedFile); if (!expectedFile1.Exists) { LoggedConsole.WriteErrorLine("An EXPECTED results file does not exist. Test will fail!"); LoggedConsole.WriteErrorLine("If ACTUAL results file is correct, move it to dir <...\\TestResources\\FrequencyScale> and change its suffix to <.EXPECTED.json>"); } var resultFile1 = new FileInfo(Path.Combine(outputDir.FullName, stemOfActualFile)); Json.Serialise(resultFile1, freqScale.GridLineLocations); FileEqualityHelpers.TextFileEqual(expectedFile1, resultFile1); // Check that image dimensions are correct Assert.AreEqual(310, image.Height); Assert.AreEqual(3247, image.Width); }
public static void TestMethod_GenerateSignal1() { int sampleRate = 22050; double duration = 20; // signal duration in seconds int[] harmonics = { 500, 1000, 2000, 4000, 8000 }; int windowSize = 512; var freqScale = new FrequencyScale(sampleRate / 2, windowSize, 1000); string path = @"C:\SensorNetworks\Output\Sonograms\UnitTestSonograms\SineSignal1.png"; var recording = GenerateTestRecording(sampleRate, duration, harmonics, WaveType.Cosine); var sonoConfig = new SonogramConfig { WindowSize = freqScale.WindowSize, WindowOverlap = 0.0, SourceFName = "Signal1", NoiseReductionType = NoiseReductionType.Standard, NoiseReductionParameter = 0.12, }; var sonogram = new AmplitudeSonogram(sonoConfig, recording.WavReader); // pick a row, any row var oneSpectrum = MatrixTools.GetRow(sonogram.Data, 40); oneSpectrum = DataTools.normalise(oneSpectrum); var peaks = DataTools.GetPeaks(oneSpectrum, 0.5); for (int i = 2; i < peaks.Length - 2; i++) { if (peaks[i]) { LoggedConsole.WriteLine($"bin ={freqScale.BinBounds[i, 0]}, Herz={freqScale.BinBounds[i, 1]}-{freqScale.BinBounds[i + 1, 1]} "); } } if (peaks[11] && peaks[22] && peaks[45] && peaks[92] && peaks[185]) { LoggedConsole.WriteSuccessLine("Spectral Peaks found at correct places"); } else { LoggedConsole.WriteErrorLine("Spectral Peaks found at INCORRECT places"); } foreach (int h in harmonics) { LoggedConsole.WriteLine($"Harmonic {h}Herz should be in bin {freqScale.GetBinIdForHerzValue(h)}"); } // spectrogram without framing, annotation etc var image = sonogram.GetImage(); string title = $"Spectrogram of Harmonics: {DataTools.Array2String(harmonics)} SR={sampleRate} Window={windowSize}"; image = sonogram.GetImageFullyAnnotated(image, title, freqScale.GridLineLocations); image.Save(path); }
public static void SaveAndViewSpectrogramImage(Image image, string opDir, string fName, string imageViewer) { string imagePath = Path.Combine(opDir, fName); image.Save(imagePath); var fiImage = new FileInfo(imagePath); if (fiImage.Exists) { LoggedConsole.WriteErrorLine("Showing image is no longer supported"); } }
/// <summary> /// Compress high resolution indices - intended to be used when summarizing results. /// Summarize method not yet written. /// </summary> /// <param name="analysisResults"></param> /// <param name="indexResults"></param> /// <param name="highResolutionParsedConfiguration"></param> private void SummarizeHighResolutionIndices( AnalysisResult2 analysisResults, IndexCalculateResult[] indexResults, AcousticIndices.AcousticIndicesConfig highResolutionParsedConfiguration) { // NOW COMPRESS THE HI-RESOLUTION SPECTRAL INDICES TO LOW RES double lowResolution = highResolutionParsedConfiguration.GetDoubleOrNull("LowResolution") ?? 60.0; TimeSpan imageScale = TimeSpan.FromSeconds(lowResolution); TimeSpan dataScale = highResolutionParsedConfiguration.IndexCalculationDuration.Seconds(); var dictionaryOfSpectra = indexResults.Select(icr => icr.SpectralIndexValues).ToArray().ToTwoDimensionalArray(SpectralIndexValues.CachedSelectors, TwoDimensionalArray.Rotate90ClockWise); var spectralSelection = IndexMatrices.CompressIndexSpectrograms(dictionaryOfSpectra, imageScale, dataScale); // check that have not compressed matrices to zero length double[,] matrix = spectralSelection.First().Value; if (matrix.GetLength(0) == 0 || matrix.GetLength(1) == 0) { LoggedConsole.WriteErrorLine("WARNING: SPECTRAL INDEX MATRICES compressed to zero length!!!!!!!!!!!!!!!!!!!!!!!!"); } // Place LOW RESOLUTION SPECTRAL INDICES INTO analysisResults before returning. //int windowLength = (int?)highResolutionConfig[AnalysisKeys.FrameLength] ?? IndexCalculate.DefaultWindowSize; var indexProperties = highResolutionParsedConfiguration.IndexProperties; SpectralIndexValues.CheckExistenceOfSpectralIndexValues(indexProperties); // Init a new spectral indices class and populate it with spectral indices var spectrums = SpectralIndexValues.ImportFromDictionary(spectralSelection); for (int i = 0; i < spectrums.Length; i++) { spectrums[i].ResultStartSeconds = (analysisResults.SegmentStartOffset + TimeSpan.FromSeconds(i * lowResolution)).TotalSeconds; spectrums[i].SegmentDurationSeconds = imageScale.TotalSeconds; spectrums[i].FileName = ((SegmentSettings <object>)analysisResults.SegmentSettings).Segment.SourceMetadata.Identifier; } // assign to the analysis result analysisResults.SpectralIndices = spectrums; // TODO TODO TODO // ALSO NEED TO COMPRESS THE analysisResults.SummaryIndices To LOW RESOLUTION //var summaryIndexValues = new SummaryIndexValues(); //summaryIndexValues.BackgroundNoise = ETC; // ETC //var summaryiv = new SummaryIndexValues[1]; //summaryiv[0] = summaryIndexValues; //analysisResults.SummaryIndices = summaryiv; }
// ############################################################################################################## // ######################### METHODS FOR STITCHING TNC - EDDIE GAME's DATA // ######################### CONCATENATE EVERYTHING /// <summary> /// RECENT METHOD TO CONCATENATE Spectral INDEX.CSV FILES - August 2015. Revised Septermber 2016 /// Was written to deal with EDDIE GAME PNG data where the files to be concatenated are all in one top level directory. /// This method merges all files of spectral indices in the passed directories. /// The total length of the concatenated files can exceed 24 hours - limited by memory! /// </summary> public static Dictionary <string, double[, ]> ConcatenateAllSpectralIndexFiles(DirectoryInfo[] directories, string[] keys, IndexGenerationData indexGenerationData) { string analysisType = "Towsey.Acoustic"; var dictionaryOfSpectralIndices = IndexMatrices.GetSpectralIndexFilesAndConcatenate(directories, analysisType, keys, indexGenerationData, true); if (dictionaryOfSpectralIndices.Count == 0) { LoggedConsole.WriteErrorLine("WARNING from method LDSpectrogramStitching.ConcatenateSpectralIndexFiles() !!!"); LoggedConsole.WriteErrorLine(" An empty dictionary of spectral indices was returned !!! "); return(null); } // now add in derived indices i.e. POW, NCDI etc // dictionaryOfSpectralIndices = IndexMatrices.AddDerivedIndices(dictionaryOfSpectralIndices); return(dictionaryOfSpectralIndices); }
/// <summary> /// Initializes a new instance of the <see cref="TemporalMatrix"/> class. /// CONSTRUCTOR. /// </summary> public TemporalMatrix(string temporalDirection, double[,] m, TimeSpan dataScale) { if (temporalDirection.Equals("rows") || temporalDirection.Equals("columns")) { this.TemporalDirection = temporalDirection; } else { this.TemporalDirection = null; LoggedConsole.WriteErrorLine("temporalDirection can have only one of two values: <rows> or <columns>. "); throw new Exception(); } this.DataScale = dataScale; this.Matrix = m; }
public ColorCubeHelix(string mode) { if (mode.Equals(Default)) { var c1 = new HslColor(300, 0.5, 0.0); var c2 = new HslColor(-240, 0.5, 1.0); } else if (mode.Equals(Grayscale)) { this.colorPalette = ImageTools.GrayScale(); } else { LoggedConsole.WriteErrorLine("WARNING: {0} is UNKNOWN COLOUR PALLETTE!", mode); } }
public static int[] Histo(double[] data, int binCount, out double binWidth, out double min, out double max) { DataTools.MinMax(data, out min, out max); double range = max - min; // init freq bin array int[] bins = new int[binCount]; if (range == 0.0) { binWidth = 0.0; bins[0] = data.Length; return(bins); } int nanCount = 0; binWidth = range / binCount; for (int i = 0; i < data.Length; i++) { double value = data[i]; int id = 0; if (double.IsNaN(value)) { nanCount++; } else { id = (int)((value - min) / binWidth); } if (id >= binCount) { id = binCount - 1; } bins[id]++; } if (nanCount > 0) { string msg = $"#### WARNING from Histogram.Histo(): {nanCount}/{data.Length} values were NaN"; LoggedConsole.WriteErrorLine(msg); } return(bins); }
public static double GetWaveletCoefficients(double[] sampleArray, double[] wavelet) { int length = sampleArray.Length; if (length != wavelet.Length) { LoggedConsole.WriteErrorLine("Lenght of sample array != length of wavelet array: {0} != {1}", length, wavelet.Length); return(0.0); } double sum = 0.0; for (int i = 0; i < length; i++) { sum += sampleArray[i] * wavelet[i]; } return(sum); }
}// end ReadtextFile() public static List <string> ReadSelectedLinesOfCsvFile(string fName, string key, int value) { var lines = new List <string>(); using (TextReader reader = new StreamReader(fName)) { //read header line string line = reader.ReadLine(); string[] array = line.Split(','); // determine which CSV column contains the key int columnID = -1; for (int i = 0; i < array.Length; i++) { if (array[i].Equals(key)) { columnID = i; break; } } // the key was not found if (columnID == -1) { LoggedConsole.WriteErrorLine("THE KEY <" + key + "> WAS NOT FOUND IN FILE <" + fName + ">"); return(null); } while ((line = reader.ReadLine()) != null) { //read one line at a time in string array array = line.Split(','); if (int.Parse(array[columnID]) == value) { lines.Add(line); } } //end while } //end using return(lines); }// end ReadtextFile()
public static T FindAndCheckAnalyzer <T>(string analysisIdentifier, string partialIdentifier) where T : class, IAnalyser2 { string searchName; if (analysisIdentifier.IsNotWhitespace()) { searchName = analysisIdentifier; Log.Debug($"Searching for exact analysis identifier name {searchName} (from a CLI option)"); } else { // split name (e.g. "Towsey.Acoustics.Zooming.yml") on periods var fragments = partialIdentifier.Split(new[] { "." }, StringSplitOptions.RemoveEmptyEntries); Contract.Requires <CommandLineArgumentException>( fragments.Length >= 2, $"We need at least two segments to search for an analyzer, supplied name `{partialIdentifier}` is insufficient."); // assume identifier (e.g. "Towsey.Acoustic") in first two segments searchName = fragments[0] + "." + fragments[1]; Log.Debug($"Searching for partial analysis identifier name. `{searchName}` extracted from `{partialIdentifier}`"); } var analyzers = AnalysisCoordinator.GetAnalyzers <T>(typeof(MainEntry).Assembly).ToList(); T analyzer = analyzers.FirstOrDefault(a => a.Identifier == searchName); if (analyzer == null) { var error = $"We cannot determine what analysis you want to run. We tried to search for \"{searchName}\""; LoggedConsole.WriteErrorLine(error); var knownAnalyzers = analyzers.Aggregate(string.Empty, (a, i) => a + $" {i.Identifier}\n"); LoggedConsole.WriteLine("Available analyzers are:\n" + knownAnalyzers); throw new ValidationException($"Cannot find an IAnalyser2 with the name `{searchName}`"); } Log.Info($"Using analyzer {analyzer.Identifier}"); return(analyzer); }
private static async Task <double> CreateSegment( ISourcePreparer sourcePreparer, ISegment <FileInfo> fileSegment, AnalysisSettings settings, Arguments arguments, int itemNumber, int itemCount, bool mixDownToMono) { var timer = Stopwatch.StartNew(); FileSegment preparedFile; try { preparedFile = await sourcePreparer.PrepareFile( arguments.OutputDir.ToDirectoryInfo(), fileSegment, settings.SegmentMediaType, settings.AnalysisTargetSampleRate, settings.AnalysisTempDirectory, null, mixDownToMono); } catch (IOException ioex) { LoggedConsole.WriteErrorLine($"Failed to cut segment {itemNumber} of {itemCount}:" + ioex.Message); return(double.NaN); } LoggedConsole.WriteLine( "Created segment {0} of {1}: {2}", itemNumber, itemCount, preparedFile.SourceMetadata.Identifier); return(timer.Elapsed.TotalSeconds); }
public static Image AnalyseLocation(double[] signal, int sr, double startTimeInSeconds, int windowWidth) { int binCount = windowWidth / 2; int location = (int)Math.Round(startTimeInSeconds * sr); //assume location points to start of grunt if (location >= signal.Length) { LoggedConsole.WriteErrorLine("WARNING: Location is beyond end of signal."); return(null); } int nyquist = sr / 2; FFT.WindowFunc wf = FFT.Hamming; var fft = new FFT(windowWidth, wf); int maxHz = 1000; // max frequency to display in fft image double hzPerBin = nyquist / (double)binCount; int requiredBinCount = (int)Math.Round(maxHz / hzPerBin); double[] subsampleWav = DataTools.Subarray(signal, location, windowWidth); var spectrum = fft.Invoke(subsampleWav); // convert to power spectrum = DataTools.SquareValues(spectrum); spectrum = DataTools.filterMovingAverageOdd(spectrum, 3); spectrum = DataTools.normalise(spectrum); var subBandSpectrum = DataTools.Subarray(spectrum, 1, requiredBinCount); // ignore DC in bin zero. var startTime = TimeSpan.FromSeconds(startTimeInSeconds); double[] scoreArray = CalculateScores(subBandSpectrum, windowWidth); Image image4 = GraphsAndCharts.DrawWaveAndFft(subsampleWav, sr, startTime, spectrum, maxHz * 2, scoreArray); return(image4); }
public void TestLogging() { var log = LogManager.GetLogger(typeof(Logging)); log.Prompt("Log test PROMPT"); log.Fatal("Log test FATAL"); log.Error("Log test ERROR"); log.Warn("Log test WARN"); log.Success("Log test SUCCESS"); log.Info("Log test INFO"); log.Debug("Log test DEBUG"); log.Trace("Log test TRACE"); log.Verbose("Log test VERBOSE"); LoggedConsole.WriteFatalLine("Clean wrapper FATAL", new Exception("I'm a fake")); LoggedConsole.Log.Fatal("Clean log FATAL", new Exception("I'm a fake")); LoggedConsole.WriteErrorLine("Clean wrapper ERROR"); LoggedConsole.Log.Error("Clean log ERROR"); LoggedConsole.WriteWarnLine("Clean wrapper WARN"); LoggedConsole.Log.Warn("Clean log WARN"); LoggedConsole.WriteSuccessLine("Clean wrapper SUCCESS"); LoggedConsole.Log.Success("Clean log SUCCESS"); LoggedConsole.WriteLine("Clean wrapper INFO"); LoggedConsole.Log.Info("Clean log INFO"); }
internal static void PrintUsage(string message, Usages usageStyle, string commandName = null) { //Contract.Requires(usageStyle != Usages.Single || commandName != null); var root = CommandLineApplication.Root(); if (!string.IsNullOrWhiteSpace(message)) { LoggedConsole.WriteErrorLine(message); } if (usageStyle == Usages.All) { // print entire usage root.ShowHelp(); } else if (usageStyle == Usages.Single) { CommandLineApplication command; if (commandName == root.Name) { command = root; } else { command = root.Commands.FirstOrDefault(x => x.Name.Equals(commandName, StringComparison.InvariantCultureIgnoreCase)); // sometimes this is called from AppDomainUnhandledException, in which case throwing another exception // just gets squashed! if (command == null) { var commandNotFoundMessage = $"Could not find a command with name that matches `{commandName}`."; Log.Fatal(commandNotFoundMessage); throw new CommandParsingException(CommandLineApplication, commandNotFoundMessage); } } command.ShowHelp(); } else if (usageStyle == Usages.ListAvailable) { var commands = root.Commands; using (var sb = new StringWriter()) { ((CustomHelpTextGenerator)CommandLineApplication.HelpTextGenerator).FormatCommands(sb, commands); LoggedConsole.WriteLine(sb.ToString()); } } else if (usageStyle == Usages.NoAction) { CommandLineApplication.ShowHint(); } else { throw new InvalidOperationException(); } }
public void OctaveFrequencyScale2() { var recordingPath = PathHelper.ResolveAsset(@"Recordings\MarineJasco_AMAR119-00000139.00000139.Chan_1-24bps.1375012796.2013-07-28-11-59-56-16bit-60sec.wav"); var opFileStem = "JascoMarineGBR1"; var outputDir = this.outputDirectory; var outputImagePath = Path.Combine(this.outputDirectory.FullName, "Octave2ScaleSonogram.png"); var recording = new AudioRecording(recordingPath); var fst = FreqScaleType.Linear125Octaves7Tones28Nyquist32000; var freqScale = new FrequencyScale(fst); var sonoConfig = new SonogramConfig { WindowSize = freqScale.WindowSize, WindowOverlap = 0.2, SourceFName = recording.BaseName, NoiseReductionType = NoiseReductionType.None, NoiseReductionParameter = 0.0, }; var sonogram = new AmplitudeSonogram(sonoConfig, recording.WavReader); sonogram.Data = OctaveFreqScale.ConvertAmplitudeSpectrogramToDecibelOctaveScale(sonogram.Data, freqScale); // DO NOISE REDUCTION var dataMatrix = SNR.NoiseReduce_Standard(sonogram.Data); sonogram.Data = dataMatrix; sonogram.Configuration.WindowSize = freqScale.WindowSize; var image = sonogram.GetImageFullyAnnotated(sonogram.GetImage(), "SPECTROGRAM: " + fst.ToString(), freqScale.GridLineLocations); image.Save(outputImagePath, ImageFormat.Png); // DO FILE EQUALITY TESTS // Check that freqScale.OctaveBinBounds are correct var stemOfExpectedFile = opFileStem + "_Octave2ScaleBinBounds.EXPECTED.json"; var stemOfActualFile = opFileStem + "_Octave2ScaleBinBounds.ACTUAL.json"; var expectedFile1 = PathHelper.ResolveAsset("FrequencyScale\\" + stemOfExpectedFile); if (!expectedFile1.Exists) { LoggedConsole.WriteErrorLine("An EXPECTED results file does not exist. Test will fail!"); LoggedConsole.WriteErrorLine( $"If ACTUAL results file is correct, move it to dir `{PathHelper.TestResources}` and change its suffix to <.EXPECTED.json>"); } var resultFile1 = new FileInfo(Path.Combine(outputDir.FullName, stemOfActualFile)); Json.Serialise(resultFile1, freqScale.BinBounds); FileEqualityHelpers.TextFileEqual(expectedFile1, resultFile1); // Check that freqScale.GridLineLocations are correct stemOfExpectedFile = opFileStem + "_Octave2ScaleGridLineLocations.EXPECTED.json"; stemOfActualFile = opFileStem + "_Octave2ScaleGridLineLocations.ACTUAL.json"; var expectedFile2 = PathHelper.ResolveAsset("FrequencyScale\\" + stemOfExpectedFile); if (!expectedFile2.Exists) { LoggedConsole.WriteErrorLine("An EXPECTED results file does not exist. Test will fail!"); LoggedConsole.WriteErrorLine( $"If ACTUAL results file is correct, move it to dir `{PathHelper.TestResources}` and change its suffix to <.EXPECTED.json>"); } var resultFile2 = new FileInfo(Path.Combine(outputDir.FullName, stemOfActualFile)); Json.Serialise(resultFile2, freqScale.GridLineLocations); FileEqualityHelpers.TextFileEqual(expectedFile2, resultFile2); // Check that image dimensions are correct Assert.AreEqual(201, image.Width); Assert.AreEqual(310, image.Height); }
public void OctaveFrequencyScale1() { var recordingPath = PathHelper.ResolveAsset("Recordings", "BAC2_20071008-085040.wav"); var opFileStem = "BAC2_20071008"; var outputDir = this.outputDirectory; var outputImagePath = Path.Combine(outputDir.FullName, "Octave1ScaleSonogram.png"); var recording = new AudioRecording(recordingPath); // default octave scale var fst = FreqScaleType.Linear125Octaves6Tones30Nyquist11025; var freqScale = new FrequencyScale(fst); var sonoConfig = new SonogramConfig { WindowSize = freqScale.WindowSize, WindowOverlap = 0.75, SourceFName = recording.BaseName, NoiseReductionType = NoiseReductionType.None, NoiseReductionParameter = 0.0, }; // Generate amplitude sonogram and then conver to octave scale var sonogram = new AmplitudeSonogram(sonoConfig, recording.WavReader); // THIS IS THE CRITICAL LINE. COULD DO WITH SEPARATE UNIT TEST sonogram.Data = OctaveFreqScale.ConvertAmplitudeSpectrogramToDecibelOctaveScale(sonogram.Data, freqScale); // DO NOISE REDUCTION var dataMatrix = SNR.NoiseReduce_Standard(sonogram.Data); sonogram.Data = dataMatrix; sonogram.Configuration.WindowSize = freqScale.WindowSize; var image = sonogram.GetImageFullyAnnotated(sonogram.GetImage(), "SPECTROGRAM: " + fst.ToString(), freqScale.GridLineLocations); image.Save(outputImagePath, ImageFormat.Png); // DO FILE EQUALITY TESTS // Check that freqScale.OctaveBinBounds are correct var stemOfExpectedFile = opFileStem + "_Octave1ScaleBinBounds.EXPECTED.json"; var stemOfActualFile = opFileStem + "_Octave1ScaleBinBounds.ACTUAL.json"; var expectedFile1 = PathHelper.ResolveAsset("FrequencyScale\\" + stemOfExpectedFile); if (!expectedFile1.Exists) { LoggedConsole.WriteErrorLine("An EXPECTED results file does not exist. Test will fail!"); LoggedConsole.WriteErrorLine( $"If ACTUAL results file is correct, move it to dir `{PathHelper.TestResources}` and change its suffix to <.EXPECTED.json>"); } var resultFile1 = new FileInfo(Path.Combine(outputDir.FullName, stemOfActualFile)); Json.Serialise(resultFile1, freqScale.BinBounds); FileEqualityHelpers.TextFileEqual(expectedFile1, resultFile1); // Check that freqScale.GridLineLocations are correct stemOfExpectedFile = opFileStem + "_Octave1ScaleGridLineLocations.EXPECTED.json"; stemOfActualFile = opFileStem + "_Octave1ScaleGridLineLocations.ACTUAL.json"; var expectedFile2 = PathHelper.ResolveAsset("FrequencyScale\\" + stemOfExpectedFile); if (!expectedFile2.Exists) { LoggedConsole.WriteErrorLine("An EXPECTED results file does not exist. Test will fail!"); LoggedConsole.WriteErrorLine( $"If ACTUAL results file is correct, move it to dir `{PathHelper.TestResources}` and change its suffix to <.EXPECTED.json>"); } var resultFile2 = new FileInfo(Path.Combine(outputDir.FullName, stemOfActualFile)); Json.Serialise(resultFile2, freqScale.GridLineLocations); FileEqualityHelpers.TextFileEqual(expectedFile2, resultFile2); // Check that image dimensions are correct Assert.AreEqual(645, image.Width); Assert.AreEqual(310, image.Height); }
public static int[,] GetGridLineLocations(FreqScaleType ost, int[,] octaveBinBounds) { int[,] gridLineLocations = null; switch (ost) { case FreqScaleType.Linear62Octaves7Tones31Nyquist11025: gridLineLocations = new int[8, 2]; LoggedConsole.WriteErrorLine("This Octave Scale does not currently have grid data provided."); break; case FreqScaleType.Linear125Octaves6Tones30Nyquist11025: gridLineLocations = new int[7, 2]; gridLineLocations[0, 0] = 46; // 125 Hz gridLineLocations[1, 0] = 79; // 250 gridLineLocations[2, 0] = 111; // 500 gridLineLocations[3, 0] = 143; // 1000 gridLineLocations[4, 0] = 175; // 2000 gridLineLocations[5, 0] = 207; // 4000 gridLineLocations[6, 0] = 239; // 8000 // enter the Hz value gridLineLocations[0, 1] = 125; // 125 Hz gridLineLocations[1, 1] = 250; // 250 gridLineLocations[2, 1] = 500; // 500 gridLineLocations[3, 1] = 1000; // 1000 gridLineLocations[4, 1] = 2000; // 2000 gridLineLocations[5, 1] = 4000; // 4000 gridLineLocations[6, 1] = 8000; // 8000 break; case FreqScaleType.Octaves24Nyquist32000: gridLineLocations = new int[8, 2]; LoggedConsole.WriteErrorLine("This Octave Scale does not currently have grid data provided."); break; case FreqScaleType.Linear125Octaves7Tones28Nyquist32000: gridLineLocations = new int[9, 2]; gridLineLocations[0, 0] = 34; // 125 Hz gridLineLocations[1, 0] = 62; // 250 gridLineLocations[2, 0] = 89; // 500 gridLineLocations[3, 0] = 117; // 1000 gridLineLocations[4, 0] = 145; // 2000 gridLineLocations[5, 0] = 173; // 4000 gridLineLocations[6, 0] = 201; // 8000 gridLineLocations[7, 0] = 229; //16000 gridLineLocations[8, 0] = 256; //32000 // enter the Hz values gridLineLocations[0, 1] = 125; // 125 Hz gridLineLocations[1, 1] = 250; // 250 gridLineLocations[2, 1] = 500; // 500 gridLineLocations[3, 1] = 1000; // 1000 gridLineLocations[4, 1] = 2000; // 2000 gridLineLocations[5, 1] = 4000; // 4000 gridLineLocations[6, 1] = 8000; // 8000 gridLineLocations[7, 1] = 16000; //16000 gridLineLocations[8, 1] = 32000; //32000 break; default: LoggedConsole.WriteErrorLine("Not a valid Octave Scale."); break; } return(gridLineLocations); }
/// <summary> /// this method is used only to visualize the clusters and which frames they hit. /// Create a new spectrogram of same size as the passed spectrogram. /// Later on it is superimposed on a detailed spectrogram. /// </summary> /// <param name="spectrogram">spectrogram used to derive spectral richness indices. Orientation is row=frame</param> /// <param name="lowerBinBound">bottom N freq bins are excluded because likely to contain traffic and wind noise.</param> /// <param name="clusterInfo">information about accumulated clusters</param> /// <param name="data">training data</param> public static int[,] AssembleClusterSpectrogram(double[,] spectrogram, int lowerBinBound, ClusterInfo clusterInfo, TrainingDataInfo data) { // the weight vector for each cluster - a list of double-arrays var clusterWts = clusterInfo.PrunedClusterWts; // an array indicating which cluster each frame belongs to. Zero = no cluster int[] clusterHits = clusterInfo.ClusterHits2; bool[] activeFrames = clusterInfo.SelectedFrames; int frameCount = spectrogram.GetLength(0); int freqBinCount = spectrogram.GetLength(1); //reassemble spectrogram to visualise the clusters var clusterSpectrogram = new int[frameCount, freqBinCount]; for (int row = 0; row < frameCount; row++) { if (activeFrames[row]) { int clusterId = clusterHits[row]; if (clusterId == 0) { // cluster zero does not exist. Place filler continue; } double[] wtVector = clusterWts[clusterId]; if (wtVector == null) { // This should not happen but ... LoggedConsole.WriteErrorLine($"WARNING: Cluster {clusterId} = null"); continue; } double[] fullLengthSpectrum = RestoreFullLengthSpectrum(wtVector, freqBinCount, data.LowBinBound); //for (int j = lowerBinBound; j < freqBinCount; j++) for (int j = 0; j < freqBinCount; j++) { //if (spectrogram[row, j] > data.intensityThreshold) if (fullLengthSpectrum[j] > 0.0) { clusterSpectrogram[row, j] = clusterId + 1; //+1 so do not have zero index for a cluster } else { clusterSpectrogram[row, j] = 0; //correct for case where set hit count < 0 for pruned wts. } } } } //add in the weights to first part of spectrogram //int space = 10; //int col = space; //for (int i = 0; i < clusterWts.Count; i++) //{ // if (clusterWts[i] == null) continue; // for (int c = 0; c < space; c++) // { // col++; // //for (int j = 0; j < clusterSpectrogram.GetLength(1); j++) clusterSpectrogram[col, j] = clusterWts.Count+3; // for (int j = 0; j < clusterWts[i].Length; j++) // { // if (clusterWts[i][j] > 0.0) clusterSpectrogram[col, excludeBins + j - 1] = i + 1; // } // } // //col += 2; //} return(clusterSpectrogram); }
public static void DrawFrequencyLinesOnImage(Bitmap bmp, int[,] gridLineLocations, bool includeLabels) { int minimumSpectrogramWidth = 10; if (bmp.Width < minimumSpectrogramWidth) { // there is no point drawing grid lines on a very narrow image. return; } // attempt to determine background colour of spectrogram i.e. dark false-colour or light. // get the average brightness in a neighbourhood of m x n pixels. int pixelCount = 0; float brightness = 0.0F; for (int m = 5; m < minimumSpectrogramWidth; m++) { for (int n = 5; n < minimumSpectrogramWidth; n++) { var bgnColour = bmp.GetPixel(m, n); brightness += bgnColour.GetBrightness(); pixelCount++; } } brightness /= pixelCount; var txtColour = Brushes.White; if (brightness > 0.5) { txtColour = Brushes.Black; } int width = bmp.Width; int height = bmp.Height; int bandCount = gridLineLocations.GetLength(0); var g = Graphics.FromImage(bmp); // draw the grid line for each frequency band for (int b = 0; b < bandCount; b++) { int y = height - gridLineLocations[b, 0]; if (y < 0) { LoggedConsole.WriteErrorLine(" WarningException: Negative image index for gridline!"); continue; } for (int x = 1; x < width - 3; x++) { bmp.SetPixel(x, y, Color.White); x += 3; bmp.SetPixel(x, y, Color.Black); x += 2; } } if (!includeLabels || bmp.Width < 30) { // there is no point placing Hertz label on a narrow image. It obscures too much spectrogram. return; } // draw Hertz label on each band for (int b = 0; b < bandCount; b++) { int y = height - gridLineLocations[b, 0]; if (y > 1) { g.DrawString($"{gridLineLocations[b, 1]}", new Font("Thachoma", 8), txtColour, 1, y); } } } //end AddHzGridLines()
public static void Execute(Arguments arguments) { var inputDirs = arguments.InputDataDirectories.Select(FileInfoExtensions.ToDirectoryInfo); var output = arguments.OutputDirectory.ToDirectoryInfo(); string date = "# DATE AND TIME: " + DateTime.Now; LoggedConsole.WriteLine("\n# DRAW an EASY IMAGE from consecutive days of SUMMARY INDICES in CSV files."); LoggedConsole.WriteLine("# IT IS ASSUMED THAT THE CSV files are already concatenated into 24 hour files."); LoggedConsole.WriteLine(date); LoggedConsole.WriteLine("# Summary Index.csv files are in directories:"); foreach (DirectoryInfo dir in inputDirs) { LoggedConsole.WriteLine(" {0}", dir.FullName); } LoggedConsole.WriteLine("# Output directory: " + output); if (arguments.StartDate == null) { LoggedConsole.WriteLine("# Start date = NULL (No argument provided). Will revise start date ...."); } else { LoggedConsole.WriteLine("# Start date = " + arguments.StartDate.ToString()); } if (arguments.EndDate == null) { LoggedConsole.WriteLine("# End date = NULL (No argument provided). Will revise end date ...."); } else { LoggedConsole.WriteLine("# End date = " + arguments.EndDate.ToString()); } LoggedConsole.WriteLine("# FILE FILTER = " + arguments.FileFilter); LoggedConsole.WriteLine(); // PATTERN SEARCH FOR SUMMARY INDEX FILES. //string pattern = "*__Towsey.Acoustic.Indices.csv"; FileInfo[] csvFiles = IndexMatrices.GetFilesInDirectories(inputDirs.ToArray(), arguments.FileFilter); //LoggedConsole.WriteLine("# Subdirectories Count = " + subDirectories.Length); LoggedConsole.WriteLine("# SummaryIndexFiles.csv Count = " + csvFiles.Length); if (csvFiles.Length == 0) { LoggedConsole.WriteErrorLine("\n\nWARNING from method DrawEasyImage.Execute():"); LoggedConsole.WriteErrorLine(" No SUMMARY index files were found."); LoggedConsole.WriteErrorLine(" RETURNING EMPTY HANDED!"); return; } // Sort the files by date and return as a dictionary: sortedDictionaryOfDatesAndFiles<DateTimeOffset, FileInfo> //var sortedDictionaryOfDatesAndFiles = LDSpectrogramStitching.FilterFilesForDates(csvFiles, arguments.TimeSpanOffsetHint); // calculate new start date if passed value = null. DateTimeOffset?startDate = arguments.StartDate; DateTimeOffset?endDate = arguments.EndDate; TimeSpan totalTimespan = (DateTimeOffset)endDate - (DateTimeOffset)startDate; int dayCount = totalTimespan.Days + 1; // assume last day has full 24 hours of recording available. LoggedConsole.WriteLine("\n# Start date = " + startDate.ToString()); LoggedConsole.WriteLine("# End date = " + endDate.ToString()); LoggedConsole.WriteLine(string.Format("# Elapsed time = {0:f1} hours", dayCount * 24)); LoggedConsole.WriteLine("# Day count = " + dayCount + " (inclusive of start and end days)"); LoggedConsole.WriteLine("# Time Zone = " + arguments.TimeSpanOffsetHint.ToString()); // create top level output directory if it does not exist. DirectoryInfo opDir = output; if (!opDir.Exists) { opDir.Create(); } // SET UP DEFAULT SITE LOCATION INFO -- DISCUSS IWTH ANTHONY // The following location data is used only to draw the sunrise/sunset tracks on images. double?latitude = null; double?longitude = null; var siteDescription = new SiteDescription(); siteDescription.SiteName = arguments.FileStemName; siteDescription.Latitude = latitude; siteDescription.Longitude = longitude; // the following required if drawing the index images FileInfo indexPropertiesConfig = null; // require IndexGenerationData and indexPropertiesConfig for drawing //indexGenerationData = IndexGenerationData.GetIndexGenerationData(csvFiles[0].Directory); indexPropertiesConfig = arguments.IndexPropertiesConfig.ToFileInfo(); Dictionary <string, IndexProperties> listOfIndexProperties = IndexProperties.GetIndexProperties(indexPropertiesConfig); Tuple <List <string>, List <double[]> > tuple = CsvTools.ReadCSVFile(csvFiles[0].FullName); var names = tuple.Item1; // default EASY indices int redID = 3; // backgroundNoise int grnID = 5; // avSNROfActiveframes int bluID = 7; // events per second string rep = @"bgn-avsnr-evn"; // ACI Ht Hpeaks EASY indices if (false) { redID = 11; // ACI grnID = 12; // Ht //bluID = 13; // HavgSp //bluID = 14; // Hvariance //bluID = 15; // Hpeaks bluID = 16; // Hcov //bluID = 7; // SPT rep = @"aci-ht-hcov"; //rep = @"aci-ht-spt"; } // LF, MF, HF if (true) { redID = 10; // LF grnID = 9; // MF bluID = 8; // HF rep = @"lf-mf-hf"; } IndexProperties redIndexProps = listOfIndexProperties[names[redID]]; IndexProperties grnIndexProps = listOfIndexProperties[names[grnID]]; IndexProperties bluIndexProps = listOfIndexProperties[names[bluID]]; int dayPixelHeight = 4; int rowCount = (dayPixelHeight * dayCount) + 35; // +30 for grid lines int colCount = 1440; var bitmap = new Image <Rgb24>(colCount, rowCount); var colour = Color.Yellow; int currentRow = 0; var oneDay = TimeSpan.FromHours(24); int graphWidth = colCount; int trackHeight = 20; var stringFont = Drawing.Arial8; string[] monthNames = { "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec" }; // for drawing the y-axis scale int scaleWidth = trackHeight + 7; var yAxisScale = new Image <Rgb24>(scaleWidth, rowCount + (2 * trackHeight)); yAxisScale.Mutate(g => { g.Clear(Color.Black); // loop over days for (int d = 0; d < dayCount; d++) { var thisday = ((DateTimeOffset)startDate).AddDays(d); if (thisday.Day == 1) { int nextRow = currentRow + 1; for (int c = 0; c < colCount; c++) { bitmap[c, currentRow] = Color.Gray; bitmap[c, nextRow] = Color.Gray; } for (int c = 0; c < scaleWidth; c++) { yAxisScale[c, currentRow + trackHeight] = Color.Gray; yAxisScale[c, nextRow + trackHeight] = Color.Gray; } string month = monthNames[thisday.Month - 1]; if (thisday.Month == 1) // January { g.DrawText(thisday.Year.ToString(), stringFont, Color.White, new PointF(0, nextRow + trackHeight + 1)); //draw time g.DrawText(month, stringFont, Color.White, new PointF(1, nextRow + trackHeight + 11)); //draw time } else { g.DrawText(month, stringFont, Color.White, new PointF(1, nextRow + trackHeight + 1)); //draw time } currentRow += 2; } // get the exact date and time LoggedConsole.WriteLine($"READING DAY {d + 1} of {dayCount}: {thisday.ToString()}"); // CREATE DAY LEVEL OUTPUT DIRECTORY for this day string dateString = $"{thisday.Year}{thisday.Month:D2}{thisday.Day:D2}"; tuple = CsvTools.ReadCSVFile(csvFiles[d].FullName); var arrays = tuple.Item2; var redArray = arrays[redID]; var grnArray = arrays[grnID]; var bluArray = arrays[bluID]; // NormaliseMatrixValues the indices redArray = DataTools.NormaliseInZeroOne(redArray, redIndexProps.NormMin, redIndexProps.NormMax); grnArray = DataTools.NormaliseInZeroOne(grnArray, grnIndexProps.NormMin, grnIndexProps.NormMax); bluArray = DataTools.NormaliseInZeroOne(bluArray, bluIndexProps.NormMin, bluIndexProps.NormMax); for (int c = 0; c < colCount; c++) { for (int r = 0; r < dayPixelHeight; r++) { //transformedValue = Math.Sqrt(redArray[c]); var transformedValue = redArray[c] * redArray[c]; int redVal = (int)Math.Round(transformedValue * 255); if (redVal < 0) { redVal = 0; } else if (redVal > 255) { redVal = 255; } //transformedValue = Math.Sqrt(grnArray[c]); transformedValue = grnArray[c] * grnArray[c]; // square the value int grnVal = (int)Math.Round(transformedValue * 255); if (grnVal < 0) { grnVal = 0; } else if (grnVal > 255) { grnVal = 255; } //transformedValue = Math.Sqrt(bluArray[c]); transformedValue = bluArray[c] * bluArray[c]; // square the value int bluVal = (int)Math.Round(transformedValue * 255); if (bluVal < 0) { bluVal = 0; } else if (bluVal > 255) { bluVal = 255; } bitmap[c, currentRow + r] = Color.FromRgb((byte)redVal, (byte)grnVal, (byte)bluVal); } } // over all columns currentRow += dayPixelHeight; if (thisday.Day % 7 == 0) { for (int c = 0; c < colCount; c++) { bitmap[c, currentRow] = Color.Gray; } currentRow++; } } // over days }); // draw on civil dawn and dusk lines int startdayOfYear = ((DateTimeOffset)startDate).DayOfYear; int endDayOfYear = ((DateTimeOffset)endDate).DayOfYear; SunAndMoon.AddSunRiseSetLinesToImage(bitmap, arguments.BrisbaneSunriseDatafile.ToFileInfo(), startdayOfYear, endDayOfYear, dayPixelHeight); // add the time scales Image <Rgb24> timeBmp1 = ImageTrack.DrawTimeRelativeTrack(oneDay, graphWidth, trackHeight); var imageList = new [] { timeBmp1, bitmap, timeBmp1 }; Image <Rgb24> compositeBmp1 = (Image <Rgb24>)ImageTools.CombineImagesVertically(imageList); imageList = new [] { yAxisScale, compositeBmp1 }; Image <Rgb24> compositeBmp2 = (Image <Rgb24>)ImageTools.CombineImagesInLine(imageList); // indices used for image string indicesDescription = $"{redIndexProps.Name}|{grnIndexProps.Name}|{bluIndexProps.Name}"; string startString = $"{startDate.Value.Year}/{startDate.Value.Month}/{startDate.Value.Day}"; string endString = $"{endDate.Value.Year}/{endDate.Value.Month}/{endDate.Value.Day}"; string title = $"EASY: {arguments.FileStemName} From {startString} to {endString} Indices: {indicesDescription}"; Image <Rgb24> titleBar = ImageTrack.DrawTitleTrack(compositeBmp2.Width, trackHeight, title); imageList = new [] { titleBar, compositeBmp2 }; compositeBmp2 = (Image <Rgb24>)ImageTools.CombineImagesVertically(imageList); var outputFileName = Path.Combine(opDir.FullName, arguments.FileStemName + "." + rep + ".EASY.png"); compositeBmp2.Save(outputFileName); } // Execute()
public static void DrawFrequencyLinesOnImage(Image <Rgb24> bmp, int[,] gridLineLocations, bool includeLabels) { int minimumSpectrogramWidth = 10; if (bmp.Width < minimumSpectrogramWidth) { // there is no point drawing grid lines on a very narrow image. return; } // attempt to determine background colour of spectrogram i.e. dark false-colour or light. // get the average brightness in a neighbourhood of m x n pixels. int pixelCount = 0; float brightness = 0.0F; for (int m = 5; m < minimumSpectrogramWidth; m++) { for (int n = 5; n < minimumSpectrogramWidth; n++) { var bgnColour = bmp[m, n]; brightness += bgnColour.GetBrightness(); pixelCount++; } } brightness /= pixelCount; var txtColour = Color.White; if (brightness > 0.5) { txtColour = Color.Black; } int width = bmp.Width; int height = bmp.Height; int bandCount = gridLineLocations.GetLength(0); // draw the grid line for each frequency band for (int b = 0; b < bandCount; b++) { int y = height - gridLineLocations[b, 0]; if (y < 0) { LoggedConsole.WriteErrorLine(" WarningException: Negative image index for gridline!"); continue; } for (int x = 1; x < width - 3; x++) { bmp[x, y] = Color.White; x += 3; bmp[x, y] = Color.Black; x += 2; } } if (!includeLabels || bmp.Width < 30) { // there is no point placing Hertz label on a narrow image. It obscures too much spectrogram. return; } bmp.Mutate(g => { // draw Hertz label on each band for (int b = 0; b < bandCount; b++) { int y = height - gridLineLocations[b, 0]; int hertzValue = gridLineLocations[b, 1]; if (y > 1) { g.DrawText($"{hertzValue}", Drawing.Tahoma8, txtColour, new PointF(1, y)); } } }); } //end AddHzGridLines()
/// <summary> /// 2. Analyses long audio recording (mp3 or wav) as per passed config file. Outputs an events.csv file AND an /// indices.csv file /// Signed off: Michael Towsey 4th December 2012 /// </summary> public static void Execute(Arguments arguments) { if (arguments == null) { throw new NoDeveloperMethodException(); } LoggedConsole.WriteLine("# PROCESS LONG RECORDING"); LoggedConsole.WriteLine("# DATE AND TIME: " + DateTime.Now); // 1. set up the necessary files var sourceAudio = arguments.Source; var configFile = arguments.Config.ToFileInfo(); var outputDirectory = arguments.Output; var tempFilesDirectory = arguments.TempDir; // if a temp dir is not given, use output dir as temp dir if (tempFilesDirectory == null) { Log.Warn("No temporary directory provided, using output directory"); tempFilesDirectory = outputDirectory; } // try an automatically find the config file if (configFile == null) { throw new FileNotFoundException("No config file argument provided"); } else if (!configFile.Exists) { Log.Warn($"Config file {configFile.FullName} not found... attempting to resolve config file"); // we use .ToString() here to get the original input string - Using fullname always produces an absolute path wrt to pwd... we don't want to prematurely make asusmptions: // e.g. We require a missing absolute path to fail... that wouldn't work with .Name // e.g. We require a relative path to try and resolve, using .FullName would fail the first absolute check inside ResolveConfigFile configFile = ConfigFile.Resolve(configFile.ToString(), Directory.GetCurrentDirectory().ToDirectoryInfo()); } if (arguments.StartOffset.HasValue ^ arguments.EndOffset.HasValue) { throw new InvalidStartOrEndException("If StartOffset or EndOffset is specified, then both must be specified"); } if (arguments.StartOffset.HasValue && arguments.EndOffset.HasValue && arguments.EndOffset.Value <= arguments.StartOffset.Value) { throw new InvalidStartOrEndException("Start offset must be less than end offset."); } LoggedConsole.WriteLine("# Recording file: " + sourceAudio.FullName); LoggedConsole.WriteLine("# Configuration file: " + configFile); LoggedConsole.WriteLine("# Output folder: " + outputDirectory); LoggedConsole.WriteLine("# Temp File Directory: " + tempFilesDirectory); // optionally copy logs / config to make results easier to understand // TODO: remove, see https://github.com/QutEcoacoustics/audio-analysis/issues/133 if (arguments.WhenExitCopyConfig || arguments.WhenExitCopyLog) { AppDomain.CurrentDomain.ProcessExit += (sender, args) => { Cleanup(arguments, configFile); }; } // 2. initialize the analyzer // we're changing the way resolving config files works. Ideally, we'd like to use statically typed config files // but we can't do that unless we know which type we have to load first! Currently analyzer to load is in // the config file so we can't know which analyzer we can use. Thus we will change to using the file name, // or an argument to resolve the analyzer to load. // Get analysis name: IAnalyser2 analyzer = FindAndCheckAnalyzer <IAnalyser2>(arguments.AnalysisIdentifier, configFile.Name); // 2. get the analysis config AnalyzerConfig configuration = analyzer.ParseConfig(configFile); SaveBehavior saveIntermediateWavFiles = configuration.SaveIntermediateWavFiles; bool saveIntermediateDataFiles = configuration.SaveIntermediateCsvFiles; SaveBehavior saveSonogramsImages = configuration.SaveSonogramImages; bool filenameDate = configuration.RequireDateInFilename; if (configuration[AnalysisKeys.AnalysisName].IsNotWhitespace()) { Log.Warn("Your config file has `AnalysisName` set - this property is deprecated and ignored"); } // AT 2018-02: changed logic so default index properties loaded if not provided FileInfo indicesPropertiesConfig = IndexProperties.Find(configuration, configFile); if (indicesPropertiesConfig == null || !indicesPropertiesConfig.Exists) { Log.Warn("IndexProperties config can not be found! Loading a default"); indicesPropertiesConfig = ConfigFile.Default <Dictionary <string, IndexProperties> >(); } LoggedConsole.WriteLine("# IndexProperties Cfg: " + indicesPropertiesConfig.FullName); // min score for an acceptable event Log.Info("Minimum event threshold has been set to " + configuration.EventThreshold); FileSegment.FileDateBehavior defaultBehavior = FileSegment.FileDateBehavior.Try; if (filenameDate) { if (!FileDateHelpers.FileNameContainsDateTime(sourceAudio.Name)) { throw new InvalidFileDateException( "When RequireDateInFilename option is set, the filename of the source audio file must contain " + "a valid AND UNAMBIGUOUS date. Such a date was not able to be parsed."); } defaultBehavior = FileSegment.FileDateBehavior.Required; } // 3. initilize AnalysisCoordinator class that will do the analysis var analysisCoordinator = new AnalysisCoordinator( new LocalSourcePreparer(), saveIntermediateWavFiles, false, arguments.Parallel); // 4. get the segment of audio to be analysed // if tiling output, specify that FileSegment needs to be able to read the date var fileSegment = new FileSegment(sourceAudio, arguments.AlignToMinute, null, defaultBehavior); var bothOffsetsProvided = arguments.StartOffset.HasValue && arguments.EndOffset.HasValue; if (bothOffsetsProvided) { fileSegment.SegmentStartOffset = TimeSpan.FromSeconds(arguments.StartOffset.Value); fileSegment.SegmentEndOffset = TimeSpan.FromSeconds(arguments.EndOffset.Value); } else { Log.Debug("Neither start nor end segment offsets provided. Therefore both were ignored."); } // 6. initialize the analysis settings object var analysisSettings = analyzer.DefaultSettings; analysisSettings.ConfigFile = configFile; analysisSettings.Configuration = configuration; analysisSettings.AnalysisOutputDirectory = outputDirectory; analysisSettings.AnalysisTempDirectory = tempFilesDirectory; analysisSettings.AnalysisDataSaveBehavior = saveIntermediateDataFiles; analysisSettings.AnalysisImageSaveBehavior = saveSonogramsImages; analysisSettings.AnalysisChannelSelection = arguments.Channels; analysisSettings.AnalysisMixDownToMono = arguments.MixDownToMono; var segmentDuration = configuration.SegmentDuration?.Seconds(); if (!segmentDuration.HasValue) { segmentDuration = analysisSettings.AnalysisMaxSegmentDuration ?? TimeSpan.FromMinutes(1); Log.Warn( $"Can't read `{nameof(AnalyzerConfig.SegmentDuration)}` from config file. " + $"Default value of {segmentDuration} used)"); } analysisSettings.AnalysisMaxSegmentDuration = segmentDuration.Value; var segmentOverlap = configuration.SegmentOverlap?.Seconds(); if (!segmentOverlap.HasValue) { segmentOverlap = analysisSettings.SegmentOverlapDuration; Log.Warn( $"Can't read `{nameof(AnalyzerConfig.SegmentOverlap)}` from config file. " + $"Default value of {segmentOverlap} used)"); } analysisSettings.SegmentOverlapDuration = segmentOverlap.Value; // set target sample rate var resampleRate = configuration.ResampleRate; if (!resampleRate.HasValue) { resampleRate = analysisSettings.AnalysisTargetSampleRate ?? AppConfigHelper.DefaultTargetSampleRate; Log.Warn( $"Can't read {nameof(configuration.ResampleRate)} from config file. " + $"Default value of {resampleRate} used)"); } analysisSettings.AnalysisTargetSampleRate = resampleRate; Log.Info( $"{nameof(configuration.SegmentDuration)}={segmentDuration}, " + $"{nameof(configuration.SegmentOverlap)}={segmentOverlap}, " + $"{nameof(configuration.ResampleRate)}={resampleRate}"); // 7. ####################################### DO THE ANALYSIS ################################### LoggedConsole.WriteLine("START ANALYSIS ..."); var analyserResults = analysisCoordinator.Run(fileSegment, analyzer, analysisSettings); // ############################################################################################## // 8. PROCESS THE RESULTS LoggedConsole.WriteLine(string.Empty); LoggedConsole.WriteLine("START PROCESSING RESULTS ..."); if (analyserResults == null) { LoggedConsole.WriteErrorLine("###################################################\n"); LoggedConsole.WriteErrorLine("The Analysis Run Coordinator has returned a null result."); LoggedConsole.WriteErrorLine("###################################################\n"); throw new AnalysisOptionDevilException(); } // Merge and correct main result types EventBase[] mergedEventResults = ResultsTools.MergeResults(analyserResults, ar => ar.Events, ResultsTools.CorrectEvent); SummaryIndexBase[] mergedIndicesResults = ResultsTools.MergeResults(analyserResults, ar => ar.SummaryIndices, ResultsTools.CorrectSummaryIndex); SpectralIndexBase[] mergedSpectralIndexResults = ResultsTools.MergeResults(analyserResults, ar => ar.SpectralIndices, ResultsTools.CorrectSpectrumIndex); // not an exceptional state, do not throw exception if (mergedEventResults != null && mergedEventResults.Length == 0) { LoggedConsole.WriteWarnLine("The analysis produced no EVENTS (mergedResults had zero count)"); } if (mergedIndicesResults != null && mergedIndicesResults.Length == 0) { LoggedConsole.WriteWarnLine("The analysis produced no Summary INDICES (mergedResults had zero count)"); } if (mergedSpectralIndexResults != null && mergedSpectralIndexResults.Length == 0) { LoggedConsole.WriteWarnLine("The analysis produced no Spectral INDICES (merged results had zero count)"); } // 9. CREATE SUMMARY INDICES IF NECESSARY (FROM EVENTS) #if DEBUG // get the duration of the original source audio file - need this to convert Events datatable to Indices Datatable var audioUtility = new MasterAudioUtility(tempFilesDirectory); var mimeType = MediaTypes.GetMediaType(sourceAudio.Extension); var sourceInfo = audioUtility.Info(sourceAudio); // updated by reference all the way down in LocalSourcePreparer Debug.Assert(fileSegment.TargetFileDuration == sourceInfo.Duration); #endif var duration = fileSegment.TargetFileDuration.Value; ResultsTools.ConvertEventsToIndices( analyzer, mergedEventResults, ref mergedIndicesResults, duration, configuration.EventThreshold); int eventsCount = mergedEventResults?.Length ?? 0; int numberOfRowsOfIndices = mergedIndicesResults?.Length ?? 0; // 10. Allow analysers to post-process // TODO: remove results directory if possible var instanceOutputDirectory = AnalysisCoordinator.GetNamedDirectory(analysisSettings.AnalysisOutputDirectory, analyzer); // 11. IMPORTANT - this is where IAnalyser2's post processor gets called. // Produces all spectrograms and images of SPECTRAL INDICES. // Long duration spectrograms are drawn IFF analysis type is Towsey.Acoustic analyzer.SummariseResults(analysisSettings, fileSegment, mergedEventResults, mergedIndicesResults, mergedSpectralIndexResults, analyserResults); // 12. SAVE THE RESULTS string fileNameBase = Path.GetFileNameWithoutExtension(sourceAudio.Name); var eventsFile = ResultsTools.SaveEvents(analyzer, fileNameBase, instanceOutputDirectory, mergedEventResults); var indicesFile = ResultsTools.SaveSummaryIndices(analyzer, fileNameBase, instanceOutputDirectory, mergedIndicesResults); var spectraFile = ResultsTools.SaveSpectralIndices(analyzer, fileNameBase, instanceOutputDirectory, mergedSpectralIndexResults); // 13. THIS IS WHERE SUMMARY INDICES ARE PROCESSED // Convert summary indices to black and white tracks image if (mergedIndicesResults == null) { Log.Info("No summary indices produced"); } else { if (indicesPropertiesConfig == null || !indicesPropertiesConfig.Exists) { throw new InvalidOperationException("Cannot process indices without an index configuration file, the file could not be found!"); } // this arbitrary amount of data. if (mergedIndicesResults.Length > 5000) { Log.Warn("Summary Indices Image not able to be drawn - there are too many indices to render"); } else { var basename = Path.GetFileNameWithoutExtension(fileNameBase); string imageTitle = $"SOURCE:{basename}, {Meta.OrganizationTag}; "; // Draw Tracks-Image of Summary indices // set time scale resolution for drawing of summary index tracks TimeSpan timeScale = TimeSpan.FromSeconds(0.1); Bitmap tracksImage = IndexDisplay.DrawImageOfSummaryIndices( IndexProperties.GetIndexProperties(indicesPropertiesConfig), indicesFile, imageTitle, timeScale, fileSegment.TargetFileStartDate); var imagePath = FilenameHelpers.AnalysisResultPath(instanceOutputDirectory, basename, "SummaryIndices", ImageFileExt); tracksImage.Save(imagePath); } } // 14. wrap up, write stats LoggedConsole.WriteLine("INDICES CSV file(s) = " + (indicesFile?.Name ?? "<<No indices result, no file!>>")); LoggedConsole.WriteLine("\tNumber of rows (i.e. minutes) in CSV file of indices = " + numberOfRowsOfIndices); LoggedConsole.WriteLine(string.Empty); if (eventsFile == null) { LoggedConsole.WriteLine("An Events CSV file was NOT returned."); } else { LoggedConsole.WriteLine("EVENTS CSV file(s) = " + eventsFile.Name); LoggedConsole.WriteLine("\tNumber of events = " + eventsCount); } Log.Success($"Analysis Complete.\nSource={sourceAudio.Name}\nOutput={instanceOutputDirectory.FullName}"); }