public WorkingDirectory([NotNull] Logger logger, [NotNull] RunningConfig config) : base(logger, Stage.Preparation, nameof(WorkingDirectory)) { string baseDir = Path.Combine(config.Directories.BaseProcessingDirectory, "unittests"); string callingMethod = GetCallingMethodAndClass(); Info("Base dir: " + baseDir); Info("Calling Method: " + callingMethod); Dir = Path.Combine(baseDir, FilenameHelpers.CleanFileName(callingMethod)); Info("Used Directory: " + Dir); DirDi = new DirectoryInfo(Dir); if (DirDi.Exists) { try { DirDi.Delete(true); } #pragma warning disable CA1031 // Do not catch general exception types catch (Exception ex) { #pragma warning restore CA1031 // Do not catch general exception types logger.ErrorM(ex.Message, Stage.Preparation, nameof(WorkingDirectory)); } Thread.Sleep(250); } DirDi.Create(); Thread.Sleep(250); }
public static string GetResultArchiveDirectory([NotNull] ScenarioSliceParameters slice, DateTime startingTime, [NotNull] RunningConfig config, RelativeDirectory relativeDir, [CanBeNull] string chapter) { string date = FilenameHelpers.CleanFileName(startingTime.ToString("yyyy-MM-dd")); string scenario = slice.GetFileName(); if (slice.SmartGridEnabled) { scenario += "smart"; } string resultArchiveDirectory = Path.Combine(config.Directories.ResultStorageDirectory, date, scenario, relativeDir.ToString()); if (chapter != null) { resultArchiveDirectory = Path.Combine(config.Directories.ResultStorageDirectory, "Abschlussbericht", date, "Kapitel " + chapter, scenario); } if (!Directory.Exists(resultArchiveDirectory)) { Directory.CreateDirectory(resultArchiveDirectory); Thread.Sleep(250); } return(resultArchiveDirectory); }
public string MakeAndRegisterFullFilename([NotNull] string filename, [NotNull] ScenarioSliceParameters slice, bool replaceSpaces = true) { FilesCreated++; return(FilenameHelpers.MakeAndRegisterFullFilenameStatic( filename, MyStage, SequenceNumber, Name, slice, Services.RunningConfig, replaceSpaces)); }
public void ClearTargetDirectory([NotNull] ScenarioSliceParameters slice) { var fullpath = FilenameHelpers.GetTargetDirectory(MyStage, SequenceNumber, Name, slice, Services.RunningConfig); if (Directory.Exists(fullpath)) { try { DirectoryInfo di = new DirectoryInfo(fullpath); var files = di.GetFiles("*.*", SearchOption.AllDirectories); foreach (var fileInfo in files) { fileInfo.Delete(); } di.Delete(true); } #pragma warning disable CA1031 // Do not catch general exception types catch (Exception e) { #pragma warning restore CA1031 // Do not catch general exception types SLogger.Error(e.Message); } } if (!Directory.Exists(fullpath)) { Directory.CreateDirectory(fullpath); Thread.Sleep(250); } }
/// <summary> /// Joins summary indices csv files together. /// This method merges ALL the passed files of acoustic indices /// It is assumed you are concatenating a sequence of consecutive short recordings. /// </summary> public static List <SummaryIndexValues> ConcatenateAllSummaryIndexFiles( FileInfo[] summaryIndexFiles, DirectoryInfo opDir, IndexGenerationData indexGenerationData, string outputFileBaseName) { var indexResolution = indexGenerationData.IndexCalculationDuration; var summaryIndices = IndexMatrices.ConcatenateSummaryIndexFilesWithTimeCheck(summaryIndexFiles, indexResolution); if (summaryIndices.Count == 0) { LoggedConsole.WriteErrorLine("WARNING: LDSpectrogramStitching.ConcatenateAllSummaryIndexFiles(): Empty List of SUMMARY indices returned!"); return(null); } // check length of data and make adjustments if required. // NOTHING done with this info at the moment. Could be used to truncate data to 24 hours. //int totalRowMinutes = (int)Math.Round(summaryIndices.Count() * indexResolution.TotalMinutes); // write out the list of data file names to JSON file. var arrayOfFileNames = summaryIndices.Select(x => x.FileName).ToArray(); var path = FilenameHelpers.AnalysisResultPath(opDir, outputFileBaseName, "FileNames", "json"); Json.Serialise(new FileInfo(path), arrayOfFileNames); return(summaryIndices); }
private void WriteDebugImage( AudioRecording recording, DirectoryInfo outputDirectory, BaseSonogram sonogram, List <AcousticEvent> acousticEvents, List <Plot> plots, double[,] hits) { //DEBUG IMAGE this recogniser only. MUST set false for deployment. bool displayDebugImage = MainEntry.InDEBUG; if (displayDebugImage) { Image debugImage1 = SpectrogramTools.GetSonogramPlusCharts(sonogram, acousticEvents, plots, hits); var debugPath1 = outputDirectory.Combine(FilenameHelpers.AnalysisResultName(Path.GetFileNameWithoutExtension(recording.BaseName), this.Identifier, "png", "DebugSpectrogram1")); debugImage1.Save(debugPath1.FullName); // save new image with longer frame var sonoConfig2 = new SonogramConfig { SourceFName = recording.BaseName, WindowSize = 1024, WindowOverlap = 0, //NoiseReductionType = NoiseReductionType.NONE, NoiseReductionType = NoiseReductionType.Standard, NoiseReductionParameter = 0.1, }; BaseSonogram sonogram2 = new SpectrogramStandard(sonoConfig2, recording.WavReader); var debugPath2 = outputDirectory.Combine(FilenameHelpers.AnalysisResultName(Path.GetFileNameWithoutExtension(recording.BaseName), this.Identifier, "png", "DebugSpectrogram2")); Image debugImage2 = SpectrogramTools.GetSonogramPlusCharts(sonogram2, acousticEvents, plots, null); debugImage2.Save(debugPath2.FullName); } }
protected override void RunActualProcess(ScenarioSliceParameters slice) { string srcPath = FilenameHelpers.GetTargetDirectory(Stage.ProfileGeneration, 500, nameof(E_ApplySmartGridToGeneratedProfiles), slice, Services.RunningConfig); string lz4ProfilePath = Path.Combine(srcPath, "addedProfile.lz4"); byte[] arr = File.ReadAllBytes(lz4ProfilePath); var addedProfileraw = LZ4MessagePackSerializer.Deserialize <Profile>(arr); var pos = addedProfileraw.GetOnlyPositive("pos"); var neg = addedProfileraw.GetOnlyNegative("neg"); var addedProfile = pos.Add(neg, "added"); var maxDailyGen = addedProfile.MakeDailyAverages().Values.Max() * 24 * 4; double storageSize = maxDailyGen * 2; MakeXlsForCurrentProfile(slice); MakeExampleWsForSmartgrid(slice, addedProfile, storageSize, 1); MakeExampleWsForSmartgrid(slice, addedProfile, storageSize, 0.5); MakeStorageSizeSheet(slice, maxDailyGen, addedProfile); MakeAbregelungWorksheet(slice, maxDailyGen, addedProfile); }
private void MakeResultExcel([NotNull][ItemNotNull] List <CSVFile> csvs, [NotNull] ScenarioSliceParameters slice) { string suffix = csvs[0].GenerationOrLoad.ToString(); using (var p = new ExcelPackage()) { var ws = p.Workbook.Worksheets.Add("MySheet"); var tgt = FilenameHelpers.GetTargetDirectory(MyStage, SequenceNumber, nameof(C01_CSVProfileVisualizer), slice, Services.RunningConfig); var row = 1; var col = 1; ws.Cells[row, col++].Value = "TrafoKreis"; ws.Cells[row, col++].Value = "ISN ID"; ws.Cells[row, col++].Value = "CSV Energy"; ws.Cells[row, col].Value = "DB Energy"; row++; foreach (var csvFile in csvs) { foreach (var csvLine in csvFile.Lines) { col = 1; ws.Cells[row, col++].Value = csvFile.TsName; ws.Cells[row, col++].Value = csvLine.HausanschlussID; var csvEnergy = csvLine.CalculateTotalEnergy(); ws.Cells[row, col].Value = csvEnergy; } } p.SaveAs(new FileInfo(Path.Combine(tgt, "ComparisonCSVvsDB" + suffix + ".xlsx"))); } }
private void CopyCSVFiles([NotNull] ScenarioSliceParameters parameters, int sequence, [NotNull] string exporteRName, [NotNull] string subPath) { var path = FilenameHelpers.GetTargetDirectory(Stage.ProfileExport, sequence, exporteRName, parameters); var di = new DirectoryInfo(Path.Combine(path, "Export")); if (!di.Exists) { throw new FlaException("Directory " + di.FullName + " does not exist"); } var csvfiles = di.GetFiles("*.csv"); if (csvfiles.Length == 0) { throw new FlaException("No exported files found"); } string dstpath = @"v:\tstcopy\v01"; dstpath = Path.Combine(dstpath, parameters.DstScenario.ToString(), parameters.DstYear.ToString(), subPath); if (Directory.Exists(dstpath)) { Directory.Delete(dstpath, true); Thread.Sleep(500); } Directory.CreateDirectory(dstpath); Thread.Sleep(500); foreach (var fileInfo in csvfiles) { string dstfullName = Path.Combine(dstpath, fileInfo.Name); Info("Copying " + dstfullName); fileInfo.CopyTo(dstfullName); } }
protected override void RunActualProcess(ScenarioSliceParameters slice) { var fn = Path.Combine(FilenameHelpers.GetTargetDirectory(MyStage, SequenceNumber, Name, slice, Services.RunningConfig), "Export"); HouseProcessor hp = new HouseProcessor(Services, fn, MyStage); hp.ProcessAllHouses(slice, MakeAndRegisterFullFilename, HouseProcessor.ProcessingMode.Preparing, DevelopmentStatus); }
private void WriteSumLineToCsv([NotNull] Prosumer p, [NotNull] string trafokreis, GenerationOrLoad generationOrLoad) { string tkFileName = FilenameHelpers.CleanFileName(trafokreis); var csvFileNameGeneration = Path.Combine(_processingResultPathForProfiles, tkFileName + "." + generationOrLoad + ".csv"); StreamWriter sw = new StreamWriter(csvFileNameGeneration, true); sw.WriteLine(p.GetCSVLine()); sw.Close(); }
/// <summary> /// This is cut down version of the method of same name in LDSpectrogramRGB.cs. /// </summary> /// <param name="ldSpectrogramConfig">config for ldfc spectrogram.</param> /// <param name="outputDirectory">outputDirectory.</param> /// <param name="indexGenerationData">indexGenerationData.</param> /// <param name="basename">stem name of the original recording.</param> /// <param name="indexSpectrograms">Optional spectra to pass in. If specified the spectra will not be loaded from disk!.</param> private static string DrawSpectrogramsFromSpectralIndices( LdSpectrogramConfig ldSpectrogramConfig, DirectoryInfo outputDirectory, IndexGenerationData indexGenerationData, string basename, Dictionary <string, double[, ]> indexSpectrograms = null) { string colorMap1 = ldSpectrogramConfig.ColorMap1; // SpectrogramConstants.RGBMap_ACI_ENT_EVN; string colorMap2 = ldSpectrogramConfig.ColorMap2; // SpectrogramConstants.RGBMap_BGN_PMN_OSC; double blueEnhanceParameter = ldSpectrogramConfig.BlueEnhanceParameter.Value; var cs1 = new LDSpectrogramRGB(ldSpectrogramConfig, indexGenerationData, colorMap1); string fileStem = basename; cs1.FileName = fileStem; // calculate start time by combining DatetimeOffset with minute offset. cs1.StartOffset = indexGenerationData.AnalysisStartOffset; if (indexGenerationData.RecordingStartDate.HasValue) { DateTimeOffset dto = (DateTimeOffset)indexGenerationData.RecordingStartDate; cs1.RecordingStartDate = dto; if (dto != null) { cs1.StartOffset = dto.TimeOfDay + cs1.StartOffset; } } var indexProperties = IndexCalculateSixOnly.GetIndexProperties(); cs1.SetSpectralIndexProperties(indexProperties); // Load the Index Spectrograms into a Dictionary cs1.LoadSpectrogramDictionary(indexSpectrograms); if (cs1.GetCountOfSpectrogramMatrices() == 0) { Log.Error("No spectrogram matrices in the dictionary. Spectrogram files do not exist?"); throw new InvalidOperationException("Cannot find spectrogram matrix files"); } // draw all available gray scale index spectrograms. var keys = indexProperties.Keys.ToArray(); cs1.DrawGreyScaleSpectrograms(outputDirectory, fileStem, keys); // create two false-color spectrogram images var image1NoChrome = cs1.DrawFalseColorSpectrogramChromeless(cs1.ColorMode, colorMap1, blueEnhanceParameter); var image2NoChrome = cs1.DrawFalseColorSpectrogramChromeless(cs1.ColorMode, colorMap2, blueEnhanceParameter); var spacer = new Image <Rgb24>(image1NoChrome.Width, 10); var imageList = new[] { image1NoChrome, spacer, image2NoChrome, spacer }; Image image3 = ImageTools.CombineImagesVertically(imageList); var outputPath = FilenameHelpers.AnalysisResultPath(outputDirectory, fileStem, "2Maps", "png"); image3.Save(outputPath); return(outputPath); }
public static string MakePath(string directory, string baseName, string format, string tag) { if (string.IsNullOrEmpty(format)) { return(directory); } Contract.Requires(AllFormats.Contains(format)); return(Path.Combine(directory, FilenameHelpers.AnalysisResultName(baseName, tag, format))); }
protected override void RunActualProcess([NotNull] ScenarioSliceParameters slice) { var fnX = MakeAndRegisterFullFilename(FilenameHelpers.CleanFileName("ExportForAllProviderTypesXlsDumper") + ".xlsx", slice); ProcessOneSumTypeWithXls(slice, fnX, SumType.ByProvider); var fn = MakeAndRegisterFullFilename(FilenameHelpers.CleanFileName("ExportForAllProviderTypes") + ".xlsx", slice); ProcessOneSumType(slice, fn, SumType.ByProvider); var fn2 = MakeAndRegisterFullFilename(FilenameHelpers.CleanFileName("ExportForAllProfileSources") + ".xlsx", slice); ProcessOneSumType(slice, fn2, SumType.ByProfileSource); }
/// <summary> /// Writes a list of erroneous segment properties to file. /// </summary> /// <param name="errors">list of erroneous segments.</param> /// <param name="outputDirectory">directory in which json file to be written.</param> /// <param name="fileStem">name of json file.</param> public static void WriteErrorsToFile(List <GapsAndJoins> errors, DirectoryInfo outputDirectory, string fileStem) { // write info to file if (errors.Count == 0) { return; } string path = FilenameHelpers.AnalysisResultPath(outputDirectory, fileStem, ErroneousIndexSegmentsFilenameFragment, "json"); // ReSharper disable once RedundantTypeArgumentsOfMethod Json.Serialise <List <GapsAndJoins> >(new FileInfo(path), errors); }
protected override void RunActualProcess([NotNull] ScenarioSliceParameters slice) { if (!Services.RunningConfig.MakeExcelPerTrafokreis) { return; } var dbArchive = Services.SqlConnectionPreparer.GetDatabaseConnection(Stage.ProfileGeneration, slice, DatabaseCode.SummedLoadForAnalysis); var saHouses = SaveableEntry <ArchiveEntry> .GetSaveableEntry(dbArchive, SaveableEntryTableType.SummedHouseProfiles, Services.Logger); string currentTrafokreis = ""; ExcelWorksheet ws; int columnIdx = 1; ExcelPackage p = new ExcelPackage(); ws = p.Workbook.Worksheets.Add("sheet1"); foreach (var house in saHouses.ReadEntireTableDBAsEnumerable("Trafokreis")) { if (currentTrafokreis != house.Trafokreis && !string.IsNullOrWhiteSpace(currentTrafokreis)) { var fn = MakeAndRegisterFullFilename(FilenameHelpers.CleanFileName(currentTrafokreis) + ".xlsx", slice); // ReSharper disable once PossibleNullReferenceException p.SaveAs(new FileInfo(fn)); SaveToArchiveDirectory(fn, RelativeDirectory.Trafokreise, slice); p.Dispose(); p = new ExcelPackage(); ws = p.Workbook.Worksheets.Add(currentTrafokreis); columnIdx = 2; } currentTrafokreis = house.Trafokreis; // ReSharper disable once PossibleNullReferenceException ws.Cells[1, columnIdx].Value = house.Name; int rowIdx = 2; for (int i = 0; i < house.Profile.Values.Count; i++) { ws.Cells[rowIdx, columnIdx].Value = house.Profile.Values[i]; rowIdx++; } columnIdx++; } var fn2 = MakeAndRegisterFullFilename(FilenameHelpers.CleanFileName(currentTrafokreis) + ".xlsx", slice); // ReSharper disable once PossibleNullReferenceException p.SaveAs(new FileInfo(fn2)); SaveToArchiveDirectory(fn2, RelativeDirectory.Trafokreise, slice); p.Dispose(); }
public override string GetFileBaseName(SortedSet <Layer> calculatedLayers, Layer selectedLayer, Point tileOffsets) { // discard Y coordinate var xOffset = (TimeSpan)this.GetTileIndexes(calculatedLayers, selectedLayer, tileOffsets); var tileDate = this.baseDateUtc.Add(xOffset); var formattedDateTime = tileDate.ToString(AppConfigHelper.Iso8601FileCompatibleDateFormatUtcWithFractionalSeconds); var zoomIndex = (double)this.GetZoomIndex(calculatedLayers, selectedLayer); var basename = FilenameHelpers.AnalysisResultName( this.prefix, this.tag, null, formattedDateTime, zoomIndex.ToString(CultureInfo.InvariantCulture)); return(basename); }
private static FileInfo SaveResults <T>(DirectoryInfo outputDirectory, string resultFilenameBase, string analysisTag, Action <FileInfo, IEnumerable <T> > serialiseFunc, IEnumerable <T> results) { if (results == null) { Log.Debug("No results returned... file not written:" + resultFilenameBase + ReportFileExt); return(null); } var reportFileInfo = FilenameHelpers.AnalysisResultPath(outputDirectory, resultFilenameBase, analysisTag, ReportFileExt).ToFileInfo(); serialiseFunc(reportFileInfo, results); return(reportFileInfo); }
public void WriteDebugImage(string recordingFileName, DirectoryInfo outputDirectory, BaseSonogram sonogram, List <AcousticEvent> events, List <Plot> scores, double[,] hits) { //DEBUG IMAGE this recognizer only. MUST set false for deployment. bool displayDebugImage = MainEntry.InDEBUG; if (displayDebugImage) { bool doHighlightSubband = false; bool add1kHzLines = true; Image_MultiTrack image = new Image_MultiTrack(sonogram.GetImage(doHighlightSubband, add1kHzLines, doMelScale: false)); image.AddTrack(ImageTrack.GetTimeTrack(sonogram.Duration, sonogram.FramesPerSecond)); if (scores != null) { foreach (Plot plot in scores) { image.AddTrack(ImageTrack.GetNamedScoreTrack(plot.data, 0.0, 1.0, plot.threshold, plot.title)); } //assumes data normalised in 0,1 } if (hits != null) { image.OverlayRainbowTransparency(hits); } if (events.Count > 0) { foreach (AcousticEvent ev in events) // set colour for the events { ev.BorderColour = AcousticEvent.DefaultBorderColor; ev.ScoreColour = AcousticEvent.DefaultScoreColor; } image.AddEvents( events, sonogram.NyquistFrequency, sonogram.Configuration.FreqBinCount, sonogram.FramesPerSecond); } var debugImage = image.GetImage(); var debugPath = outputDirectory.Combine(FilenameHelpers.AnalysisResultName(Path.GetFileNameWithoutExtension(recordingFileName), this.Identifier, "png", "DebugSpectrogram")); debugImage.Save(debugPath.FullName); } }
public static List <FileInfo> WriteSpectrumIndicesFilesCustom(DirectoryInfo destination, string fileNameBase, IEnumerable <SpectralIndexBase> results) { var selectors = results.First().GetSelectors(); var spectralIndexFiles = new List <FileInfo>(selectors.Count); foreach (var kvp in selectors) { // write spectrogram to disk as CSV file var filename = FilenameHelpers.AnalysisResultPath(destination, fileNameBase, TowseyAcoustic + "." + kvp.Key, "csv").ToFileInfo(); spectralIndexFiles.Add(filename); Csv.WriteMatrixToCsv(filename, results, kvp.Value); } return(spectralIndexFiles); }
public override List <FileInfo> WriteSpectrumIndicesFiles(DirectoryInfo destination, string fileNameBase, IEnumerable <SpectralIndexBase> results) { // Group results based on configuration Dictionary <IndexCalculateConfig, List <SpectralIndexBase> > dict = GroupResultsOnConfiguration(results); var spectralIndexFiles = new List <FileInfo>(); // For each group and for each selector?? create a csv file foreach (var configGroup in dict) { var groupResults = configGroup.Value; var selectors = groupResults.First().GetSelectors(); // Get the values of the band configuration, since they all have same configuration, just get the first item var config = (SpectralIndexValues)groupResults.First(); string minBandWidth = config.Configuration.MinBandWidth.ToString(); string maxBandWidth = config.Configuration.MaxBandWidth.ToString(); string mel; string melScale; if (config.Configuration.MelScale != 0) { mel = "Mel"; melScale = config.Configuration.MelScale.ToString(); } else { mel = "Standard"; melScale = 0.ToString(); } string fftWindow = config.Configuration.FrameLength.ToString(); foreach (var kvp in selectors) { // write spectrogram to disk as CSV file var filename = FilenameHelpers.AnalysisResultPath(destination, fileNameBase, this.Identifier + "." + kvp.Key, "csv", minBandWidth, maxBandWidth, mel, melScale, "FftWindow", fftWindow).ToFileInfo(); spectralIndexFiles.Add(filename); Csv.WriteMatrixToCsv(filename, groupResults, kvp.Value); } } return(spectralIndexFiles); }
public static void DrawSummaryIndexFiles( Dictionary <string, double[]> dictionaryOfCsvColumns, IndexGenerationData indexGenerationData, FileInfo indexPropertiesConfigFileInfo, DirectoryInfo opDir, SiteDescription siteDescription, FileInfo sunriseDatafile = null, List <GapsAndJoins> erroneousSegments = null, // info if have fatal errors i.e. no signal bool verbose = false) { var dto = (DateTimeOffset)indexGenerationData.RecordingStartDate; string dateString = $"{dto.Year}{dto.Month:D2}{dto.Day:D2}"; string opFileStem = $"{siteDescription.SiteName}_{dateString}"; // Calculate the index distribution statistics and write to a json file. Also save as png image var indexDistributions = IndexDistributions.WriteSummaryIndexDistributionStatistics(dictionaryOfCsvColumns, opDir, opFileStem); var start = ((DateTimeOffset)indexGenerationData.RecordingStartDate).TimeOfDay; string startTime = $"{start.Hours:d2}{start.Minutes:d2}h"; if (start.Hours == 0 && start.Minutes == 0) { startTime = "midnight"; } string titletext = $"SOURCE: \"{opFileStem}\". Starts at {startTime} {Meta.OrganizationTag}"; Bitmap tracksImage = IndexDisplay.DrawImageOfSummaryIndices( IndexProperties.GetIndexProperties(indexPropertiesConfigFileInfo), dictionaryOfCsvColumns, titletext, indexGenerationData.IndexCalculationDuration, indexGenerationData.RecordingStartDate, sunriseDatafile, erroneousSegments, verbose); var imagePath = FilenameHelpers.AnalysisResultPath(opDir, opFileStem, SummaryIndicesStr, ImgFileExt); tracksImage.Save(imagePath); }
public override List <FileInfo> WriteSpectrumIndicesFiles(DirectoryInfo destination, string fileNameBase, IEnumerable <SpectralIndexBase> results) { if (!results.Any()) { return(null); } var selectors = results.First().GetSelectors(); var spectralIndexFiles = new List <FileInfo>(selectors.Count); foreach (var kvp in selectors) { // write spectrum to disk as CSV file var filename = FilenameHelpers.AnalysisResultPath(destination, fileNameBase, this.Identifier + "." + kvp.Key, "csv").ToFileInfo(); spectralIndexFiles.Add(filename); Csv.WriteMatrixToCsv(filename, results, kvp.Value); } return(spectralIndexFiles); }
protected override void RunActualProcess(ScenarioSliceParameters slice) { { var smartSlice = slice.CopyThisSlice(); smartSlice.SmartGridEnabled = true; var path = FilenameHelpers.GetTargetDirectory(Stage.ProfileGeneration, E_ApplySmartGridToGeneratedProfiles.MySequenceNumber, nameof(E_ApplySmartGridToGeneratedProfiles), smartSlice, Services.RunningConfig); var di = new DirectoryInfo(path); if (di.Exists) { // not every scenario has a smart version var csvs1 = ReadCSVFiles(GenerationOrLoad.Load, di); ProcessAllCharts(csvs1, smartSlice); var csvs2 = ReadCSVFiles(GenerationOrLoad.Generation, di); ProcessAllCharts(csvs2, smartSlice); } } { var path = FilenameHelpers.GetTargetDirectory(Stage.ProfileGeneration, B_LoadProfileGenerator.MySequenceNumber, nameof(B_LoadProfileGenerator), slice, Services.RunningConfig); var di = new DirectoryInfo(Path.Combine(path, "Export")); if (!di.Exists) { throw new FlaException("Directory " + di.FullName + " does not exist"); } var csvs1 = ReadCSVFiles(GenerationOrLoad.Load, di); ProcessAllCharts(csvs1, slice); var csvs2 = ReadCSVFiles(GenerationOrLoad.Generation, di); ProcessAllCharts(csvs2, slice); } }
protected override void RunActualProcess([NotNull][ItemNotNull] List <ScenarioSliceParameters> scenarioSliceList) { ScenarioSliceParameters slice = scenarioSliceList.Last(); // Create a MigraDoc document var pdfDstPath = FilenameHelpers.GetTargetDirectory(Stage.PDF, SequenceNumber, Name, slice); var document = CreateDocument(); ProcessScenario(document, slice.DstScenario); var renderer = new PdfDocumentRenderer(true) { Document = document }; renderer.RenderDocument(); // Save the document... if (!Directory.Exists(pdfDstPath)) { Directory.CreateDirectory(pdfDstPath); } var filename = "Report." + slice.DstScenario + ".pdf"; var dstFullName = Path.Combine(pdfDstPath, filename); if (File.Exists(dstFullName)) { File.Delete(dstFullName); } renderer.PdfDocument.Save(dstFullName); GC.WaitForPendingFinalizers(); GC.Collect(); // ...and start a viewer. // ReSharper disable once ConditionIsAlwaysTrueOrFalse if (Startpdf) { Process.Start(dstFullName); } }
protected override void RunActualProcess([NotNull] ScenarioSliceParameters slice) { var fn = MakeAndRegisterFullFilename(FilenameHelpers.CleanFileName("SummedLoadProfileExport.xlsx"), slice); var dbArchive = Services.SqlConnectionPreparer.GetDatabaseConnection(Stage.ProfileGeneration, slice, DatabaseCode.SummedLoadForAnalysis); var saHouses = SaveableEntry <ArchiveEntry> .GetSaveableEntry(dbArchive, SaveableEntryTableType.SummedLoadsForAnalysis, Services.Logger); var dbRaw = Services.SqlConnectionPreparer.GetDatabaseConnection(Stage.Raw, Constants.PresentSlice); var bkwArr = dbRaw.Fetch <BkwProfile>(); var bkwjSonProfile = bkwArr[0]; var entries = saHouses.LoadAllOrMatching(); var providerentries = entries.Where(x => x.Key.SumType == SumType.ByProvider).ToList(); List <Profile> profiles = new List <Profile>(); foreach (var providerentry in providerentries) { providerentry.Profile.Name = (providerentry.Key.ProviderType ?? throw new FlaException()) + " " + providerentry.Key.GenerationOrLoad; if (providerentry.Key.GenerationOrLoad == GenerationOrLoad.Load) { profiles.Add(providerentry.Profile); } else { profiles.Add(providerentry.Profile.MultiplyWith(-1, providerentry.Profile.Name)); } } profiles = MergeProfiles(profiles); var bkwProfile = new Profile(bkwjSonProfile.Profile); bkwProfile.Name = "Messung 2017 [kW]"; profiles.Add(bkwProfile); XlsxDumper.DumpProfilesToExcel(fn, slice.DstYear, 15, new ProfileWorksheetContent("Profiles", "Last [kW]", bkwProfile.Name, profiles)); SaveToArchiveDirectory(fn, RelativeDirectory.Report, slice); SaveToPublicationDirectory(fn, slice, "4.4"); SaveToPublicationDirectory(fn, slice, "5"); Info("saved " + fn); }
public override List <FileInfo> WriteSpectrumIndicesFiles(DirectoryInfo destination, string fileNameBase, IEnumerable <SpectralIndexBase> results) { //get selectors and removed unwanted because these indices were never calculated. var spectralIndexBases = results.ToList(); var selectors = spectralIndexBases.First().GetSelectors(); // TODO: REMOVE unused index filter with new Spectral Indices child class foreach (var indexName in ContentSignatures.UnusedIndexNames) { selectors.Remove(indexName); } var spectralIndexFiles = new List <FileInfo>(selectors.Count); foreach (var kvp in selectors) { // write spectrogram to disk as CSV file var filename = FilenameHelpers.AnalysisResultPath(destination, fileNameBase, TowseyContentDescription + "." + kvp.Key, "csv").ToFileInfo(); spectralIndexFiles.Add(filename); Csv.WriteMatrixToCsv(filename, spectralIndexBases, kvp.Value); } return(spectralIndexFiles); }
public void SummariseResults(AnalysisSettings settings, FileSegment inputFileSegment, EventBase[] events, SummaryIndexBase[] indices, SpectralIndexBase[] spectralIndices, AnalysisResult2[] results) { var acousticIndicesConfig = (AcousticIndicesConfig)settings.AnalysisAnalyzerSpecificConfiguration; var sourceAudio = inputFileSegment.Source; var resultsDirectory = AnalysisCoordinator.GetNamedDirectory(settings.AnalysisOutputDirectory, this); bool tileOutput = acousticIndicesConfig.TileOutput; var frameWidth = acousticIndicesConfig.FrameLength; int sampleRate = AppConfigHelper.DefaultTargetSampleRate; sampleRate = acousticIndicesConfig.ResampleRate ?? sampleRate; // Gather settings for rendering false color spectrograms var ldSpectrogramConfig = acousticIndicesConfig.LdSpectrogramConfig; string basename = Path.GetFileNameWithoutExtension(sourceAudio.Name); // output to disk (so other analyzers can use the data, // only data - configuration settings that generated these indices // this data can then be used by post-process analyses /* NOTE: The value for FrameStep is used only when calculating a standard spectrogram * FrameStep is NOT used when calculating Summary and Spectral indices. */ var indexConfigData = new IndexGenerationData() { RecordingExtension = inputFileSegment.Source.Extension, RecordingBasename = basename, RecordingStartDate = inputFileSegment.TargetFileStartDate, RecordingDuration = inputFileSegment.TargetFileDuration.Value, SampleRateOriginal = inputFileSegment.TargetFileSampleRate.Value, SampleRateResampled = sampleRate, FrameLength = frameWidth, FrameStep = settings.Configuration.GetIntOrNull(AnalysisKeys.FrameStep) ?? frameWidth, IndexCalculationDuration = acousticIndicesConfig.IndexCalculationDurationTimeSpan, BgNoiseNeighbourhood = acousticIndicesConfig.BgNoiseBuffer, AnalysisStartOffset = inputFileSegment.SegmentStartOffset ?? TimeSpan.Zero, MaximumSegmentDuration = settings.AnalysisMaxSegmentDuration, BackgroundFilterCoeff = SpectrogramConstants.BACKGROUND_FILTER_COEFF, LongDurationSpectrogramConfig = ldSpectrogramConfig, }; var icdPath = FilenameHelpers.AnalysisResultPath( resultsDirectory, basename, IndexGenerationData.FileNameFragment, "json"); Json.Serialise(icdPath.ToFileInfo(), indexConfigData); // gather spectra to form spectrograms. Assume same spectra in all analyzer results // this is the most efficient way to do this // gather up numbers and strings store in memory, write to disk one time // this method also AUTOMATICALLY SORTS because it uses array indexing var dictionaryOfSpectra = spectralIndices.ToTwoDimensionalArray(SpectralIndexValues.CachedSelectors, TwoDimensionalArray.Rotate90ClockWise); // Calculate the index distribution statistics and write to a json file. Also save as png image var indexDistributions = IndexDistributions.WriteSpectralIndexDistributionStatistics(dictionaryOfSpectra, resultsDirectory, basename); // HACK: do not render false color spectrograms unless IndexCalculationDuration = 60.0 (the normal resolution) if (acousticIndicesConfig.IndexCalculationDurationTimeSpan != 60.0.Seconds()) { Log.Warn("False color spectrograms were not rendered"); } else { FileInfo indicesPropertiesConfig = acousticIndicesConfig.IndexPropertiesConfig.ToFileInfo(); // Actually draw false color / long duration spectrograms Tuple <Image <Rgb24>, string>[] images = LDSpectrogramRGB.DrawSpectrogramsFromSpectralIndices( inputDirectory: resultsDirectory, outputDirectory: resultsDirectory, ldSpectrogramConfig: ldSpectrogramConfig, indexPropertiesConfigPath: indicesPropertiesConfig, indexGenerationData: indexConfigData, basename: basename, analysisType: this.Identifier, indexSpectrograms: dictionaryOfSpectra, indexStatistics: indexDistributions, imageChrome: (!tileOutput).ToImageChrome()); if (tileOutput) { Debug.Assert(images.Length == 2); Log.Info("Tiling output at scale: " + acousticIndicesConfig.IndexCalculationDuration); foreach (var image in images) { TileOutput(resultsDirectory, Path.GetFileNameWithoutExtension(sourceAudio.Name), image.Item2 + ".Tile", inputFileSegment, image.Item1); } } } }
/// <summary> /// Do your analysis. This method is called once per segment (typically one-minute segments). /// </summary> /// <param name="recording"></param> /// <param name="configuration"></param> /// <param name="segmentStartOffset"></param> /// <param name="getSpectralIndexes"></param> /// <param name="outputDirectory"></param> /// <param name="imageWidth"></param> /// <returns></returns> public override RecognizerResults Recognize(AudioRecording recording, Config configuration, TimeSpan segmentStartOffset, Lazy <IndexCalculateResult[]> getSpectralIndexes, DirectoryInfo outputDirectory, int?imageWidth) { var recognizerConfig = new LitoriaNasutaConfig(); recognizerConfig.ReadConfigFile(configuration); // BETTER TO SET THESE. IGNORE USER! // this default framesize seems to work const int frameSize = 1024; const double windowOverlap = 0.0; // i: MAKE SONOGRAM var sonoConfig = new SonogramConfig { SourceFName = recording.BaseName, WindowSize = frameSize, WindowOverlap = windowOverlap, // use the default HAMMING window //WindowFunction = WindowFunctions.HANNING.ToString(), //WindowFunction = WindowFunctions.NONE.ToString(), // if do not use noise reduction can get a more sensitive recogniser. //NoiseReductionType = NoiseReductionType.None NoiseReductionType = NoiseReductionType.Standard, NoiseReductionParameter = 0.0, }; TimeSpan recordingDuration = recording.WavReader.Time; int sr = recording.SampleRate; double freqBinWidth = sr / (double)sonoConfig.WindowSize; int minBin = (int)Math.Round(recognizerConfig.MinHz / freqBinWidth) + 1; int maxBin = (int)Math.Round(recognizerConfig.MaxHz / freqBinWidth) + 1; var decibelThreshold = 3.0; BaseSonogram sonogram = new SpectrogramStandard(sonoConfig, recording.WavReader); // ###################################################################### // ii: DO THE ANALYSIS AND RECOVER SCORES OR WHATEVER int rowCount = sonogram.Data.GetLength(0); double[] amplitudeArray = MatrixTools.GetRowAveragesOfSubmatrix(sonogram.Data, 0, minBin, rowCount - 1, maxBin); //double[] topBand = MatrixTools.GetRowAveragesOfSubmatrix(sonogram.Data, 0, maxBin + 3, (rowCount - 1), maxBin + 9); //double[] botBand = MatrixTools.GetRowAveragesOfSubmatrix(sonogram.Data, 0, minBin - 3, (rowCount - 1), minBin - 9); // ii: DO THE ANALYSIS AND RECOVER SCORES OR WHATEVER var acousticEvents = AcousticEvent.ConvertScoreArray2Events( amplitudeArray, recognizerConfig.MinHz, recognizerConfig.MaxHz, sonogram.FramesPerSecond, freqBinWidth, decibelThreshold, recognizerConfig.MinDuration, recognizerConfig.MaxDuration, segmentStartOffset); double[,] hits = null; var prunedEvents = new List <AcousticEvent>(); acousticEvents.ForEach(ae => { ae.SpeciesName = recognizerConfig.SpeciesName; ae.SegmentDurationSeconds = recordingDuration.TotalSeconds; ae.SegmentStartSeconds = segmentStartOffset.TotalSeconds; ae.Name = recognizerConfig.AbbreviatedSpeciesName; }); var thresholdedPlot = new double[amplitudeArray.Length]; for (int x = 0; x < amplitudeArray.Length; x++) { if (amplitudeArray[x] > decibelThreshold) { thresholdedPlot[x] = amplitudeArray[x]; } } var maxDb = amplitudeArray.MaxOrDefault(); double[] normalisedScores; double normalisedThreshold; DataTools.Normalise(thresholdedPlot, decibelThreshold, out normalisedScores, out normalisedThreshold); var text = string.Format($"{this.DisplayName} (Fullscale={maxDb:f1}dB)"); var plot = new Plot(text, normalisedScores, normalisedThreshold); if (true) { // display a variety of debug score arrays DataTools.Normalise(amplitudeArray, decibelThreshold, out normalisedScores, out normalisedThreshold); var amplPlot = new Plot("Band amplitude", normalisedScores, normalisedThreshold); var debugPlots = new List <Plot> { plot, amplPlot }; // NOTE: This DrawDebugImage() method can be over-written in this class. var debugImage = DrawDebugImage(sonogram, acousticEvents, debugPlots, hits); var debugPath = FilenameHelpers.AnalysisResultPath(outputDirectory, recording.BaseName, this.SpeciesName, "png", "DebugSpectrogram"); debugImage.Save(debugPath); } return(new RecognizerResults() { Sonogram = sonogram, Hits = hits, Plots = plot.AsList(), Events = acousticEvents, }); }
internal RecognizerResults Gruntwork(AudioRecording audioRecording, Config configuration, DirectoryInfo outputDirectory, TimeSpan segmentStartOffset) { double noiseReductionParameter = configuration.GetDoubleOrNull(AnalysisKeys.NoiseBgThreshold) ?? 0.1; // make a spectrogram var config = new SonogramConfig { WindowSize = 256, NoiseReductionType = NoiseReductionType.Standard, NoiseReductionParameter = noiseReductionParameter, }; config.WindowOverlap = 0.0; // now construct the standard decibel spectrogram WITH noise removal, and look for LimConvex // get frame parameters for the analysis var sonogram = (BaseSonogram) new SpectrogramStandard(config, audioRecording.WavReader); // remove the DC column var spg = MatrixTools.Submatrix(sonogram.Data, 0, 1, sonogram.Data.GetLength(0) - 1, sonogram.Data.GetLength(1) - 1); int sampleRate = audioRecording.SampleRate; int rowCount = spg.GetLength(0); int colCount = spg.GetLength(1); int frameSize = config.WindowSize; int frameStep = frameSize; // this default = zero overlap double frameStepInSeconds = frameStep / (double)sampleRate; double framesPerSec = 1 / frameStepInSeconds; // reading in variables from the config file string speciesName = configuration[AnalysisKeys.SpeciesName] ?? "<no species>"; string abbreviatedSpeciesName = configuration[AnalysisKeys.AbbreviatedSpeciesName] ?? "<no.sp>"; int minHz = configuration.GetInt(AnalysisKeys.MinHz); int maxHz = configuration.GetInt(AnalysisKeys.MaxHz); // ## THREE THRESHOLDS ---- only one of these is given to user. // minimum dB to register a dominant freq peak. After noise removal double peakThresholdDb = 3.0; // The threshold dB amplitude in the dominant freq bin required to yield an event double eventThresholdDb = 6; // minimum score for an acceptable event - that is when processing the score array. double similarityThreshold = configuration.GetDoubleOrNull(AnalysisKeys.EventThreshold) ?? 0.2; // IMPORTANT: The following frame durations assume a sampling rate = 22050 and window size of 256. int minFrameWidth = 7; int maxFrameWidth = 14; double minDuration = (minFrameWidth - 1) * frameStepInSeconds; double maxDuration = maxFrameWidth * frameStepInSeconds; // Calculate Max Amplitude int binMin = (int)Math.Round(minHz / sonogram.FBinWidth); int binMax = (int)Math.Round(maxHz / sonogram.FBinWidth); int[] dominantBins = new int[rowCount]; // predefinition of events max frequency double[] scores = new double[rowCount]; // predefinition of score array double[,] hits = new double[rowCount, colCount]; // loop through all spectra/rows of the spectrogram - NB: spg is rotated to vertical. // mark the hits in hitMatrix for (int s = 0; s < rowCount; s++) { double[] spectrum = MatrixTools.GetRow(spg, s); double maxAmplitude = double.MinValue; int maxId = 0; // loop through bandwidth of L.onvex call and look for dominant frequency for (int binID = 5; binID < binMax; binID++) { if (spectrum[binID] > maxAmplitude) { maxAmplitude = spectrum[binID]; maxId = binID; } } if (maxId < binMin) { continue; } // peak should exceed thresold amplitude if (spectrum[maxId] < peakThresholdDb) { continue; } scores[s] = maxAmplitude; dominantBins[s] = maxId; // Console.WriteLine("Col {0}, Bin {1} ", c, freqBinID); } // loop through all spectra // Find average amplitude double[] amplitudeArray = MatrixTools.GetRowAveragesOfSubmatrix( sonogram.Data, 0, binMin, rowCount - 1, binMax); var highPassFilteredSignal = DspFilters.SubtractBaseline(amplitudeArray, 7); // We now have a list of potential hits for C. tinnula. This needs to be filtered. var startEnds = new List <Point>(); Plot.FindStartsAndEndsOfScoreEvents(highPassFilteredSignal, eventThresholdDb, minFrameWidth, maxFrameWidth, out var prunedScores, out startEnds); // High pass Filter // loop through the score array and find beginning and end of potential events var potentialEvents = new List <AcousticEvent>(); foreach (Point point in startEnds) { // get average of the dominant bin int binSum = 0; int binCount = 0; int eventWidth = point.Y - point.X + 1; for (int s = point.X; s <= point.Y; s++) { if (dominantBins[s] >= binMin) { binSum += dominantBins[s]; binCount++; } } // find average dominant bin for the event int avDominantBin = (int)Math.Round(binSum / (double)binCount); int avDominantFreq = (int)(Math.Round(binSum / (double)binCount) * sonogram.FBinWidth); // Get score for the event. // Use a simple template for the honk and calculate cosine similarity to the template. // Template has three dominant frequenices. // minimum number of bins covering frequency bandwidth of C. tinnula call// minimum number of bins covering frequency bandwidth of L.convex call int callBinWidth = 14; var templates = GetCtinnulaTemplates(callBinWidth); var eventMatrix = MatrixTools.Submatrix(spg, point.X, avDominantBin - callBinWidth + 2, point.Y, avDominantBin + 1); double eventScore = GetEventScore(eventMatrix, templates); // put hits into hits matrix // put cosine score into the score array for (int s = point.X; s <= point.Y; s++) { hits[s, avDominantBin] = 10; prunedScores[s] = eventScore; } if (eventScore < similarityThreshold) { continue; } int topBinForEvent = avDominantBin + 2; int bottomBinForEvent = topBinForEvent - callBinWidth; double startTime = point.X * frameStepInSeconds; double durationTime = eventWidth * frameStepInSeconds; var newEvent = new AcousticEvent(segmentStartOffset, startTime, durationTime, minHz, maxHz); newEvent.DominantFreq = avDominantFreq; newEvent.Score = eventScore; newEvent.SetTimeAndFreqScales(framesPerSec, sonogram.FBinWidth); newEvent.Name = string.Empty; // remove name because it hides spectral content of the event. potentialEvents.Add(newEvent); } // display the original score array scores = DataTools.normalise(scores); var debugPlot = new Plot(this.DisplayName, scores, similarityThreshold); // DEBUG IMAGE this recognizer only. MUST set false for deployment. bool displayDebugImage = MainEntry.InDEBUG; if (displayDebugImage) { // display a variety of debug score arrays DataTools.Normalise(amplitudeArray, eventThresholdDb, out var normalisedScores, out var normalisedThreshold); var ampltdPlot = new Plot("Average amplitude", normalisedScores, normalisedThreshold); DataTools.Normalise(highPassFilteredSignal, eventThresholdDb, out normalisedScores, out normalisedThreshold); var demeanedPlot = new Plot("Hi Pass", normalisedScores, normalisedThreshold); /* * DataTools.Normalise(scores, eventThresholdDb, out normalisedScores, out normalisedThreshold); * var ampltdPlot = new Plot("amplitude", normalisedScores, normalisedThreshold); * * * DataTools.Normalise(lowPassFilteredSignal, decibelThreshold, out normalisedScores, out normalisedThreshold); * var lowPassPlot = new Plot("Low Pass", normalisedScores, normalisedThreshold); */ var debugPlots = new List <Plot> { ampltdPlot, demeanedPlot }; Image debugImage = DisplayDebugImage(sonogram, potentialEvents, debugPlots, null); var debugPath = outputDirectory.Combine(FilenameHelpers.AnalysisResultName(Path.GetFileNameWithoutExtension(audioRecording.BaseName), this.Identifier, "png", "DebugSpectrogram")); debugImage.Save(debugPath.FullName); } // display the cosine similarity scores var plot = new Plot(this.DisplayName, prunedScores, similarityThreshold); var plots = new List <Plot> { plot }; // add names into the returned events foreach (AcousticEvent ae in potentialEvents) { ae.Name = "speciesName"; // abbreviatedSpeciesName; } return(new RecognizerResults() { Events = potentialEvents, Hits = hits, Plots = plots, Sonogram = sonogram, }); }