public void TestValidDateFormatsWithOffsetHint()
        {
            foreach (var example in this.validFormatsWithOffsetHint)
            {
                Trace.WriteLine($"Testing format: {example.Key}");
                Assert.IsTrue(
                    FileDateHelpers.FileNameContainsDateTime(
                        example.Key,
                        out var parsedDate,
                        offsetHint: new TimeSpan(6, 30, 0)),
                    $"Testing format: {example}");

                Assert.AreEqual(example.Value, parsedDate);
            }
        }
        public void TestFileOrderingFunction()
        {
            var fileInfos = this.unorderedFiles.Select(s => s.ToFileInfo());

            var sortedFiles =
                FileDateHelpers.FilterFilesForDates(fileInfos, offsetHint: TimeSpan.FromHours(11)).ToArray();

            // all dates are valid
            Assert.AreEqual(this.orderedDates.Length, sortedFiles.Length);

            for (var i = 0; i < sortedFiles.Length; i++)
            {
                var datePair = sortedFiles[i];
                Debug.WriteLine($"Date parsed: {datePair.Key.ToString("O")}, {datePair.Key.UtcDateTime.ToString("O")}, for file: {datePair.Value}");

                //Assert.AreEqual(this.orderedDates[i], datePair.Key);
            }
        }
Beispiel #3
0
        /// <summary>
        /// Determine new files names and rename if not a dry run.
        /// </summary>
        /// <param name="files">Array of files.</param>
        /// <param name="isDryRun">Dry run or not.</param>
        /// <param name="timezone">Timezone string to use.</param>
        /// <returns>Array of file names in same order.</returns>
        private static string[] StartParallel(FileInfo[] files, bool isDryRun, TimeSpan timezone)
        {
            var count   = files.Count();
            var results = new string[count];

            Parallel.ForEach(
                files,
                new ParallelOptions()
            {
                MaxDegreeOfParallelism = Environment.ProcessorCount
            },
                (item, state, index) =>
            {
                var item1  = item;
                var index1 = index;

                var fileName = item1.Name;

                var isDateTimeInFileName = FileDateHelpers.FileNameContainsDateTime(fileName);

                if (isDateTimeInFileName)
                {
                    results[index1] = item1.FullName;
                }
                else
                {
                    results[index1] = Path.Combine(item1.DirectoryName, GetNewName(item1, timezone));

                    if (!isDryRun)
                    {
                        File.Move(item1.FullName, results[index1]);
                    }
                }
            });

            return(results);
        }
        private DateTimeOffset?ParseDate(DateTimeOffset?suppliedDate = null)
        {
            if (this.dateBehavior == FileDateBehavior.None)
            {
                return(null);
            }

            this.triedToParseDate = true;
            DateTimeOffset?date = suppliedDate;

            if (date == null)
            {
                bool fileDateFound = FileDateHelpers.FileNameContainsDateTime(this.Source.Name, out var parsedDate);

                if (fileDateFound)
                {
                    date = parsedDate;
                    Log.Info("Parsed file start date as " + parsedDate.ToString("O"));
                }

                // Historical note: This method previously supported inferring the date of the recording from the file's
                // last modified timestamp. This method ultimately proved unreliable and inefficient.
                // Support was removed for this edge case mid 2017.
            }


            if (this.dateBehavior == FileDateBehavior.Required)
            {
                if (!date.HasValue)
                {
                    throw new InvalidFileDateException(
                              "A file date is required but one has not been successfully parsed");
                }
            }

            return(date);
        }
Beispiel #5
0
        /// <summary>
        /// 2. Analyses long audio recording (mp3 or wav) as per passed config file. Outputs an events.csv file AND an
        /// indices.csv file
        /// Signed off: Michael Towsey 4th December 2012
        /// </summary>
        public static void Execute(Arguments arguments)
        {
            if (arguments == null)
            {
                throw new NoDeveloperMethodException();
            }

            LoggedConsole.WriteLine("# PROCESS LONG RECORDING");
            LoggedConsole.WriteLine("# DATE AND TIME: " + DateTime.Now);

            // 1. set up the necessary files
            var sourceAudio        = arguments.Source;
            var configFile         = arguments.Config.ToFileInfo();
            var outputDirectory    = arguments.Output;
            var tempFilesDirectory = arguments.TempDir;

            // if a temp dir is not given, use output dir as temp dir
            if (tempFilesDirectory == null)
            {
                Log.Warn("No temporary directory provided, using output directory");
                tempFilesDirectory = outputDirectory;
            }

            // try an automatically find the config file
            if (configFile == null)
            {
                throw new FileNotFoundException("No config file argument provided");
            }
            else if (!configFile.Exists)
            {
                Log.Warn($"Config file {configFile.FullName} not found... attempting to resolve config file");

                // we use .ToString() here to get the original input string - Using fullname always produces an absolute path wrt to pwd... we don't want to prematurely make asusmptions:
                // e.g. We require a missing absolute path to fail... that wouldn't work with .Name
                // e.g. We require a relative path to try and resolve, using .FullName would fail the first absolute check inside ResolveConfigFile
                configFile = ConfigFile.Resolve(configFile.ToString(), Directory.GetCurrentDirectory().ToDirectoryInfo());
            }

            if (arguments.StartOffset.HasValue ^ arguments.EndOffset.HasValue)
            {
                throw new InvalidStartOrEndException("If StartOffset or EndOffset is specified, then both must be specified");
            }

            if (arguments.StartOffset.HasValue && arguments.EndOffset.HasValue && arguments.EndOffset.Value <= arguments.StartOffset.Value)
            {
                throw new InvalidStartOrEndException("Start offset must be less than end offset.");
            }

            LoggedConsole.WriteLine("# Recording file:      " + sourceAudio.FullName);
            LoggedConsole.WriteLine("# Configuration file:  " + configFile);
            LoggedConsole.WriteLine("# Output folder:       " + outputDirectory);
            LoggedConsole.WriteLine("# Temp File Directory: " + tempFilesDirectory);

            // optionally copy logs / config to make results easier to understand
            // TODO: remove, see https://github.com/QutEcoacoustics/audio-analysis/issues/133
            if (arguments.WhenExitCopyConfig || arguments.WhenExitCopyLog)
            {
                AppDomain.CurrentDomain.ProcessExit += (sender, args) => { Cleanup(arguments, configFile); };
            }

            // 2. initialize the analyzer
            // we're changing the way resolving config files works. Ideally, we'd like to use statically typed config files
            // but we can't do that unless we know which type we have to load first! Currently analyzer to load is in
            // the config file so we can't know which analyzer we can use. Thus we will change to using the file name,
            // or an argument to resolve the analyzer to load.
            // Get analysis name:
            IAnalyser2 analyzer = FindAndCheckAnalyzer <IAnalyser2>(arguments.AnalysisIdentifier, configFile.Name);

            // 2. get the analysis config
            AnalyzerConfig configuration = analyzer.ParseConfig(configFile);

            SaveBehavior saveIntermediateWavFiles  = configuration.SaveIntermediateWavFiles;
            bool         saveIntermediateDataFiles = configuration.SaveIntermediateCsvFiles;
            SaveBehavior saveSonogramsImages       = configuration.SaveSonogramImages;

            bool filenameDate = configuration.RequireDateInFilename;

            if (configuration[AnalysisKeys.AnalysisName].IsNotWhitespace())
            {
                Log.Warn("Your config file has `AnalysisName` set - this property is deprecated and ignored");
            }

            // AT 2018-02: changed logic so default index properties loaded if not provided
            FileInfo indicesPropertiesConfig = IndexProperties.Find(configuration, configFile);

            if (indicesPropertiesConfig == null || !indicesPropertiesConfig.Exists)
            {
                Log.Warn("IndexProperties config can not be found! Loading a default");
                indicesPropertiesConfig = ConfigFile.Default <Dictionary <string, IndexProperties> >();
            }

            LoggedConsole.WriteLine("# IndexProperties Cfg: " + indicesPropertiesConfig.FullName);

            // min score for an acceptable event
            Log.Info("Minimum event threshold has been set to " + configuration.EventThreshold);

            FileSegment.FileDateBehavior defaultBehavior = FileSegment.FileDateBehavior.Try;
            if (filenameDate)
            {
                if (!FileDateHelpers.FileNameContainsDateTime(sourceAudio.Name))
                {
                    throw new InvalidFileDateException(
                              "When RequireDateInFilename option is set, the filename of the source audio file must contain "
                              + "a valid AND UNAMBIGUOUS date. Such a date was not able to be parsed.");
                }

                defaultBehavior = FileSegment.FileDateBehavior.Required;
            }

            // 3. initilize AnalysisCoordinator class that will do the analysis
            var analysisCoordinator = new AnalysisCoordinator(
                new LocalSourcePreparer(),
                saveIntermediateWavFiles,
                false,
                arguments.Parallel);

            // 4. get the segment of audio to be analysed
            // if tiling output, specify that FileSegment needs to be able to read the date
            var fileSegment         = new FileSegment(sourceAudio, arguments.AlignToMinute, null, defaultBehavior);
            var bothOffsetsProvided = arguments.StartOffset.HasValue && arguments.EndOffset.HasValue;

            if (bothOffsetsProvided)
            {
                fileSegment.SegmentStartOffset = TimeSpan.FromSeconds(arguments.StartOffset.Value);
                fileSegment.SegmentEndOffset   = TimeSpan.FromSeconds(arguments.EndOffset.Value);
            }
            else
            {
                Log.Debug("Neither start nor end segment offsets provided. Therefore both were ignored.");
            }

            // 6. initialize the analysis settings object
            var analysisSettings = analyzer.DefaultSettings;

            analysisSettings.ConfigFile                = configFile;
            analysisSettings.Configuration             = configuration;
            analysisSettings.AnalysisOutputDirectory   = outputDirectory;
            analysisSettings.AnalysisTempDirectory     = tempFilesDirectory;
            analysisSettings.AnalysisDataSaveBehavior  = saveIntermediateDataFiles;
            analysisSettings.AnalysisImageSaveBehavior = saveSonogramsImages;
            analysisSettings.AnalysisChannelSelection  = arguments.Channels;
            analysisSettings.AnalysisMixDownToMono     = arguments.MixDownToMono;

            var segmentDuration = configuration.SegmentDuration?.Seconds();

            if (!segmentDuration.HasValue)
            {
                segmentDuration = analysisSettings.AnalysisMaxSegmentDuration ?? TimeSpan.FromMinutes(1);
                Log.Warn(
                    $"Can't read `{nameof(AnalyzerConfig.SegmentDuration)}` from config file. "
                    + $"Default value of {segmentDuration} used)");
            }

            analysisSettings.AnalysisMaxSegmentDuration = segmentDuration.Value;

            var segmentOverlap = configuration.SegmentOverlap?.Seconds();

            if (!segmentOverlap.HasValue)
            {
                segmentOverlap = analysisSettings.SegmentOverlapDuration;
                Log.Warn(
                    $"Can't read `{nameof(AnalyzerConfig.SegmentOverlap)}` from config file. "
                    + $"Default value of {segmentOverlap} used)");
            }

            analysisSettings.SegmentOverlapDuration = segmentOverlap.Value;

            // set target sample rate
            var resampleRate = configuration.ResampleRate;

            if (!resampleRate.HasValue)
            {
                resampleRate = analysisSettings.AnalysisTargetSampleRate ?? AppConfigHelper.DefaultTargetSampleRate;
                Log.Warn(
                    $"Can't read {nameof(configuration.ResampleRate)} from config file. "
                    + $"Default value of {resampleRate} used)");
            }

            analysisSettings.AnalysisTargetSampleRate = resampleRate;

            Log.Info(
                $"{nameof(configuration.SegmentDuration)}={segmentDuration}, "
                + $"{nameof(configuration.SegmentOverlap)}={segmentOverlap}, "
                + $"{nameof(configuration.ResampleRate)}={resampleRate}");

            // 7. ####################################### DO THE ANALYSIS ###################################
            LoggedConsole.WriteLine("START ANALYSIS ...");
            var analyserResults = analysisCoordinator.Run(fileSegment, analyzer, analysisSettings);

            // ##############################################################################################
            // 8. PROCESS THE RESULTS
            LoggedConsole.WriteLine(string.Empty);
            LoggedConsole.WriteLine("START PROCESSING RESULTS ...");
            if (analyserResults == null)
            {
                LoggedConsole.WriteErrorLine("###################################################\n");
                LoggedConsole.WriteErrorLine("The Analysis Run Coordinator has returned a null result.");
                LoggedConsole.WriteErrorLine("###################################################\n");
                throw new AnalysisOptionDevilException();
            }

            // Merge and correct main result types
            EventBase[]         mergedEventResults         = ResultsTools.MergeResults(analyserResults, ar => ar.Events, ResultsTools.CorrectEvent);
            SummaryIndexBase[]  mergedIndicesResults       = ResultsTools.MergeResults(analyserResults, ar => ar.SummaryIndices, ResultsTools.CorrectSummaryIndex);
            SpectralIndexBase[] mergedSpectralIndexResults = ResultsTools.MergeResults(analyserResults, ar => ar.SpectralIndices, ResultsTools.CorrectSpectrumIndex);

            // not an exceptional state, do not throw exception
            if (mergedEventResults != null && mergedEventResults.Length == 0)
            {
                LoggedConsole.WriteWarnLine("The analysis produced no EVENTS (mergedResults had zero count)");
            }

            if (mergedIndicesResults != null && mergedIndicesResults.Length == 0)
            {
                LoggedConsole.WriteWarnLine("The analysis produced no Summary INDICES (mergedResults had zero count)");
            }

            if (mergedSpectralIndexResults != null && mergedSpectralIndexResults.Length == 0)
            {
                LoggedConsole.WriteWarnLine("The analysis produced no Spectral INDICES (merged results had zero count)");
            }

            // 9. CREATE SUMMARY INDICES IF NECESSARY (FROM EVENTS)
#if DEBUG
            // get the duration of the original source audio file - need this to convert Events datatable to Indices Datatable
            var audioUtility = new MasterAudioUtility(tempFilesDirectory);
            var mimeType     = MediaTypes.GetMediaType(sourceAudio.Extension);
            var sourceInfo   = audioUtility.Info(sourceAudio);

            // updated by reference all the way down in LocalSourcePreparer
            Debug.Assert(fileSegment.TargetFileDuration == sourceInfo.Duration);
#endif
            var duration = fileSegment.TargetFileDuration.Value;

            ResultsTools.ConvertEventsToIndices(
                analyzer,
                mergedEventResults,
                ref mergedIndicesResults,
                duration,
                configuration.EventThreshold);
            int eventsCount           = mergedEventResults?.Length ?? 0;
            int numberOfRowsOfIndices = mergedIndicesResults?.Length ?? 0;

            // 10. Allow analysers to post-process

            // TODO: remove results directory if possible
            var instanceOutputDirectory =
                AnalysisCoordinator.GetNamedDirectory(analysisSettings.AnalysisOutputDirectory, analyzer);

            // 11. IMPORTANT - this is where IAnalyser2's post processor gets called.
            // Produces all spectrograms and images of SPECTRAL INDICES.
            // Long duration spectrograms are drawn IFF analysis type is Towsey.Acoustic
            analyzer.SummariseResults(analysisSettings, fileSegment, mergedEventResults, mergedIndicesResults, mergedSpectralIndexResults, analyserResults);

            // 12. SAVE THE RESULTS
            string fileNameBase = Path.GetFileNameWithoutExtension(sourceAudio.Name);

            var eventsFile  = ResultsTools.SaveEvents(analyzer, fileNameBase, instanceOutputDirectory, mergedEventResults);
            var indicesFile = ResultsTools.SaveSummaryIndices(analyzer, fileNameBase, instanceOutputDirectory, mergedIndicesResults);
            var spectraFile = ResultsTools.SaveSpectralIndices(analyzer, fileNameBase, instanceOutputDirectory, mergedSpectralIndexResults);

            // 13. THIS IS WHERE SUMMARY INDICES ARE PROCESSED
            //     Convert summary indices to black and white tracks image
            if (mergedIndicesResults == null)
            {
                Log.Info("No summary indices produced");
            }
            else
            {
                if (indicesPropertiesConfig == null || !indicesPropertiesConfig.Exists)
                {
                    throw new InvalidOperationException("Cannot process indices without an index configuration file, the file could not be found!");
                }

                // this arbitrary amount of data.
                if (mergedIndicesResults.Length > 5000)
                {
                    Log.Warn("Summary Indices Image not able to be drawn - there are too many indices to render");
                }
                else
                {
                    var    basename   = Path.GetFileNameWithoutExtension(fileNameBase);
                    string imageTitle = $"SOURCE:{basename},   {Meta.OrganizationTag};  ";

                    // Draw Tracks-Image of Summary indices
                    // set time scale resolution for drawing of summary index tracks
                    TimeSpan timeScale   = TimeSpan.FromSeconds(0.1);
                    Bitmap   tracksImage =
                        IndexDisplay.DrawImageOfSummaryIndices(
                            IndexProperties.GetIndexProperties(indicesPropertiesConfig),
                            indicesFile,
                            imageTitle,
                            timeScale,
                            fileSegment.TargetFileStartDate);
                    var imagePath = FilenameHelpers.AnalysisResultPath(instanceOutputDirectory, basename, "SummaryIndices", ImageFileExt);
                    tracksImage.Save(imagePath);
                }
            }

            // 14. wrap up, write stats
            LoggedConsole.WriteLine("INDICES CSV file(s) = " + (indicesFile?.Name ?? "<<No indices result, no file!>>"));
            LoggedConsole.WriteLine("\tNumber of rows (i.e. minutes) in CSV file of indices = " + numberOfRowsOfIndices);
            LoggedConsole.WriteLine(string.Empty);

            if (eventsFile == null)
            {
                LoggedConsole.WriteLine("An Events CSV file was NOT returned.");
            }
            else
            {
                LoggedConsole.WriteLine("EVENTS CSV file(s) = " + eventsFile.Name);
                LoggedConsole.WriteLine("\tNumber of events = " + eventsCount);
            }

            Log.Success($"Analysis Complete.\nSource={sourceAudio.Name}\nOutput={instanceOutputDirectory.FullName}");
        }
        public static async Task <int> Execute(RibbonPlot.Arguments arguments)
        {
            if (arguments.InputDirectories.IsNullOrEmpty())
            {
                throw new CommandLineArgumentException(
                          $"{nameof(arguments.InputDirectories)} is null or empty - please provide at least one source directory");
            }

            var doNotExist = arguments.InputDirectories.Where(x => !x.Exists);

            if (doNotExist.Any())
            {
                throw new CommandLineArgumentException(
                          $"The following directories given to {nameof(arguments.InputDirectories)} do not exist: " + doNotExist.FormatList());
            }

            if (arguments.OutputDirectory == null)
            {
                arguments.OutputDirectory = arguments.InputDirectories.First();
                Log.Warn(
                    $"{nameof(arguments.OutputDirectory)} was not provided and was automatically set to source directory {arguments.OutputDirectory}");
            }

            if (arguments.Midnight == null || arguments.Midnight == TimeSpan.Zero)
            {
                // we need this to be width of day and not zero for rounding functions later on
                arguments.Midnight = RibbonPlotDomain;
                Log.Debug($"{nameof(arguments.Midnight)} was reset to {arguments.Midnight}");
            }

            if (arguments.Midnight < TimeSpan.Zero || arguments.Midnight > RibbonPlotDomain)
            {
                throw new InvalidStartOrEndException($"{nameof(arguments.Midnight)} cannot be less than `00:00` or greater than `{RibbonPlotDomain}`");
            }

            LoggedConsole.Write("Begin scanning directories");

            var allIndexFiles = arguments.InputDirectories.SelectMany(IndexGenerationData.FindAll);

            if (allIndexFiles.IsNullOrEmpty())
            {
                throw new MissingDataException($"Could not find `{IndexGenerationData.FileNameFragment}` files in:" + arguments.InputDirectories.FormatList());
            }

            Log.Debug("Checking files have dates");
            var indexGenerationDatas = allIndexFiles.Select(IndexGenerationData.Load);
            var datedIndices         = FileDateHelpers.FilterObjectsForDates(
                indexGenerationDatas,
                x => x.Source,
                y => y.RecordingStartDate,
                arguments.TimeSpanOffsetHint);

            LoggedConsole.WriteLine($"{datedIndices.Count} index generation data files were loaded");
            if (datedIndices.Count == 0)
            {
                throw new MissingDataException("No index generation files had dates, cannot proceed");
            }

            // now find the ribbon plots for these images - there are typically two color maps per index generation
            var datesMappedToColorMaps = new Dictionary <string, Dictionary <DateTimeOffset, FileInfo> >(2);

            foreach (var(date, indexData) in datedIndices)
            {
                Add(indexData.LongDurationSpectrogramConfig.ColorMap1);
                Add(indexData.LongDurationSpectrogramConfig.ColorMap2);

                void Add(string colorMap)
                {
                    if (!datesMappedToColorMaps.ContainsKey(colorMap))
                    {
                        datesMappedToColorMaps.Add(colorMap, new Dictionary <DateTimeOffset, FileInfo>(datedIndices.Count));
                    }

                    // try to find the associated ribbon
                    var searchPattern = "*" + colorMap + LdSpectrogramRibbons.SpectralRibbonTag + "*";

                    if (Log.IsVerboseEnabled())
                    {
                        Log.Verbose($"Searching `{indexData.Source?.Directory}` with pattern `{searchPattern}`.");
                    }

                    var ribbonFile = indexData.Source?.Directory?.EnumerateFiles(searchPattern).FirstOrDefault();

                    if (ribbonFile == null)
                    {
                        Log.Warn($"Did not find expected ribbon file for color map {colorMap} in directory `{indexData.Source?.Directory}`."
                                 + "This can happen if the ribbon is missing or if more than one file matches the color map.");
                    }

                    datesMappedToColorMaps[colorMap].Add(date, ribbonFile);
                }
            }

            // get the min and max dates and other things
            var stats = new RibbonPlotStats(datedIndices, arguments.Midnight.Value);

            Log.Debug($"Files found between {stats.Min:O} and {stats.Max:O}, rendering between {stats.Start:O} and {stats.End:O}, in {stats.Buckets} buckets");

            bool success = false;

            foreach (var(colorMap, ribbons) in datesMappedToColorMaps)
            {
                Log.Info($"Rendering ribbon plot for color map {colorMap}");
                if (ribbons.Count(x => x.Value.NotNull()) == 0)
                {
                    Log.Error($"There are no ribbon files found for color map {colorMap} - skipping this color map");
                    continue;
                }

                var image = CreateRibbonPlot(datedIndices, ribbons, stats);

                var midnight = arguments.Midnight == RibbonPlotDomain
                    ? string.Empty
                    : "Midnight=" + arguments.Midnight.Value.ToString("hhmm");
                var path = FilenameHelpers.AnalysisResultPath(
                    arguments.OutputDirectory,
                    arguments.OutputDirectory.Name,
                    "RibbonPlot",
                    "png",
                    colorMap,
                    midnight);

                using (var file = File.Create(path))
                {
                    image.SaveAsPng(file);
                }

                image.Dispose();

                success = true;
            }

            if (success == false)
            {
                throw new MissingDataException("Could not find any ribbon files for any of the color maps. No ribbon plots were produced.");
            }

            LoggedConsole.WriteSuccessLine("Completed");
            return(ExceptionLookup.Ok);
        }
        /// <summary>
        /// All the passed files will be concatenated. Filtering needs to be done somewhere else.
        /// </summary>
        /// <param name="files">array of file names.</param>
        /// <param name="indexCalcDuration">used to match rows of indices to elapsed time in file names.</param>
        public static List <SummaryIndexValues> ConcatenateSummaryIndexFilesWithTimeCheck(FileInfo[] files, TimeSpan indexCalcDuration)
        {
            TimeSpan?offsetHint = new TimeSpan(10, 0, 0);

            DateTimeOffset[] dtoArray = new DateTimeOffset[files.Length];
            var summaryIndices        = new List <SummaryIndexValues>();

            // accumulate the start times for each of the files
            for (int f = 0; f < files.Length; f++)
            {
                if (!files[f].Exists)
                {
                    LoggedConsole.WriteWarnLine($"WARNING: Concatenation Time Check: MISSING FILE: {files[f].FullName}");
                    continue;
                }

                if (!FileDateHelpers.FileNameContainsDateTime(files[f].Name, out var date, offsetHint))
                {
                    LoggedConsole.WriteWarnLine($"WARNING: Concatenation Time Check: INVALID DateTime in File Name {files[f].Name}");
                }

                dtoArray[f] = date;
            }

            // we use the fileName field to distinguish unique input source files
            // this Set allows us to check they are unique and render joins
            var sourceFileNames = new HashSet <string>();

            // now loop through the files again to extract the indices
            for (int i = 0; i < files.Length; i++)
            {
                if (!files[i].Exists)
                {
                    continue;
                }

                var rowsOfCsvFile = Csv.ReadFromCsv <SummaryIndexValues>(files[i], throwOnMissingField: false);

                // check all rows have fileName set
                var thisSourceFileNames = new HashSet <string>();
                foreach (var summaryIndexValues in rowsOfCsvFile)
                {
                    if (summaryIndexValues.FileName.IsNullOrEmpty())
                    {
                        throw new InvalidOperationException($"A supplied summary index file did not have the `{nameof(SummaryIndexValues.FileName)}` field populated. File: {files[i].FullName}");
                    }

                    thisSourceFileNames.Add(summaryIndexValues.FileName);
                }

                // check all found filenames are unique
                foreach (var sourceFileName in thisSourceFileNames)
                {
                    if (sourceFileNames.Contains(sourceFileName))
                    {
                        throw new InvalidOperationException(
                                  $"The summary index files already read previously contained the filename {sourceFileName} - duplicates are not allowed. File: {files[i].FullName}");
                    }

                    sourceFileNames.Add(sourceFileName);
                }

                summaryIndices.AddRange(rowsOfCsvFile);

                // track the row counts
                int partialRowCount = rowsOfCsvFile.Count();

                // calculate elapsed time from the rows
                int accumulatedRowMinutes = (int)Math.Round(partialRowCount * indexCalcDuration.TotalMinutes);

                // calculate the partial elapsed minutes as indexed by file names.
                var elapsedMinutesInFileNames = 0;
                if (i < files.Length - 1)
                {
                    TimeSpan elapsedTimeAccordingtoFileNames = dtoArray[i + 1] - dtoArray[i];
                    elapsedMinutesInFileNames = (int)Math.Round(elapsedTimeAccordingtoFileNames.TotalMinutes);
                }
                else
                {
                    elapsedMinutesInFileNames = accumulatedRowMinutes; // a hack for the last file
                }

                // Check for Mismatch error in concatenation.
                if (accumulatedRowMinutes != elapsedMinutesInFileNames)
                {
                    string str1 = $"Concatenation: Elapsed Time Mismatch ERROR in csvFile {i + 1}/{files.Length}: {accumulatedRowMinutes} accumulatedRowMinutes != {elapsedMinutesInFileNames} elapsedMinutesInFileNames";
                    LoggedConsole.WriteWarnLine(str1);

                    //dictionary = RepairDictionaryOfArrays(dictionary, rowCounts[i], partialMinutes);
                    int scalingfactor = (int)Math.Round(60.0 / indexCalcDuration.TotalSeconds);
                    int minutesToAdd  = elapsedMinutesInFileNames - accumulatedRowMinutes;
                    int rowsToAdd     = minutesToAdd * scalingfactor;

                    // add in the missing summary index rows
                    for (int j = 0; j < rowsToAdd; j++)
                    {
                        var vector = new SummaryIndexValues {
                            FileName = MissingRowString
                        };
                        summaryIndices.Add(vector);
                    }
                }
            }

            // Can prune the list of summary indices as required.
            //int expectedRowCount = (int)Math.Round(numberOfMinutesInDay / indexCalcDuration.TotalMinutes);
            //if (totalRowCount != expectedRowCount)
            //{
            //    if (IndexMatrices.Verbose)
            //        LoggedConsole.WriteLine("WARNING: INCONSISTENT ELAPSED TIME CHECK from IndexMatrices.GetSummaryIndexFilesAndConcatenateWithTimeCheck() ");
            //    string str = String.Format("   Final Data Row Count = {0}     Estimated Cumulative Duration = {1} minutes", totalRowCount, expectedRowCount);
            //    if (IndexMatrices.Verbose)
            //        LoggedConsole.WriteLine(str);
            //    dictionary = RepairDictionaryOfArrays(dictionary, totalRowCount, expectedRowCount);
            //}

            return(summaryIndices);
        }