private void OrchestrateAggregation <T>(string targetDirectoryPath, DataReaderExtensionBase dataReader, DatasetInfo dataset, List <Aggregation> aggregations, DateTime date, bool force, CancellationToken cancellationToken) where T : unmanaged { // check source sample rate var sampleRate = new SampleRateContainer(dataset.Id, ensureNonZeroIntegerHz: true); // prepare variables var units = new List <AggregationUnit>(); var channel = (ChannelInfo)dataset.Parent; // prepare buffers foreach (var aggregation in aggregations) { var periodsToSkip = new List <int>(); foreach (var period in aggregation.Periods) { #warning Ensure that period is a sensible value foreach (var entry in aggregation.Methods) { var method = entry.Key; var arguments = entry.Value; // translate method name var methodIdentifier = method switch { AggregationMethod.Mean => "mean", AggregationMethod.MeanPolar => "mean_polar", AggregationMethod.Min => "min", AggregationMethod.Max => "max", AggregationMethod.Std => "std", AggregationMethod.Rms => "rms", AggregationMethod.MinBitwise => "min_bitwise", AggregationMethod.MaxBitwise => "max_bitwise", AggregationMethod.SampleAndHold => "sample_and_hold", AggregationMethod.Sum => "sum", _ => throw new Exception($"The aggregation method '{method}' is unknown.") }; var targetFileName = $"{channel.Id}_{period}_s_{methodIdentifier}.nex"; var targetFilePath = Path.Combine(targetDirectoryPath, targetFileName); if (force || !File.Exists(targetFileName)) { var buffer = new double[86400 / period]; var unit = new AggregationUnit() { Aggregation = aggregation, Period = period, Method = method, Argument = arguments, Buffer = buffer, TargetFilePath = targetFilePath }; units.Add(unit); } else { // skip period / method combination } } } } if (!units.Any()) { return; } // process data var fundamentalPeriod = TimeSpan.FromMinutes(10); // required to ensure that the aggregation functions get data with a multiple length of 10 minutes var endDate = date.AddDays(1); var blockSizeLimit = _aggregationChunkSizeMb * 1000 * 1000; // read raw data foreach (var progressRecord in dataReader.Read(dataset, date, endDate, blockSizeLimit, fundamentalPeriod, cancellationToken)) { var dataRecord = progressRecord.DatasetToRecordMap.First().Value; // aggregate data var partialBuffersMap = this.ApplyAggregationFunction(dataset, (T[])dataRecord.Dataset, dataRecord.Status, units); foreach (var entry in partialBuffersMap) { // copy aggregated data to target buffer var partialBuffer = entry.Value; var unit = entry.Key; Array.Copy(partialBuffer, 0, unit.Buffer, unit.BufferPosition, partialBuffer.Length); unit.BufferPosition += partialBuffer.Length; } } // write data to file foreach (var unit in units) { try { _fileAccessManager.Register(unit.TargetFilePath, cancellationToken); if (File.Exists(unit.TargetFilePath)) { File.Delete(unit.TargetFilePath); } // create data file AggregationFile.Create <double>(unit.TargetFilePath, unit.Buffer); } finally { _fileAccessManager.Unregister(unit.TargetFilePath); } } }
private string InitializeDatabase() { // create dirs var root = Path.Combine(Path.GetTempPath(), $"Nexus.Tests.{Guid.NewGuid()}"); Directory.CreateDirectory(root); var project1 = "/A/B/C"; var project2 = "/A2/B/C"; var monthFolderProject1 = Path.Combine(root, "DATA", WebUtility.UrlEncode(project1)); var dataFolderPathEmpty1 = Path.Combine(monthFolderProject1, "2020-06"); Directory.CreateDirectory(dataFolderPathEmpty1); var monthFolderProject2 = Path.Combine(root, "DATA", WebUtility.UrlEncode(project2)); var dataFolderPathEmpty2 = Path.Combine(monthFolderProject2, "2020-06"); Directory.CreateDirectory(dataFolderPathEmpty2); // create files var dayOffset = 86400 * 100; var hourOffset = 360000; var halfHourOffset = hourOffset / 2; var id1 = Guid.NewGuid(); var id2 = Guid.NewGuid(); // day 1 var dateTime1 = new DateTime(2020, 07, 08); var folderPath1 = Path.Combine(monthFolderProject1, dateTime1.ToString("yyyy-MM"), dateTime1.ToString("dd")); var filePath1 = Path.Combine(folderPath1, $"{id1}_100_Hz_mean.nex"); Directory.CreateDirectory(folderPath1); var buffer1 = new double[86400 * 100]; buffer1.AsSpan().Fill(double.NaN); buffer1[0] = 99.27636; buffer1[2] = 99.27626; buffer1[dayOffset - 1] = 2323e-3; AggregationFile.Create <double>(filePath1, buffer1); // day 2 var dateTime2 = new DateTime(2020, 07, 09); var folderPath2 = Path.Combine(monthFolderProject1, dateTime2.ToString("yyyy-MM"), dateTime2.ToString("dd")); var filePath2 = Path.Combine(folderPath2, $"{id1}_100_Hz_mean.nex"); Directory.CreateDirectory(folderPath2); var buffer2 = new double[86400 * 100]; buffer2.AsSpan().Fill(double.NaN); buffer2[0] = 98.27636; buffer2[2] = 97.27626; buffer2[dayOffset - hourOffset - 1] = 2323e-6; buffer2[dayOffset - halfHourOffset + 0] = 90.27636; buffer2[dayOffset - halfHourOffset + 2] = 90.27626; buffer2[dayOffset - 1] = 2323e-9; AggregationFile.Create <double>(filePath2, buffer2); // second project var folderPath3 = Path.Combine(monthFolderProject2, dateTime1.ToString("yyyy-MM"), dateTime1.ToString("dd")); var filePath3 = Path.Combine(folderPath3, $"{id2}_100_Hz_mean.nex"); Directory.CreateDirectory(folderPath3); AggregationFile.Create <double>(filePath3, buffer1); // return(root); }