public Task <DataAvailabilityStatistics> GetDataAvailabilityStatistics(string campaignName, DateTime dateTimeBegin, DateTime dateTimeEnd) { long fileId = -1; ulong lengthPerDay; ulong offset; ulong start; ulong stride; ulong block; ulong count; double totalDays; int[] data; int[] aggregatedData; DateTime epochStart; DateTime epochEnd; DataAvailabilityGranularity granularity; return(Task.Run(() => { this.CheckState(); // open file fileId = H5F.open(_options.VdsFilePath, H5F.ACC_RDONLY); // epoch & hyperslab epochStart = new DateTime(2000, 01, 01); epochEnd = new DateTime(2030, 01, 01); if (!(epochStart <= dateTimeBegin && dateTimeBegin <= dateTimeEnd && dateTimeEnd <= epochEnd)) { throw new Exception("requirement >> epochStart <= dateTimeBegin && dateTimeBegin <= dateTimeEnd && dateTimeBegin <= epochEnd << is not matched"); } lengthPerDay = OneDasUtilities.GetSamplesPerDayFromString("is_chunk_completed_set"); start = (ulong)(Math.Floor((dateTimeBegin - epochStart).TotalDays * lengthPerDay)); stride = 1; block = (ulong)(Math.Ceiling((dateTimeEnd - dateTimeBegin).TotalDays * lengthPerDay)); count = 1; // get data totalDays = (dateTimeEnd - dateTimeBegin).TotalDays; data = IOHelper.ReadDataset <byte>(fileId, $"{ campaignName }/is_chunk_completed_set", start, stride, block, count).Select(value => (int)value).ToArray(); if (totalDays <= 7) { granularity = DataAvailabilityGranularity.ChunkLevel; aggregatedData = data; } else if (totalDays <= 365) { granularity = DataAvailabilityGranularity.DayLevel; offset = (ulong)dateTimeBegin.TimeOfDay.TotalMinutes; aggregatedData = new int[(int)Math.Ceiling(totalDays)]; Parallel.For(0, (int)Math.Ceiling(totalDays), day => { ulong startIndex; // inclusive ulong endIndex; // exclusive startIndex = (ulong)day * lengthPerDay; endIndex = startIndex + lengthPerDay; if ((int)startIndex - (int)offset < 0) { startIndex = 0; } else { startIndex = startIndex - offset; } if (endIndex - offset >= (ulong)data.Length) { endIndex = (ulong)data.Length; } else { endIndex = endIndex - offset; } aggregatedData[day] = (int)((double)data.Skip((int)startIndex).Take((int)(endIndex - startIndex)).Sum() / (endIndex - startIndex) * 100); }); } else { int totalMonths; DateTime totalDateTimeBegin; totalMonths = (dateTimeEnd.Month - dateTimeBegin.Month) + 1 + 12 * (dateTimeEnd.Year - dateTimeBegin.Year); totalDateTimeBegin = new DateTime(dateTimeBegin.Year, dateTimeBegin.Month, 1); granularity = DataAvailabilityGranularity.MonthLevel; offset = (ulong)(dateTimeBegin - totalDateTimeBegin).TotalMinutes; aggregatedData = new int[totalMonths]; Parallel.For(0, totalMonths, month => { ulong startIndex; // inclusive ulong endIndex; // exclusive DateTime currentDateTimeBegin; DateTime currentDateTimeEnd; currentDateTimeBegin = totalDateTimeBegin.AddMonths(month); currentDateTimeEnd = currentDateTimeBegin.AddMonths(1); if ((currentDateTimeBegin - totalDateTimeBegin).TotalMinutes - offset < 0) { startIndex = 0; } else { startIndex = (ulong)(currentDateTimeBegin - totalDateTimeBegin).TotalMinutes - offset; } if ((currentDateTimeEnd - totalDateTimeBegin).TotalMinutes - offset >= data.Length) { endIndex = (ulong)data.Length; } else { endIndex = (ulong)(currentDateTimeEnd - totalDateTimeBegin).TotalMinutes - offset; } aggregatedData[month] = (int)((double)data.Skip((int)startIndex).Take((int)(endIndex - startIndex)).Sum() / (endIndex - startIndex) * 100); }); } // clean up H5F.close(fileId); return new DataAvailabilityStatistics(granularity, aggregatedData); })); }
public bool WriteZipFileCampaignEntry(ZipArchive zipArchive, FileGranularity fileGranularity, FileFormat fileFormat, ZipSettings zipSettings) { IList <VariableDescription> variableDescriptionSet; IList <CustomMetadataEntry> customMetadataEntrySet; ZipArchiveEntry zipArchiveEntry; DataWriterExtensionLogicBase dataWriter; DataWriterExtensionSettingsBase settings; DataWriterContext dataWriterContext; string directoryPath; string[] campaignName_splitted; string[] filePathSet; int currentFile; int fileCount; // build variable descriptions variableDescriptionSet = new List <VariableDescription>(); zipSettings.CampaignInfo.Value.ToList().ForEach(variableInfo => { variableInfo.Value.ForEach(datasetName => { long groupId = -1; long typeId = -1; long datasetId = -1; string displayName; string groupName; string unit; ulong samplesPerDay; OneDasDataType oneDasDataType; hdf_transfer_function_t[] hdf_transfer_function_t_set; List <TransferFunction> transferFunctionSet; try { groupId = H5G.open(zipSettings.SourceFileId, $"{ zipSettings.CampaignInfo.Key }/{ variableInfo.Key }"); datasetId = H5D.open(groupId, datasetName); typeId = H5D.get_type(datasetId); displayName = IOHelper.ReadAttribute <string>(groupId, "name_set").Last(); groupName = IOHelper.ReadAttribute <string>(groupId, "group_set").Last(); unit = IOHelper.ReadAttribute <string>(groupId, "unit_set").LastOrDefault(); hdf_transfer_function_t_set = IOHelper.ReadAttribute <hdf_transfer_function_t>(groupId, "transfer_function_set"); transferFunctionSet = hdf_transfer_function_t_set.Select(tf => new TransferFunction(DateTime.ParseExact(tf.date_time, "yyyy-MM-ddTHH-mm-ssZ", CultureInfo.InvariantCulture), tf.type, tf.option, tf.argument)).ToList(); oneDasDataType = OneDasUtilities.GetOneDasDataTypeFromType(TypeConversionHelper.GetTypeFromHdfTypeId(typeId)); samplesPerDay = OneDasUtilities.GetSamplesPerDayFromString(datasetName); variableDescriptionSet.Add(new VariableDescription(new Guid(variableInfo.Key), displayName, datasetName, groupName, oneDasDataType, samplesPerDay, unit, transferFunctionSet, typeof(ISimpleDataStorage))); } finally { if (H5I.is_valid(datasetId) > 0) { H5D.close(datasetId); } if (H5I.is_valid(groupId) > 0) { H5G.close(groupId); } if (H5I.is_valid(typeId) > 0) { H5T.close(typeId); } } }); }); dataWriter = null; // REMOVE ONE IMPLEMENTED PROPERLY switch (fileFormat) { case FileFormat.CSV: //settings = new CsvSettings() { FileGranularity = fileGranularity }; //dataWriter = new CsvWriter((CsvSettings)settings, new LoggerFactory()); break; case FileFormat.GAM: //settings = new GamSettings() { FileGranularity = fileGranularity }; //dataWriter = new GamWriter((GamSettings)settings, new LoggerFactory()); break; case FileFormat.MAT73: //settings = new Mat73Settings() { FileGranularity = fileGranularity }; //dataWriter = new Mat73Writer((Mat73Settings)settings, new LoggerFactory()); break; default: throw new NotImplementedException(); } // create temp directory directoryPath = Path.Combine(Path.GetTempPath(), "OneDas.Hdf.Explorer", Guid.NewGuid().ToString()); Directory.CreateDirectory(directoryPath); // create custom meta data customMetadataEntrySet = new List <CustomMetadataEntry>(); //customMetadataEntrySet.Add(new CustomMetadataEntry("system_name", "HDF Explorer", CustomMetadataEntryLevel.File)); // initialize data writer campaignName_splitted = zipSettings.CampaignInfo.Key.Split('/'); dataWriterContext = new DataWriterContext("HDF Explorer", directoryPath, new OneDasCampaignDescription(Guid.Empty, 0, campaignName_splitted[1], campaignName_splitted[2], campaignName_splitted[3]), customMetadataEntrySet); dataWriter.Configure(dataWriterContext, variableDescriptionSet); // create temp files try { if (!this.CreateFiles(dataWriter, zipSettings)) { this.CleanUp(directoryPath); return(false); } } finally { dataWriter.Dispose(); } // write zip archive entries filePathSet = Directory.GetFiles(directoryPath, "*", SearchOption.AllDirectories); currentFile = 0; fileCount = filePathSet.Count(); foreach (string filePath in filePathSet) { zipArchiveEntry = zipArchive.CreateEntry(Path.GetFileName(filePath), CompressionLevel.Optimal); this.OnProgressUpdated(new ProgressUpdatedEventArgs(currentFile / (double)fileCount * 100, $"Writing file { currentFile + 1 } / { fileCount } to ZIP archive ...")); using (FileStream fileStream = File.Open(filePath, FileMode.Open, FileAccess.Read)) { using (Stream zipArchiveEntryStream = zipArchiveEntry.Open()) { fileStream.CopyTo(zipArchiveEntryStream); } } currentFile++; } this.CleanUp(directoryPath); return(true); }