/// <summary> /// 构造 Log /// </summary> /// <param name="pathname">相对于网站、应用程序根目录 App_Log 目录的相对路径,如: System, 就相当于 ~/App_Log/System/、 应用程序根\App_Log\System\</param> /// <param name="granularity">日志文件分割的颗粒度,可选 year, month, day, hour</param> /// <param name="writeTimeInfo">日志内容是否附加时间刻度信息</param> public Log(string pathname, FileGranularity granularity = FileGranularity.day, bool writeTimeInfo = true) { if (string.IsNullOrEmpty(pathname)) { throw new Exception("没有初始化 Log 类的 PathName 变量"); } this.pathName = System.AppDomain.CurrentDomain.BaseDirectory + "App_Log/" + pathname; this.granularity = granularity; this.writeTimeInfo = writeTimeInfo; if (!Directory.Exists(this.pathName)) { try { Directory.CreateDirectory(this.pathName); } catch { } } }
public Task <string> GetData(DateTime dateTimeBegin, DateTime dateTimeEnd, string sampleRateDescription, FileFormat fileFormat, FileGranularity fileGranularity, Dictionary <string, Dictionary <string, List <string> > > campaignInfoSet) { long fileId = -1; long datasetId = -1; ulong start; ulong stride; ulong block; ulong count; ulong segmentLength; ulong segmentSize; ulong bytesPerRow; double sampleRate; DateTime epochStart; DateTime epochEnd; string zipFilePath; // task return(Task.Run(() => { this.CheckState(); if (!campaignInfoSet.Any()) { return string.Empty; } // zip file zipFilePath = Path.Combine(_options.SupportDirectoryPath, "EXPORT", $"OneDAS_{ dateTimeBegin.ToString("yyyy-MM-ddTHH-mm") }_{ sampleRateDescription }_{ Guid.NewGuid().ToString() }.zip"); // sampleRate sampleRate = sampleRateDescription.ToSampleRate(); // epoch & hyperslab epochStart = new DateTime(2000, 01, 01); epochEnd = new DateTime(2030, 01, 01); if (!(epochStart <= dateTimeBegin && dateTimeBegin <= dateTimeEnd && dateTimeEnd <= epochEnd)) { throw new Exception("requirement >> epochStart <= dateTimeBegin && dateTimeBegin <= dateTimeEnd && dateTimeBegin <= epochEnd << is not matched"); } start = (ulong)(Math.Floor((dateTimeBegin - epochStart).TotalSeconds * sampleRate)); stride = 1; block = (ulong)(Math.Ceiling((dateTimeEnd - dateTimeBegin).TotalSeconds * sampleRate)); count = 1; try { // open file fileId = H5F.open(_options.VdsFilePath, H5F.ACC_RDONLY); // byte count bytesPerRow = 0; foreach (var campaignInfo in campaignInfoSet) { foreach (var variableInfo in campaignInfo.Value) { foreach (string datasetInfo in variableInfo.Value) { try { datasetId = H5D.open(fileId, $"{ campaignInfo.Key }/{ variableInfo.Key }/{ datasetInfo }"); bytesPerRow += (ulong)OneDasUtilities.SizeOf(TypeConversionHelper.GetTypeFromHdfTypeId(H5D.get_type(datasetId))); } finally { if (H5I.is_valid(datasetId) > 0) { H5D.close(datasetId); } } } } } this.GetClient().SendByteCount(bytesPerRow * block); segmentSize = (50 * 1024 * 1024) / bytesPerRow * bytesPerRow; segmentLength = segmentSize / bytesPerRow; // ensure that dataset length is multiple of 1 minute if ((segmentLength / sampleRate) % 60 != 0) { segmentLength = (ulong)((ulong)(segmentLength / sampleRate / 60) * 60 * sampleRate); } // start _stateManager.SetState(this.Context.ConnectionId, HdfExplorerState.Loading); using (ZipArchive zipArchive = ZipFile.Open(zipFilePath, ZipArchiveMode.Create)) { foreach (var campaignInfo in campaignInfoSet) { HdfDataLoader hdfDataLoader; hdfDataLoader = new HdfDataLoader(_stateManager.GetToken(this.Context.ConnectionId)); hdfDataLoader.ProgressUpdated += this.OnProgressUpdated; if (!hdfDataLoader.WriteZipFileCampaignEntry(zipArchive, fileGranularity, fileFormat, new ZipSettings(dateTimeBegin, campaignInfo, fileId, sampleRate, start, stride, block, count, segmentLength))) { return string.Empty; } } } } catch (Exception ex) { this.WriteLogEntry(ex.Message, true); throw; } finally { _stateManager.SetState(this.Context.ConnectionId, HdfExplorerState.Idle); if (H5I.is_valid(fileId) > 0) { H5F.close(fileId); } } this.WriteLogEntry($"{ this.Context.GetHttpContext().Connection.RemoteIpAddress } requested data: { dateTimeBegin.ToString("yyyy-MM-dd HH:mm:ss") } to { dateTimeEnd.ToString("yyyy-MM-dd HH:mm:ss") }", false); return $"download/{ Path.GetFileName(zipFilePath) }"; }, _stateManager.GetToken(this.Context.ConnectionId))); }
public bool WriteZipFileCampaignEntry(ZipArchive zipArchive, FileGranularity fileGranularity, FileFormat fileFormat, ZipSettings zipSettings) { IList <VariableDescription> variableDescriptionSet; IList <CustomMetadataEntry> customMetadataEntrySet; ZipArchiveEntry zipArchiveEntry; DataWriterExtensionLogicBase dataWriter; DataWriterExtensionSettingsBase settings; DataWriterContext dataWriterContext; string directoryPath; string[] campaignName_splitted; string[] filePathSet; int currentFile; int fileCount; // build variable descriptions variableDescriptionSet = new List <VariableDescription>(); zipSettings.CampaignInfo.Value.ToList().ForEach(variableInfo => { variableInfo.Value.ForEach(datasetName => { long groupId = -1; long typeId = -1; long datasetId = -1; string displayName; string groupName; string unit; ulong samplesPerDay; OneDasDataType oneDasDataType; hdf_transfer_function_t[] hdf_transfer_function_t_set; List <TransferFunction> transferFunctionSet; try { groupId = H5G.open(zipSettings.SourceFileId, $"{ zipSettings.CampaignInfo.Key }/{ variableInfo.Key }"); datasetId = H5D.open(groupId, datasetName); typeId = H5D.get_type(datasetId); displayName = IOHelper.ReadAttribute <string>(groupId, "name_set").Last(); groupName = IOHelper.ReadAttribute <string>(groupId, "group_set").Last(); unit = IOHelper.ReadAttribute <string>(groupId, "unit_set").LastOrDefault(); hdf_transfer_function_t_set = IOHelper.ReadAttribute <hdf_transfer_function_t>(groupId, "transfer_function_set"); transferFunctionSet = hdf_transfer_function_t_set.Select(tf => new TransferFunction(DateTime.ParseExact(tf.date_time, "yyyy-MM-ddTHH-mm-ssZ", CultureInfo.InvariantCulture), tf.type, tf.option, tf.argument)).ToList(); oneDasDataType = OneDasUtilities.GetOneDasDataTypeFromType(TypeConversionHelper.GetTypeFromHdfTypeId(typeId)); samplesPerDay = OneDasUtilities.GetSamplesPerDayFromString(datasetName); variableDescriptionSet.Add(new VariableDescription(new Guid(variableInfo.Key), displayName, datasetName, groupName, oneDasDataType, samplesPerDay, unit, transferFunctionSet, typeof(ISimpleDataStorage))); } finally { if (H5I.is_valid(datasetId) > 0) { H5D.close(datasetId); } if (H5I.is_valid(groupId) > 0) { H5G.close(groupId); } if (H5I.is_valid(typeId) > 0) { H5T.close(typeId); } } }); }); dataWriter = null; // REMOVE ONE IMPLEMENTED PROPERLY switch (fileFormat) { case FileFormat.CSV: //settings = new CsvSettings() { FileGranularity = fileGranularity }; //dataWriter = new CsvWriter((CsvSettings)settings, new LoggerFactory()); break; case FileFormat.GAM: //settings = new GamSettings() { FileGranularity = fileGranularity }; //dataWriter = new GamWriter((GamSettings)settings, new LoggerFactory()); break; case FileFormat.MAT73: //settings = new Mat73Settings() { FileGranularity = fileGranularity }; //dataWriter = new Mat73Writer((Mat73Settings)settings, new LoggerFactory()); break; default: throw new NotImplementedException(); } // create temp directory directoryPath = Path.Combine(Path.GetTempPath(), "OneDas.Hdf.Explorer", Guid.NewGuid().ToString()); Directory.CreateDirectory(directoryPath); // create custom meta data customMetadataEntrySet = new List <CustomMetadataEntry>(); //customMetadataEntrySet.Add(new CustomMetadataEntry("system_name", "HDF Explorer", CustomMetadataEntryLevel.File)); // initialize data writer campaignName_splitted = zipSettings.CampaignInfo.Key.Split('/'); dataWriterContext = new DataWriterContext("HDF Explorer", directoryPath, new OneDasCampaignDescription(Guid.Empty, 0, campaignName_splitted[1], campaignName_splitted[2], campaignName_splitted[3]), customMetadataEntrySet); dataWriter.Configure(dataWriterContext, variableDescriptionSet); // create temp files try { if (!this.CreateFiles(dataWriter, zipSettings)) { this.CleanUp(directoryPath); return(false); } } finally { dataWriter.Dispose(); } // write zip archive entries filePathSet = Directory.GetFiles(directoryPath, "*", SearchOption.AllDirectories); currentFile = 0; fileCount = filePathSet.Count(); foreach (string filePath in filePathSet) { zipArchiveEntry = zipArchive.CreateEntry(Path.GetFileName(filePath), CompressionLevel.Optimal); this.OnProgressUpdated(new ProgressUpdatedEventArgs(currentFile / (double)fileCount * 100, $"Writing file { currentFile + 1 } / { fileCount } to ZIP archive ...")); using (FileStream fileStream = File.Open(filePath, FileMode.Open, FileAccess.Read)) { using (Stream zipArchiveEntryStream = zipArchiveEntry.Open()) { fileStream.CopyTo(zipArchiveEntryStream); } } currentFile++; } this.CleanUp(directoryPath); return(true); }