internal static byte[] Write(ISignatureWriterHelper helper, TypeSig typeSig, DataWriterContext context) { using (var writer = new SignatureWriter(helper, context)) { writer.Write(typeSig); return(writer.GetResult()); } }
DeclSecurityWriter(ModuleDef module, IWriterError helper, bool optimizeCustomAttributeSerializedTypeNames, DataWriterContext context) { this.module = module; this.helper = helper; this.context = context; this.optimizeCustomAttributeSerializedTypeNames = optimizeCustomAttributeSerializedTypeNames; }
SignatureWriter(ISignatureWriterHelper helper, DataWriterContext context) { this.helper = helper; recursionCounter = new RecursionCounter(); outStream = context.OutStream; writer = context.Writer; disposeStream = false; outStream.SetLength(0); outStream.Position = 0; }
CustomAttributeWriter(ICustomAttributeWriterHelper helper, DataWriterContext context) { this.helper = helper; recursionCounter = new RecursionCounter(); outStream = context.OutStream; writer = context.Writer; genericArguments = null; disposeStream = false; outStream.SetLength(0); outStream.Position = 0; }
public void FamosWriterCreatesDatFile() { // Arrange var services = new ServiceCollection(); ConfigureServices(services); var provider = services.BuildServiceProvider(); var dataWriter = provider.GetRequiredService <FamosWriter>(); var projectGuid = Guid.NewGuid(); var dataDirectoryPath = Path.Combine(Path.GetTempPath(), projectGuid.ToString()); Directory.CreateDirectory(dataDirectoryPath); var projectDescription = new NexusProjectDescription(projectGuid, 1, "a", "b", "c"); var customMetadataEntrySet = new List <CustomMetadataEntry>(); var dataWriterContext = new DataWriterContext("Nexus", dataDirectoryPath, projectDescription, customMetadataEntrySet); var channelDescriptionSet = new List <ChannelDescription>() { this.CreateChannelDescription("Var1", "Group1", NexusDataType.FLOAT64, new SampleRateContainer(8640000), "Unit1"), this.CreateChannelDescription("Var2", "Group2", NexusDataType.FLOAT64, new SampleRateContainer(8640000), "Unit2"), this.CreateChannelDescription("Var3", "Group1", NexusDataType.FLOAT64, new SampleRateContainer(86400), "Unit2"), }; var currentDate = new DateTime(2019, 1, 1, 15, 0, 0); var period = TimeSpan.FromMinutes(1); // Act dataWriter.Configure(dataWriterContext, channelDescriptionSet); for (int i = 0; i < 9; i++) { var buffers = channelDescriptionSet.Select(current => { var length = (int)current.SampleRate.SamplesPerDay / 1440; var offset = length * i; var data = Enumerable.Range(offset, length).Select(value => value * 0 + (double)i + 1).ToArray(); return(BufferUtilities.CreateSimpleBuffer(data)); }).ToList(); dataWriter.Write(currentDate, period, buffers.Cast <IBuffer>().ToList()); currentDate += period; } dataWriter.Dispose(); // Assert }
internal static byte[] Write(ModuleDef module, IList <SecurityAttribute> secAttrs, IWriterError helper, bool optimizeCustomAttributeSerializedTypeNames, DataWriterContext context) => new DeclSecurityWriter(module, helper, optimizeCustomAttributeSerializedTypeNames, context).Write(secAttrs);
internal static byte[] Write(ICustomAttributeWriterHelper helper, IList <CANamedArgument> namedArgs, DataWriterContext context) { using (var writer = new CustomAttributeWriter(helper, context)) { writer.Write(namedArgs); return(writer.GetResult()); } }
internal static byte[] Write(ICustomAttributeWriterHelper helper, CustomAttribute ca, DataWriterContext context) { using (var writer = new CustomAttributeWriter(helper, context)) { writer.Write(ca); return(writer.GetResult()); } }
PortablePdbCustomDebugInfoWriter(IPortablePdbCustomDebugInfoWriterHelper helper, SerializerMethodContext methodContext, Metadata systemMetadata, DataWriterContext context) { this.helper = helper; this.methodContext = methodContext; this.systemMetadata = systemMetadata; outStream = context.OutStream; writer = context.Writer; outStream.SetLength(0); outStream.Position = 0; }
public static byte[] Write(IPortablePdbCustomDebugInfoWriterHelper helper, SerializerMethodContext methodContext, Metadata systemMetadata, PdbCustomDebugInfo cdi, DataWriterContext context) { var writer = new PortablePdbCustomDebugInfoWriter(helper, methodContext, systemMetadata, context); return(writer.Write(cdi)); }
internal static byte[] Write(ISignatureWriterHelper helper, CallingConventionSig sig, DataWriterContext context) { using (var writer = new SignatureWriter(helper, context)) { writer.Write(sig); return(writer.GetResult()); } }
public bool WriteZipFileCampaignEntry(ZipArchive zipArchive, FileGranularity fileGranularity, FileFormat fileFormat, ZipSettings zipSettings) { IList <VariableDescription> variableDescriptionSet; IList <CustomMetadataEntry> customMetadataEntrySet; ZipArchiveEntry zipArchiveEntry; DataWriterExtensionLogicBase dataWriter; DataWriterExtensionSettingsBase settings; DataWriterContext dataWriterContext; string directoryPath; string[] campaignName_splitted; string[] filePathSet; int currentFile; int fileCount; // build variable descriptions variableDescriptionSet = new List <VariableDescription>(); zipSettings.CampaignInfo.Value.ToList().ForEach(variableInfo => { variableInfo.Value.ForEach(datasetName => { long groupId = -1; long typeId = -1; long datasetId = -1; string displayName; string groupName; string unit; ulong samplesPerDay; OneDasDataType oneDasDataType; hdf_transfer_function_t[] hdf_transfer_function_t_set; List <TransferFunction> transferFunctionSet; try { groupId = H5G.open(zipSettings.SourceFileId, $"{ zipSettings.CampaignInfo.Key }/{ variableInfo.Key }"); datasetId = H5D.open(groupId, datasetName); typeId = H5D.get_type(datasetId); displayName = IOHelper.ReadAttribute <string>(groupId, "name_set").Last(); groupName = IOHelper.ReadAttribute <string>(groupId, "group_set").Last(); unit = IOHelper.ReadAttribute <string>(groupId, "unit_set").LastOrDefault(); hdf_transfer_function_t_set = IOHelper.ReadAttribute <hdf_transfer_function_t>(groupId, "transfer_function_set"); transferFunctionSet = hdf_transfer_function_t_set.Select(tf => new TransferFunction(DateTime.ParseExact(tf.date_time, "yyyy-MM-ddTHH-mm-ssZ", CultureInfo.InvariantCulture), tf.type, tf.option, tf.argument)).ToList(); oneDasDataType = OneDasUtilities.GetOneDasDataTypeFromType(TypeConversionHelper.GetTypeFromHdfTypeId(typeId)); samplesPerDay = OneDasUtilities.GetSamplesPerDayFromString(datasetName); variableDescriptionSet.Add(new VariableDescription(new Guid(variableInfo.Key), displayName, datasetName, groupName, oneDasDataType, samplesPerDay, unit, transferFunctionSet, typeof(ISimpleDataStorage))); } finally { if (H5I.is_valid(datasetId) > 0) { H5D.close(datasetId); } if (H5I.is_valid(groupId) > 0) { H5G.close(groupId); } if (H5I.is_valid(typeId) > 0) { H5T.close(typeId); } } }); }); dataWriter = null; // REMOVE ONE IMPLEMENTED PROPERLY switch (fileFormat) { case FileFormat.CSV: //settings = new CsvSettings() { FileGranularity = fileGranularity }; //dataWriter = new CsvWriter((CsvSettings)settings, new LoggerFactory()); break; case FileFormat.GAM: //settings = new GamSettings() { FileGranularity = fileGranularity }; //dataWriter = new GamWriter((GamSettings)settings, new LoggerFactory()); break; case FileFormat.MAT73: //settings = new Mat73Settings() { FileGranularity = fileGranularity }; //dataWriter = new Mat73Writer((Mat73Settings)settings, new LoggerFactory()); break; default: throw new NotImplementedException(); } // create temp directory directoryPath = Path.Combine(Path.GetTempPath(), "OneDas.Hdf.Explorer", Guid.NewGuid().ToString()); Directory.CreateDirectory(directoryPath); // create custom meta data customMetadataEntrySet = new List <CustomMetadataEntry>(); //customMetadataEntrySet.Add(new CustomMetadataEntry("system_name", "HDF Explorer", CustomMetadataEntryLevel.File)); // initialize data writer campaignName_splitted = zipSettings.CampaignInfo.Key.Split('/'); dataWriterContext = new DataWriterContext("HDF Explorer", directoryPath, new OneDasCampaignDescription(Guid.Empty, 0, campaignName_splitted[1], campaignName_splitted[2], campaignName_splitted[3]), customMetadataEntrySet); dataWriter.Configure(dataWriterContext, variableDescriptionSet); // create temp files try { if (!this.CreateFiles(dataWriter, zipSettings)) { this.CleanUp(directoryPath); return(false); } } finally { dataWriter.Dispose(); } // write zip archive entries filePathSet = Directory.GetFiles(directoryPath, "*", SearchOption.AllDirectories); currentFile = 0; fileCount = filePathSet.Count(); foreach (string filePath in filePathSet) { zipArchiveEntry = zipArchive.CreateEntry(Path.GetFileName(filePath), CompressionLevel.Optimal); this.OnProgressUpdated(new ProgressUpdatedEventArgs(currentFile / (double)fileCount * 100, $"Writing file { currentFile + 1 } / { fileCount } to ZIP archive ...")); using (FileStream fileStream = File.Open(filePath, FileMode.Open, FileAccess.Read)) { using (Stream zipArchiveEntryStream = zipArchiveEntry.Open()) { fileStream.CopyTo(zipArchiveEntryStream); } } currentFile++; } this.CleanUp(directoryPath); return(true); }
DeclSecurityWriter(ModuleDef module, IWriterError helper, DataWriterContext context) { this.module = module; this.helper = helper; this.context = context; }
internal static byte[] Write(ModuleDef module, IList <SecurityAttribute> secAttrs, IWriterError helper, DataWriterContext context) => new DeclSecurityWriter(module, helper, context).Write(secAttrs);
private void CreateFiles(ClaimsPrincipal user, ExportParameters exportParameters, SparseProjectInfo sparseProject, string directoryPath, CancellationToken cancellationToken) { var channelDescriptionSet = sparseProject.ToChannelDescriptions(); var singleFile = exportParameters.FileGranularity == FileGranularity.SingleFile; TimeSpan filePeriod; if (singleFile) { filePeriod = exportParameters.End - exportParameters.Begin; } else { filePeriod = TimeSpan.FromSeconds((int)exportParameters.FileGranularity); } DataWriterExtensionSettingsBase settings; DataWriterExtensionLogicBase dataWriter; switch (exportParameters.FileFormat) { case FileFormat.CSV: settings = new CsvSettings() { FilePeriod = filePeriod, SingleFile = singleFile, RowIndexFormat = exportParameters.CsvRowIndexFormat, SignificantFigures = exportParameters.CsvSignificantFigures, }; dataWriter = new CsvWriter((CsvSettings)settings, NullLogger.Instance); break; case FileFormat.FAMOS: settings = new FamosSettings() { FilePeriod = filePeriod, SingleFile = singleFile, }; dataWriter = new FamosWriter((FamosSettings)settings, NullLogger.Instance); break; case FileFormat.MAT73: settings = new Mat73Settings() { FilePeriod = filePeriod, SingleFile = singleFile, }; dataWriter = new Mat73Writer((Mat73Settings)settings, NullLogger.Instance); break; default: throw new NotImplementedException(); } // create custom meta data var customMetadataEntrySet = new List <CustomMetadataEntry>(); //customMetadataEntrySet.Add(new CustomMetadataEntry("system_name", "Nexus Explorer", CustomMetadataEntryLevel.File)); if (!string.IsNullOrWhiteSpace(sparseProject.License.FileMessage)) { customMetadataEntrySet.Add(new CustomMetadataEntry("license", sparseProject.License.FileMessage, CustomMetadataEntryLevel.Project)); } // initialize data writer var projectName_splitted = sparseProject.Id.Split('/'); var dataWriterContext = new DataWriterContext("Nexus Explorer", directoryPath, new NexusProjectDescription(Guid.Empty, 0, projectName_splitted[1], projectName_splitted[2], projectName_splitted[3]), customMetadataEntrySet); dataWriter.Configure(dataWriterContext, channelDescriptionSet); try { // create temp files this.CreateFiles(user, dataWriter, exportParameters, sparseProject, cancellationToken); dataWriter.Dispose(); } finally { dataWriter.Dispose(); } }