public static double[, ,] ReadFieldData3D(string fileName) { H5FileId fileId = H5F.open(fileName, H5F.OpenMode.ACC_RDONLY); H5DataSetId fDataSetId = H5D.open(fileId, "/FieldData/FD/f0"); H5DataTypeId fDataTypeId = H5D.getType(fDataSetId); if (!H5T.equal(fDataTypeId, H5T.copy(H5T.H5Type.NATIVE_FLOAT))) { Console.WriteLine("Error: Invalid dataset type, expected {0}", H5T.H5Type.NATIVE_FLOAT); } long[] dims = H5S.getSimpleExtentDims(H5D.getSpace(fDataSetId)).ToArray(); if (dims.Length != 3) { Console.WriteLine("Error: Invalid field data dimensions"); } float[, ,] data = new float[dims[0], dims[1], dims[2]]; H5D.read(fDataSetId, fDataTypeId, new H5Array <float>(data)); // Reorder double[, ,] fieldValues = new double[dims[2], dims[1], dims[0]]; for (int i = 0; i < dims[0]; i++) { for (int j = 0; j < dims[1]; j++) { for (int k = 0; k < dims[2]; k++) { fieldValues[k, j, i] = data[i, j, k]; } } } return(fieldValues); }
private static Hdf5Container_LidarDaimler ReadContainer(string sFilePath_inp) { int status = 0; long file_id = H5F.open(sFilePath_inp, H5F.ACC_RDWR); long testDataset_id = H5D.open(file_id, "distance"); long testDataspace_id = H5D.get_space(testDataset_id); ulong[] dims = new ulong[2]; status = H5S.get_simple_extent_dims(testDataspace_id, dims, null); int rows = Convert.ToInt32(dims[0]); int cols = Convert.ToInt32(dims[1]); Hdf5Container_LidarDaimler outContainer = new Hdf5Container_LidarDaimler(rows, cols) { _distances = Hdf5IO.GetFloatDataset(H5D.open(file_id, "distance"), rows, cols), _intensity = Hdf5IO.GetFloatDataset(H5D.open(file_id, "intensity"), rows, cols), _labelProbabilities = Hdf5IO.GetFloatDataset(H5D.open(file_id, "labelProbabilities"), rows, cols), _labelWorkingSet = Hdf5IO.GetLabelWorkingSet(H5G.open(file_id, "labelWorkingSet")), _labels = Hdf5IO.GetUintDataset(H5D.open(file_id, "labels"), rows, cols), _pointValid = Hdf5IO.GetIntDataset(H5D.open(file_id, "pointValid"), rows, cols), _sensorX = Hdf5IO.GetFloatDataset(H5D.open(file_id, "sensorX"), rows, cols), _sensorY = Hdf5IO.GetFloatDataset(H5D.open(file_id, "sensorY"), rows, cols), _sensorZ = Hdf5IO.GetFloatDataset(H5D.open(file_id, "sensorZ"), rows, cols), _vehicleX = Hdf5IO.GetFloatDataset(H5D.open(file_id, "vehicleX"), rows, cols), _vehicleY = Hdf5IO.GetFloatDataset(H5D.open(file_id, "vehicleY"), rows, cols), _vehicleZ = Hdf5IO.GetFloatDataset(H5D.open(file_id, "vehicleZ"), rows, cols) }; status = H5F.close(file_id); return(outContainer); }
public static void UpdateCampaignInfoSet() { long vdsFileId = -1; long vdsMetaFileId = -1; long groupId = -1; lock (_lock) { try { if (File.Exists(_options.VdsFilePath)) { vdsFileId = H5F.open(_options.VdsFilePath, H5F.ACC_RDONLY); vdsMetaFileId = H5F.open(_options.VdsMetaFilePath, H5F.ACC_RDONLY); Program.CampaignInfoSet = GeneralHelper.GetCampaignInfoSet(vdsFileId, false); } else { Program.CampaignInfoSet = new List <CampaignInfo>(); } Program.CampaignDescriptionSet = Program.CampaignInfoSet.ToDictionary(campaignInfo => campaignInfo.Name, campaignInfo => { if (IOHelper.CheckLinkExists(vdsMetaFileId, campaignInfo.Name)) { try { groupId = H5G.open(vdsMetaFileId, campaignInfo.Name); if (H5A.exists(groupId, "description") > 0) { return(IOHelper.ReadAttribute <string>(groupId, "description").First()); } } finally { if (H5I.is_valid(groupId) > 0) { H5G.close(groupId); } } } return("no description available"); }); } finally { if (H5I.is_valid(vdsFileId) > 0) { H5F.close(vdsFileId); } if (H5I.is_valid(vdsMetaFileId) > 0) { H5F.close(vdsMetaFileId); } } } }
public Hdf5Operator(string filename) { _fname = filename; _h5FileId = H5F.open(filename, H5F.ACC_RDONLY); GetAllFileAttributes(); GetAllDatasetNames(); }
public static double[,] ReadFieldData2D(string file, string dataSet) { H5FileId fileId = H5F.open(file, H5F.OpenMode.ACC_RDONLY); H5DataSetId fDataSetId = H5D.open(fileId, dataSet); H5DataTypeId fDataTypeId = H5D.getType(fDataSetId); long[] dims = H5S.getSimpleExtentDims(H5D.getSpace(fDataSetId)).ToArray(); double[,] data = new double[dims[0], dims[1]]; H5D.read(fDataSetId, fDataTypeId, new H5Array <double>(data)); double[,] fieldValues = new double[dims[1], dims[0]]; for (int i = 0; i < dims[1]; i++) { for (int j = 0; j < dims[0]; j++) { fieldValues[i, j] = (double)data[j, i]; } } H5T.close(fDataTypeId); H5D.close(fDataSetId); H5F.close(fileId); return(fieldValues); }
public static double ReadAttribute(string file, string dataSetOrGroup, string attribute) { double attr = Double.NaN; try { H5FileId fileId = H5F.open(file, H5F.OpenMode.ACC_RDONLY); H5ObjectInfo objectInfo = H5O.getInfoByName(fileId, dataSetOrGroup); H5GroupId groupId = null; H5DataSetId dataSetId = null; H5AttributeId attrId; if (objectInfo.objectType == H5ObjectType.GROUP) { groupId = H5G.open(fileId, dataSetOrGroup); attrId = H5A.open(groupId, attribute); } else { dataSetId = H5D.open(fileId, dataSetOrGroup); attrId = H5A.open(dataSetId, attribute); } H5DataTypeId attrTypeId = H5A.getType(attrId); double[] dAttrs = new double[] { }; if (H5T.equal(attrTypeId, H5T.copy(H5T.H5Type.NATIVE_FLOAT))) { float[] fAttrs = new float[H5S.getSimpleExtentNPoints(H5A.getSpace(attrId))]; H5A.read(attrId, attrTypeId, new H5Array <float>(fAttrs)); dAttrs = (from f in fAttrs select(double) f).ToArray(); } else if (H5T.equal(attrTypeId, H5T.copy(H5T.H5Type.NATIVE_DOUBLE))) { dAttrs = new double[H5S.getSimpleExtentNPoints(H5A.getSpace(attrId))]; H5A.read(attrId, attrTypeId, new H5Array <double>(dAttrs)); } H5T.close(attrTypeId); H5A.close(attrId); if (groupId != null) { H5G.close(groupId); } if (dataSetId != null) { H5D.close(dataSetId); } H5F.close(fileId); return((double)dAttrs[0]); } catch (HDFException e) { Console.WriteLine("Error: Unhandled HDF5 exception"); Console.WriteLine(e.Message); } return(attr); }
/// <summary> /// Opens an existing file and loads up the group and dataset headers in the object. /// </summary> /// <param name="_filename"></param> public Hdf5File(string _filename) { if (!File.Exists(_filename)) { throw new FileNotFoundException(); } Id = H5F.open(_filename, H5F.ACC_RDWR).ToId(); if (Id.Value > 0) { FileId = Id; Path = new Hdf5Path("."); Groups = new Hdf5Groups(this); Datasets = new Hdf5Datasets(this); Attributes = new Hdf5Attributes(this); AttributeHelper.LoadAttributes(Attributes); GroupHelper.PopulateChildrenObjects(Id, this); } else { throw new Hdf5UnknownException(); } }
//Load weights from hdf5 file. Weights must be saved as a vector per layer public static float[] loadH5(string path, string dsname) { //Get file id var h5fid = H5F.open(path, H5F.OpenMode.ACC_RDONLY); //Get dataset id var h5did = H5D.open(h5fid, dsname); //Dataset size var h5space = H5D.getSpace(h5did); var h5size = H5S.getSimpleExtentDims(h5space); //Dataset size to array var S = h5size.ToArray(); //Empty double array for the data double[] data = new double[S[0]]; //Read the dataset var h5array = new H5Array <double>(data); var h5dtype = H5D.getType(h5did); H5D.read(h5did, h5dtype, h5array); //Convert to float float[] newarray = new float[data.Length]; Parallel.For(0, data.Length, (k) => { newarray[k] = (float)data[k]; }); return(newarray); }
/// <summary> /// Opens a Hdf-5 file /// </summary> /// <param name="filename"></param> /// <param name="readOnly"></param> /// <returns></returns> public static long OpenFile(string filename, bool readOnly = false) { uint access = (readOnly) ? H5F.ACC_RDONLY : H5F.ACC_RDWR; var fileId = H5F.open(filename, access); return(fileId); }
} // make_table static void test_getting_info() { try { Console.Write("Testing getting table/field information"); // Open the file to check on the table. H5FileId fileId = H5F.open(FILE_NAME, H5F.OpenMode.ACC_RDWR); hssize_t nfields = 0, nrecords = 0; string[] field_names = { "c", "i", "l" }; // Get the table info. TableInfo table = H5TB.getTableInfo(fileId, TABLE_NAME); if (table.nFields != N_FIELDS) { Console.WriteLine("\ntest_getting_info: incorrect number of fields: read {0} - should be {1}", nfields, N_FIELDS); nerrors++; } if (table.nRecords != N_RECORDS) { Console.WriteLine("\ntest_getting_info: incorrect number of fields: read {0} - should be {1}", nrecords, N_RECORDS); nerrors++; } // Get field info. int [] sizes = new int[N_FIELDS]; int [] offsets = new int[N_FIELDS]; TableFieldInfo tablefield = H5TB.getFieldInfo(fileId, TABLE_NAME); int ii; for (ii = 0; ii < N_FIELDS; ii++) { if (tablefield.fieldName[ii] != field_names[ii]) { Console.WriteLine("\ntest_getting_info: field #{0} has incorrect name: read {0} - should be {1}", ii, field_names[ii], tablefield.fieldName[ii]); nerrors++; } } H5F.close(fileId); Console.WriteLine("\t\t\tPASSED"); } catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // test_getting_info
private static H5FileId GetFileId() { return (File.Exists(Global.GetOutputPath(Global.Configuration.HDF5Path)) ? H5F.open(Global.GetOutputPath(Global.Configuration.HDF5Path), H5F.OpenMode.ACC_RDWR) : H5F.create(Global.GetOutputPath(Global.Configuration.HDF5Path), H5F.CreateMode.ACC_EXCL)); }
public static T GetMetadata <T>(string filename, string path) { H5FileId zFile = null; if (!File.Exists(filename)) { throw new Exception("File not found."); } try { zFile = H5F.open(filename, H5F.OpenMode.ACC_RDONLY); return((T)HDFExtensions.ReadScalar <T>(zFile, path)); } catch { throw new Exception("Path not found"); } finally { if (zFile != null) { H5F.close(zFile); } } }
public static double[][] ReadMesh(string fileName) { double[][] meshes = new double[3][]; string[] meshNames = { "x", "y", "z" }; H5FileId fileId = H5F.open(fileName, H5F.OpenMode.ACC_RDONLY); for (int i = 0; i < meshNames.Length; i++) { H5DataSetId dsId = H5D.open(fileId, "/Mesh/" + meshNames[i]); H5DataTypeId dtId = H5D.getType(dsId); if (!H5T.equal(dtId, H5T.copy(H5T.H5Type.NATIVE_FLOAT))) { Console.WriteLine("Error: Invalid dataset type, expected {0}", H5T.H5Type.NATIVE_FLOAT); } float[] mesh = new float[H5D.getStorageSize(dsId) / H5T.getSize(dtId)]; H5D.read(dsId, dtId, new H5Array <float>(mesh)); meshes[i] = mesh.Select(x => (double)x * 1000.0).ToArray(); // m -> mm H5D.close(dsId); H5T.close(dtId); } H5F.close(fileId); return(meshes); }
public Dictionary <string, string> TryReadDataTable(string datasetName) { Dictionary <string, string> result = null; var subDsDic = ds.GetSubDatasets(); if (subDsDic.Count > 0) { H5ID h5FileId = H5F.open(fileName, H5F.ACC_RDONLY); H5ID datasetId = H5D.open(h5FileId, datasetName); H5ID typeId = H5D.get_type(datasetId); H5ID spaceId = H5D.get_space(datasetId); if (H5T.get_class(typeId) == H5T.class_t.COMPOUND) { int numCount = H5T.get_nmembers(typeId); var size = H5T.get_size(typeId); byte[] buffer = new byte[size.ToInt32()]; GCHandle hnd = GCHandle.Alloc(buffer, GCHandleType.Pinned); int ndims = H5S.get_simple_extent_ndims(spaceId); if (ndims == 1) { result = new Dictionary <string, string>(); H5D.read(datasetId, typeId, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); for (uint i = 0; i < numCount; i++) { string name = Marshal.PtrToStringAnsi(H5T.get_member_name(typeId, i)); int offset = H5T.get_member_offset(typeId, i).ToInt32(); H5ID subTypeId = H5T.get_member_type(typeId, i); H5T.class_t typeClass = H5T.get_member_class(typeId, i); string value = ReadBuffer(buffer, offset, typeClass, subTypeId); result.Add(name, value); H5T.close(subTypeId); } } hnd.Free(); } if (spaceId != 0) { H5S.close(spaceId); } if (typeId != 0) { H5T.close(typeId); } if (datasetId != 0) { H5D.close(datasetId); } if (h5FileId != 0) { H5F.close(h5FileId); } } return(result); }
/// <summary> /// Opens a Hdf-5 file /// </summary> /// <param name="filename"></param> /// <param name="readOnly"></param> /// <returns></returns> public static hid_t OpenFile(string filename, bool readOnly = false, bool overwrite = false) { hid_t fileId; uint access = (readOnly) ? H5F.ACC_RDONLY : H5F.ACC_RDWR; fileId = H5F.open(filename, access); return(fileId); }
static void test_file_open() { try { // Output message about test being performed. Console.Write("Testing file opening I/O"); // First ensure the file does not exist File.Delete(FILE2); // Try opening a non-existent file. This should fail. try { H5FileId non_exist_file = H5F.open(FILE2, H5F.OpenMode.ACC_RDWR); // should fail, but didn't, print out the error message. Console.WriteLine("\ntest_file_open: Attempting to open a non-existent file."); nerrors++; } catch (H5FopenException) { } // does nothing, it should fail // Open the file. H5FileId fileId = H5F.open(FILE1, H5F.OpenMode.ACC_RDWR); // Create dataspace for the dataset in the file. hssize_t[] dims = { 20 }; H5DataSpaceId dspace = H5S.create_simple(RANK, dims); // Create a group. H5GroupId groupId = H5G.create(fileId, GROUP_NAME); // Create a dataset using file as location. H5DataSetId dset1Id = H5D.create(fileId, DSET1_NAME, H5T.H5Type.NATIVE_INT, dspace); // Create a dataset using group as location. H5DataSetId dset2Id = H5D.create(groupId, DSET2_NAME, H5T.H5Type.NATIVE_SHORT, dspace); // Close objects and files. H5D.close(dset1Id); H5D.close(dset2Id); H5S.close(dspace); H5G.close(groupId); H5F.close(fileId); Console.WriteLine("\t\t\t\tPASSED"); } catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // test_file_open
/// <summary> /// WARNING: ADVANCED USE ONLY!! Loads a 2D generic dataset from an H5 file. /// The generic loaders only loads data in non-Unity friendly types, such as bytes, uints, longs etc... /// You'll have to know the correct cast to retrieve usable data. /// /// Created With help from https://github.com/LiorBanai/HDF5-CSharp/blob/master/HDF5-CSharp/Hdf5Dataset.cs /// </summary> /// <param name="filePath"></param> /// <param name="datasetName"></param> /// <typeparam name="T"></typeparam> /// <returns></returns> /// <exception cref="FileNotFoundException"></exception> static T[,] Load2DDataset <T>(string filePath, string datasetName) { if (!File.Exists(filePath)) { throw new FileNotFoundException($"Loading dataset {datasetName} from file that doesn't exist {filePath}"); } long fileId = H5F.open(filePath, H5F.ACC_RDONLY); T[,] resultArray = new T[2, 2]; try { ulong[] start = { 0, 0 }; ulong[] count = { 0, 0 }; long datasetId = H5D.open(fileId, datasetName); var datatype = H5D.get_type(datasetId); var spaceId = H5D.get_space(datasetId); int rank = H5S.get_simple_extent_ndims(spaceId); ulong[] maxDims = new ulong[rank]; ulong[] dims = new ulong[rank]; H5S.get_simple_extent_dims(spaceId, dims, maxDims); count[0] = dims[0]; count[1] = dims[1]; // Define file hyperslab. long status = H5S.select_hyperslab(spaceId, H5S.seloper_t.SET, start, null, count, null); // Define the memory dataspace. resultArray = new T[dims[0], dims[1]]; var memId = H5S.create_simple(rank, dims, null); // Define memory hyperslab. status = H5S.select_hyperslab(memId, H5S.seloper_t.SET, start, null, count, null); // Read data from hyperslab in the file into the hyperslab in // memory and display. GCHandle handle = GCHandle.Alloc(resultArray, GCHandleType.Pinned); try { H5D.read(datasetId, datatype, memId, spaceId, H5P.DEFAULT, handle.AddrOfPinnedObject()); } finally { handle.Free(); H5S.close(status); H5S.close(memId); H5S.close(spaceId); H5D.close(datatype); H5D.close(datasetId); } } finally { H5F.close(fileId); } return(resultArray); }
public bool FileExistsApi(string source, string filename) { var fileId = H5F.open(source, H5F.OpenMode.ACC_RDONLY); var result = H5L.Exists(fileId, filename); H5F.close(fileId); return(result); }
protected H5FileId open(H5F.OpenMode mode) { this.fileId = H5F.open(filepath, mode); this.IsValid = this.getOMXFileAttributes(); this.IsValid &= this.getOMXMatrixTables(); this.IsValid &= this.getOMXIndexMaps(); return(fileId); }
private void Menu_2() { long vdsFileId = -1; long vdsMetaFileId = -1; long fcPropertyId = -1; string vdsFilePath; string vdsMetaFilePath; List <CampaignInfo> campaignInfoSet; IList <HdfElementBase> currentList; // vdsFilePath = Path.Combine(Program.BaseDirectoryPath, "VDS.h5"); vdsMetaFilePath = Path.Combine(Program.BaseDirectoryPath, "VDS_META.h5"); try { if (File.Exists(vdsFilePath)) { vdsFileId = H5F.open(vdsFilePath, H5F.ACC_RDONLY); } else { return; } if (File.Exists(vdsMetaFilePath)) { vdsMetaFileId = H5F.open(vdsMetaFilePath, H5F.ACC_RDWR); } if (vdsMetaFileId == -1) { fcPropertyId = H5P.create(H5P.FILE_CREATE); H5P.set_file_space(fcPropertyId, H5F.file_space_type_t.ALL_PERSIST); vdsMetaFileId = H5F.create(vdsMetaFilePath, H5F.ACC_TRUNC, fcPropertyId); } campaignInfoSet = GeneralHelper.GetCampaignInfoSet(vdsFileId, true); currentList = campaignInfoSet.Cast <HdfElementBase>().ToList(); new VdsMetaNavigator(vdsFileId, vdsMetaFileId, "/", currentList); } finally { if (H5I.is_valid(vdsFileId) > 0) { H5F.close(vdsFileId); } if (H5I.is_valid(vdsMetaFileId) > 0) { H5F.close(vdsMetaFileId); } } }
private void Menu_2(Dictionary <string, string> settings) // edit variable name { long fileId; long groupId; string variableGroupPath; string attributeName; string[] attributeContentSet; List <string> filePathSet; // settings = this.PromptEditVariableNameData(settings); if (settings.ContainsKey("DirectoryPath")) { filePathSet = Directory.GetFiles(settings["DirectoryPath"], settings["SearchPattern"], settings["IncludeSubDirectories"] == "Yes" ? SearchOption.AllDirectories : SearchOption.TopDirectoryOnly).ToList(); } else { filePathSet = new List <string>() { settings["FilePath"] }; } variableGroupPath = settings["VariableGroupPath"]; attributeName = "name_set"; attributeContentSet = new string[] { settings["VariableName"] }; foreach (string filePath in filePathSet) { fileId = H5F.open(filePath, H5F.ACC_RDWR, 0); if (IOHelper.CheckLinkExists(fileId, variableGroupPath)) { groupId = H5G.open(fileId, variableGroupPath); if (settings["AppendMode"] == "Append") { IOHelper.PrepareAttribute(groupId, attributeName, attributeContentSet, new ulong[] { H5S.UNLIMITED }, true); } else { IOHelper.WriteAttribute(groupId, attributeName, attributeContentSet); } // clean up H5G.close(groupId); } // clean up H5F.close(fileId); } }
public NeuralNetworkH5Loader(string h5File) { if (h5File is null) { throw new ArgumentNullException(nameof(h5File)); } H5FileId = H5F.open(h5File, H5F.OpenMode.ACC_RDONLY); H5GroupId = H5G.open(H5FileId, "/"); H5GroupIdModelWeights = H5G.open(H5GroupId, "model_weights"); }
private mData[] ReadData() { Console.WriteLine("Reading H5 file {0}...", filename); H5FileId fileId = H5F.open(filename, H5F.OpenMode.ACC_RDONLY); H5DataSetId dataSetId = H5D.open(fileId, dataSetName); mData[] readDataBack = new mData[count]; H5D.read(dataSetId, new H5DataTypeId(H5T.H5Type.STD_REF_OBJ), new H5Array <mData>(readDataBack)); H5D.close(dataSetId); H5F.close(fileId); return(readDataBack); }
{ // methods devoted to file I/O public static List <HDF5info> SNIRFinfo(string filename) { List <HDF5info> fields = new List <HDF5info>(); hid_t fileId = H5F.open(filename, H5F.ACC_RDONLY); hid_t gId = H5G.open(fileId, "/"); string fullname = ""; fields = ScanInfo(gId, fields, fullname); return(fields); }
/// <summary> /// 重写数据集的值(去条带的数据) /// </summary> /// <typeparam name="T">数据类型</typeparam> /// <param name="dataSetName">数据集的名称</param> /// <param name="dataTypeId">数据集的类型ID</param> /// <param name="values">去条带之后数据</param> /// <param name="BrandNo">在数据集的维度从0开始</param> private void ReWriteDataSet <T>(string dataSetName, H5DataTypeId dataTypeId, T[] values, int BrandNo) { H5FileId _h5FileId = null; H5DataSetId dataSetId = null; H5DataSpaceId spaceid = null; try { _h5FileId = H5F.open(fileName, H5F.OpenMode.ACC_RDWR); //先找出含有指定波段的数据集 dataSetId = H5D.open(_h5FileId, dataSetName); spaceid = H5D.getSpace(dataSetId); long[] dims = H5S.getSimpleExtentDims(spaceid); //得到数据数组的大小,比如[3,1800,2048] int rank = H5S.getSimpleExtentNDims(spaceid); //得到数据数组的维数,比如3 H5S.close(spaceid); //根据数据集的名字获取数据集的ID int size = 0; if (rank == 0) { size = 1; } else if (rank == 1) { size = Convert.ToInt32(dims[0]); } else if (rank == 2) { size = Convert.ToInt32(dims[0] * dims[1]); } else if (rank == 3) { size = Convert.ToInt32(dims[0] * dims[1] * dims[2]); } T[] v = new T[size]; //从数据集中读取原始数据 H5D.read <T>(dataSetId, dataTypeId, new H5Array <T>(v)); //将波段校正后的数据读取赋给相应的波段 for (int i = BrandNo; i < values.Length; i++) { v[i] = values[i]; } H5D.write <T>(dataSetId, dataTypeId, new H5Array <T>(v)); } catch (Exception e) { throw new Exception(e.Message); } finally { H5D.close(dataSetId); H5F.close(_h5FileId); } }
public void H5FcloseTest1() { string fname = Path.GetTempFileName(); hid_t file = H5F.create(fname, H5F.ACC_TRUNC); Assert.IsTrue(file >= 0); Assert.IsTrue(H5F.close(file) >= 0); file = H5F.open(fname, H5F.ACC_RDONLY); Assert.IsTrue(file >= 0); Assert.IsTrue(H5F.close(file) >= 0); File.Delete(fname); }
/// <summary> /// The constructor. /// </summary> /// <param name="cuda">Specifies the CudaDnn connection to Cuda.</param> /// <param name="log">Specifies the Log for output.</param> /// <param name="strFile">Specifies the HDF5 file to load.</param> public HDF5(CudaDnn <T> cuda, Log log, string strFile) { m_strFile = strFile; m_cuda = cuda; m_log = log; m_file = H5F.open(strFile, H5F.OpenMode.ACC_RDONLY); if (m_file == null) { m_log.FAIL("Failed opening HDF5 file: '" + strFile + "'!"); } }
static void ReadFile(string filePath) { var file = H5F.open(filePath, H5F.ACC_RDONLY); var dataSet = H5D.open(file, "/group/dataset"); var dataSpace = H5D.get_space(dataSet); var rank = H5S.get_simple_extent_ndims(dataSpace); if (rank == 2) { var dims = new ulong[2]; H5S.get_simple_extent_dims(dataSpace, dims, null); var data = new int[dims[0], dims[1]]; H5D.read(dataSet, H5T.NATIVE_INT, H5S.ALL, H5S.ALL, H5P.DEFAULT, new PinnedObject(data)); for (int i = 0; i < data.GetLength(0); ++i) { for (int j = 0; j < data.GetLength(1); ++j) { Write($"{data[i,j],3}"); } WriteLine(); } } H5S.close(dataSpace); var doubleAttribute = H5A.open(dataSet, "double"); #if false double pi = 0.0; var handle = GCHandle.Alloc(pi, GCHandleType.Pinned); H5A.read(doubleAttribute, H5T.NATIVE_DOUBLE, handle.AddrOfPinnedObject()); handle.Free(); WriteLine($"PI = {pi}"); #else var values = new double[1]; H5A.read(doubleAttribute, H5T.NATIVE_DOUBLE, new PinnedObject(values)); WriteLine($"PI = {values[0]}"); #endif H5A.close(doubleAttribute); WriteLine($"string: {ReadStringAttribute(dataSet, "string")}"); WriteLine($"string-ascii: {ReadStringAttribute(dataSet, "string-ascii")}"); WriteLine($"string-vlen: {ReadStringAttribute(dataSet, "string-vlen")}"); H5D.close(dataSet); H5F.close(file); }
public HDF5File(string _file, bool _readonly) { m_FileName = _file; if (_readonly) { m_FileId = H5F.open(_file, H5F.OpenMode.ACC_RDONLY); } else { m_FileId = H5F.open(_file, H5F.OpenMode.ACC_RDWR); } }
public HDF5Helper(String path, bool createFlag) { if (!createFlag) { _fileId = H5F.open(path, H5F.OpenMode.ACC_RDONLY); } else { _fileId = H5F.create(path, H5F.CreateMode.ACC_TRUNC); } getFileAttributeNames(); getDatasetNames(); }