public static T GetMetadata <T>(string filename, string path) { H5FileId zFile = null; if (!File.Exists(filename)) { throw new Exception("File not found."); } try { zFile = H5F.open(filename, H5F.OpenMode.ACC_RDONLY); return((T)HDFExtensions.ReadScalar <T>(zFile, path)); } catch { throw new Exception("Path not found"); } finally { if (zFile != null) { H5F.close(zFile); } } }
private static void ConvertHdf4To5(Hdf4FileAttrs hdf4FileAttrs, string f5name, Action <string, int, int> messageAction) { try { // Create a new file using H5F_ACC_TRUNC access, // default file creation properties, and default file // access properties. H5FileId fileId = H5F.create(f5name, H5F.CreateMode.ACC_TRUNC); int nxf5 = hdf4FileAttrs.Hdf4FileAttr.XDim; int nyf5 = hdf4FileAttrs.Hdf4FileAttr.YDim; int rank = 2; //测试读取的科学数据集及其属性 int sdscount = hdf4FileAttrs.Hdf4FileAttr.DataFields.Count; for (int k = 0; k < sdscount; k++) { ConvertHdf4To5BySds(hdf4FileAttrs, messageAction, k, nyf5, nxf5, rank, fileId); } HDFAttributeDef[] attributeDef5s = hdf4FileAttrs.GetHDFAttributeDefs(); foreach (HDFAttributeDef attributeDef5 in attributeDef5s) { WriteHdfAttributes.WriteHdfAttribute(fileId, attributeDef5); } H5F.close(fileId); } catch (Exception ex) { throw new Exception("拼接Hdf4时出错,具体信息:" + ex.Message, ex); } }
public Hdf5Operator(string filename) { _fname = filename; _h5FileId = H5F.open(filename, HDF5DotNet.H5F.OpenMode.ACC_RDONLY); GetAllFileAttributes(); GetAllDatasetNames(); }
public static double ReadAttribute(string file, string dataSetOrGroup, string attribute) { double attr = Double.NaN; try { H5FileId fileId = H5F.open(file, H5F.OpenMode.ACC_RDONLY); H5ObjectInfo objectInfo = H5O.getInfoByName(fileId, dataSetOrGroup); H5GroupId groupId = null; H5DataSetId dataSetId = null; H5AttributeId attrId; if (objectInfo.objectType == H5ObjectType.GROUP) { groupId = H5G.open(fileId, dataSetOrGroup); attrId = H5A.open(groupId, attribute); } else { dataSetId = H5D.open(fileId, dataSetOrGroup); attrId = H5A.open(dataSetId, attribute); } H5DataTypeId attrTypeId = H5A.getType(attrId); double[] dAttrs = new double[] { }; if (H5T.equal(attrTypeId, H5T.copy(H5T.H5Type.NATIVE_FLOAT))) { float[] fAttrs = new float[H5S.getSimpleExtentNPoints(H5A.getSpace(attrId))]; H5A.read(attrId, attrTypeId, new H5Array <float>(fAttrs)); dAttrs = (from f in fAttrs select(double) f).ToArray(); } else if (H5T.equal(attrTypeId, H5T.copy(H5T.H5Type.NATIVE_DOUBLE))) { dAttrs = new double[H5S.getSimpleExtentNPoints(H5A.getSpace(attrId))]; H5A.read(attrId, attrTypeId, new H5Array <double>(dAttrs)); } H5T.close(attrTypeId); H5A.close(attrId); if (groupId != null) { H5G.close(groupId); } if (dataSetId != null) { H5D.close(dataSetId); } H5F.close(fileId); return((double)dAttrs[0]); } catch (HDFException e) { Console.WriteLine("Error: Unhandled HDF5 exception"); Console.WriteLine(e.Message); } return(attr); }
public static double[,] ReadFieldData2D(string file, string dataSet) { H5FileId fileId = H5F.open(file, H5F.OpenMode.ACC_RDONLY); H5DataSetId fDataSetId = H5D.open(fileId, dataSet); H5DataTypeId fDataTypeId = H5D.getType(fDataSetId); long[] dims = H5S.getSimpleExtentDims(H5D.getSpace(fDataSetId)).ToArray(); double[,] data = new double[dims[0], dims[1]]; H5D.read(fDataSetId, fDataTypeId, new H5Array <double>(data)); double[,] fieldValues = new double[dims[1], dims[0]]; for (int i = 0; i < dims[1]; i++) { for (int j = 0; j < dims[0]; j++) { fieldValues[i, j] = (double)data[j, i]; } } H5T.close(fDataTypeId); H5D.close(fDataSetId); H5F.close(fileId); return(fieldValues); }
public static double[][] ReadMesh(string fileName) { double[][] meshes = new double[3][]; string[] meshNames = { "x", "y", "z" }; H5FileId fileId = H5F.open(fileName, H5F.OpenMode.ACC_RDONLY); for (int i = 0; i < meshNames.Length; i++) { H5DataSetId dsId = H5D.open(fileId, "/Mesh/" + meshNames[i]); H5DataTypeId dtId = H5D.getType(dsId); if (!H5T.equal(dtId, H5T.copy(H5T.H5Type.NATIVE_FLOAT))) { Console.WriteLine("Error: Invalid dataset type, expected {0}", H5T.H5Type.NATIVE_FLOAT); } float[] mesh = new float[H5D.getStorageSize(dsId) / H5T.getSize(dtId)]; H5D.read(dsId, dtId, new H5Array <float>(mesh)); meshes[i] = mesh.Select(x => (double)x * 1000.0).ToArray(); // m -> mm H5D.close(dsId); H5T.close(dtId); } H5F.close(fileId); return(meshes); }
public static double[, ,] ReadFieldData3D(string fileName) { H5FileId fileId = H5F.open(fileName, H5F.OpenMode.ACC_RDONLY); H5DataSetId fDataSetId = H5D.open(fileId, "/FieldData/FD/f0"); H5DataTypeId fDataTypeId = H5D.getType(fDataSetId); if (!H5T.equal(fDataTypeId, H5T.copy(H5T.H5Type.NATIVE_FLOAT))) { Console.WriteLine("Error: Invalid dataset type, expected {0}", H5T.H5Type.NATIVE_FLOAT); } long[] dims = H5S.getSimpleExtentDims(H5D.getSpace(fDataSetId)).ToArray(); if (dims.Length != 3) { Console.WriteLine("Error: Invalid field data dimensions"); } float[, ,] data = new float[dims[0], dims[1], dims[2]]; H5D.read(fDataSetId, fDataTypeId, new H5Array <float>(data)); // Reorder double[, ,] fieldValues = new double[dims[2], dims[1], dims[0]]; for (int i = 0; i < dims[0]; i++) { for (int j = 0; j < dims[1]; j++) { for (int k = 0; k < dims[2]; k++) { fieldValues[k, j, i] = data[i, j, k]; } } } return(fieldValues); }
} // make_table static void test_getting_info() { try { Console.Write("Testing getting table/field information"); // Open the file to check on the table. H5FileId fileId = H5F.open(FILE_NAME, H5F.OpenMode.ACC_RDWR); hssize_t nfields = 0, nrecords = 0; string[] field_names = { "c", "i", "l" }; // Get the table info. TableInfo table = H5TB.getTableInfo(fileId, TABLE_NAME); if (table.nFields != N_FIELDS) { Console.WriteLine("\ntest_getting_info: incorrect number of fields: read {0} - should be {1}", nfields, N_FIELDS); nerrors++; } if (table.nRecords != N_RECORDS) { Console.WriteLine("\ntest_getting_info: incorrect number of fields: read {0} - should be {1}", nrecords, N_RECORDS); nerrors++; } // Get field info. int [] sizes = new int[N_FIELDS]; int [] offsets = new int[N_FIELDS]; TableFieldInfo tablefield = H5TB.getFieldInfo(fileId, TABLE_NAME); int ii; for (ii = 0; ii < N_FIELDS; ii++) { if (tablefield.fieldName[ii] != field_names[ii]) { Console.WriteLine("\ntest_getting_info: field #{0} has incorrect name: read {0} - should be {1}", ii, field_names[ii], tablefield.fieldName[ii]); nerrors++; } } H5F.close(fileId); Console.WriteLine("\t\t\tPASSED"); } catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // test_getting_info
public static void SetHdfFileAttributeValue(H5FileId fileId, HDFDef hDFDef) { foreach (HDFAttributeDef hdfAttributeDef in hDFDef.AttCollection.Attributes) { WriteHdfAttributes.WriteHdfAttribute(fileId, hdfAttributeDef); } }
/// <summary> /// Release all resources uses. /// </summary> public void Dispose() { if (m_file != null) { H5F.close(m_file); m_file = null; } }
/// <summary> /// 释放资源 /// </summary> public void Dispose() { if (_fileId != null) { H5F.close(_fileId); _fileId = null; } }
static void test_file_open() { try { // Output message about test being performed. Console.Write("Testing file opening I/O"); // First ensure the file does not exist File.Delete(FILE2); // Try opening a non-existent file. This should fail. try { H5FileId non_exist_file = H5F.open(FILE2, H5F.OpenMode.ACC_RDWR); // should fail, but didn't, print out the error message. Console.WriteLine("\ntest_file_open: Attempting to open a non-existent file."); nerrors++; } catch (H5FopenException) { } // does nothing, it should fail // Open the file. H5FileId fileId = H5F.open(FILE1, H5F.OpenMode.ACC_RDWR); // Create dataspace for the dataset in the file. hssize_t[] dims = { 20 }; H5DataSpaceId dspace = H5S.create_simple(RANK, dims); // Create a group. H5GroupId groupId = H5G.create(fileId, GROUP_NAME); // Create a dataset using file as location. H5DataSetId dset1Id = H5D.create(fileId, DSET1_NAME, H5T.H5Type.NATIVE_INT, dspace); // Create a dataset using group as location. H5DataSetId dset2Id = H5D.create(groupId, DSET2_NAME, H5T.H5Type.NATIVE_SHORT, dspace); // Close objects and files. H5D.close(dset1Id); H5D.close(dset2Id); H5S.close(dspace); H5G.close(groupId); H5F.close(fileId); Console.WriteLine("\t\t\t\tPASSED"); } catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // test_file_open
public NeuralNetworkH5Loader(string h5File) { if (h5File is null) { throw new ArgumentNullException(nameof(h5File)); } H5FileId = H5F.open(h5File, H5F.OpenMode.ACC_RDONLY); H5GroupId = H5G.open(H5FileId, "/"); H5GroupIdModelWeights = H5G.open(H5GroupId, "model_weights"); }
/// <summary> /// The constructor. /// </summary> /// <param name="cuda">Specifies the CudaDnn connection to Cuda.</param> /// <param name="log">Specifies the Log for output.</param> /// <param name="strFile">Specifies the HDF5 file to load.</param> public HDF5(CudaDnn <T> cuda, Log log, string strFile) { m_strFile = strFile; m_cuda = cuda; m_log = log; m_file = H5F.open(strFile, H5F.OpenMode.ACC_RDONLY); if (m_file == null) { m_log.FAIL("Failed opening HDF5 file: '" + strFile + "'!"); } }
private mData[] ReadData() { Console.WriteLine("Reading H5 file {0}...", filename); H5FileId fileId = H5F.open(filename, H5F.OpenMode.ACC_RDONLY); H5DataSetId dataSetId = H5D.open(fileId, dataSetName); mData[] readDataBack = new mData[count]; H5D.read(dataSetId, new H5DataTypeId(H5T.H5Type.STD_REF_OBJ), new H5Array <mData>(readDataBack)); H5D.close(dataSetId); H5F.close(fileId); return(readDataBack); }
/// <summary> /// 重写数据集的值(去条带的数据) /// </summary> /// <typeparam name="T">数据类型</typeparam> /// <param name="dataSetName">数据集的名称</param> /// <param name="dataTypeId">数据集的类型ID</param> /// <param name="values">去条带之后数据</param> /// <param name="BrandNo">在数据集的维度从0开始</param> private void ReWriteDataSet <T>(string dataSetName, H5DataTypeId dataTypeId, T[] values, int BrandNo) { H5FileId _h5FileId = null; H5DataSetId dataSetId = null; H5DataSpaceId spaceid = null; try { _h5FileId = H5F.open(fileName, H5F.OpenMode.ACC_RDWR); //先找出含有指定波段的数据集 dataSetId = H5D.open(_h5FileId, dataSetName); spaceid = H5D.getSpace(dataSetId); long[] dims = H5S.getSimpleExtentDims(spaceid); //得到数据数组的大小,比如[3,1800,2048] int rank = H5S.getSimpleExtentNDims(spaceid); //得到数据数组的维数,比如3 H5S.close(spaceid); //根据数据集的名字获取数据集的ID int size = 0; if (rank == 0) { size = 1; } else if (rank == 1) { size = Convert.ToInt32(dims[0]); } else if (rank == 2) { size = Convert.ToInt32(dims[0] * dims[1]); } else if (rank == 3) { size = Convert.ToInt32(dims[0] * dims[1] * dims[2]); } T[] v = new T[size]; //从数据集中读取原始数据 H5D.read <T>(dataSetId, dataTypeId, new H5Array <T>(v)); //将波段校正后的数据读取赋给相应的波段 for (int i = BrandNo; i < values.Length; i++) { v[i] = values[i]; } H5D.write <T>(dataSetId, dataTypeId, new H5Array <T>(v)); } catch (Exception e) { throw new Exception(e.Message); } finally { H5D.close(dataSetId); H5F.close(_h5FileId); } }
static void test_attr_plist() { try { Console.Write("Testing attribute property lists"); hssize_t[] dims = { 256, 512 }; const string PLST_FILE_NAME = ("tattr_plist.h5"); hssize_t[] dims1 = { SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3 }; hssize_t[] dims2 = { ATTR1_DIM }; // Create file. H5FileId fileId = H5F.create(PLST_FILE_NAME, H5F.CreateMode.ACC_TRUNC); // Create dataspace for dataset. H5DataSpaceId space1_Id = H5S.create_simple(SPACE1_RANK, dims1); // Create a dataset. H5DataSetId dsetId = H5D.create(fileId, DSET1_NAME, H5T.H5Type.NATIVE_UCHAR, space1_Id); // Create dataspace for attribute. H5DataSpaceId space2_Id = H5S.create_simple(ATTR1_RANK, dims2); // Create default property list for attribute. H5PropertyListId plist = H5P.create(H5P.PropertyListClass.ATTRIBUTE_CREATE); // Create an attribute for the dataset using the property list. H5AttributeId attrId = H5A.create(dsetId, ATTR1_NAME, new H5DataTypeId(H5T.H5Type.NATIVE_INT), space2_Id, plist); // Close all objects. H5S.close(space1_Id); H5S.close(space2_Id); H5P.close(plist); H5A.close(attrId); H5D.close(dsetId); H5F.close(fileId); Console.WriteLine("\t\t\tPASSED"); } catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // test_attr_plist
public HDF5File(string _file, bool _readonly) { m_FileName = _file; if (_readonly) { m_FileId = H5F.open(_file, H5F.OpenMode.ACC_RDONLY); } else { m_FileId = H5F.open(_file, H5F.OpenMode.ACC_RDWR); } }
public static H5ObjectWithAttributes open(H5FileId id, string path) { H5ObjectInfo oinfo = H5O.getInfoByName(id, path); switch (oinfo.objectType) { case H5ObjectType.DATASET: return H5D.open(id, path); case H5ObjectType.GROUP: return H5G.open(id, path); default: throw new ArgumentException(); } }
public void SimpleOpenClose() { // Create an HDF5 file. // The enumeration type H5F.CreateMode provides only the legal // creation modes. Missing H5Fcreate parameters are provided // with default values. H5FileId fileId = H5F.create(TEST_FILE, H5F.CreateMode.ACC_TRUNC); // Close the file. H5F.close(fileId); Assert.IsTrue(System.IO.File.Exists(TEST_FILE)); }
public HDF5Helper(String path, bool createFlag) { if (!createFlag) { _fileId = H5F.open(path, H5F.OpenMode.ACC_RDONLY); } else { _fileId = H5F.create(path, H5F.CreateMode.ACC_TRUNC); } getFileAttributeNames(); getDatasetNames(); }
private static H5GroupId CreateGroupIfNoneExists(H5FileId fileId, string path) { H5GroupId group = null; if (H5L.Exists(fileId, path)) { group = H5G.open(fileId, path); } else { group = H5G.create(fileId, path); } return(group); }
} // test_copy static void Main(string[] args) { Console.WriteLine(); Console.WriteLine("TEST: HDF5DotNet Datatype API"); Console.WriteLine(); try { // Suppress error printing from the C library. H5E.suppressPrinting(); // Create the file H5FileId fileId = H5F.create("test_types.h5", H5F.CreateMode.ACC_TRUNC); // Invokes individual datatype tests test_classes(); test_integer_dtype(); test_float_dtype(); test_compound_dtype(fileId); test_enum_dtype(fileId); test_vlen_dtype(fileId); test_copy(); // Close the file H5F.close(fileId); } catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } // Report results. Console.WriteLine(); if (nerrors > 0) { Console.WriteLine("Test(s) failed: {0}", nerrors, "occurred!"); } else { Console.WriteLine("---- All datatype tests passed."); } Console.WriteLine(); }
public bool Open(string hdfFile) { bool ret = true; m_fileId = H5F.open(hdfFile, H5F.OpenMode.ACC_RDONLY); if (m_fileId.Id == 0) { ret = false; } //DumpAttri(hdfFile); return(ret); }
// TODO: add create with both dimensions defined in shape. /// <summary> /// Create OMX file with no matrix tables /// </summary> /// <param name="zones">Number of zones - matrix tables will all be square</param> /// <param name="overwrite">true to automatically overwrite file on disc, false will prompt user to overwrite</param> public void CreateFileOMX(int zones, bool overwrite) { overwriteCheck(overwrite); // create and open are two seperate hanldes, need to close create after finished H5FileId createFile = H5F.create(filepath, H5F.CreateMode.ACC_EXCL); H5F.close(createFile); fileId = H5F.open(filepath, H5F.OpenMode.ACC_RDWR); this.Shape = new long[] { zones, zones }; H5DataTypeId[] matDataTypes = null; this.IsValid = setOMXFileContents(matDataTypes); }
public static H5FileId InitializeHDF(int numpos, int height, int width, UInt16[,,] datacube, string fullpath) { // write in HDF5 (.h5) format // generate standard groups H5FileId fileId = H5F.create(fullpath, H5F.CreateMode.ACC_TRUNC); //H5FileId fileId = H5F.create("D:/2017/Pixelated Camera/CameraSoftware/FileFormat/Test/test5.emd", //H5F.CreateMode.ACC_TRUNC); //NumberAttributeGenerator(fileId, "voltage", Convert.ToSingle(300)); //H5GroupId dataGroup = H5G.create(fileId, "/data"); //dash is required for root group H5GroupId userGroup = H5G.create(fileId, "/user"); H5GroupId micGroup = H5G.create(fileId, "/microscope"); H5GroupId sampleGroup = H5G.create(fileId, "/sample"); H5GroupId commentGroup = H5G.create(fileId, "/comments"); // generate attributes for user group, all attributes are sting StringAttributeGenerator(userGroup, "user", "Chenyu Zhang"); StringAttributeGenerator(userGroup, "email", "*****@*****.**"); StringAttributeGenerator(userGroup, "institution", "UW-Madison"); StringAttributeGenerator(userGroup, "department", "Materials Science and Engineering"); // generate attributes for microscope group StringAttributeGenerator(micGroup, "voltage units", "kV"); NumberAttributeGenerator(micGroup, "voltage", Convert.ToSingle(300)); StringAttributeGenerator(micGroup, "wavelength units", "nm"); NumberAttributeGenerator(micGroup, "wavelength", Convert.ToSingle(0.00197)); // generate attributes for sample group StringAttributeGenerator(sampleGroup, "material", "STO"); StringAttributeGenerator(sampleGroup, "preparation", "Mechanical polishing and ion milling"); StringAttributeGenerator(sampleGroup, "Zone Axis", "[1][0][0]"); // Write 3D data cube to the file WriteDataCube(fileId, datacube); // close groups and file H5G.close(userGroup); H5G.close(micGroup); H5G.close(sampleGroup); H5G.close(commentGroup); //H5G.close(dataGroup); H5F.close(fileId); return(fileId); }
static void Main(string[] args) { Console.WriteLine(); Console.WriteLine("TEST: HDF5DotNet Dataset API"); Console.WriteLine(); try { const string FILE_NAME = ("Dataset.h5"); // Suppress error printing from the C library. H5E.suppressPrinting(); // Create a new file using H5F_ACC_TRUNC access, // default file creation properties, and default file // access properties. H5FileId fileId = H5F.create(FILE_NAME, H5F.CreateMode.ACC_TRUNC); test_create(fileId); // test creating dataset test_onedim_array(fileId); // test writing one-dimensional array test_twodims_array(); // test writing multiple-dimensional arrays test_fivedims_array(fileId); // Close the file. H5F.close(fileId); } catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } Console.WriteLine(); if (nerrors > 0) { Console.WriteLine("Test(s) failed: ", nerrors, "occurred!"); } else { Console.WriteLine("---- All dataset tests passed."); } Console.WriteLine(); }
private void WriteData() { Console.WriteLine("Creating H5 file {0}...", filename); // Rank is the number of dimensions of the data array. const int RANK = 1; // Create an HDF5 file. // The enumeration type H5F.CreateMode provides only the legal // creation modes. Missing H5Fcreate parameters are provided // with default values. H5FileId fileId = H5F.create(filename, H5F.CreateMode.ACC_TRUNC); // Prepare to create a data space for writing a 1-dimensional // signed integer array. long[] dims = new long[RANK]; dims[0] = count; // Put descending ramp data in an array so that we can // write it to the file. mData[] dset_data = new mData[count]; for (int i = 0; i < count; i++) { dset_data[i] = new mData(i + 80, i + 40, i + 1); } // Create a data space to accommodate our 1-dimensional array. // The resulting H5DataSpaceId will be used to create the // data set. H5DataSpaceId spaceId = H5S.create_simple(RANK, dims); // Create a copy of a standard data type. We will use the // resulting H5DataTypeId to create the data set. We could // have used the HST.H5Type data directly in the call to // H5D.create, but this demonstrates the use of H5T.copy // and the use of a H5DataTypeId in H5D.create. H5DataTypeId typeId = H5T.copy(H5T.H5Type.STD_REF_OBJ); // Find the size of the type int typeSize = H5T.getSize(typeId); // Create the data set. H5DataSetId dataSetId = H5D.create(fileId, dataSetName, typeId, spaceId); // Write the integer data to the data set. H5D.write(dataSetId, new H5DataTypeId(H5T.H5Type.STD_REF_OBJ), new H5Array <mData>(dset_data)); H5D.close(dataSetId); H5F.close(fileId); Console.WriteLine("H5 file {0} created successfully!", filename); }
public static T[,] Read2DArray <T>(this H5FileId fileId, string dataSetName) { var dataset = H5D.open(fileId, dataSetName); var space = H5D.getSpace(dataset); var dims = H5S.getSimpleExtentDims(space); var dataType = H5D.getType(dataset); if (typeof(T) == typeof(string)) { // this will also need a string hack... } T[,] dataArray = new T[dims[0], dims[1]]; var wrapArray = new H5Array <T>(dataArray); H5D.read(dataset, dataType, wrapArray); return(dataArray); }
public static void WriteDataCube(H5FileId fileId, UInt16[,,] datacube) { H5GroupId dataGroup = H5G.create(fileId, "/data"); H5GroupId dataSubGroup = H5G.create(dataGroup, "DEsoftware"); long[] dims = new long[3] { datacube.GetLength(0), datacube.GetLength(1), datacube.GetLength(2) }; H5DataSpaceId spaceId = H5S.create_simple(3, dims); H5DataTypeId typeId = H5T.copy(H5T.H5Type.NATIVE_USHORT); H5DataSetId dataSetId = H5D.create(dataSubGroup, "data", typeId, spaceId); // create attribute emd_group_type for dataSubGroup, which is required to have value 1 int par = 1; long[] attdims = new long[1]; int[] AttArray = new int[1] { par }; dims[0] = AttArray.Length; H5AttributeId attributeId = H5A.create(dataSubGroup, "emd_group_type", H5T.copy(H5T.H5Type.NATIVE_UCHAR), H5S.create_simple(1, dims)); H5A.write(attributeId, H5T.copy(H5T.H5Type.NATIVE_INT), new H5Array <int>(AttArray)); H5A.close(attributeId); // write datacube to "data", which contains whole 3D datacube H5D.write <ushort>(dataSetId, typeId, new H5Array <ushort>(datacube)); // create three more array for three dimensions long[] dim1 = new long[1] { datacube.GetLength(0) }; double[] dimarray = new double [datacube.GetLength(0)]; spaceId = H5S.create_simple(3, dim1); typeId = H5T.copy(H5T.H5Type.NATIVE_DOUBLE); dataSetId = H5D.create(dataSubGroup, "dim1", typeId, spaceId); H5D.write <double>(dataSetId, typeId, new H5Array <double>(dimarray)); H5S.close(spaceId); H5T.close(typeId); H5D.close(dataSetId); H5G.close(dataGroup); H5G.close(dataSubGroup); }
} // test_chunked_dset static void Main(string[] args) { Console.WriteLine(); Console.WriteLine("TEST: HDF5DotNet Property List API"); Console.WriteLine(); try { const string FILE_NAME = ("Dataset.h5"); // Suppress error printing from the C library. H5E.suppressPrinting(); // Create a new file using H5F_ACC_TRUNC access, // default file creation properties, and default file // access properties. H5FileId fileId = H5F.create(FILE_NAME, H5F.CreateMode.ACC_TRUNC); test_buffer(); test_chunked_dset(); // Close the file. H5F.close(fileId); } catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } Console.WriteLine(); if (nerrors > 0) { Console.WriteLine("Test(s) failed: ", nerrors, "occurred!"); } else { Console.WriteLine("---- All property list tests passed."); } Console.WriteLine(); }
} // test_compound_dtype static void test_copy(H5FileId fileId) { try { Console.Write("Testing copying datatypes"); // Make a copy of a predefined type. H5DataTypeId inttype = H5T.copy(H5T.H5Type.NATIVE_INT); uint intsize = H5T.getSize(inttype); // Make a copy of that type. H5DataTypeId tcopy1 = H5T.copy(inttype); uint tcopy1_size = H5T.getSize(tcopy1); // The sizes of the copies should be the same. if (intsize != tcopy1_size) { Console.WriteLine("test_copy: copy types incorrectly"); nerrors++; } // Close second type H5T.close(tcopy1); /* * Test copy a datatype from a dataset. */ // Create a dataset with a simple dataspace. hsize_t[] dims = { DIM0, DIM1 }; H5DataSpaceId dspace = H5S.create_simple(2, dims); H5DataSetId dset = H5D.create(fileId, "test_types", inttype, dspace); // Obtain the datatype from the dataset. H5DataTypeId dstype = H5T.copy(dset); // Check this datatype's class, size, and sign. H5T.H5TClass tclass = H5T.getClass(dstype); if (tclass != H5T.H5TClass.INTEGER) { Console.WriteLine("test_copy: copy of dataset's datatype has incorrect class"); nerrors++; } uint tsize = H5T.getSize(dstype); if (tsize != intsize) { Console.WriteLine("test_copy: copy of dataset's datatype has incorrect size"); nerrors++; } H5T.Sign tsign = H5T.getSign(dstype); if (tsign != H5T.Sign.TWOS_COMPLEMENT) { Console.WriteLine("test_copy: copy of dataset's datatype has incorrect sign, {0}", tsign); nerrors++; } // Close objects. H5T.close(inttype); H5S.close(dspace); H5T.close(dstype); H5D.close(dset); Console.WriteLine("\t\tPASSED"); } catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // test_copy
public HDFEngine() { file_id = null; last_exception = null; UseMohidAttributes = true; }
public bool OpenHDF(FileName file, H5F.OpenMode mode = H5F.OpenMode.ACC_RDONLY) { try { file_id = H5F.open(file.FullPath, mode); last_exception = null; if (file_id.Id >= 0) return true; else return false; } catch (Exception ex) { last_exception = ex; return false; } }
public bool CreateHDF(FileName file, H5F.CreateMode mode = H5F.CreateMode.ACC_TRUNC) { try { file_id = H5F.create(file.FullPath, mode); last_exception = null; if (file_id.Id >= 0) return true; else return false; } catch (Exception ex) { last_exception = ex; return false; } }
static void test_compound_dtype(H5FileId fileId) { uint i, j, n; try { Console.Write("Testing compound datatypes"); // Allocate space for the points & check arrays s1[,] points = new s1[DIM0,DIM1]; s1[,] check = new s1[DIM0,DIM1]; // Initialize the dataset for (i = n = 0; i < DIM0; i++) { for (j = 0; j < DIM1; j++) { points[i,j].c = 't'; points[i,j].i = n++; points[i,j].l = (i*10+j*100)*n; } } // Create the data space hsize_t[] dims = {DIM0,DIM1}; H5DataSpaceId spaceId = H5S.create_simple(2, dims); // Create compound datatype for disk storage H5DataTypeId typeId = H5T.create(H5T.CreateClass.COMPOUND, 16); // Insert members H5T.insert(typeId, "c", 0, H5T.H5Type.STD_U8LE); H5T.insert(typeId, "i", 1, H5T.H5Type.STD_U32LE); H5T.insert(typeId, "l", 5, H5T.H5Type.STD_I64BE); // Create the dataset H5DataSetId dsetId = H5D.create(fileId, DSET_COMPOUND_NAME, typeId, spaceId); // Write the dataset H5D.write(dsetId, typeId, new H5Array<s1>(points)); // Close dataset and dataspace H5D.close(dsetId); H5S.close(spaceId); H5T.close(typeId); // Open dataset again to check various functions. dsetId = H5D.open(fileId, DSET_COMPOUND_NAME); // Get its type and native type. H5DataTypeId dset_typeId = H5D.getType(dsetId); H5DataTypeId native_type = H5T.getNativeType(dset_typeId, H5T.Direction.DEFAULT); // Check name against this list string[] memb_names = { "c", "i", "l" }; H5DataTypeId mtypeId; // member type H5T.H5TClass memb_cls1, memb_cls2; // member classes retrieved different ways string memb_name; // member name int memb_idx; // member index // Get the number of members in the type. int nmembers = H5T.getNMembers(native_type); // For each member, check its name, class, index, and size. for (i = 0; i < nmembers; i++) { // Get the type of the ith member. mtypeId = H5T.getMemberType(native_type, i); // Get the name of the ith member. memb_name = H5T.getMemberName(native_type, i); if (memb_name != memb_names[i]) { Console.WriteLine("test_compound_dtypes: incorrect member name, {0}, for member no {1}", memb_name, i); nerrors++; } // Get the class of the ith member and then verify the class. memb_cls1 = H5T.getMemberClass(native_type, i); if (memb_cls1 != H5T.H5TClass.INTEGER) { Console.WriteLine("test_compound_dtypes: incorrect class, {0}, for member no {1}", memb_cls1, i); nerrors++; } // Get the class via type id memb_cls2 = H5T.getClass(mtypeId); if (memb_cls1 != memb_cls2) { Console.WriteLine("test_compound_dtypes: H5T.getMemberClass and H5T.getClass return different classes for the same type."); nerrors++; } // Get member's index back from its name and verify it. memb_idx = H5T.getMemberIndex(dset_typeId, memb_name); if (memb_idx != i) { Console.WriteLine("test_compound_dtypes: H5T.getMemberName and/or H5T.getMemberIndex returned false values."); nerrors++; } // Get size of the member's type and verify it. uint tsize = H5T.getSize(mtypeId); switch (i) { case 0: //Console.WriteLine("tsize = {0}, STD_U8LE = {1}", tsize, H5T.getSize(H5T.H5Type.STD_U8LE)); if (tsize != H5T.getSize(H5T.H5Type.STD_U8LE)) { Console.WriteLine("test_compound_dtypes: First member has incorrect size"); nerrors++; } break; case 1: if (tsize != H5T.getSize(H5T.H5Type.STD_U32LE)) { Console.WriteLine("test_compound_dtypes: Second member has incorrect size"); nerrors++; } break; case 2: if (tsize != H5T.getSize(H5T.H5Type.STD_I64BE)) { Console.WriteLine("test_compound_dtypes: Third member has incorrect size"); nerrors++; } break; default: Console.WriteLine("test_compound_dtypes: Only 3 members."); break; } // end switch // Close current member type. H5T.close(mtypeId); } // end for // Close objects. H5T.close(dset_typeId); H5T.close(native_type); H5D.close(dsetId); Console.WriteLine("\t\tPASSED"); } // end of try block catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // test_compound_dtype
static void test_enum(H5FileId fileId) { //void* tmp; short i, j; string[] mname = { "RED", "GREEN", "BLUE", "YELLOW", "PINK", "PURPLE", "ORANGE", "WHITE" }; short[,] spoints2 = new short[DIM0, DIM1]; short[,] scheck2 = new short[DIM0, DIM1]; try { Console.Write("Testing enumeration datatypes"); // Create the data space */ hsize_t[] dims = { DIM0, DIM1 }; H5DataSpaceId dspace = H5S.create_simple(2, dims); // Construct enum type based on native type H5DataTypeId etype = H5T.enumCreate(H5T.H5Type.NATIVE_INT); // Insert members to type. for (i = 0; i < 8; i++) { H5T.enumInsert(etype, mname[i], ref i); } // Assign a name to the enum type, close it, and open it by name. H5T.commit(fileId, "Color Type", etype); H5T.close(etype); H5DataTypeId color_type = H5T.open(fileId, "Color Type"); // Check its class H5T.H5TClass tcls = H5T.getClass(color_type); if (tcls != H5T.H5TClass.ENUM) Console.WriteLine("test_enum: class of color_type = {0} is incorrect, should be ENUM", tcls); // Create the dataset H5DataSetId dataset = H5D.create(fileId, DSET_ENUM_NAME, color_type, dspace); // Construct enum type based on native type in memory. H5DataTypeId etype_m = H5T.enumCreate(H5T.H5Type.NATIVE_SHORT); // Insert members to type. for (i = 0; i < 8; i++) { H5T.enumInsert(etype_m, mname[i], ref i); } // Initialize the dataset and buffer. for (i = 0; i < DIM0; i++) { for (j = 0; j < DIM1; j++) { spoints2[i, j] = i; scheck2[i, j] = 0; } } // Write the data to the dataset. H5D.write(dataset, etype_m, new H5Array<short>(spoints2)); // Close objects. H5D.close(dataset); H5T.close(color_type); H5S.close(dspace); H5T.close(etype_m); // Open dataset again to check the type. dataset = H5D.open(fileId, DSET_ENUM_NAME); // Get dataset's datatype. H5DataTypeId dstype = H5D.getType(dataset); // Get the datatype's class and check that it is of class ENUM. H5T.H5TClass tclass = H5T.getClass(dstype); if (tclass != H5T.H5TClass.ENUM) { Console.WriteLine("Type should be an enum class"); nerrors++; } // Read data back. H5D.read(dataset, dstype, new H5Array<short>(scheck2)); // Close objects. H5D.close(dataset); H5T.close(dstype); Console.WriteLine("\t\tPASSED"); } // end of try block catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // end of test_enum
/// <summary> /// Constructs a new EpochHDF5Persistor with an HDF5 file at the given path. /// </summary> /// <param name="filename">Desired HDF5 path</param> /// <param name="assocFilePrefix">Prefix for auxiliary (e.g. image) file associated with this HDF5 file</param> /// <param name="guidGenerator">Function for generating new UUIDs (e.g. Guid.NewGuid)</param> /// <param name="compression">Automatically numeric data compression (0 = none, 9 = maximum)</param> public EpochHDF5Persistor(string filename, string assocFilePrefix, Func<Guid> guidGenerator, uint compression = 9) : base(guidGenerator) { if (filename == null) throw new ArgumentException("File name must not be null", "filename"); if(compression > 9) throw new ArgumentException("Compression must be 0-9", "compression"); if (assocFilePrefix == null) assocFilePrefix = ""; this.AssociatedFilePrefix = assocFilePrefix; NumericDataCompression = compression; EpochGroupsIDs = new Stack<EpochGroupIDs>(); var fInfo = new FileInfo(filename); string prefixedFilePath = fInfo.DirectoryName + Path.DirectorySeparatorChar + this.AssociatedFilePrefix + fInfo.Name; var currentFile = new FileInfo(prefixedFilePath); if (currentFile.Exists) { fileId = H5F.open(prefixedFilePath, H5F.OpenMode.ACC_RDWR); string_t = H5T.open(fileId, "STRING40"); keyval_t = H5T.open(fileId, "KEY40VAR40"); measurement_t = H5T.open(fileId, "MEASUREMENT"); extdevmeasurement_t = H5T.open(fileId, "EXTDEV_MEASUREMENT"); //TODO Check persistence version } else { fileId = H5F.create(prefixedFilePath, H5F.CreateMode.ACC_EXCL); WriteAttribute(fileId, "version", Version); // Create our standard String type (string of length FIXED_STRING_LENGTH characters) string_t = H5T.copy(H5T.H5Type.C_S1); H5T.setSize(string_t, 40); H5T.commit(fileId, "STRING40", string_t); // Create our key/value compound type (two strings of length 40 characters) keyval_t = H5T.create(H5T.CreateClass.COMPOUND, 80); H5T.insert(keyval_t, "key", 0, string_t); H5T.insert(keyval_t, "value", FIXED_STRING_LENGTH, string_t); H5T.commit(fileId, "KEY40VAR40", keyval_t); // Create the Measurement compound type measurement_t = H5T.create(H5T.CreateClass.COMPOUND, 48); // confirm 48 is enough/too much/whatever H5T.insert(measurement_t, "quantity", 0, H5T.H5Type.NATIVE_DOUBLE); H5T.insert(measurement_t, "unit", H5T.getSize(H5T.H5Type.NATIVE_DOUBLE), string_t); H5T.commit(fileId, "MEASUREMENT", measurement_t); // Create the ExtDev/Measurement compound type extdevmeasurement_t = H5T.create(H5T.CreateClass.COMPOUND, H5T.getSize(string_t) + 2 * H5T.getSize(measurement_t)); H5T.insert(extdevmeasurement_t, "externalDevice", 0, string_t); H5T.insert(extdevmeasurement_t, "measurement", H5T.getSize(string_t), measurement_t); H5T.commit(fileId, "EXTDEV_MEASUREMENT", extdevmeasurement_t); } Interlocked.Increment(ref _openHdf5FileCount); }
static void test_create(H5FileId fileId) { try { Console.Write("Testing create, open, and close datasets"); const string DSET_DEFAULT_NAME = ("default"); const int RANK = 2; // one-dimension // Create the data space. hsize_t[] dims = { 256, 512 }; H5DataSpaceId space = H5S.create_simple(RANK, dims); // Create a small data space for compact dataset. hsize_t[] small_dims = { 16, 8 }; H5DataSpaceId small_space = H5S.create_simple(RANK, small_dims); // Create a dataset using the default dataset creation properties. H5DataSetId dataset = H5D.create(fileId, DSET_DEFAULT_NAME, H5T.H5Type.NATIVE_DOUBLE, space); // Close the dataset. H5D.close(dataset); // Try creating a dataset that already exists. This should fail since a // dataset can only be created once. try { dataset = H5D.create(fileId, DSET_DEFAULT_NAME, H5T.H5Type.NATIVE_DOUBLE, space); // should fail, but didn't, print an error message. Console.WriteLine("\ntest_create: Attempting to create an existing dataset."); nerrors++; } catch (HDFException) {} // does nothing, it should fail // Open the dataset we created above and then close it. This is how // existing datasets are accessed. dataset = H5D.open(fileId, DSET_DEFAULT_NAME); H5D.close(dataset); // Try opening a non-existent dataset. This should fail since new datasets // cannot be created with this function. try { dataset = H5D.open(fileId, "does_not_exist"); // should fail, but didn't, print an error message. Console.WriteLine("\ntest_create: Opened a non-existent dataset."); nerrors++; } catch (HDFException) {} // does nothing, it should fail Console.WriteLine("\tPASSED"); } // end try block catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // test_create
void OpenFile() { _file_id = H5F.open(_filename, _mode); }
} // test_twodims_array static void test_fivedims_array(H5FileId fileId) { try { Console.Write("Testing write/read five-dimensional array"); const string DSET_NAME = ("Five-dim IntArray"); const int DIM1 = 1; // data set dimension const int DIM2 = 2; const int DIM3 = 2; const int DIM4 = 4; const int DIM5 = 4; const int RANK = 5; // five-dimension // Data and output buffer initialization. int i, j, k, m, n; int[, , , ,] data = new int[DIM1, DIM2, DIM3, DIM4, DIM5]; for (i = 0; i < DIM1; i++) for (j = 0; j < DIM2; j++) for (k = 0; k < DIM3; k++) for (m = 0; m < DIM4; m++) for (n = 0; n < DIM5; n++) { data[i, j, k, m, n] = i + j + k + m + n; } // Describe the size of the array and create the data space for fixed // size dataset. ulong[] dims = { DIM1, DIM2, DIM3, DIM4, DIM5 }; H5DataSpaceId dspaceId = H5S.create_simple(RANK, dims); // Define datatype for the data in the file. H5DataTypeId dtypeId = H5T.copy(H5T.H5Type.NATIVE_INT); // Create the data set DSET_NAME. H5DataSetId dsetId = H5D.create(fileId, DSET_NAME, dtypeId, dspaceId); // Write the one-dimensional data set array. H5D.write(dsetId, dtypeId, new H5Array<int>(data)); int[, , , ,] outdata = new int[DIM1, DIM2, DIM3, DIM4, DIM5]; for (i = 0; i < DIM1; i++) for (j = 0; j < DIM2; j++) for (k = 0; k < DIM3; k++) for (m = 0; m < DIM4; m++) for (n = 0; n < DIM5; n++) { outdata[i,j,k,m,n] = 0; } // Close and re-open the dataset. H5D.close(dsetId); dsetId = H5D.open(fileId, DSET_NAME); // Read back data. H5D.read(dsetId, dtypeId, new H5Array<int>(outdata)); // Compare against input buffer to verify. for (i = 0; i < DIM1; i++) for (j = 0; j < DIM2; j++) for (k = 0; k < DIM3; k++) for (m = 0; m < DIM4; m++) for (n = 0; n < DIM5; n++) { int out_value = outdata[i, j, k, m, n]; int in_value = data[i, j, k, m, n]; if (out_value != in_value) { Console.WriteLine("\ntest_fivedim_array: read value differs from input: read {0} - input {1}", out_value, in_value); nerrors++; } } // Close all objects and file. H5D.close(dsetId); H5T.close(dtypeId); H5S.close(dspaceId); Console.WriteLine("\tPASSED"); } catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } }
} // test_create static void test_onedim_array(H5FileId fileId) { try { Console.Write("Testing write/read one-dimensional array"); const string DSET_NAME = ("One-dim IntArray"); const int NX = 5; // data set dimension const int RANK = 1; // one-dimension // Data and output buffer initialization. int i; int[] data = new int[NX]; for (i = 0; i < NX; i++) data[i] = i; // Describe the size of the array and create the data space for fixed // size dataset. ulong[] dims = { NX }; H5DataSpaceId dspaceId = H5S.create_simple(RANK, dims); // Define datatype for the data in the file. // We will store little endian INT numbers. H5DataTypeId dtypeId = H5T.copy(H5T.H5Type.STD_I32LE); // Create the data set DATASETNAME. H5DataSetId dsetId = H5D.create(fileId, DSET_NAME, dtypeId, dspaceId); // Write the one-dimensional data set array H5D.write(dsetId, new H5DataTypeId(H5T.H5Type.STD_I32LE), new H5Array<int>(data)); int[] outdata = new int[NX]; for (i = 0; i < NX; i++) outdata[i] = 0; // Read data back. H5D.read(dsetId, new H5DataTypeId(H5T.H5Type.STD_I32LE), new H5Array<int>(outdata)); // Compare against input buffer to verify. for (i = 0; i < NX; i++) { if (outdata[i] != data[i]) { Console.WriteLine("\ntest_onedim_array: read value differs from input: read {0} - input {1}", outdata[i], data[i]); nerrors++; } } // Close all objects and file. H5D.close(dsetId); H5T.close(dtypeId); H5S.close(dspaceId); Console.WriteLine("\tPASSED"); } catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // test_onedim_array
public AttributeValue GetAttribute(string attribXml, string attributeName, H5FileId fileId) { H5AttributeId attributeId = H5A.openName(fileId, attributeName); //根据属性名称得到属性Id H5DataTypeId attributeType = H5A.getType(attributeId); //得到属性数据类型 H5T.H5TClass typeClass = H5T.getClass(attributeType); H5DataSpaceId spaceId = H5A.getSpace(attributeId); int rank = H5S.getSimpleExtentNDims(spaceId); AttributeValue atrributeData = new AttributeValue(); string[] stringDatatmp = new string[1]; stringDatatmp[0] = "NULL"; XmlReaderSettings settings = new XmlReaderSettings(); settings.DtdProcessing = DtdProcessing.Ignore; settings.ValidationType = ValidationType.None; settings.ValidationEventHandler += settings_ValidationEventHandler; settings.CheckCharacters = false; StringReader xmlSr = ChangeXmlInGBCode(attribXml); XmlReader reader = XmlReader.Create(xmlSr); XmlDocument xml = new XmlDocument(); xml.Load(reader); XmlNodeList node = xml.GetElementsByTagName("hdf5:Attribute"); foreach (XmlNode child in node) { if (child.OuterXml.Contains(attributeName)) stringDatatmp[0] = child.InnerText; } stringDatatmp[0] = stringDatatmp[0].Replace("\r\n", ""); stringDatatmp[0] = stringDatatmp[0].Trim(); atrributeData.dataValue = stringDatatmp; switch (typeClass) { case H5T.H5TClass.FLOAT: atrributeData.valueType = DataValueType.FLOAT; break; case H5T.H5TClass.INTEGER: atrributeData.valueType = DataValueType.INT; break; case H5T.H5TClass.STRING: atrributeData.valueType = DataValueType.STRING; break; default: break; } atrributeData.rank = rank; H5T.close(attributeType); H5S.close(spaceId); H5A.close(attributeId); return atrributeData; }
public AttributeValue GetAttribute(H5FileId fileId, string attributeName) { H5AttributeId attributeId = H5A.openName(fileId, attributeName); //根据属性名称得到属性Id H5DataTypeId attributeType = H5A.getType(attributeId); //得到属性数据类型 int size = H5T.getSize(attributeType); H5T.H5TClass typeClass = H5T.getClass(attributeType); H5DataSpaceId spaceId = H5A.getSpace(attributeId); long[] dims = H5S.getSimpleExtentDims(spaceId); int rank = H5S.getSimpleExtentNDims(spaceId); H5T.H5Type h5type; Type dataType = null; AttributeValue atrributeData = new AttributeValue(); atrributeData.dataValue = null; atrributeData.valueType = DataValueType.EMPTY; atrributeData.rank = rank; switch (typeClass) { case H5T.H5TClass.FLOAT: h5type = H5T.H5Type.NATIVE_FLOAT; if (rank == 1) { float[] floatDatatmp = new float[dims[0]]; H5A.read(attributeId, new H5DataTypeId(h5type), new H5Array<float>(floatDatatmp)); atrributeData.dataValue = floatDatatmp; } else if (rank == 2) { float[,] floatDatatmp = new float[dims[0], dims[1]]; H5A.read(attributeId, new H5DataTypeId(h5type), new H5Array<float>(floatDatatmp)); atrributeData.dataValue = floatDatatmp; } atrributeData.valueType = DataValueType.FLOAT; break; case H5T.H5TClass.INTEGER: h5type = H5T.H5Type.NATIVE_INT; // int[,] intDatatmp = null; if (rank == 1) { int[] intDatatmp = new int[dims[0]]; H5A.read(attributeId, new H5DataTypeId(h5type), new H5Array<int>(intDatatmp)); atrributeData.dataValue = intDatatmp; } else if (rank == 2) { int[,] intDatatmp = new int[dims[0], dims[1]]; H5A.read(attributeId, new H5DataTypeId(h5type), new H5Array<int>(intDatatmp)); atrributeData.dataValue = intDatatmp; } atrributeData.valueType = DataValueType.INT; break; case H5T.H5TClass.STRING: h5type = H5T.H5Type.C_S1; if (rank == 0) { string[] stringDatatmp = new string[1]; byte[] bytedata = new byte[255]; H5A.read(attributeId, attributeType, new H5Array<byte>(bytedata)); //H5A.read(attributeId, new H5DataTypeId(h5type), new H5Array<string>(stringDatatmp)); stringDatatmp[0] = Encoding.Default.GetString(bytedata).Trim('\0'); atrributeData.dataValue = stringDatatmp; } else if (rank == 1) { string[] stringDatatmp = new string[dims[0]]; // string stringDatatmp = ""; // byte[] bytedata = new byte[255]; // byte[,] bytedata = new byte[2,255]; // H5DataTypeId memtype = H5T.copy(H5T.H5Type.C_S1); //H5T.setVariableSize(memtype); //H5T.setSize(attributeType, size); // H5A.read(attributeId, memtype, new H5Array<string>(stringDatatmp)); // H5A.read(attributeId, new H5DataTypeId(h5type), new H5Array<string>(stringDatatmp)); // stringDatatmp[0] = Encoding.Default.GetString(bytedata).Trim('\0'); //string test = Encoding.Default.GetString(bytedata).Trim('\0'); // atrributeData.dataValue = stringDatatmp; // VariableLengthString[] value = new VariableLengthString[1]; atrributeData.dataValue = stringDatatmp; } else if (rank == 2) { string[,] stringDatatmp = new string[dims[0], dims[1]]; //H5DataTypeId memtype = H5T.copy(H5T.H5Type.C_S1); //H5T.setVariableSize(memtype); //H5A.read(attributeId, memtype, new H5Array<string>(stringDatatmp)); atrributeData.dataValue = stringDatatmp; } atrributeData.valueType = DataValueType.STRING; break; default: h5type = H5T.H5Type.C_S1; break; } H5T.close(attributeType); H5S.close(spaceId); H5A.close(attributeId); return atrributeData; }