/// <summary> /// 重写数据集的值(去条带的数据) /// </summary> /// <typeparam name="T">数据类型</typeparam> /// <param name="dataSetName">数据集的名称</param> /// <param name="dataTypeId">数据集的类型ID</param> /// <param name="values">去条带之后数据</param> /// <param name="BrandNo">在数据集的维度从0开始</param> private void ReWriteDataSet <T>(string dataSetName, H5DataTypeId dataTypeId, T[] values, int BrandNo) { H5FileId _h5FileId = null; H5DataSetId dataSetId = null; H5DataSpaceId spaceid = null; try { _h5FileId = H5F.open(fileName, H5F.OpenMode.ACC_RDWR); //先找出含有指定波段的数据集 dataSetId = H5D.open(_h5FileId, dataSetName); spaceid = H5D.getSpace(dataSetId); long[] dims = H5S.getSimpleExtentDims(spaceid); //得到数据数组的大小,比如[3,1800,2048] int rank = H5S.getSimpleExtentNDims(spaceid); //得到数据数组的维数,比如3 H5S.close(spaceid); //根据数据集的名字获取数据集的ID int size = 0; if (rank == 0) { size = 1; } else if (rank == 1) { size = Convert.ToInt32(dims[0]); } else if (rank == 2) { size = Convert.ToInt32(dims[0] * dims[1]); } else if (rank == 3) { size = Convert.ToInt32(dims[0] * dims[1] * dims[2]); } T[] v = new T[size]; //从数据集中读取原始数据 H5D.read <T>(dataSetId, dataTypeId, new H5Array <T>(v)); //将波段校正后的数据读取赋给相应的波段 for (int i = BrandNo; i < values.Length; i++) { v[i] = values[i]; } H5D.write <T>(dataSetId, dataTypeId, new H5Array <T>(v)); } catch (Exception e) { throw new Exception(e.Message); } finally { H5D.close(dataSetId); H5F.close(_h5FileId); } }
private static void ReadFile(string filePath) { var file = H5F.open(filePath, H5F.OpenMode.ACC_RDONLY); var dataSet = H5D.open(file, "/group/dataset"); var fileSpace = H5D.getSpace(dataSet); var rank = H5S.getSimpleExtentNDims(fileSpace); WriteLine("Rank: {0}", rank); var dims = H5S.getSimpleExtentDims(fileSpace); Write("Dims:"); foreach (var d in dims) { Write(" {0}", d); } WriteLine(); H5S.close(fileSpace); var ints = new int[1]; var intAttribute = H5A.openName(dataSet, "int"); H5A.read(intAttribute, H5A.getType(intAttribute), new H5Array <int>(ints)); WriteLine("int: {0}", ints[0]); H5A.close(intAttribute); var stringAttribute = H5A.openName(dataSet, "string"); var stringType = H5A.getType(stringAttribute); var stringSize = H5T.getSize(stringType); WriteLine("string length: {0}", stringSize); var buffer = new byte[stringSize]; H5A.read(stringAttribute, stringType, new H5Array <byte>(buffer)); WriteLine("string: {0}", Encoding.ASCII.GetString(buffer)); H5T.close(stringType); H5A.close(stringAttribute); if (rank == 2) { var data = new int[dims[0], dims[1]]; H5D.read(dataSet, H5D.getType(dataSet), new H5Array <int>(data)); for (int i = 0; i < data.GetLength(0); ++i) { for (int j = 0; j < data.GetLength(1); ++j) { Write(" {0}", data[i, j]); } WriteLine(); } } H5D.close(dataSet); H5F.close(file); }
private Tuple <H5DataSetId, int> load_nd_datasetEx(Blob <T> blob, string strDatasetName, bool bReshape, int nMinDim = 1, int nMaxDim = int.MaxValue, H5GroupId id = null, bool bAllowSingleItems = false) { H5DataSetId ds = null; int nSingleItemSize = 0; try { if (id != null) { ds = H5D.open(id, strDatasetName); } else { ds = H5D.open(m_file, strDatasetName); } if (ds == null) { m_log.FAIL("Failed to find the dataset '" + strDatasetName + "'!"); } // Verify that the number of dimensions are in the accepted range. H5DataSpaceId dsSpace = H5D.getSpace(ds); if (dsSpace == null) { m_log.FAIL("Failed to get the dataset space!"); } int nDims = H5S.getSimpleExtentNDims(dsSpace); m_log.CHECK_GE(nDims, nMinDim, "The dataset dim is out of range!"); m_log.CHECK_LE(nDims, nMaxDim, "The dataset dim is out of range!"); long[] rgDims = H5S.getSimpleExtentDims(dsSpace); // Verify that the data format is what we expect: float or double H5DataTypeId dsType = H5D.getType(ds); if (dsType == null) { m_log.FAIL("Failed to get the dataset type!"); } H5T.H5TClass dataClass = H5T.getClass(dsType); switch (dataClass) { case H5T.H5TClass.FLOAT: m_log.WriteLine("Datatype class: H5T_FLOAT"); break; case H5T.H5TClass.INTEGER: m_log.WriteLine("Datatype class: H5T_INTEGER"); break; default: m_log.FAIL("Unsupported datatype class: " + dataClass.ToString()); break; } List <int> rgBlobDims = new List <int>(); for (int i = 0; i < nDims; i++) { rgBlobDims.Add((int)rgDims[i]); } if (bReshape) { blob.Reshape(rgBlobDims); } else { if (!Utility.Compare <int>(rgBlobDims, blob.shape())) { if (!bAllowSingleItems || (rgBlobDims.Count == 1 && rgBlobDims[0] != 1)) { string strSrcShape = Utility.ToString <int>(rgBlobDims); m_log.FAIL("Cannot load blob from hdf5; shape mismatch. Source shape = " + strSrcShape + ", target shape = " + blob.shape_string); } if (rgBlobDims.Count == 1) { nSingleItemSize = rgBlobDims[0]; } } } } catch (Exception excpt) { if (ds != null) { H5D.close(ds); ds = null; } throw excpt; } return(new Tuple <H5DataSetId, int>(ds, nSingleItemSize)); }
public DatasetInfo GetDatasetInfo(H5FileId fileId, string datasetName, string groupName) { DatasetInfo datasetInfo = new DatasetInfo(); datasetInfo.band = 1; datasetInfo.col = 1; datasetInfo.rank = 1; datasetInfo.row = 1; H5GroupId groupId = H5G.open(fileId, groupName); H5DataSetId dataSetId = H5D.open(groupId, datasetName); // ulong storeSize = H5D.getStorageSize(dataSetId); //得到数据数组存储大小 H5DataSpaceId spaceid = H5D.getSpace(dataSetId); long[] dims = H5S.getSimpleExtentDims(spaceid); //得到数据数组的大小,比如[3,1800,2048] datasetInfo.rank = H5S.getSimpleExtentNDims(spaceid); //得到数据数组的维数,比如3 int dimCount = dims.Length; if (dimCount == 2) { datasetInfo.col = Convert.ToInt32(dims[1]);//宽 datasetInfo.row = Convert.ToInt32(dims[0]); } else if (dimCount == 3) { datasetInfo.band = Convert.ToInt32(dims[0]); //波段数 datasetInfo.col = Convert.ToInt32(dims[2]); //宽 datasetInfo.row = Convert.ToInt32(dims[1]); //高 } else if (dimCount == 1) { datasetInfo.row = Convert.ToInt32(dims[0]);//高 } H5DataTypeId typeId = H5D.getType(dataSetId); H5T.H5TClass dataClass = H5T.getClass(typeId);//得到数据集的类型 string typeName = dataClass.ToString(); switch (typeName) { case "FLOAT": datasetInfo.type = DataValueType.FLOAT; break; case "INTEGER": datasetInfo.type = DataValueType.INT; break; case "COMPOUND": datasetInfo.type = DataValueType.COMPOUND; H5DataTypeId tid0 = H5D.getType(dataSetId); int nMember = H5T.getNMembers(tid0); datasetInfo.col = nMember; break; default: datasetInfo.type = DataValueType.EMPTY; break; } H5T.close(typeId); H5S.close(spaceid); H5D.close(dataSetId); H5G.close(groupId); return(datasetInfo); }
private void ReadOldDataSetData(string dataSetName, int bandIndex, out int bandWidth, out int bandHeight, out enumDataType dataType, out object retObject) { bandHeight = bandWidth = 0; dataType = enumDataType.UInt16; retObject = null; H5FileId _h5FileId = null; H5DataSpaceId spaceid = null; H5DataSetId dataSetId = null; try { _h5FileId = H5F.open(fileName, H5F.OpenMode.ACC_RDONLY); //先找出含有指定波段的数据集 dataSetId = H5D.open(_h5FileId, dataSetName); spaceid = H5D.getSpace(dataSetId); long[] dims = H5S.getSimpleExtentDims(spaceid); //得到数据数组的大小,比如[3,1800,2048] int rank = H5S.getSimpleExtentNDims(spaceid); //得到数据数组的维数,比如3 int size = 0; if (rank == 1) { bandHeight = bandWidth = 1; size = bandWidth * bandHeight * rank; } else if (rank == 2) { bandWidth = Convert.ToInt32(dims[0]); bandHeight = Convert.ToInt32(dims[1]); size = bandWidth * bandHeight; } else if (rank == 3) { List <long> r = dims.ToList <long>(); r.Sort(); long[] temp = r.ToArray(); bandWidth = Convert.ToInt32(temp[1]); bandHeight = Convert.ToInt32(temp[2]); size = bandWidth * bandHeight * Convert.ToInt32(temp[0]); } int outSize = bandWidth * bandHeight; H5DataTypeId typeId = H5D.getType(dataSetId); H5T.H5TClass typeClass = H5T.getClass(typeId);//得到数据集的类型 int dataSize = H5T.getSize(typeId); H5DataTypeId newTypeId = null; switch (typeClass) { case H5T.H5TClass.INTEGER: H5T.Sign sign = H5T.Sign.TWOS_COMPLEMENT; sign = H5T.getSign(typeId); switch (dataSize) { case 1: newTypeId = H5T.copy(H5T.H5Type.NATIVE_B8); retObject = ReadArray <byte>(size, dataSetId, newTypeId, bandIndex, outSize); dataType = enumDataType.Byte; break; case 2: switch (sign) { case H5T.Sign.TWOS_COMPLEMENT: newTypeId = H5T.copy(H5T.H5Type.NATIVE_SHORT); retObject = ReadArray <Int16>(size, dataSetId, newTypeId, bandIndex, outSize); dataType = enumDataType.Int16; break; case H5T.Sign.UNSIGNED: newTypeId = H5T.copy(H5T.H5Type.NATIVE_USHORT); retObject = ReadArray <UInt16>(size, dataSetId, newTypeId, bandIndex, outSize); dataType = enumDataType.UInt16; break; } break; case 4: switch (sign) { case H5T.Sign.TWOS_COMPLEMENT: newTypeId = H5T.copy(H5T.H5Type.NATIVE_INT); retObject = ReadArray <Int32>(size, dataSetId, newTypeId, bandIndex, outSize); dataType = enumDataType.Int32; break; case H5T.Sign.UNSIGNED: newTypeId = H5T.copy(H5T.H5Type.NATIVE_UINT); retObject = ReadArray <UInt32>(size, dataSetId, newTypeId, bandIndex, outSize); dataType = enumDataType.UInt32; break; } break; case 8: switch (sign) { case H5T.Sign.TWOS_COMPLEMENT: newTypeId = H5T.copy(H5T.H5Type.NATIVE_LONG); retObject = ReadArray <Int64>(size, dataSetId, newTypeId, bandIndex, outSize); dataType = enumDataType.Int64; break; case H5T.Sign.UNSIGNED: newTypeId = H5T.copy(H5T.H5Type.NATIVE_ULONG); retObject = ReadArray <UInt64>(size, dataSetId, newTypeId, bandIndex, outSize); dataType = enumDataType.UInt64; break; } break; } break; case H5T.H5TClass.FLOAT: switch (dataSize) { case 4: newTypeId = H5T.copy(H5T.H5Type.NATIVE_FLOAT); retObject = ReadArray <float>(size, dataSetId, newTypeId, bandIndex, outSize); dataType = enumDataType.Float; break; case 8: newTypeId = H5T.copy(H5T.H5Type.NATIVE_DOUBLE); retObject = ReadArray <double>(size, dataSetId, newTypeId, bandIndex, outSize); dataType = enumDataType.Double; break; } break; } } finally { H5S.close(spaceid); H5D.close(dataSetId); H5F.close(_h5FileId); } }
static void test_h5s_basic() { try { int rank; // Logical rank of dataspace hssize_t[] dims1 = { SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3 }; hssize_t[] dims2 = { SPACE2_DIM1, SPACE2_DIM2, SPACE2_DIM3, SPACE2_DIM4 }; hssize_t[] max2 = { SPACE2_MAX1, SPACE2_MAX2, SPACE2_MAX3, SPACE2_MAX4 }; hssize_t[] tmax = new hssize_t[4]; // Output message about test being performed. Console.Write("Testing dataspace manipulation"); // Create a simple dataspace and check its rank. H5DataSpaceId sid1 = H5S.create_simple(SPACE1_RANK, dims1); rank = H5S.getSimpleExtentNDims(sid1); if (rank != SPACE1_RANK) { Console.WriteLine("\ntest_h5s_basic: Incorrect rank {0}, should be SPACE1_RANK({1})", rank, SPACE1_RANK); nerrors++; } // Check its dims. hssize_t[] tdims1 = new hssize_t[3]; tdims1 = H5S.getSimpleExtentDims(sid1); int i; for (i = 0; i < rank; i++) { if (tdims1[i] != dims1[i]) { Console.WriteLine("\ntest_h5s_basic: read tdims1[{0}] = {1} differs from dims1[{0}] = {2}", i, tdims1[i], dims1[i]); nerrors++; } } // Create another simple dataspace and check its rank, dims, and maxdims. H5DataSpaceId sid2 = H5S.create_simple(SPACE2_RANK, dims2, max2); rank = H5S.getSimpleExtentNDims(sid2); if (rank != SPACE2_RANK) { Console.WriteLine("\ntest_h5s_basic: Incorrect rank {0}, should be SPACE1_RANK({1})", rank, SPACE1_RANK); nerrors++; } hssize_t[] tdims2 = new hssize_t[3]; tdims2 = H5S.getSimpleExtentDims(sid2); tmax = H5S.getSimpleExtentMaxDims(sid2); for (i = 0; i < rank; i++) { if (tdims2[i] != dims2[i]) { Console.WriteLine("\ntest_h5s_basic: read tdims2[{0}] = {1} differs from dims2[{0}] = {2}", i, tdims2[i], dims2[i]); nerrors++; } } for (i = 0; i < rank; i++) { if (tmax[i] != max2[i]) { Console.WriteLine("\ntest_h5s_basic: read tmax[{0}] = {1} differs from max2[{0}] = {2}", i, tmax[i], max2[i]); nerrors++; } } // Close all dataspaces. H5S.close(sid1); H5S.close(sid2); /* * Try writing simple dataspaces without setting their extents. */ // Create the file H5FileId fid1 = H5F.create(BASICFILE, H5F.CreateMode.ACC_TRUNC); // Create dataspaces for testing. dims1[0] = SPACE1_DIM1; sid1 = H5S.create(H5S.H5SClass.SIMPLE); sid2 = H5S.create_simple(1, dims1, dims1); // This dataset's space has no extent; it should not be created try { H5DataSetId dset1 = H5D.create(fid1, BASICDATASET, H5T.H5Type.NATIVE_INT, sid1); // should fail, but didn't, print an error message. Console.WriteLine("\ntest_h5s_basic: Attempting to create a dataset whose space has no extent."); nerrors++; } catch (H5DcreateException) { } // does nothing, it should fail // Create dataset with good dataspace. H5DataSetId dataset = H5D.create(fid1, BASICDATASET2, H5T.H5Type.NATIVE_INT, sid2); // Try some writes with the bad dataspace (sid1) try { hssize_t nelems = 10; // Number of dataspace elements H5D.writeScalar(dataset, new H5DataTypeId(H5T.H5Type.NATIVE_INT), sid1, sid2, new H5PropertyListId(H5P.Template.DEFAULT), ref nelems); // should fail, but didn't, print an error message. Console.WriteLine("\ntest_h5s_basic: Attempting to write to a dataset with space that has no extent."); nerrors++; } catch (H5DwriteException) { } // does nothing, it should fail // Make sure that dataspace reads using the bad dataspace fail try { hssize_t n = 10; // Number of dataspace elements H5D.readScalar(dataset, new H5DataTypeId(H5T.H5Type.NATIVE_INT), sid1, sid2, new H5PropertyListId(H5P.Template.DEFAULT), ref n); // should fail, but didn't, print an error message. Console.WriteLine("\ntest_h5s_basic: Attempting to read a dataset with space that has no extent."); nerrors++; } catch (H5DreadException) { } // does nothing, it should fail // Close objects and file. H5D.close(dataset); H5S.close(sid1); H5S.close(sid2); H5F.close(fid1); Console.WriteLine("\t\t\t\tPASSED"); } // end of try catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // test_h5s_basic
static void test_h5s_scalar() { try { hssize_t[] tdims = new hssize_t[3]; // Output message about test being performed. Console.Write("Testing dataspace during writing"); // Create the file. H5FileId fid1 = H5F.create(DATAFILE, H5F.CreateMode.ACC_TRUNC); // Create scalar dataspace. H5DataSpaceId sid1 = H5S.create_simple(SPACE3_RANK, null); // Get the logical rank of dataspace and verify it. int rank = H5S.getSimpleExtentNDims(sid1); if (rank != SPACE3_RANK) { Console.WriteLine("\ntest_h5s_scalar: incorrect rank {0}, should be SPACE3_RANK({1})", rank, SPACE3_RANK); nerrors++; } // Create and write the dataset. uint space3_data = 65; H5DataSetId dataset = H5D.create(fid1, "Dataset1", H5T.H5Type.NATIVE_UINT, sid1); H5D.writeScalar(dataset, new H5DataTypeId(H5T.H5Type.NATIVE_UINT), ref space3_data); // Close objects and file. H5D.close(dataset); H5S.close(sid1); H5F.close(fid1); /* Open the file and verify the dataspace. */ // Open the file. fid1 = H5F.open(DATAFILE, H5F.OpenMode.ACC_RDWR); // Create a dataset. dataset = H5D.open(fid1, "Dataset1"); // Get dataset's dataspace. sid1 = H5D.getSpace(dataset); rank = H5S.getSimpleExtentNDims(sid1); if (rank != SPACE3_RANK) { Console.WriteLine("\ntest_h5s_scalar: incorrect rank {0}", rank); } tdims = H5S.getSimpleExtentDims(sid1); //Console.WriteLine("tdims[0] = {0}, tdims[1] = {1}", tdims[0], tdims[1]); if (rank != 0) { Console.WriteLine("\ntest_h5s_scalar: incorrect rank {0}", rank); } // Read the dataset. uint rdata = 0; H5D.readScalar(dataset, new H5DataTypeId(H5T.H5Type.NATIVE_UINT), ref rdata); if (rdata != space3_data) { Console.WriteLine("\ntest_h5s_scalar: incorrect data {0}, should be {1}", rdata, space3_data); } // Close objects. H5D.close(dataset); H5S.close(sid1); H5F.close(fid1); Console.WriteLine("\t\t\tPASSED"); } // end of try catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // test_h5s_scalar_write
} // test_attr_compound_write static void test_attr_compound_read() { try { Console.Write("Testing read attribute with compound datatype"); // Open file. H5FileId fileId = H5F.open(COMP_FNAME, H5F.OpenMode.ACC_RDWR); // Open the dataset. H5DataSetId dsetId = H5D.open(fileId, DSET1_NAME); // Verify the correct number of attributes for this dataset. H5ObjectInfo oinfo = H5O.getInfo(dsetId); if (oinfo.nAttributes != 1) { Console.WriteLine("\ntest_attr_basic_read: incorrect number of attributes: read {0} - should be {1}", oinfo.nAttributes, 1); nerrors++; } // Open first attribute for the dataset. H5AttributeId attrId = H5A.openByIndex(dsetId, ".", H5IndexType.CRT_ORDER, H5IterationOrder.INCREASING, 0); // Verify dataspace. H5DataSpaceId spaceId = H5A.getSpace(attrId); int rank = H5S.getSimpleExtentNDims(spaceId); if (rank != ATTR4_RANK) { Console.WriteLine("\ntest_attr_compound_read: incorrect rank = {0} - should be {1}", rank, ATTR4_RANK); nerrors++; } long[] dims = H5S.getSimpleExtentDims(spaceId); if (dims[0] != ATTR4_DIM1) { Console.WriteLine("\ntest_attr_compound_read: incorrect dim[0] = {0} - should be {1}", dims[0], ATTR4_DIM1); nerrors++; } if (dims[1] != ATTR4_DIM2) { Console.WriteLine("\ntest_attr_compound_read: incorrect dim[1] = {0} - should be {1}", dims[1], ATTR4_DIM2); nerrors++; } // Close dataspace. H5S.close(spaceId); // Verify datatype of the attribute. H5DataTypeId typeId = H5A.getType(attrId); H5T.H5TClass t_class = H5T.getClass(typeId); if (t_class != H5T.H5TClass.COMPOUND) { Console.WriteLine("test_compound_dtypes: H5T.getMemberClass and H5T.getClass return different classes for the same type."); nerrors++; } int nfields = H5T.getNMembers(typeId); if (nfields != 3) { Console.WriteLine("test_compound_dtypes: H5T.getMemberClass and H5T.getClass return different classes for the same type."); nerrors++; } // Check name against this list string[] memb_names = { ATTR4_FIELDNAME1, ATTR4_FIELDNAME2, ATTR4_FIELDNAME3 }; int[] memb_offsets = { 0, 1, 5 }; // list of member offsets H5DataTypeId mtypeId; // member type H5T.H5TClass memb_cls1; // member classes retrieved different ways string memb_name; // member name int memb_idx; // member index int memb_offset, idx; // member offset, loop index // how to handle int versus uint for memb_idx and idx??? // For each member, check its name, class, index, and size. for (idx = 0; idx < nfields; idx++) { // Get the type of the ith member to test other functions later. mtypeId = H5T.getMemberType(typeId, idx); // Get the name of the ith member. memb_name = H5T.getMemberName(typeId, idx); if (memb_name != memb_names[idx]) { Console.WriteLine("test_compound_dtypes: incorrect member name, {0}, for member no {1}", memb_name, idx); nerrors++; } // Get the class of the ith member and then verify the class. memb_cls1 = H5T.getMemberClass(typeId, idx); if (memb_cls1 != H5T.H5TClass.INTEGER) { Console.WriteLine("test_compound_dtypes: incorrect class, {0}, for member no {1}", memb_cls1, idx); nerrors++; } // Get member's index back using its name and verify it. memb_idx = H5T.getMemberIndex(typeId, memb_name); if (memb_idx != idx) { Console.WriteLine("test_attr_compound_read: H5T.getMemberName and/or H5T.getMemberIndex returned false values."); nerrors++; } // Get member's offset and verify it. memb_offset = H5T.getMemberOffset(typeId, idx); if (memb_offset != memb_offsets[idx]) { Console.WriteLine("test_attr_compound_read: H5T.getMemberOffset returned incorrect value - {0}, should be {1}", memb_offset, memb_offsets[idx]); nerrors++; } // Get member's size and verify it. int tsize = H5T.getSize(mtypeId); switch (idx) { case 0: if (tsize != H5T.getSize(H5T.H5Type.STD_U8LE)) { Console.WriteLine("test_attr_compound_read: H5T.getSize returned incorrect value."); nerrors++; } break; case 1: if (tsize != H5T.getSize(H5T.H5Type.NATIVE_INT)) { Console.WriteLine("test_attr_compound_read: H5T.getSize returned incorrect value."); nerrors++; } break; case 2: if (tsize != H5T.getSize(H5T.H5Type.STD_I64BE)) { Console.WriteLine("test_attr_compound_read: H5T.getSize returned incorrect value."); nerrors++; } break; default: Console.WriteLine("test_attr_compound_read: We should only have 3 members."); nerrors++; break; } // end switch // Close current member type. H5T.close(mtypeId); } // end for // Prepare the check array to verify read data. It should be the same as the attr_data4 array // in the previous test function test_attr_compound_write. attr4_struct[,] check = new attr4_struct[ATTR4_DIM1, ATTR4_DIM2]; // Initialize the dataset int ii, jj, nn; for (ii = nn = 0; ii < ATTR4_DIM1; ii++) { for (jj = 0; jj < ATTR4_DIM2; jj++) { check[ii, jj].c = 't'; check[ii, jj].i = nn++; check[ii, jj].l = (ii * 10 + jj * 100) * nn; } } // Read attribute information. attr4_struct[,] read_data4 = new attr4_struct[ATTR4_DIM1, ATTR4_DIM2]; H5A.read(attrId, typeId, new H5Array <attr4_struct>(read_data4)); // Verify values read in. for (ii = 0; ii < ATTR4_DIM1; ii++) { for (jj = 0; jj < ATTR4_DIM2; jj++) { if ((check[ii, jj].c != read_data4[ii, jj].c) || (check[ii, jj].i != read_data4[ii, jj].i) || (check[ii, jj].l != read_data4[ii, jj].l)) { Console.WriteLine("test_attr_compound_read: Incorrect read data: {0}, should be {1}", read_data4[ii, jj], check[ii, jj]); nerrors++; } } } // Close resources. H5T.close(typeId); H5A.close(attrId); H5D.close(dsetId); H5F.close(fileId); Console.WriteLine("\t\tPASSED"); } catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // test_attr_compound_read
public AttributeValue GetAttribute(H5FileId fileId, string attributeName) { H5AttributeId attributeId = H5A.openName(fileId, attributeName); //根据属性名称得到属性Id H5DataTypeId attributeType = H5A.getType(attributeId); //得到属性数据类型 int size = H5T.getSize(attributeType); H5T.H5TClass typeClass = H5T.getClass(attributeType); H5DataSpaceId spaceId = H5A.getSpace(attributeId); long[] dims = H5S.getSimpleExtentDims(spaceId); int rank = H5S.getSimpleExtentNDims(spaceId); H5T.H5Type h5type; Type dataType = null; AttributeValue atrributeData = new AttributeValue(); atrributeData.dataValue = null; atrributeData.valueType = DataValueType.EMPTY; atrributeData.rank = rank; switch (typeClass) { case H5T.H5TClass.FLOAT: h5type = H5T.H5Type.NATIVE_FLOAT; if (rank == 1) { float[] floatDatatmp = new float[dims[0]]; H5A.read(attributeId, new H5DataTypeId(h5type), new H5Array <float>(floatDatatmp)); atrributeData.dataValue = floatDatatmp; } else if (rank == 2) { float[,] floatDatatmp = new float[dims[0], dims[1]]; H5A.read(attributeId, new H5DataTypeId(h5type), new H5Array <float>(floatDatatmp)); atrributeData.dataValue = floatDatatmp; } atrributeData.valueType = DataValueType.FLOAT; break; case H5T.H5TClass.INTEGER: h5type = H5T.H5Type.NATIVE_INT; // int[,] intDatatmp = null; if (rank == 1) { int[] intDatatmp = new int[dims[0]]; H5A.read(attributeId, new H5DataTypeId(h5type), new H5Array <int>(intDatatmp)); atrributeData.dataValue = intDatatmp; } else if (rank == 2) { int[,] intDatatmp = new int[dims[0], dims[1]]; H5A.read(attributeId, new H5DataTypeId(h5type), new H5Array <int>(intDatatmp)); atrributeData.dataValue = intDatatmp; } atrributeData.valueType = DataValueType.INT; break; case H5T.H5TClass.STRING: h5type = H5T.H5Type.C_S1; if (rank == 0) { string[] stringDatatmp = new string[1]; byte[] bytedata = new byte[255]; H5A.read(attributeId, attributeType, new H5Array <byte>(bytedata)); //H5A.read(attributeId, new H5DataTypeId(h5type), new H5Array<string>(stringDatatmp)); stringDatatmp[0] = Encoding.Default.GetString(bytedata).Trim('\0'); atrributeData.dataValue = stringDatatmp; } else if (rank == 1) { string[] stringDatatmp = new string[dims[0]]; // string stringDatatmp = ""; // byte[] bytedata = new byte[255]; // byte[,] bytedata = new byte[2,255]; // H5DataTypeId memtype = H5T.copy(H5T.H5Type.C_S1); //H5T.setVariableSize(memtype); //H5T.setSize(attributeType, size); // H5A.read(attributeId, memtype, new H5Array<string>(stringDatatmp)); // H5A.read(attributeId, new H5DataTypeId(h5type), new H5Array<string>(stringDatatmp)); // stringDatatmp[0] = Encoding.Default.GetString(bytedata).Trim('\0'); //string test = Encoding.Default.GetString(bytedata).Trim('\0'); // atrributeData.dataValue = stringDatatmp; // VariableLengthString[] value = new VariableLengthString[1]; atrributeData.dataValue = stringDatatmp; } else if (rank == 2) { string[,] stringDatatmp = new string[dims[0], dims[1]]; //H5DataTypeId memtype = H5T.copy(H5T.H5Type.C_S1); //H5T.setVariableSize(memtype); //H5A.read(attributeId, memtype, new H5Array<string>(stringDatatmp)); atrributeData.dataValue = stringDatatmp; } atrributeData.valueType = DataValueType.STRING; break; default: h5type = H5T.H5Type.C_S1; break; } H5T.close(attributeType); H5S.close(spaceId); H5A.close(attributeId); return(atrributeData); }
public AttributeValue GetAttribute(string attribXml, string attributeName, H5FileId fileId) { H5AttributeId attributeId = H5A.openName(fileId, attributeName); //根据属性名称得到属性Id H5DataTypeId attributeType = H5A.getType(attributeId); //得到属性数据类型 H5T.H5TClass typeClass = H5T.getClass(attributeType); H5DataSpaceId spaceId = H5A.getSpace(attributeId); int rank = H5S.getSimpleExtentNDims(spaceId); AttributeValue atrributeData = new AttributeValue(); string[] stringDatatmp = new string[1]; stringDatatmp[0] = "NULL"; XmlReaderSettings settings = new XmlReaderSettings(); settings.DtdProcessing = DtdProcessing.Ignore; settings.ValidationType = ValidationType.None; settings.ValidationEventHandler += settings_ValidationEventHandler; settings.CheckCharacters = false; StringReader xmlSr = ChangeXmlInGBCode(attribXml); XmlReader reader = XmlReader.Create(xmlSr); XmlDocument xml = new XmlDocument(); xml.Load(reader); XmlNodeList node = xml.GetElementsByTagName("hdf5:Attribute"); foreach (XmlNode child in node) { if (child.OuterXml.Contains(attributeName)) { stringDatatmp[0] = child.InnerText; } } stringDatatmp[0] = stringDatatmp[0].Replace("\r\n", ""); stringDatatmp[0] = stringDatatmp[0].Trim(); atrributeData.dataValue = stringDatatmp; switch (typeClass) { case H5T.H5TClass.FLOAT: atrributeData.valueType = DataValueType.FLOAT; break; case H5T.H5TClass.INTEGER: atrributeData.valueType = DataValueType.INT; break; case H5T.H5TClass.STRING: atrributeData.valueType = DataValueType.STRING; break; default: break; } atrributeData.rank = rank; H5T.close(attributeType); H5S.close(spaceId); H5A.close(attributeId); return(atrributeData); }