private void GetGroupDatasetNames(string groupName) { H5GroupId h5GroupId = H5G.open(_h5FileId, groupName); try { long dscount = H5G.getNumObjects(h5GroupId); for (int i = 0; i < dscount; i++) { string objname = H5G.getObjectNameByIndex(h5GroupId, (ulong)i); ObjectInfo objInfo = H5G.getObjectInfo(h5GroupId, objname, false); switch (objInfo.objectType) { case H5GType.DATASET: if (objInfo.objectType == H5GType.DATASET) { if (groupName == "/") { _datasetNames.Add(objname); } else { _datasetNames.Add(groupName + objname); } } break; case H5GType.GROUP: if (groupName == "/") { GetGroupDatasetNames(objname + "/"); } else { GetGroupDatasetNames(groupName + objname + "/"); } break; case H5GType.LINK: break; case H5GType.TYPE: break; default: break; } } } finally { if (h5GroupId != null) { H5G.close(h5GroupId); } } }
private void createHD5GroupObject(H5GroupId id, String name, ref HDF5GroupObject hdf5Obj) { hdf5Obj.GroupID = id; hdf5Obj.GroupName = name; long num = H5G.getNumObjects(hdf5Obj.GroupID); // the items under the group for (ulong i = 0; i < (ulong)num; i++) { String objName = H5G.getObjectNameByIndex(hdf5Obj.GroupID, i); // get the link info. //linfo = H5L.getInfo(groupID, objName); ObjectInfo objInfo = H5G.getObjectInfo(hdf5Obj.GroupID, objName, true); if (objInfo.objectType == H5GType.GROUP) { String pathName = "/" + name + "/" + objName; if (name.CompareTo("/") == 0) { pathName = "/" + objName; } H5GroupId groupID = null; try { groupID = H5G.open(hdf5Obj.GroupID, pathName); HDF5GroupObject subObj = new HDF5GroupObject(); subObj.ShortName = objName; createHD5GroupObject(groupID, pathName, ref subObj); hdf5Obj.SubGroups.Add(subObj); } catch (Exception e) { Console.WriteLine(e.Message); if (groupID != null) { H5G.close(groupID); } } } else if (objInfo.objectType == H5GType.DATASET) { String pathName = "/" + name + "/" + objName; HD5DataSetObject dataObject = new HD5DataSetObject(); dataObject.ShortName = objName; createHD5DataObject(hdf5Obj.GroupID, pathName, ref dataObject); hdf5Obj.Datasets.Add(dataObject); } } }
public float[] GetBiases(string name, int units) { if (name is null) { throw new ArgumentNullException(nameof(name)); } ObjectInfo objectInfo = null; try { objectInfo = H5G.getObjectInfo(H5GroupIdModelWeights, $"{name}/{name}/bias:0", true); } catch { } if (objectInfo == null) { return(new float[units]); } NeuralNetworkAPI.GetDataSet(H5GroupIdModelWeights, $"{name}/{name}/bias:0", out float[] biases, units); return(biases); }
public static List <string> GetChildGroupNames(H5GroupId _groupId) { List <string> names = new List <string>(); ulong count = (ulong)H5G.getNumObjects(_groupId);; for (ulong i = 0; i < count; i++) { string name = H5G.getObjectNameByIndex(_groupId, i); ObjectInfo info = H5G.getObjectInfo(_groupId, name, false); if (info.objectType == H5GType.GROUP) { names.Add(name); } } return(names); }
static void test_group_basics() { try { Console.Write("Testing group basics"); // Create the file. H5FileId fileId = H5F.create(FILE_NAME, H5F.CreateMode.ACC_TRUNC); // Create a group. H5GroupId groupId = H5G.create(fileId, "/fromRoot"); // Create a dataspace for common use. hssize_t[] dims = { 1000, 20 }; H5DataSpaceId dspace = H5S.create_simple(RANK, dims); // Create a dataset using file as location with absolute path name. H5DataSetId dset1Id = H5D.create(fileId, "/fromRoot/intArray", H5T.H5Type.NATIVE_INT, dspace); // Create a dataset using group as location with absolute path name. H5DataSetId dset2Id = H5D.create(groupId, "/fromRoot/shortArray", H5T.H5Type.NATIVE_SHORT, dspace); // Create a dataset using group as location with relative path name. H5DataSetId dset3Id = H5D.create(groupId, "notfromRoot", H5T.H5Type.NATIVE_UCHAR, dspace); ObjectInfo info = H5G.getObjectInfo(fileId, "/fromRoot/intArray", true); if (info.nHardLinks != 1) { Console.WriteLine("\ntest_group_basics: number of hardlinks for /fromRoot/intArray should be = {0}", info.nHardLinks); } if (info.objectType != H5GType.DATASET) { Console.WriteLine("\ntest_group_basics: Object should be a dataset"); } // Close objects and files. H5D.close(dset1Id); H5D.close(dset2Id); H5D.close(dset3Id); H5S.close(dspace); H5G.close(groupId); // Check various number of objects. H5GroupId rootId = H5G.open(fileId, "/"); hssize_t num_objs = H5G.getNumObjects(rootId); if (num_objs != 1) { Console.WriteLine("\ntest_group_basics: incorrect num_objs = {0} for root group\n", num_objs); nerrors++; } groupId = H5G.open(fileId, "fromRoot"); num_objs = H5G.getNumObjects(groupId); if (num_objs != 3) { Console.WriteLine("\ntest_group_basics: incorrect num_objs = {0} for group \"fromRoot\"\n", num_objs); nerrors++; } H5G.close(rootId); H5G.close(groupId); H5F.close(fileId); if (nerrors == 0) { Console.WriteLine("\t\t\t\t\tPASSED"); } } catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // test_group_basics
public static void runTest() { try { // We will write and read an int array of this length. const int DATA_ARRAY_LENGTH = 12; // Rank is the number of dimensions of the data array. const int RANK = 1; // Create an HDF5 file. // The enumeration type H5F.CreateMode provides only the legal // creation modes. Missing H5Fcreate parameters are provided // with default values. H5FileId fileId = H5F.create("myCSharp.h5", H5F.CreateMode.ACC_TRUNC); // Create a HDF5 group. H5GroupId groupId = H5G.create(fileId, "/cSharpGroup"); H5GroupId subGroup = H5G.create(groupId, "mySubGroup"); // Demonstrate getObjectInfo ObjectInfo info = H5G.getObjectInfo(fileId, "/cSharpGroup", true); Console.WriteLine("cSharpGroup header size is {0}", info.headerSize); Console.WriteLine("cSharpGroup nlinks is {0}", info.nHardLinks); Console.WriteLine("cSharpGroup fileno is {0} {1}", info.fileNumber[0], info.fileNumber[1]); Console.WriteLine("cSharpGroup objno is {0} {1}", info.objectNumber[0], info.objectNumber[1]); Console.WriteLine("cSharpGroup type is {0}", info.objectType); H5G.close(subGroup); // Prepare to create a data space for writing a 1-dimensional // signed integer array. long[] dims = new long[RANK]; dims[0] = DATA_ARRAY_LENGTH; // Put descending ramp data in an array so that we can // write it to the file. int[] dset_data = new int[DATA_ARRAY_LENGTH]; for (int i = 0; i < DATA_ARRAY_LENGTH; i++) { dset_data[i] = DATA_ARRAY_LENGTH - i; } // Create a data space to accommodate our 1-dimensional array. // The resulting H5DataSpaceId will be used to create the // data set. H5DataSpaceId spaceId = H5S.create_simple(RANK, dims); // Create a copy of a standard data type. We will use the // resulting H5DataTypeId to create the data set. We could // have used the HST.H5Type data directly in the call to // H5D.create, but this demonstrates the use of H5T.copy // and the use of a H5DataTypeId in H5D.create. H5DataTypeId typeId = H5T.copy(H5T.H5Type.NATIVE_INT); // Find the size of the type int typeSize = H5T.getSize(typeId); Console.WriteLine("typeSize is {0}", typeSize); // Set the order to big endian H5T.setOrder(typeId, H5T.Order.BE); // Set the order to little endian H5T.setOrder(typeId, H5T.Order.LE); // Create the data set. H5DataSetId dataSetId = H5D.create(fileId, "/csharpExample", typeId, spaceId); // Write the integer data to the data set. H5D.write(dataSetId, new H5DataTypeId(H5T.H5Type.NATIVE_INT), new H5Array <int>(dset_data)); // If we were writing a single value it might look like this. // int singleValue = 100; // H5D.writeScalar(dataSetId, new H5DataTypeId(H5T.H5Type.NATIVE_INT), // ref singleValue); // Create an integer array to receive the read data. int[] readDataBack = new int[DATA_ARRAY_LENGTH]; // Read the integer data back from the data set H5D.read(dataSetId, new H5DataTypeId(H5T.H5Type.NATIVE_INT), new H5Array <int>(readDataBack)); // Echo the data for (int i = 0; i < DATA_ARRAY_LENGTH; i++) { Console.WriteLine(readDataBack[i]); } // Close all the open resources. H5D.close(dataSetId); // Reopen and close the data sets to show that we can. dataSetId = H5D.open(fileId, "/csharpExample"); H5D.close(dataSetId); dataSetId = H5D.open(groupId, "/csharpExample"); H5D.close(dataSetId); H5S.close(spaceId); H5T.close(typeId); H5G.close(groupId); //int x = 10; //H5T.enumInsert<int>(typeId, "myString", ref x); //H5G.close(groupId); H5GIterateCallback myDelegate; myDelegate = myFunction; int x = 9; int start = 0; int index = H5G.iterate(fileId, "/cSharpGroup", myDelegate, x, ref start); // Reopen the group id to show that we can. groupId = H5G.open(fileId, "/cSharpGroup"); H5G.close(groupId); H5F.close(fileId); // Reopen and reclose the file. H5FileId openId = H5F.open("myCSharp.h5", H5F.OpenMode.ACC_RDONLY); H5F.close(openId); } // This catches all the HDF exception classes. Because each call // generates unique exception, different exception can be handled // separately. For example, to catch open errors we could have used // catch (H5FopenException openException). catch (HDFException e) { Console.WriteLine(e.Message); } Console.WriteLine("Processing complete!"); Console.ReadLine(); }