public static double[][] ReadMesh(string fileName) { double[][] meshes = new double[3][]; string[] meshNames = { "x", "y", "z" }; H5FileId fileId = H5F.open(fileName, H5F.OpenMode.ACC_RDONLY); for (int i = 0; i < meshNames.Length; i++) { H5DataSetId dsId = H5D.open(fileId, "/Mesh/" + meshNames[i]); H5DataTypeId dtId = H5D.getType(dsId); if (!H5T.equal(dtId, H5T.copy(H5T.H5Type.NATIVE_FLOAT))) { Console.WriteLine("Error: Invalid dataset type, expected {0}", H5T.H5Type.NATIVE_FLOAT); } float[] mesh = new float[H5D.getStorageSize(dsId) / H5T.getSize(dtId)]; H5D.read(dsId, dtId, new H5Array <float>(mesh)); meshes[i] = mesh.Select(x => (double)x * 1000.0).ToArray(); // m -> mm H5D.close(dsId); H5T.close(dtId); } H5F.close(fileId); return(meshes); }
public static string AttributeAsString(H5AttributeId _attributeId) { H5DataTypeId dataTypeId = H5A.getType(_attributeId); bool isVariableLength = H5T.isVariableString(dataTypeId); if (isVariableLength) { // Variable length string attribute // NOTE: This section only works if the array length is 1 VariableLengthString[] value = new VariableLengthString[1]; H5A.read <VariableLengthString>(_attributeId, dataTypeId, new H5Array <VariableLengthString>(value)); return(value[0].ToString()); } else { // Make length smaller so null termination character is not read int length = (int)H5T.getSize(dataTypeId) - 1; // Fixed length string attribute byte[] valueBytes = new byte[length]; H5A.read <byte>(_attributeId, dataTypeId, new H5Array <byte>(valueBytes)); string value = System.Text.ASCIIEncoding.ASCII.GetString(valueBytes); return(value); } }
public static string GetAttributeValue(H5ObjectWithAttributes objectWithAttributes, string name) { if (objectWithAttributes is null) { throw new ArgumentNullException(nameof(objectWithAttributes)); } if (name is null) { throw new ArgumentNullException(nameof(name)); } H5AttributeId h5AttributeId = H5A.open(objectWithAttributes, name); H5DataTypeId h5DataTypeId = H5A.getType(h5AttributeId); if (H5T.isVariableString(h5DataTypeId)) { VariableLengthString[] variableLengthStrings = new VariableLengthString[1]; H5A.read(h5AttributeId, h5DataTypeId, new H5Array <VariableLengthString> (variableLengthStrings)); H5T.close(h5DataTypeId); H5A.close(h5AttributeId); return(variableLengthStrings[0].ToString()); } byte[] bytes = new byte[H5T.getSize(h5DataTypeId)]; H5A.read(h5AttributeId, h5DataTypeId, new H5Array <byte> (bytes)); H5T.close(h5DataTypeId); H5A.close(h5AttributeId); return(Encoding.ASCII.GetString(bytes)); }
protected T[] getAttribute <T>(H5AttributeId aid) { H5DataTypeId sv = H5A.getType(aid); int size = H5T.getSize(sv); var attValue = new T[size]; H5A.read <T>(aid, sv, new H5Array <T>(attValue)); return(attValue); }
void LoadRowData() { var dtype = H5D.getType(Id); var size = H5T.getSize(dtype); _row_data = new byte[FindNumberOfRows(), size]; H5D.read(Id, dtype, new H5Array <byte>(_row_data)); //TODO: Does this work with more than one row? Dunno. Probs not. }
private void createHD5DataObject(H5GroupId h5GroupId, string pathName, ref HD5DataSetObject dataObject) { H5DataSetId datasetid = null; H5DataSpaceId spaceid = null; H5DataTypeId dataTypeid = null; try { dataObject.GroupId = h5GroupId; datasetid = H5D.open(h5GroupId, pathName); dataObject.DatasetID = datasetid; dataObject.DatasetName = pathName; spaceid = H5D.getSpace(datasetid); var dims = H5S.getSimpleExtentDims(spaceid); dataTypeid = H5D.getType(datasetid); dataObject.Dim = dims.Length; HDF5DotNet.H5T.H5TClass classType = H5T.getClass(dataTypeid); int size = H5T.getSize(dataTypeid); H5T.Sign sign = H5T.Sign.TWOS_COMPLEMENT; if (classType == H5T.H5TClass.INTEGER) { sign = H5T.getSign(dataTypeid); } //var rank = H5S.getSimpleExtentNDims(space); //var statu = H5S.getSimpleExtentDims(space); Boolean bString = H5T.isVariableString(dataTypeid); //String name = H5T.getMemberName(dataType, 0); // var type2 = H5T.getNativeType(dataType, H5T.Direction.DEFAULT); Type type = getTypeof(classType, size, sign); dataObject.DataType = type; dataObject.Data = readData(dataObject); } catch (Exception e) { Console.WriteLine(e.Message); } finally{ if (datasetid != null) { H5D.close(datasetid); } if (spaceid != null) { H5S.close(spaceid); } if (dataTypeid != null) { H5T.close(dataTypeid); } } }
private void WriteData() { Console.WriteLine("Creating H5 file {0}...", filename); // Rank is the number of dimensions of the data array. const int RANK = 1; // Create an HDF5 file. // The enumeration type H5F.CreateMode provides only the legal // creation modes. Missing H5Fcreate parameters are provided // with default values. H5FileId fileId = H5F.create(filename, H5F.CreateMode.ACC_TRUNC); // Prepare to create a data space for writing a 1-dimensional // signed integer array. long[] dims = new long[RANK]; dims[0] = count; // Put descending ramp data in an array so that we can // write it to the file. mData[] dset_data = new mData[count]; for (int i = 0; i < count; i++) { dset_data[i] = new mData(i + 80, i + 40, i + 1); } // Create a data space to accommodate our 1-dimensional array. // The resulting H5DataSpaceId will be used to create the // data set. H5DataSpaceId spaceId = H5S.create_simple(RANK, dims); // Create a copy of a standard data type. We will use the // resulting H5DataTypeId to create the data set. We could // have used the HST.H5Type data directly in the call to // H5D.create, but this demonstrates the use of H5T.copy // and the use of a H5DataTypeId in H5D.create. H5DataTypeId typeId = H5T.copy(H5T.H5Type.STD_REF_OBJ); // Find the size of the type int typeSize = H5T.getSize(typeId); // Create the data set. H5DataSetId dataSetId = H5D.create(fileId, dataSetName, typeId, spaceId); // Write the integer data to the data set. H5D.write(dataSetId, new H5DataTypeId(H5T.H5Type.STD_REF_OBJ), new H5Array <mData>(dset_data)); H5D.close(dataSetId); H5F.close(fileId); Console.WriteLine("H5 file {0} created successfully!", filename); }
} // end of test_classes static void test_integer_dtype() { const string GEN_FILE_NAME = "gen_types.h5"; const string INT_TYPE_NAME = "new integer type 1"; try { Console.Write("Testing getting some integer information"); H5FileId genfileId = H5F.open(GEN_FILE_NAME, H5F.OpenMode.ACC_RDONLY); // Open the datatype to check. H5DataTypeId dtypeId = H5T.open(genfileId, INT_TYPE_NAME); int offset = H5T.getOffset(dtypeId); if (offset != 0) { Console.WriteLine("Incorrect offset: {0}, should be {1}", offset, 0); nerrors++; } int size = H5T.getSize(dtypeId); if (size != 3) { Console.WriteLine("Incorrect size: {0}, should be {1}", size, 3); nerrors++; } // Close datatype and file. H5T.close(dtypeId); H5F.close(genfileId); Console.WriteLine("\t\tPASSED"); } // end of try block catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // test_integer_dtype
private void WriteData(List <mData> data) { Console.WriteLine("Creating H5 file {0}...", filename); const int RANK = 1; long[] dims = new long[RANK]; dims[0] = count; H5FileId fileId = H5F.create(filename, H5F.CreateMode.ACC_TRUNC); H5DataSpaceId spaceId = H5S.create_simple(RANK, dims); H5DataTypeId typeId = H5T.copy(H5T.H5Type.STD_REF_OBJ); int typeSize = H5T.getSize(typeId); H5DataSetId dataSetId = H5D.create(fileId, dataSetName, typeId, spaceId); H5D.write(dataSetId, new H5DataTypeId(H5T.H5Type.STD_REF_OBJ), new H5Array <mData>(data.ToArray())); H5D.close(dataSetId); H5F.close(fileId); Console.WriteLine("H5 file {0} created successfully!", filename); }
public static T[] Read1DArray <T>(H5FileId fileId, string dataSetName) { H5DataSetId dataset = null; H5DataSpaceId space = null; H5DataTypeId dataType = null; long[] dims; try { dataset = H5D.open(fileId, dataSetName); space = H5D.getSpace(dataset); dims = H5S.getSimpleExtentDims(space); dataType = H5D.getType(dataset); if (typeof(T) == typeof(string)) { int stringLength = H5T.getSize(dataType); int a = (int)dims[0]; byte[] buffer = new byte[(int)(dims[0]) * stringLength]; H5D.read(dataset, dataType, new H5Array <byte>(buffer)); string stuff = System.Text.ASCIIEncoding.ASCII.GetString(buffer); return(stuff.SplitInParts(stringLength).Select(ss => (T)(object)ss).ToArray()); } T[] dataArray = new T[dims[0]]; var wrapArray = new H5Array <T>(dataArray); H5D.read(dataset, dataType, wrapArray); return(dataArray); } catch { return(null); } finally { if (space != null) { H5S.close(space); } if (dataset != null) { H5D.close(dataset); } } }
public static T ReadScalar <T>(H5FileId fileId, string datasetName) { H5DataSetId dataset = null; H5DataSpaceId space = null; H5DataTypeId dataType = null; long[] dims; T data = default(T); try { dataset = H5D.open(fileId, datasetName); space = H5D.getSpace(dataset); dims = H5S.getSimpleExtentDims(space); dataType = H5D.getType(dataset); H5D.readScalar <T>(dataset, dataType, ref data); if (typeof(T) == typeof(string)) { int stringLength = H5T.getSize(dataType); byte[] buffer = new byte[2 * stringLength]; H5D.read(dataset, dataType, new H5Array <byte>(buffer)); string stuff = System.Text.ASCIIEncoding.ASCII.GetString(buffer); return((T)(stuff.SplitInParts(stringLength).Select(ss => (T)(object)ss))); } return(data); } catch { return(default(T)); } finally { if (space != null) { H5S.close(space); } if (dataset != null) { H5D.close(dataset); } } }
public static T[] Read1DArray <T>(this H5FileId fileId, string dataSetName) { var dataset = H5D.open(fileId, dataSetName); var space = H5D.getSpace(dataset); var dims = H5S.getSimpleExtentDims(space); var dataType = H5D.getType(dataset); if (typeof(T) == typeof(string)) { int stringLength = H5T.getSize(dataType); byte[] buffer = new byte[dims[0] * stringLength]; H5D.read(dataset, dataType, new H5Array <byte>(buffer)); string stuff = System.Text.ASCIIEncoding.ASCII.GetString(buffer); return(stuff.SplitInParts(stringLength).Select(ss => (T)(object)ss).ToArray()); } T[] dataArray = new T[dims[0]]; var wrapArray = new H5Array <T>(dataArray); H5D.read(dataset, dataType, wrapArray); return(dataArray); }
void FindAttribute(int i) { var attr_field = "FIELD_" + i + "_NAME"; var attr = H5A.open(Id, attr_field); var dtype = H5A.getType(attr); var size = H5T.getSize(dtype); var mtype = H5T.create(H5T.CreateClass.STRING, size); var buffer = new byte[size]; H5A.read(attr, mtype, new H5Array <byte>(buffer)); var attr_datatype = H5T.getMemberType(H5D.getType(Id), i); var attr_size = H5T.getSize(attr_datatype); var attr_class = H5T.getMemberClass(H5D.getType(Id), i).ToString(); var attr_name = Encoding.GetString(buffer).Replace('\0', ' ').Trim(); switch (attr_class) { case "STRING": _attributes[i] = new StringAttribute(attr_name, attr_size); break; case "INTEGER": _attributes[i] = new IntegerAttribute(attr_name, attr_size); break; case "FLOAT": _attributes[i] = new FloatingPointAttribute(attr_name, attr_size); break; default: throw new ArgumentException("Unknown attribute type " + attr_class, "attr_type"); } }
private void ReadHDF5Mesh() { double[][] meshes = new double[3][]; string[] meshNames = { "phi", "r", "theta" }; H5FileId fileId = H5F.open(m_resultFile, H5F.OpenMode.ACC_RDONLY); if (HDF5.ReadAttribute(m_resultFile, "/Mesh", "MeshType") != 2) { Console.WriteLine("Error: Invalid NF2FF mesh type in <{0}>", m_resultFile); return; } for (int i = 0; i < meshNames.Length; i++) { H5DataSetId dsId = H5D.open(fileId, "/Mesh/" + meshNames[i]); H5DataTypeId dtId = H5D.getType(dsId); if (!H5T.equal(dtId, H5T.copy(H5T.H5Type.NATIVE_FLOAT))) { Console.WriteLine("Error: Invalid dataset type, expected {0}", H5T.H5Type.NATIVE_FLOAT); } float[] mesh = new float[H5D.getStorageSize(dsId) / H5T.getSize(dtId)]; H5D.read(dsId, dtId, new H5Array <float>(mesh)); meshes[i] = mesh.Select(x => (double)x).ToArray(); H5D.close(dsId); H5T.close(dtId); } H5F.close(fileId); m_theta = meshes[2]; m_phi = meshes[0]; }
public string Get(string path, bool outfolder) { logger.Log(LogLevel.Info, "Entered AVISTEDHDF5Converter GET()"); try { string content = File.ReadAllText(path); List <Dictionary <string, string> > data = JsonConvert.DeserializeObject <List <Dictionary <string, string> > >(content); string result = "false"; string randomlyGeneratedFolderNamePart = Path.GetFileNameWithoutExtension(Path.GetRandomFileName()); string timeRelatedFolderNamePart = DateTime.Now.Year.ToString() + DateTime.Now.Month.ToString() + DateTime.Now.Day.ToString() + DateTime.Now.Hour.ToString() + DateTime.Now.Minute.ToString() + DateTime.Now.Second.ToString() + DateTime.Now.Millisecond.ToString(); string processRelatedFolderNamePart = System.Diagnostics.Process.GetCurrentProcess().Id.ToString(); string copypath = ""; if (outfolder) { copypath = ConfigurationManager.AppSettings["Save_Downloads"].ToString(); } else { copypath = ConfigurationManager.AppSettings["Converters"].ToString(); } string temporaryDirectoryName = Path.Combine(copypath , timeRelatedFolderNamePart + processRelatedFolderNamePart + randomlyGeneratedFolderNamePart); Directory.CreateDirectory(temporaryDirectoryName); logger.Log(LogLevel.Info, "Created Directory"); string uri = Path.Combine(temporaryDirectoryName, "result" + ".h5"); H5FileId fileId = H5F.create(uri, H5F.CreateMode.ACC_TRUNC); string[] results = new string[data.Count + 1]; int i = 0, j = 0; Dictionary <string, string> resultdict = new Dictionary <string, string>(); Dictionary <string, string> tempdict = data.First(); string[] names = tempdict.Keys.ToArray(); string[] values = new string[names.Length]; foreach (Dictionary <string, string> dict in data) { var value = dict.Values.ToArray(); if (j == 0) { for (int k = 0; k < values.Length; k++) { values[k] = value[k]; } j = 1; } else { for (int k = 0; k < values.Length; k++) { values[k] += "," + value[k]; } } } int index = 0; foreach (string s in names) { if (s.Equals("date")) { string[] strings = values[index++].Split(','); byte[] bytes = Encoding.UTF8.GetBytes(String.Concat(strings)); char[,] myChars = new char[strings.Length, 10]; myChars = StringToChar(myChars, strings); // Prepare to 9create a data space for writing a 1 - dimensional // signed integer array. long[] dims = new long[1]; dims[0] = strings.Length; H5DataSpaceId spaceId = H5S.create_simple(1, dims); H5DataTypeId typeId = H5T.copy(H5T.H5Type.C_S1); // Find the size of the type int typeSize = H5T.getSize(typeId) * 10; H5T.setSize(typeId, 10); string name = "/" + s; // Create the data set. H5DataSetId dataSetId = H5D.create(fileId, s, typeId, spaceId); H5D.write(dataSetId, typeId, new H5Array <byte>(bytes)); H5D.close(dataSetId); H5S.close(spaceId); logger.Log(LogLevel.Info, "Created parameter {0}", s); } else { string[] strings = values[index++].Split(','); float[] vl = new float[strings.Length]; int l = 0; foreach (string d in strings) { vl[l++] = float.Parse(d); } // Prepare to create a data space for writing a 1 - dimensional // signed integer array. long[] dims = new long[1]; dims[0] = strings.Length; H5DataSpaceId spaceId = H5S.create_simple(1, dims); H5DataTypeId typeId1 = H5T.copy(H5T.H5Type.NATIVE_FLOAT); // Find the size of the type int typeSize = H5T.getSize(typeId1); // Set the order to big endian H5T.setOrder(typeId1, H5T.Order.BE); // Set the order to little endian H5T.setOrder(typeId1, H5T.Order.LE); string name = "/" + s; // Create the data set. H5DataSetId dataSetId = H5D.create(fileId, s, typeId1, spaceId); H5D.write(dataSetId, new H5DataTypeId(H5T.H5Type.NATIVE_FLOAT), new H5Array <float>(vl)); // dscopy.AddVariable<float>(s, vl); H5D.close(dataSetId); H5S.close(spaceId); H5T.close(typeId1); logger.Log(LogLevel.Info, "Created parameter {0}", s); } } H5F.close(fileId); string SourceFolderPath = temporaryDirectoryName; return(SourceFolderPath); } catch (Exception ex) { logger.Error("AVISTEDHDF5Converter:Failed with exception {0}", ex.Message); } return("Error"); }
/// <summary> /// Creates a new dataset from an HDF5 data file. /// </summary> /// <param name="blob">The input blob is reshaped to the dataset item shape.</param> /// <param name="strDatasetName">Specifies the new dataset name.</param> /// <param name="bReshape">Specifies whether to reshape the 'blob' parameter.</param> /// <param name="nMinDim">Specifies the minimum dimension.</param> /// <param name="nMaxDim">Specifies the maximum dimension.</param> /// <param name="id">Optional, group ID to use instead of internal file (default = null).</param> /// <param name="bAllowSingleItems">When true single item values are allowed and used to copy across entire blob.</param> public void load_nd_dataset(Blob <T> blob, string strDatasetName, bool bReshape = false, int nMinDim = 1, int nMaxDim = int.MaxValue, H5GroupId id = null, bool bAllowSingleItems = false) { H5DataSetId ds = null; int nSingleItemSize = 0; try { Tuple <H5DataSetId, int> ds1 = load_nd_datasetEx(blob, strDatasetName, bReshape, nMinDim, nMaxDim, id, bAllowSingleItems); ds = ds1.Item1; nSingleItemSize = ds1.Item2; H5DataTypeId dsType = H5D.getType(ds); int nSize = H5T.getSize(dsType); if (nSize == sizeof(double)) { double[] rgBuffer = new double[blob.count()]; H5Array <double> rgData = new H5Array <double>(rgBuffer); H5D.read <double>(ds, dsType, rgData); if (!bAllowSingleItems || nSingleItemSize == 0) { blob.mutable_cpu_data = Utility.ConvertVec <T>(rgBuffer); } else { blob.SetData(rgBuffer[0]); } } else if (nSize == sizeof(float)) { float[] rgBuffer = new float[blob.count()]; H5Array <float> rgData = new H5Array <float>(rgBuffer); H5D.read <float>(ds, dsType, rgData); if (!bAllowSingleItems || nSingleItemSize == 0) { blob.mutable_cpu_data = Utility.ConvertVec <T>(rgBuffer); } else { blob.SetData(rgBuffer[0]); } } else if (nSize == sizeof(byte)) { byte[] rgBuffer = new byte[blob.count()]; H5Array <byte> rgData = new H5Array <byte>(rgBuffer); H5D.read <byte>(ds, dsType, rgData); float[] rgf = rgBuffer.Select(p1 => (float)p1).ToArray(); blob.mutable_cpu_data = Utility.ConvertVec <T>(rgf); } else { m_log.FAIL("The dataset size of '" + nSize.ToString() + "' is not supported!"); } } catch (Exception excpt) { m_log.FAIL(excpt.Message); } finally { if (ds != null) { H5D.close(ds); } } }
/// <summary> /// Constructs a new EpochHDF5Persistor with an HDF5 file at the given path. /// </summary> /// <param name="filename">Desired HDF5 path</param> /// <param name="assocFilePrefix">Prefix for auxiliary (e.g. image) file associated with this HDF5 file</param> /// <param name="guidGenerator">Function for generating new UUIDs (e.g. Guid.NewGuid)</param> /// <param name="compression">Automatically numeric data compression (0 = none, 9 = maximum)</param> public EpochHDF5Persistor(string filename, string assocFilePrefix, Func <Guid> guidGenerator, uint compression = 9) : base(guidGenerator) { if (filename == null) { throw new ArgumentException("File name must not be null", "filename"); } if (compression > 9) { throw new ArgumentException("Compression must be 0-9", "compression"); } if (assocFilePrefix == null) { assocFilePrefix = ""; } this.AssociatedFilePrefix = assocFilePrefix; NumericDataCompression = compression; EpochGroupsIDs = new Stack <EpochGroupIDs>(); var fInfo = new FileInfo(filename); string prefixedFilePath = fInfo.DirectoryName + Path.DirectorySeparatorChar + this.AssociatedFilePrefix + fInfo.Name; var currentFile = new FileInfo(prefixedFilePath); if (currentFile.Exists) { fileId = H5F.open(prefixedFilePath, H5F.OpenMode.ACC_RDWR); string_t = H5T.open(fileId, "STRING40"); keyval_t = H5T.open(fileId, "KEY40VAR40"); measurement_t = H5T.open(fileId, "MEASUREMENT"); extdevmeasurement_t = H5T.open(fileId, "EXTDEV_MEASUREMENT"); //TODO Check persistence version } else { fileId = H5F.create(prefixedFilePath, H5F.CreateMode.ACC_EXCL); WriteAttribute(fileId, "version", Version); // Create our standard String type (string of length FIXED_STRING_LENGTH characters) string_t = H5T.copy(H5T.H5Type.C_S1); H5T.setSize(string_t, 40); H5T.commit(fileId, "STRING40", string_t); // Create our key/value compound type (two strings of length 40 characters) keyval_t = H5T.create(H5T.CreateClass.COMPOUND, 80); H5T.insert(keyval_t, "key", 0, string_t); H5T.insert(keyval_t, "value", FIXED_STRING_LENGTH, string_t); H5T.commit(fileId, "KEY40VAR40", keyval_t); // Create the Measurement compound type measurement_t = H5T.create(H5T.CreateClass.COMPOUND, 48); // confirm 48 is enough/too much/whatever H5T.insert(measurement_t, "quantity", 0, H5T.H5Type.NATIVE_DOUBLE); H5T.insert(measurement_t, "unit", H5T.getSize(H5T.H5Type.NATIVE_DOUBLE), string_t); H5T.commit(fileId, "MEASUREMENT", measurement_t); // Create the ExtDev/Measurement compound type extdevmeasurement_t = H5T.create(H5T.CreateClass.COMPOUND, H5T.getSize(string_t) + 2 * H5T.getSize(measurement_t)); H5T.insert(extdevmeasurement_t, "externalDevice", 0, string_t); H5T.insert(extdevmeasurement_t, "measurement", H5T.getSize(string_t), measurement_t); H5T.commit(fileId, "EXTDEV_MEASUREMENT", extdevmeasurement_t); } Interlocked.Increment(ref _openHdf5FileCount); }
public AttributeValue GetAttribute(H5FileId fileId, string attributeName) { H5AttributeId attributeId = H5A.openName(fileId, attributeName); //根据属性名称得到属性Id H5DataTypeId attributeType = H5A.getType(attributeId); //得到属性数据类型 int size = H5T.getSize(attributeType); H5T.H5TClass typeClass = H5T.getClass(attributeType); H5DataSpaceId spaceId = H5A.getSpace(attributeId); long[] dims = H5S.getSimpleExtentDims(spaceId); int rank = H5S.getSimpleExtentNDims(spaceId); H5T.H5Type h5type; Type dataType = null; AttributeValue atrributeData = new AttributeValue(); atrributeData.dataValue = null; atrributeData.valueType = DataValueType.EMPTY; atrributeData.rank = rank; switch (typeClass) { case H5T.H5TClass.FLOAT: h5type = H5T.H5Type.NATIVE_FLOAT; if (rank == 1) { float[] floatDatatmp = new float[dims[0]]; H5A.read(attributeId, new H5DataTypeId(h5type), new H5Array <float>(floatDatatmp)); atrributeData.dataValue = floatDatatmp; } else if (rank == 2) { float[,] floatDatatmp = new float[dims[0], dims[1]]; H5A.read(attributeId, new H5DataTypeId(h5type), new H5Array <float>(floatDatatmp)); atrributeData.dataValue = floatDatatmp; } atrributeData.valueType = DataValueType.FLOAT; break; case H5T.H5TClass.INTEGER: h5type = H5T.H5Type.NATIVE_INT; // int[,] intDatatmp = null; if (rank == 1) { int[] intDatatmp = new int[dims[0]]; H5A.read(attributeId, new H5DataTypeId(h5type), new H5Array <int>(intDatatmp)); atrributeData.dataValue = intDatatmp; } else if (rank == 2) { int[,] intDatatmp = new int[dims[0], dims[1]]; H5A.read(attributeId, new H5DataTypeId(h5type), new H5Array <int>(intDatatmp)); atrributeData.dataValue = intDatatmp; } atrributeData.valueType = DataValueType.INT; break; case H5T.H5TClass.STRING: h5type = H5T.H5Type.C_S1; if (rank == 0) { string[] stringDatatmp = new string[1]; byte[] bytedata = new byte[255]; H5A.read(attributeId, attributeType, new H5Array <byte>(bytedata)); //H5A.read(attributeId, new H5DataTypeId(h5type), new H5Array<string>(stringDatatmp)); stringDatatmp[0] = Encoding.Default.GetString(bytedata).Trim('\0'); atrributeData.dataValue = stringDatatmp; } else if (rank == 1) { string[] stringDatatmp = new string[dims[0]]; // string stringDatatmp = ""; // byte[] bytedata = new byte[255]; // byte[,] bytedata = new byte[2,255]; // H5DataTypeId memtype = H5T.copy(H5T.H5Type.C_S1); //H5T.setVariableSize(memtype); //H5T.setSize(attributeType, size); // H5A.read(attributeId, memtype, new H5Array<string>(stringDatatmp)); // H5A.read(attributeId, new H5DataTypeId(h5type), new H5Array<string>(stringDatatmp)); // stringDatatmp[0] = Encoding.Default.GetString(bytedata).Trim('\0'); //string test = Encoding.Default.GetString(bytedata).Trim('\0'); // atrributeData.dataValue = stringDatatmp; // VariableLengthString[] value = new VariableLengthString[1]; atrributeData.dataValue = stringDatatmp; } else if (rank == 2) { string[,] stringDatatmp = new string[dims[0], dims[1]]; //H5DataTypeId memtype = H5T.copy(H5T.H5Type.C_S1); //H5T.setVariableSize(memtype); //H5A.read(attributeId, memtype, new H5Array<string>(stringDatatmp)); atrributeData.dataValue = stringDatatmp; } atrributeData.valueType = DataValueType.STRING; break; default: h5type = H5T.H5Type.C_S1; break; } H5T.close(attributeType); H5S.close(spaceId); H5A.close(attributeId); return(atrributeData); }
static void test_compound_dtype(H5FileId fileId) { uint i, j, n; try { Console.Write("Testing compound datatypes"); // Allocate space for the points & check arrays s1[,] points = new s1[DIM0, DIM1]; s1[,] check = new s1[DIM0, DIM1]; // Initialize the dataset for (i = n = 0; i < DIM0; i++) { for (j = 0; j < DIM1; j++) { points[i, j].c = 't'; points[i, j].i = n++; points[i, j].l = (i * 10 + j * 100) * n; } } // Create the data space hssize_t[] dims = { DIM0, DIM1 }; H5DataSpaceId spaceId = H5S.create_simple(2, dims); // Create compound datatype for disk storage H5DataTypeId typeId = H5T.create(H5T.CreateClass.COMPOUND, 16); // Insert members H5T.insert(typeId, "c", 0, H5T.H5Type.STD_U8LE); H5T.insert(typeId, "i", 1, H5T.H5Type.STD_U32LE); H5T.insert(typeId, "l", 5, H5T.H5Type.STD_I64BE); // Create the dataset H5DataSetId dsetId = H5D.create(fileId, DSET_COMPOUND_NAME, typeId, spaceId); // Write the dataset H5D.write(dsetId, typeId, new H5Array <s1>(points)); // Close dataset and dataspace H5D.close(dsetId); H5S.close(spaceId); H5T.close(typeId); // Open dataset again to check various functions. dsetId = H5D.open(fileId, DSET_COMPOUND_NAME); // Get its type and native type. H5DataTypeId dset_typeId = H5D.getType(dsetId); H5DataTypeId native_type = H5T.getNativeType(dset_typeId, H5T.Direction.DEFAULT); // Check name against this list string[] memb_names = { "c", "i", "l" }; int[] memb_offsets = { 0, 1, 5 }; H5DataTypeId mtypeId; // member type H5T.H5TClass memb_cls1, memb_cls2; // member classes retrieved different ways string memb_name; // member name int memb_idx; // member index // Get the number of members in the type. int nmembers = H5T.getNMembers(native_type); // For each member, check its name, class, index, and size. for (int ii = 0; ii < nmembers; ii++) { // Get the type of the ith member. mtypeId = H5T.getMemberType(native_type, ii); // Get the name of the ith member. memb_name = H5T.getMemberName(native_type, ii); if (memb_name != memb_names[ii]) { Console.WriteLine("test_compound_dtypes: incorrect member name, {0}, for member no {1}", memb_name, i); nerrors++; } // Get the class of the ith member and then verify the class. memb_cls1 = H5T.getMemberClass(native_type, ii); if (memb_cls1 != H5T.H5TClass.INTEGER) { Console.WriteLine("test_compound_dtypes: incorrect class, {0}, for member no {1}", memb_cls1, ii); nerrors++; } // Get the class via type id memb_cls2 = H5T.getClass(mtypeId); if (memb_cls1 != memb_cls2) { Console.WriteLine("test_compound_dtypes: H5T.getMemberClass and H5T.getClass return different classes for the same type."); nerrors++; } // Get member's index back from its name and verify it. memb_idx = H5T.getMemberIndex(dset_typeId, memb_name); if (memb_idx != ii) { Console.WriteLine("test_compound_dtypes: H5T.getMemberName and/or H5T.getMemberIndex returned false values."); nerrors++; } // Get member's offset and verify it. int memb_offset = H5T.getMemberOffset(dset_typeId, ii); if (memb_offset != memb_offsets[ii]) { Console.WriteLine("test_compound_dtypes: Incorrect offset value {0}, should be {1}.", memb_offset, memb_offsets[ii]); nerrors++; } // Get size of the member's type and verify it. int tsize = H5T.getSize(mtypeId); switch (ii) { case 0: //Console.WriteLine("tsize = {0}, STD_U8LE = {1}", tsize, H5T.getSize(H5T.H5Type.STD_U8LE)); if (tsize != H5T.getSize(H5T.H5Type.STD_U8LE)) { Console.WriteLine("test_compound_dtypes: First member has incorrect size"); nerrors++; } break; case 1: if (tsize != H5T.getSize(H5T.H5Type.STD_U32LE)) { Console.WriteLine("test_compound_dtypes: Second member has incorrect size"); nerrors++; } break; case 2: if (tsize != H5T.getSize(H5T.H5Type.STD_I64BE)) { Console.WriteLine("test_compound_dtypes: Third member has incorrect size"); nerrors++; } break; default: Console.WriteLine("test_compound_dtypes: Only 3 members."); break; } // end switch // Close current member type. H5T.close(mtypeId); } // end for // Close objects. H5T.close(dset_typeId); H5T.close(native_type); H5D.close(dsetId); Console.WriteLine("\t\t\t\tPASSED"); } // end of try block catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // test_compound_dtype
public Dictionary <string, string> GetAttributes(string datasetName) { if (string.IsNullOrEmpty(datasetName) || !_datasetNames.Contains(datasetName)) { return(null); } H5DataSetId datasetId = null; H5GroupId groupId = null; H5DataTypeId typeId = null; H5DataSpaceId spaceId = null; //H5PropertyListId psId = null; try { int groupIndex = datasetName.LastIndexOf('/'); if (groupIndex == -1) { datasetId = H5D.open(_h5FileId, datasetName); } else { string groupName = datasetName.Substring(0, groupIndex + 1); string dsName = datasetName.Substring(groupIndex + 1); groupId = H5G.open(_h5FileId, groupName); datasetId = H5D.open(groupId, dsName); } if (datasetId == null) { return(null); } Dictionary <string, string> attValues = new Dictionary <string, string>(); typeId = H5D.getType(datasetId); H5T.H5TClass type = H5T.getClass(typeId); int tSize = H5T.getSize(typeId); spaceId = H5D.getSpace(datasetId); long[] dims = H5S.getSimpleExtentDims(spaceId); long storageSize = H5D.getStorageSize(datasetId); attValues.Add("DataSetName", datasetName); attValues.Add("DataType", type.ToString()); attValues.Add("DataTypeSize", tSize.ToString() + "Byte"); attValues.Add("Dims", String.Join("*", dims)); attValues.Add("StorageSize", storageSize.ToString() + "Byte"); int attrCount = H5A.getNumberOfAttributes(datasetId); for (int i = 0; i < attrCount; i++) { string attName = H5A.getNameByIndex(datasetId, "/" + datasetName, H5IndexType.NAME, H5IterationOrder.NATIVE, (ulong)i); attValues.Add(attName, ReadAttributeValue(datasetId, attName)); } return(attValues); } finally { if (spaceId != null) { H5S.close(spaceId); } if (typeId != null) { H5T.close(typeId); } if (datasetId != null) { H5D.close(datasetId); } if (groupId != null) { H5G.close(groupId); } } }
//private string ArrayToString<T>(T[] v) //{ // StringBuilder sb = new StringBuilder(); // //sb.Append("["); // foreach (T t in v) // { // sb.Append(t.ToString()); // sb.Append(","); // } // if (sb.Length > 1) // sb.Remove(sb.Length - 1, 1); // //sb.Append("]"); // return sb.ToString(); //} private object GetAttributeValue(H5ObjectWithAttributes obj, string attributeName) { H5AttributeId attId = null; attId = H5A.open(obj, attributeName); if (attId == null) { return(null); } H5DataTypeId typeId = null; H5DataTypeId dtId = null; H5AttributeInfo attInfo = null; H5DataSpaceId spaceId = null; H5DataTypeId oldTypeId = null; object retObject = null; try { typeId = H5A.getType(attId); attInfo = H5A.getInfo(attId); dtId = H5A.getType(attId); spaceId = H5A.getSpace(attId); int dataSize = H5T.getSize(dtId); // oldTypeId = typeId; typeId = H5T.getNativeType(typeId, H5T.Direction.DEFAULT); H5T.H5TClass typeClass = H5T.getClass(typeId); long[] dims = H5S.getSimpleExtentDims(spaceId); long dimSize = 1; if (dims.Length == 0) { dimSize = 1; } else { foreach (long dim in dims) { dimSize *= dim; } } switch (typeClass) { case H5T.H5TClass.STRING: long size = attInfo.dataSize; byte[] chars = ReadArray <byte>(size, attId, typeId); retObject = Encoding.ASCII.GetString(chars); break; case H5T.H5TClass.INTEGER: H5T.Sign sign = H5T.Sign.TWOS_COMPLEMENT; sign = H5T.getSign(oldTypeId); switch (dataSize) { case 1: retObject = ReadArray <byte>(dimSize, attId, typeId); break; case 2: switch (sign) { case H5T.Sign.TWOS_COMPLEMENT: retObject = ReadArray <Int16>(dimSize, attId, typeId); break; case H5T.Sign.UNSIGNED: retObject = ReadArray <UInt16>(dimSize, attId, typeId); break; } break; case 4: switch (sign) { case H5T.Sign.TWOS_COMPLEMENT: retObject = ReadArray <Int32>(dimSize, attId, typeId); break; case H5T.Sign.UNSIGNED: retObject = ReadArray <UInt32>(dimSize, attId, typeId); break; } break; case 8: switch (sign) { case H5T.Sign.TWOS_COMPLEMENT: retObject = ReadArray <Int64>(dimSize, attId, typeId); break; case H5T.Sign.UNSIGNED: retObject = ReadArray <UInt64>(dimSize, attId, typeId); break; } break; } break; case H5T.H5TClass.FLOAT: switch (dataSize) { case 4: retObject = ReadArray <float>(dimSize, attId, typeId); break; case 8: retObject = ReadArray <double>(dimSize, attId, typeId); break; } break; } return(retObject); } finally { if (spaceId != null) { H5S.close(spaceId); } if (attId != null) { H5A.close(attId); } if (oldTypeId != null) { H5T.close(oldTypeId); } if (typeId != null) { H5T.close(typeId); } if (dtId != null) { H5T.close(dtId); } } }
private void ReadOldDataSetData(string dataSetName, int bandIndex, out int bandWidth, out int bandHeight, out enumDataType dataType, out object retObject) { bandHeight = bandWidth = 0; dataType = enumDataType.UInt16; retObject = null; H5FileId _h5FileId = null; H5DataSpaceId spaceid = null; H5DataSetId dataSetId = null; try { _h5FileId = H5F.open(fileName, H5F.OpenMode.ACC_RDONLY); //先找出含有指定波段的数据集 dataSetId = H5D.open(_h5FileId, dataSetName); spaceid = H5D.getSpace(dataSetId); long[] dims = H5S.getSimpleExtentDims(spaceid); //得到数据数组的大小,比如[3,1800,2048] int rank = H5S.getSimpleExtentNDims(spaceid); //得到数据数组的维数,比如3 int size = 0; if (rank == 1) { bandHeight = bandWidth = 1; size = bandWidth * bandHeight * rank; } else if (rank == 2) { bandWidth = Convert.ToInt32(dims[0]); bandHeight = Convert.ToInt32(dims[1]); size = bandWidth * bandHeight; } else if (rank == 3) { List <long> r = dims.ToList <long>(); r.Sort(); long[] temp = r.ToArray(); bandWidth = Convert.ToInt32(temp[1]); bandHeight = Convert.ToInt32(temp[2]); size = bandWidth * bandHeight * Convert.ToInt32(temp[0]); } int outSize = bandWidth * bandHeight; H5DataTypeId typeId = H5D.getType(dataSetId); H5T.H5TClass typeClass = H5T.getClass(typeId);//得到数据集的类型 int dataSize = H5T.getSize(typeId); H5DataTypeId newTypeId = null; switch (typeClass) { case H5T.H5TClass.INTEGER: H5T.Sign sign = H5T.Sign.TWOS_COMPLEMENT; sign = H5T.getSign(typeId); switch (dataSize) { case 1: newTypeId = H5T.copy(H5T.H5Type.NATIVE_B8); retObject = ReadArray <byte>(size, dataSetId, newTypeId, bandIndex, outSize); dataType = enumDataType.Byte; break; case 2: switch (sign) { case H5T.Sign.TWOS_COMPLEMENT: newTypeId = H5T.copy(H5T.H5Type.NATIVE_SHORT); retObject = ReadArray <Int16>(size, dataSetId, newTypeId, bandIndex, outSize); dataType = enumDataType.Int16; break; case H5T.Sign.UNSIGNED: newTypeId = H5T.copy(H5T.H5Type.NATIVE_USHORT); retObject = ReadArray <UInt16>(size, dataSetId, newTypeId, bandIndex, outSize); dataType = enumDataType.UInt16; break; } break; case 4: switch (sign) { case H5T.Sign.TWOS_COMPLEMENT: newTypeId = H5T.copy(H5T.H5Type.NATIVE_INT); retObject = ReadArray <Int32>(size, dataSetId, newTypeId, bandIndex, outSize); dataType = enumDataType.Int32; break; case H5T.Sign.UNSIGNED: newTypeId = H5T.copy(H5T.H5Type.NATIVE_UINT); retObject = ReadArray <UInt32>(size, dataSetId, newTypeId, bandIndex, outSize); dataType = enumDataType.UInt32; break; } break; case 8: switch (sign) { case H5T.Sign.TWOS_COMPLEMENT: newTypeId = H5T.copy(H5T.H5Type.NATIVE_LONG); retObject = ReadArray <Int64>(size, dataSetId, newTypeId, bandIndex, outSize); dataType = enumDataType.Int64; break; case H5T.Sign.UNSIGNED: newTypeId = H5T.copy(H5T.H5Type.NATIVE_ULONG); retObject = ReadArray <UInt64>(size, dataSetId, newTypeId, bandIndex, outSize); dataType = enumDataType.UInt64; break; } break; } break; case H5T.H5TClass.FLOAT: switch (dataSize) { case 4: newTypeId = H5T.copy(H5T.H5Type.NATIVE_FLOAT); retObject = ReadArray <float>(size, dataSetId, newTypeId, bandIndex, outSize); dataType = enumDataType.Float; break; case 8: newTypeId = H5T.copy(H5T.H5Type.NATIVE_DOUBLE); retObject = ReadArray <double>(size, dataSetId, newTypeId, bandIndex, outSize); dataType = enumDataType.Double; break; } break; } } finally { H5S.close(spaceid); H5D.close(dataSetId); H5F.close(_h5FileId); } }
public static void runTest() { try { // We will write and read an int array of this length. const int DATA_ARRAY_LENGTH = 12; // Rank is the number of dimensions of the data array. const int RANK = 1; // Create an HDF5 file. // The enumeration type H5F.CreateMode provides only the legal // creation modes. Missing H5Fcreate parameters are provided // with default values. H5FileId fileId = H5F.create("myCSharp.h5", H5F.CreateMode.ACC_TRUNC); // Create a HDF5 group. H5GroupId groupId = H5G.create(fileId, "/cSharpGroup"); H5GroupId subGroup = H5G.create(groupId, "mySubGroup"); // Demonstrate getObjectInfo ObjectInfo info = H5G.getObjectInfo(fileId, "/cSharpGroup", true); Console.WriteLine("cSharpGroup header size is {0}", info.headerSize); Console.WriteLine("cSharpGroup nlinks is {0}", info.nHardLinks); Console.WriteLine("cSharpGroup fileno is {0} {1}", info.fileNumber[0], info.fileNumber[1]); Console.WriteLine("cSharpGroup objno is {0} {1}", info.objectNumber[0], info.objectNumber[1]); Console.WriteLine("cSharpGroup type is {0}", info.objectType); H5G.close(subGroup); // Prepare to create a data space for writing a 1-dimensional // signed integer array. long[] dims = new long[RANK]; dims[0] = DATA_ARRAY_LENGTH; // Put descending ramp data in an array so that we can // write it to the file. int[] dset_data = new int[DATA_ARRAY_LENGTH]; for (int i = 0; i < DATA_ARRAY_LENGTH; i++) { dset_data[i] = DATA_ARRAY_LENGTH - i; } // Create a data space to accommodate our 1-dimensional array. // The resulting H5DataSpaceId will be used to create the // data set. H5DataSpaceId spaceId = H5S.create_simple(RANK, dims); // Create a copy of a standard data type. We will use the // resulting H5DataTypeId to create the data set. We could // have used the HST.H5Type data directly in the call to // H5D.create, but this demonstrates the use of H5T.copy // and the use of a H5DataTypeId in H5D.create. H5DataTypeId typeId = H5T.copy(H5T.H5Type.NATIVE_INT); // Find the size of the type int typeSize = H5T.getSize(typeId); Console.WriteLine("typeSize is {0}", typeSize); // Set the order to big endian H5T.setOrder(typeId, H5T.Order.BE); // Set the order to little endian H5T.setOrder(typeId, H5T.Order.LE); // Create the data set. H5DataSetId dataSetId = H5D.create(fileId, "/csharpExample", typeId, spaceId); // Write the integer data to the data set. H5D.write(dataSetId, new H5DataTypeId(H5T.H5Type.NATIVE_INT), new H5Array <int>(dset_data)); // If we were writing a single value it might look like this. // int singleValue = 100; // H5D.writeScalar(dataSetId, new H5DataTypeId(H5T.H5Type.NATIVE_INT), // ref singleValue); // Create an integer array to receive the read data. int[] readDataBack = new int[DATA_ARRAY_LENGTH]; // Read the integer data back from the data set H5D.read(dataSetId, new H5DataTypeId(H5T.H5Type.NATIVE_INT), new H5Array <int>(readDataBack)); // Echo the data for (int i = 0; i < DATA_ARRAY_LENGTH; i++) { Console.WriteLine(readDataBack[i]); } // Close all the open resources. H5D.close(dataSetId); // Reopen and close the data sets to show that we can. dataSetId = H5D.open(fileId, "/csharpExample"); H5D.close(dataSetId); dataSetId = H5D.open(groupId, "/csharpExample"); H5D.close(dataSetId); H5S.close(spaceId); H5T.close(typeId); H5G.close(groupId); //int x = 10; //H5T.enumInsert<int>(typeId, "myString", ref x); //H5G.close(groupId); H5GIterateCallback myDelegate; myDelegate = myFunction; int x = 9; int start = 0; int index = H5G.iterate(fileId, "/cSharpGroup", myDelegate, x, ref start); // Reopen the group id to show that we can. groupId = H5G.open(fileId, "/cSharpGroup"); H5G.close(groupId); H5F.close(fileId); // Reopen and reclose the file. H5FileId openId = H5F.open("myCSharp.h5", H5F.OpenMode.ACC_RDONLY); H5F.close(openId); } // This catches all the HDF exception classes. Because each call // generates unique exception, different exception can be handled // separately. For example, to catch open errors we could have used // catch (H5FopenException openException). catch (HDFException e) { Console.WriteLine(e.Message); } Console.WriteLine("Processing complete!"); Console.ReadLine(); }
} // test_integer_dtype static void test_float_dtype() { try { Console.Write("Testing getting some floating-point information"); const string GEN_FILE_NAME = "gen_types.h5"; const string FLOAT_TYPE_NAME = "new float type 1"; // Open file pre-generated by a C program. H5FileId genfileId = H5F.open(GEN_FILE_NAME, H5F.OpenMode.ACC_RDONLY); // Open the datatype to check. H5DataTypeId dtypeId = H5T.open(genfileId, FLOAT_TYPE_NAME); // Get and check fields. H5FloatingBitFields fields = H5T.getFields(dtypeId); if (fields.signBitPosition != 44) { Console.WriteLine("Incorrect sign bit position: {0}, should be {1}", fields.signBitPosition, 44); nerrors++; } if (fields.exponentBitPosition != 34) { Console.WriteLine("Incorrect exponential bit position: {0}, should be {1}", fields.exponentBitPosition, 34); nerrors++; } if (fields.nExponentBits != 10) { Console.WriteLine("Incorrect size of exponent: {0}, should be {1}", fields.nExponentBits, 10); nerrors++; } if (fields.mantissaBitPosition != 3) { Console.WriteLine("Incorrect mantissa bit-position: {0}, should be {1}", fields.mantissaBitPosition, 3); nerrors++; } if (fields.nMantissaBits != 31) { Console.WriteLine("Incorrect size of mantissa: {0}, should be {1}", fields.nMantissaBits, 44); nerrors++; } // Check precision. int precision = H5T.getPrecision(dtypeId); if (precision != 42) { Console.WriteLine("Incorrect precision: {0}, should be {1}", precision, 42); nerrors++; } // Check offset. int offset = H5T.getOffset(dtypeId); if (offset != 3) { Console.WriteLine("Incorrect offset: {0}, should be {1}", offset, 3); nerrors++; } // Check norm. H5T.Norm norm = H5T.getNorm(dtypeId); if (norm != H5T.Norm.IMPLIED) // need to be determined, not really 3 { Console.WriteLine("Incorrect norm: {0}, should be {1}", norm, H5T.Norm.IMPLIED); nerrors++; } // Check size. int size = H5T.getSize(dtypeId); if (size != 7) { Console.WriteLine("Incorrect size: {0}, should be {1}", size, 7); nerrors++; } // Close datatype and file. H5T.close(dtypeId); H5F.close(genfileId); Console.WriteLine("\t\tPASSED"); } // end of try block catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // end of test_float_dtype
static void Main(string[] args) { try { // We will write and read an int array of this length. const int DATA_ARRAY_LENGTH = 12; // Rank is the number of dimensions of the data array. const int RANK = 1; // Create an HDF5 file. // The enumeration type H5F.CreateMode provides only the legal // creation modes. Missing H5Fcreate parameters are provided // with default values. H5FileId fileId = H5F.create("myCSharp.h5", H5F.CreateMode.ACC_TRUNC); // Create a HDF5 group. H5GroupId groupId = H5G.create(fileId, "/cSharpGroup"); H5GroupId subGroup = H5G.create(groupId, "mySubGroup"); // Close the subgroup. H5G.close(subGroup); // Prepare to create a data space for writing a 1-dimensional // signed integer array. long[] dims = new long[RANK]; dims[0] = DATA_ARRAY_LENGTH; // Put descending ramp data in an array so that we can // write it to the file. int[] dset_data = new int[DATA_ARRAY_LENGTH]; for (int i = 0; i < DATA_ARRAY_LENGTH; i++) { dset_data[i] = DATA_ARRAY_LENGTH - i; } // Create a data space to accommodate our 1-dimensional array. // The resulting H5DataSpaceId will be used to create the // data set. H5DataSpaceId spaceId = H5S.create_simple(RANK, dims); // Create a copy of a standard data type. We will use the // resulting H5DataTypeId to create the data set. We could // have used the HST.H5Type data directly in the call to // H5D.create, but this demonstrates the use of H5T.copy // and the use of a H5DataTypeId in H5D.create. H5DataTypeId typeId = H5T.copy(H5T.H5Type.NATIVE_INT); // Find the size of the type int typeSize = H5T.getSize(typeId); Console.WriteLine("typeSize is {0}", typeSize); // Set the order to big endian H5T.setOrder(typeId, H5T.Order.BE); // Set the order to little endian H5T.setOrder(typeId, H5T.Order.LE); // Create the data set. H5DataSetId dataSetId = H5D.create(fileId, "/csharpExample", typeId, spaceId); // Write the integer data to the data set. H5D.write(dataSetId, new H5DataTypeId(H5T.H5Type.NATIVE_INT), new H5Array <int>(dset_data)); // If we were writing a single value it might look like this. // int singleValue = 100; // H5D.writeScalar(dataSetId, // new H5DataTypeId(H5T.H5Type.NATIVE_INT), // ref singleValue); // Create an integer array to receive the read data. int[] readDataBack = new int[DATA_ARRAY_LENGTH]; // Read the integer data back from the data set H5D.read(dataSetId, new H5DataTypeId(H5T.H5Type.NATIVE_INT), new H5Array <int>(readDataBack)); // Echo the data for (int i = 0; i < DATA_ARRAY_LENGTH; i++) { Console.WriteLine(readDataBack[i]); } // Close all the open resources. H5D.close(dataSetId); // Reopen and close the data sets to show that we can. dataSetId = H5D.open(fileId, "/csharpExample"); H5D.close(dataSetId); dataSetId = H5D.open(groupId, "/csharpExample"); H5D.close(dataSetId); H5T.close(typeId); H5G.close(groupId); // Get H5O info H5ObjectInfo objectInfo = H5O.getInfoByName(fileId, "/csharpExample"); Console.WriteLine("header.space.message is {0}", objectInfo.header.space.message); Console.WriteLine("fileNumber is {0}", objectInfo.fileNumber); Console.WriteLine("address is {0}", objectInfo.address); Console.WriteLine("type is {0}", objectInfo.objectType.ToString()); Console.WriteLine("reference count is {0}", objectInfo.referenceCount); Console.WriteLine("modification time is {0}", objectInfo.modificationTime); Console.WriteLine("birth time is {0}", objectInfo.birthTime); Console.WriteLine("access time is {0}", objectInfo.accessTime); Console.WriteLine("change time is {0}", objectInfo.changeTime); Console.WriteLine("number of attributes is {0}", objectInfo.nAttributes); Console.WriteLine("header version is {0}", objectInfo.header.version); Console.WriteLine("header nMessages is {0}", objectInfo.header.nMessages); Console.WriteLine("header nChunks is {0}", objectInfo.header.nChunks); Console.WriteLine("header flags is {0}", objectInfo.header.flags); H5LinkInfo linkInfo = H5L.getInfo(fileId, "/cSharpGroup"); Console.WriteLine( "address: {0:x}, charSet: {1}, creationOrder: {2}", linkInfo.address, linkInfo.charSet, linkInfo.creationOrder); Console.WriteLine("linkType: {0}, softLinkSizeOrUD: {1}", linkInfo.linkType, linkInfo.softLinkSizeOrUD); // Reopen the group id to show that we can. groupId = H5G.open(fileId, "/cSharpGroup"); // Use H5L.iterate to visit links H5LIterateCallback myDelegate; myDelegate = MyH5LFunction; ulong linkNumber = 0; H5IterationResult result = H5L.iterate(groupId, H5IndexType.NAME, H5IterationOrder.INCREASING, ref linkNumber, myDelegate, 0); // Create some attributes H5DataTypeId attributeType = H5T.copy(H5T.H5Type.NATIVE_INT); long[] attributeDims = new long[1]; const int RAMP_LENGTH = 5; attributeDims[0] = RAMP_LENGTH; int[] ascendingRamp = new int[RAMP_LENGTH] { 1, 2, 3, 4, 5 }; int[] descendingRamp = new int[RAMP_LENGTH] { 5, 4, 3, 2, 1 }; int[] randomData = new int[RAMP_LENGTH] { 3, 123, 27, 6, 1 }; int[] readBackRamp = new int[RAMP_LENGTH]; // Call set buffer using H5Memory // Allocate memory from "C" runtime heap (not garbage collected) H5Memory typeConversionBuffer = new H5Memory(new IntPtr(DATA_ARRAY_LENGTH)); H5Memory backgroundBuffer = new H5Memory(new IntPtr(DATA_ARRAY_LENGTH)); // Set the property list type conversion and background buffers. H5PropertyListId myPropertyListId = H5P.create(H5P.PropertyListClass.DATASET_XFER); H5P.setBuffer(myPropertyListId, typeConversionBuffer, backgroundBuffer); // Test use of vlen // Create a vlen data type H5DataTypeId tid1 = H5T.vlenCreate(H5T.H5Type.NATIVE_UINT); H5DataSetId vDataSetId = H5D.create(fileId, "/vlenTest", tid1, spaceId); // Create a jagged array of integers. hvl_t[] vlArray = new hvl_t[DATA_ARRAY_LENGTH]; // HDF5 variable length data types require the use of void // pointers. C# requires that sections of code that deal // directly with pointer be marked // as unsafe. unsafe { for (int i = 0; i < DATA_ARRAY_LENGTH; i++) { IntPtr ptr = new IntPtr((i + 1) * sizeof(int)); // Allocate memory that is not garbage collected. vlArray[i].p = H5CrtHeap.Allocate( new IntPtr((i + 1) * sizeof(int)) ).ToPointer(); // Fill the array with integers = the row number int *intPointer = (int *)vlArray[i].p; for (int j = 0; j < i + 1; j++) { intPointer[j] = (int)i; } if (IntPtr.Size == 8) { vlArray[i].len = (ulong)i + 1; } else { vlArray[i].len = (uint)i + 1; } } // Write the variable length data H5D.write(vDataSetId, tid1, new H5Array <hvl_t>(vlArray)); // Create an array to read back the array. hvl_t[] vlReadBackArray = new hvl_t[DATA_ARRAY_LENGTH]; // Read the array back H5D.read(vDataSetId, tid1, new H5Array <hvl_t>(vlReadBackArray)); // Write the data to the console for (int i = 0; i < DATA_ARRAY_LENGTH; i++) { int *iPointer = (int *)vlReadBackArray[i].p; for (int j = 0; j < i + 1; j++) { Console.WriteLine(iPointer[j]); } } // Reclaim the memory that read allocated H5D.vlenReclaim(tid1, spaceId, new H5PropertyListId(H5P.Template.DEFAULT), new H5Array <hvl_t>(vlReadBackArray)); // Now read it back again using our own memory manager //H5AllocateCallback allocDelegate = new H5AllocCallback(userAlloc); H5FreeCallback freeDelegate = new H5FreeCallback(userFree); H5PropertyListId memManagerPlist = H5P.create(H5P.PropertyListClass.DATASET_XFER); unsafe { H5P.setVlenMemManager(memManagerPlist, userAlloc, IntPtr.Zero, freeDelegate, IntPtr.Zero); } // Read the array back H5D.read(vDataSetId, tid1, new H5DataSpaceId(H5S.H5SType.ALL), new H5DataSpaceId(H5S.H5SType.ALL), memManagerPlist, new H5Array <hvl_t>(vlReadBackArray)); // Write the data to the console for (int i = 0; i < DATA_ARRAY_LENGTH; i++) { int *iPointer = (int *)vlReadBackArray[i].p; for (int j = 0; j < i + 1; j++) { Console.WriteLine(iPointer[j]); } } // Reclaim the memory that read allocated using our free routines H5D.vlenReclaim(tid1, spaceId, memManagerPlist, new H5Array <hvl_t>(vlReadBackArray)); } H5S.close(spaceId); H5DataSpaceId attributeSpace = H5S.create_simple(1, attributeDims); H5AttributeId attributeId = H5A.create(groupId, "ascendingRamp", attributeType, attributeSpace); int offset = H5T.getOffset(attributeType); Console.WriteLine("Offset is {0}", offset); H5DataTypeId float32BE = H5T.copy(H5T.H5Type.IEEE_F32BE); H5T.Norm norm = H5T.getNorm(float32BE); Console.WriteLine("Norm is {0}", norm); int precision = H5T.getPrecision(float32BE); Console.WriteLine("Precision is {0}", precision); H5FloatingBitFields bitFields = H5T.getFields(float32BE); Console.WriteLine("getFields: sign bit position: {0}", bitFields.signBitPosition); Console.WriteLine("getFields: exponent bit position: {0}", bitFields.exponentBitPosition); Console.WriteLine("getFields: number of exponent bits: {0}", bitFields.nExponentBits); Console.WriteLine("getFields: mantissa bit position: {0} ", bitFields.mantissaBitPosition); Console.WriteLine("getFields: number of mantissa bits: {0}", bitFields.nMantissaBits); Console.Write("{0}", bitFields); // Write to an attribute H5A.write <int>(attributeId, attributeType, new H5Array <int>(ascendingRamp)); // Read from an attribute H5A.read <int>(attributeId, attributeType, new H5Array <int>(readBackRamp)); // Echo results Console.WriteLine("ramp elements are: "); foreach (int rampElement in readBackRamp) { Console.WriteLine(" {0}", rampElement); } H5A.close(attributeId); // Create and write two more attributes. attributeId = H5A.createByName(groupId, ".", "descendingRamp", attributeType, attributeSpace); H5A.write <int>(attributeId, attributeType, new H5Array <int>(descendingRamp)); H5A.close(attributeId); attributeId = H5A.createByName(groupId, ".", "randomData", attributeType, attributeSpace); H5A.write <int>(attributeId, attributeType, new H5Array <int>(randomData)); // Read back the attribute data H5A.read <int>(attributeId, attributeType, new H5Array <int>(readBackRamp)); Console.WriteLine("ramp elements are: "); foreach (int rampElement in readBackRamp) { Console.WriteLine(" {0}", rampElement); } H5A.close(attributeId); // Iterate through the attributes. long position = 0; H5AIterateCallback attributeDelegate; attributeDelegate = MyH5AFunction; H5ObjectInfo groupInfo = H5O.getInfo(groupId); Console.WriteLine( "fileNumber: {0}, total space: {1}, referceCount: {2}, modification time: {3}", groupInfo.fileNumber, groupInfo.header.space.total, groupInfo.referenceCount, groupInfo.modificationTime); // While iterating, collect the names of all the attributes. ArrayList attributeNames = new ArrayList(); H5A.iterate(groupId, H5IndexType.CRT_ORDER, H5IterationOrder.INCREASING, ref position, attributeDelegate, (object)attributeNames); // Write out the names of the attributes foreach (string attributeName in attributeNames) { Console.WriteLine("attribute name is {0}", attributeName); } // Demonstrate H5A.openName attributeId = H5A.openName(groupId, "descendingRamp"); Console.WriteLine("got {0} by name", H5A.getName(attributeId)); H5A.close(attributeId); // Demonstrate H5A.getNameByIndex string secondAttribute = H5A.getNameByIndex(groupId, ".", H5IndexType.CRT_ORDER, H5IterationOrder.INCREASING, 1); Console.WriteLine("second attribute is named {0}", secondAttribute); // Demonstrate H5G.getInfo H5GInfo gInfo = H5G.getInfo(groupId); Console.WriteLine( "link storage: {0}, max creation order: {1}, nLinks: {2}", gInfo.linkStorageType, gInfo.maxCreationOrder, gInfo.nLinks); // Demonstrate H5A.getSpace //attributeId = H5A.openByName(groupId, ".", "descendingRamp"); attributeId = H5A.open(groupId, "descendingRamp"); H5DataSpaceId rampSpaceId = H5A.getSpace(attributeId); H5S.close(rampSpaceId); // Demonstrate H5A.getType H5DataTypeId rampTypeId = H5A.getType(attributeId); Console.WriteLine("size of ramp data type is {0} bytes.", H5T.getSize(rampTypeId)); H5T.close(rampTypeId); // Demonstrate H5A.getInfo H5AttributeInfo rampInfo = H5A.getInfo(attributeId); Console.WriteLine( "characterset: {0}, creationOrder: {1}, creationOrderValid: {2}, dataSize: {3}", rampInfo.characterSet, rampInfo.creationOrder, rampInfo.creationOrderValid, rampInfo.dataSize); // Demonstrate H5A.Delete H5A.Delete(groupId, "descendingRamp"); //H5A.DeleteByName(groupId, ".", "descendingRamp"); // Iterate through the attributes to show that the deletion // was successful. position = 0; ArrayList namesAfterDeletion = new ArrayList(); H5A.iterate(groupId, H5IndexType.CRT_ORDER, H5IterationOrder.DECREASING, ref position, attributeDelegate, (object)namesAfterDeletion); H5G.close(groupId); H5F.close(fileId); // Reopen and reclose the file. H5FileId openId = H5F.open("myCSharp.h5", H5F.OpenMode.ACC_RDONLY); H5F.close(openId); // Set the function to be called on error. unsafe { H5AutoCallback myErrorDelegate = new H5AutoCallback(myErrorFunction); H5E.setAuto(0, myErrorDelegate, IntPtr.Zero); } // Uncomment the next line if you want to generate an error to // test H5E.setAuto // H5G.open(openId, "noGroup"); } // This catches all the HDF exception classes. Because each call // generates a unique exception, different exception can be handled // separately. For example, to catch open errors we could have used // catch (H5FopenException openException). catch (HDFException e) { Console.WriteLine(e.Message); } Console.WriteLine("Processing complete!"); Console.ReadLine(); }
} // test_attr_compound_write static void test_attr_compound_read() { try { Console.Write("Testing read attribute with compound datatype"); // Open file. H5FileId fileId = H5F.open(COMP_FNAME, H5F.OpenMode.ACC_RDWR); // Open the dataset. H5DataSetId dsetId = H5D.open(fileId, DSET1_NAME); // Verify the correct number of attributes for this dataset. H5ObjectInfo oinfo = H5O.getInfo(dsetId); if (oinfo.nAttributes != 1) { Console.WriteLine("\ntest_attr_basic_read: incorrect number of attributes: read {0} - should be {1}", oinfo.nAttributes, 1); nerrors++; } // Open first attribute for the dataset. H5AttributeId attrId = H5A.openByIndex(dsetId, ".", H5IndexType.CRT_ORDER, H5IterationOrder.INCREASING, 0); // Verify dataspace. H5DataSpaceId spaceId = H5A.getSpace(attrId); int rank = H5S.getSimpleExtentNDims(spaceId); if (rank != ATTR4_RANK) { Console.WriteLine("\ntest_attr_compound_read: incorrect rank = {0} - should be {1}", rank, ATTR4_RANK); nerrors++; } long[] dims = H5S.getSimpleExtentDims(spaceId); if (dims[0] != ATTR4_DIM1) { Console.WriteLine("\ntest_attr_compound_read: incorrect dim[0] = {0} - should be {1}", dims[0], ATTR4_DIM1); nerrors++; } if (dims[1] != ATTR4_DIM2) { Console.WriteLine("\ntest_attr_compound_read: incorrect dim[1] = {0} - should be {1}", dims[1], ATTR4_DIM2); nerrors++; } // Close dataspace. H5S.close(spaceId); // Verify datatype of the attribute. H5DataTypeId typeId = H5A.getType(attrId); H5T.H5TClass t_class = H5T.getClass(typeId); if (t_class != H5T.H5TClass.COMPOUND) { Console.WriteLine("test_compound_dtypes: H5T.getMemberClass and H5T.getClass return different classes for the same type."); nerrors++; } int nfields = H5T.getNMembers(typeId); if (nfields != 3) { Console.WriteLine("test_compound_dtypes: H5T.getMemberClass and H5T.getClass return different classes for the same type."); nerrors++; } // Check name against this list string[] memb_names = { ATTR4_FIELDNAME1, ATTR4_FIELDNAME2, ATTR4_FIELDNAME3 }; int[] memb_offsets = { 0, 1, 5 }; // list of member offsets H5DataTypeId mtypeId; // member type H5T.H5TClass memb_cls1; // member classes retrieved different ways string memb_name; // member name int memb_idx; // member index int memb_offset, idx; // member offset, loop index // how to handle int versus uint for memb_idx and idx??? // For each member, check its name, class, index, and size. for (idx = 0; idx < nfields; idx++) { // Get the type of the ith member to test other functions later. mtypeId = H5T.getMemberType(typeId, idx); // Get the name of the ith member. memb_name = H5T.getMemberName(typeId, idx); if (memb_name != memb_names[idx]) { Console.WriteLine("test_compound_dtypes: incorrect member name, {0}, for member no {1}", memb_name, idx); nerrors++; } // Get the class of the ith member and then verify the class. memb_cls1 = H5T.getMemberClass(typeId, idx); if (memb_cls1 != H5T.H5TClass.INTEGER) { Console.WriteLine("test_compound_dtypes: incorrect class, {0}, for member no {1}", memb_cls1, idx); nerrors++; } // Get member's index back using its name and verify it. memb_idx = H5T.getMemberIndex(typeId, memb_name); if (memb_idx != idx) { Console.WriteLine("test_attr_compound_read: H5T.getMemberName and/or H5T.getMemberIndex returned false values."); nerrors++; } // Get member's offset and verify it. memb_offset = H5T.getMemberOffset(typeId, idx); if (memb_offset != memb_offsets[idx]) { Console.WriteLine("test_attr_compound_read: H5T.getMemberOffset returned incorrect value - {0}, should be {1}", memb_offset, memb_offsets[idx]); nerrors++; } // Get member's size and verify it. int tsize = H5T.getSize(mtypeId); switch (idx) { case 0: if (tsize != H5T.getSize(H5T.H5Type.STD_U8LE)) { Console.WriteLine("test_attr_compound_read: H5T.getSize returned incorrect value."); nerrors++; } break; case 1: if (tsize != H5T.getSize(H5T.H5Type.NATIVE_INT)) { Console.WriteLine("test_attr_compound_read: H5T.getSize returned incorrect value."); nerrors++; } break; case 2: if (tsize != H5T.getSize(H5T.H5Type.STD_I64BE)) { Console.WriteLine("test_attr_compound_read: H5T.getSize returned incorrect value."); nerrors++; } break; default: Console.WriteLine("test_attr_compound_read: We should only have 3 members."); nerrors++; break; } // end switch // Close current member type. H5T.close(mtypeId); } // end for // Prepare the check array to verify read data. It should be the same as the attr_data4 array // in the previous test function test_attr_compound_write. attr4_struct[,] check = new attr4_struct[ATTR4_DIM1, ATTR4_DIM2]; // Initialize the dataset int ii, jj, nn; for (ii = nn = 0; ii < ATTR4_DIM1; ii++) { for (jj = 0; jj < ATTR4_DIM2; jj++) { check[ii, jj].c = 't'; check[ii, jj].i = nn++; check[ii, jj].l = (ii * 10 + jj * 100) * nn; } } // Read attribute information. attr4_struct[,] read_data4 = new attr4_struct[ATTR4_DIM1, ATTR4_DIM2]; H5A.read(attrId, typeId, new H5Array <attr4_struct>(read_data4)); // Verify values read in. for (ii = 0; ii < ATTR4_DIM1; ii++) { for (jj = 0; jj < ATTR4_DIM2; jj++) { if ((check[ii, jj].c != read_data4[ii, jj].c) || (check[ii, jj].i != read_data4[ii, jj].i) || (check[ii, jj].l != read_data4[ii, jj].l)) { Console.WriteLine("test_attr_compound_read: Incorrect read data: {0}, should be {1}", read_data4[ii, jj], check[ii, jj]); nerrors++; } } } // Close resources. H5T.close(typeId); H5A.close(attrId); H5D.close(dsetId); H5F.close(fileId); Console.WriteLine("\t\tPASSED"); } catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // test_attr_compound_read
/// <summary> /// 得到指定属性集合中指定属性名的属性值,未对异常进行处理 /// </summary> /// <param name="obj"></param> /// <param name="attributeName"></param> /// <returns></returns> private String getAttributeValue(H5ObjectWithAttributes obj, String attributeName) { H5AttributeId attId = null; attId = H5A.open(obj, attributeName); if (attId == null) { return(null); } H5DataTypeId typeId = null; H5DataTypeId dtId = null; H5AttributeInfo attInfo = null; H5DataSpaceId spaceId = null; object attributeVal = null; typeId = H5A.getType(attId); attInfo = H5A.getInfo(attId); dtId = H5A.getType(attId); spaceId = H5A.getSpace(attId); int dataSize = H5T.getSize(dtId); typeId = H5T.getNativeType(typeId, H5T.Direction.DEFAULT); H5T.H5TClass typeClass = H5T.getClass(typeId); long[] dims = H5S.getSimpleExtentDims(spaceId); if (dims.Length == 0) { dims = new long[1]; dims[0] = 1; } switch (typeClass) { case H5T.H5TClass.STRING: long size = attInfo.dataSize; byte[] chars = readAttribute <byte>(size, attId, typeId); attributeVal = Encoding.ASCII.GetString(chars); break; case H5T.H5TClass.INTEGER: H5T.Sign sign = H5T.Sign.TWOS_COMPLEMENT; sign = H5T.getSign(typeId); switch (dataSize) { case 1: attributeVal = readAttribute <byte>(dims[0], attId, typeId); break; case 2: switch (sign) { case H5T.Sign.TWOS_COMPLEMENT: attributeVal = readAttribute <Int16>(dims[0], attId, typeId); break; case H5T.Sign.UNSIGNED: attributeVal = readAttribute <UInt16>(dims[0], attId, typeId); break; } break; case 4: switch (sign) { case H5T.Sign.TWOS_COMPLEMENT: attributeVal = readAttribute <Int32>(dims[0], attId, typeId); break; case H5T.Sign.UNSIGNED: attributeVal = readAttribute <UInt32>(dims[0], attId, typeId); break; } break; case 8: switch (sign) { case H5T.Sign.TWOS_COMPLEMENT: attributeVal = readAttribute <Int64>(dims[0], attId, typeId); break; case H5T.Sign.UNSIGNED: attributeVal = readAttribute <UInt64>(dims[0], attId, typeId); break; } break; } break; case H5T.H5TClass.FLOAT: switch (dataSize) { case 4: attributeVal = readAttribute <float>(dims[0], attId, typeId); break; case 8: attributeVal = readAttribute <double>(dims[0], attId, typeId); break; } break; } if (spaceId != null) { H5S.close(spaceId); } if (attId != null) { H5A.close(attId); } if (typeId != null) { H5T.close(typeId); } if (dtId != null) { H5T.close(dtId); } return(arrayToString(attributeVal)); }
} // test_vlen_dtype static void test_copy() { try { Console.Write("Testing copying datatypes"); // Make a copy of a predefined type. H5DataTypeId inttype = H5T.copy(H5T.H5Type.NATIVE_INT); int intsize = H5T.getSize(inttype); // Make a copy of that type. H5DataTypeId tcopy1 = H5T.copy(inttype); int tcopy1_size = H5T.getSize(tcopy1); // The sizes of the copies should be the same. if (intsize != tcopy1_size) { Console.WriteLine("test_copy: copy types incorrectly"); nerrors++; } // Close second type H5T.close(tcopy1); /* * Test copy a datatype from a dataset. */ // Create a new file. H5FileId fileId = H5F.create("sometypes.h5", H5F.CreateMode.ACC_TRUNC); // Create a dataset with a simple dataspace. hssize_t[] dims = { DIM0, DIM1 }; H5DataSpaceId dspace = H5S.create_simple(2, dims); H5DataSetId dset = H5D.create(fileId, "test_types", inttype, dspace); // Obtain the datatype from the dataset. H5DataTypeId dstype = H5T.copy(dset); // Check this datatype's class, size, and sign. H5T.H5TClass tclass = H5T.getClass(dstype); if (tclass != H5T.H5TClass.INTEGER) { Console.WriteLine("test_copy: copy of dataset's datatype has incorrect class"); nerrors++; } int tsize = H5T.getSize(dstype); if (tsize != intsize) { Console.WriteLine("test_copy: copy of dataset's datatype has incorrect size"); nerrors++; } H5T.Sign tsign = H5T.getSign(dstype); if (tsign != H5T.Sign.TWOS_COMPLEMENT) { Console.WriteLine("test_copy: copy of dataset's datatype has incorrect sign, {0}", tsign); nerrors++; } // Close objects. H5T.close(inttype); H5S.close(dspace); H5T.close(dstype); H5D.close(dset); H5F.close(fileId); Console.WriteLine("\t\t\t\tPASSED"); } catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // test_copy
private static void ReadFile(string filePath) { var file = H5F.open(filePath, H5F.OpenMode.ACC_RDONLY); var dataSet = H5D.open(file, "/group/dataset"); var fileSpace = H5D.getSpace(dataSet); var rank = H5S.getSimpleExtentNDims(fileSpace); WriteLine("Rank: {0}", rank); var dims = H5S.getSimpleExtentDims(fileSpace); Write("Dims:"); foreach (var d in dims) { Write(" {0}", d); } WriteLine(); H5S.close(fileSpace); var ints = new int[1]; var intAttribute = H5A.openName(dataSet, "int"); H5A.read(intAttribute, H5A.getType(intAttribute), new H5Array <int>(ints)); WriteLine("int: {0}", ints[0]); H5A.close(intAttribute); var stringAttribute = H5A.openName(dataSet, "string"); var stringType = H5A.getType(stringAttribute); var stringSize = H5T.getSize(stringType); WriteLine("string length: {0}", stringSize); var buffer = new byte[stringSize]; H5A.read(stringAttribute, stringType, new H5Array <byte>(buffer)); WriteLine("string: {0}", Encoding.ASCII.GetString(buffer)); H5T.close(stringType); H5A.close(stringAttribute); if (rank == 2) { var data = new int[dims[0], dims[1]]; H5D.read(dataSet, H5D.getType(dataSet), new H5Array <int>(data)); for (int i = 0; i < data.GetLength(0); ++i) { for (int j = 0; j < data.GetLength(1); ++j) { Write(" {0}", data[i, j]); } WriteLine(); } } H5D.close(dataSet); H5F.close(file); }
protected override void LoadData() { //Determine type and size of data var dtype = H5D.getType(Id); var size = H5T.getSize(dtype); var space = H5D.getSpace(Id); var dims = H5S.getSimpleExtentDims(space); var num_dimensions = dims.Length; if (num_dimensions != 1 && num_dimensions != 2) { throw new ArgumentException("Scalar dataset has more than 2 dimensions! Cannot be handled.", "datasetname"); } var member_type = H5T.getClass(dtype).ToString(); var member_size = size; //Setup parser Attribute parser = null; switch (member_type) { case "STRING": parser = new StringAttribute("data", member_size); break; case "INTEGER": parser = new IntegerAttribute("data", member_size); break; case "FLOAT": parser = new FloatingPointAttribute("data", member_size); break; default: throw new ArgumentException("Unsupported member type " + member_type, "member_type"); } if (num_dimensions == 1) { var _row_data = new byte[dims[0], size]; H5D.read(Id, dtype, new H5Array <byte>(_row_data)); //Parse rows for (int i = 0; i < dims[0]; i++) { var dat = new Dictionary <string, object>(); dat["0"] = parser.Parse(_row_data.Field(i)); AddRow(new Row(dat)); } } else if (num_dimensions == 2) { var _row_data = new byte[dims[0], dims[1], size]; H5D.read(Id, dtype, new H5Array <byte>(_row_data)); //Parse rows for (int i = 0; i < dims[0]; i++) { var dat = new Dictionary <string, object>(); for (int j = 0; j < dims[1]; j++) { dat["" + j] = parser.Parse(_row_data.Field(i, j)); } AddRow(new Row(dat)); } } }