public static IEnumerable <OffsetInfo> GetCompoundInfo(Type type, bool ieee = false) { //Type t = typeof(T); var strtype = H5T.copy(H5T.C_S1); int strsize = (int)H5T.get_size(strtype); int curSize = 0; List <OffsetInfo> offsets = new List <OffsetInfo>(); foreach (var x in type.GetFields()) { OffsetInfo oi = new OffsetInfo() { name = x.Name, type = x.FieldType, datatype = ieee ? GetDatatypeIEEE(x.FieldType) : GetDatatype(x.FieldType), size = x.FieldType == typeof(string) ? stringLength(x) : Marshal.SizeOf(x.FieldType), offset = 0 + curSize }; if (oi.datatype == H5T.C_S1) { strtype = H5T.copy(H5T.C_S1); H5T.set_size(strtype, new IntPtr(oi.size)); oi.datatype = strtype; } if (oi.datatype == H5T.STD_I64BE) { oi.size = oi.size * 2; } curSize = curSize + oi.size; offsets.Add(oi); } H5T.close(strtype); return(offsets); }
public static int WriteDatasetFromArray <T>(hid_t groupId, string name, Array dset, string datasetName = null) //where T : struct { int rank = dset.Rank; ulong[] dims = Enumerable.Range(0, rank).Select(i => { return((ulong)dset.GetLength(i)); }).ToArray(); ulong[] maxDims = null; var spaceId = H5S.create_simple(rank, dims, maxDims); var datatype = GetDatatype(typeof(T)); var typeId = H5T.copy(datatype); if (datatype == H5T.C_S1) { H5T.set_size(datatype, new IntPtr(2)); } var datasetId = H5D.create(groupId, name, datatype, spaceId); GCHandle hnd = GCHandle.Alloc(dset, GCHandleType.Pinned); var result = H5D.write(datasetId, datatype, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); H5D.close(datasetId); H5S.close(spaceId); H5T.close(typeId); return(result); }
public static unsafe void AddDataWithSharedDataType(long fileId, ContainerType container) { long typeId = H5T.copy(H5T.C_S1); H5T.set_size(typeId, H5T.VARIABLE); H5T.set_cset(typeId, H5T.cset_t.UTF8); H5T.commit(fileId, "string_t", typeId); var data = new string[] { "001", "11", "22", "33", "44", "55", "66", "77", " ", "AA", "ZZ", "!!" }; var dataChar = data .SelectMany(value => Encoding.ASCII.GetBytes(value + '\0')) .ToArray(); fixed(byte *dataVarPtr = dataChar) { var basePtr = new IntPtr(dataVarPtr); var addresses = new IntPtr[] { IntPtr.Add(basePtr, 0), IntPtr.Add(basePtr, 4), IntPtr.Add(basePtr, 7), IntPtr.Add(basePtr, 10), IntPtr.Add(basePtr, 13), IntPtr.Add(basePtr, 16), IntPtr.Add(basePtr, 19), IntPtr.Add(basePtr, 22), IntPtr.Add(basePtr, 25), IntPtr.Add(basePtr, 28), IntPtr.Add(basePtr, 31), IntPtr.Add(basePtr, 34) }; fixed(void *dataVarAddressesPtr = addresses) { TestUtils.Add(container, fileId, "shared_data_type", "shared_data_type", typeId, dataVarAddressesPtr, length: 12); } } if (H5I.is_valid(typeId) > 0) { H5T.close(typeId); } }
public static double[, ,] ReadFieldData3D(string fileName) { H5FileId fileId = H5F.open(fileName, H5F.OpenMode.ACC_RDONLY); H5DataSetId fDataSetId = H5D.open(fileId, "/FieldData/FD/f0"); H5DataTypeId fDataTypeId = H5D.getType(fDataSetId); if (!H5T.equal(fDataTypeId, H5T.copy(H5T.H5Type.NATIVE_FLOAT))) { Console.WriteLine("Error: Invalid dataset type, expected {0}", H5T.H5Type.NATIVE_FLOAT); } long[] dims = H5S.getSimpleExtentDims(H5D.getSpace(fDataSetId)).ToArray(); if (dims.Length != 3) { Console.WriteLine("Error: Invalid field data dimensions"); } float[, ,] data = new float[dims[0], dims[1], dims[2]]; H5D.read(fDataSetId, fDataTypeId, new H5Array <float>(data)); // Reorder double[, ,] fieldValues = new double[dims[2], dims[1], dims[0]]; for (int i = 0; i < dims[0]; i++) { for (int j = 0; j < dims[1]; j++) { for (int k = 0; k < dims[2]; k++) { fieldValues[k, j, i] = data[i, j, k]; } } } return(fieldValues); }
public static double[][] ReadMesh(string fileName) { double[][] meshes = new double[3][]; string[] meshNames = { "x", "y", "z" }; H5FileId fileId = H5F.open(fileName, H5F.OpenMode.ACC_RDONLY); for (int i = 0; i < meshNames.Length; i++) { H5DataSetId dsId = H5D.open(fileId, "/Mesh/" + meshNames[i]); H5DataTypeId dtId = H5D.getType(dsId); if (!H5T.equal(dtId, H5T.copy(H5T.H5Type.NATIVE_FLOAT))) { Console.WriteLine("Error: Invalid dataset type, expected {0}", H5T.H5Type.NATIVE_FLOAT); } float[] mesh = new float[H5D.getStorageSize(dsId) / H5T.getSize(dtId)]; H5D.read(dsId, dtId, new H5Array <float>(mesh)); meshes[i] = mesh.Select(x => (double)x * 1000.0).ToArray(); // m -> mm H5D.close(dsId); H5T.close(dtId); } H5F.close(fileId); return(meshes); }
public static int WritePrimitiveAttribute <T>(hid_t groupId, string name, Array attributes, string datasetName = null) //where T : struct { var tmpId = groupId; if (!string.IsNullOrWhiteSpace(datasetName)) { var datasetId = H5D.open(groupId, datasetName); if (datasetId > 0) { groupId = datasetId; } } int rank = attributes.Rank; ulong[] dims = Enumerable.Range(0, rank).Select(i => { return((ulong)attributes.GetLength(i)); }).ToArray(); ulong[] maxDims = null; var spaceId = H5S.create_simple(rank, dims, maxDims); var datatype = GetDatatype(typeof(T)); var typeId = H5T.copy(datatype); var attributeId = H5A.create(groupId, name, datatype, spaceId); GCHandle hnd = GCHandle.Alloc(attributes, GCHandleType.Pinned); var result = H5A.write(attributeId, datatype, hnd.AddrOfPinnedObject()); hnd.Free(); H5A.close(attributeId); H5S.close(spaceId); H5T.close(typeId); if (tmpId != groupId) { H5D.close(groupId); } return(result); }
public static double ReadAttribute(string file, string dataSetOrGroup, string attribute) { double attr = Double.NaN; try { H5FileId fileId = H5F.open(file, H5F.OpenMode.ACC_RDONLY); H5ObjectInfo objectInfo = H5O.getInfoByName(fileId, dataSetOrGroup); H5GroupId groupId = null; H5DataSetId dataSetId = null; H5AttributeId attrId; if (objectInfo.objectType == H5ObjectType.GROUP) { groupId = H5G.open(fileId, dataSetOrGroup); attrId = H5A.open(groupId, attribute); } else { dataSetId = H5D.open(fileId, dataSetOrGroup); attrId = H5A.open(dataSetId, attribute); } H5DataTypeId attrTypeId = H5A.getType(attrId); double[] dAttrs = new double[] { }; if (H5T.equal(attrTypeId, H5T.copy(H5T.H5Type.NATIVE_FLOAT))) { float[] fAttrs = new float[H5S.getSimpleExtentNPoints(H5A.getSpace(attrId))]; H5A.read(attrId, attrTypeId, new H5Array <float>(fAttrs)); dAttrs = (from f in fAttrs select(double) f).ToArray(); } else if (H5T.equal(attrTypeId, H5T.copy(H5T.H5Type.NATIVE_DOUBLE))) { dAttrs = new double[H5S.getSimpleExtentNPoints(H5A.getSpace(attrId))]; H5A.read(attrId, attrTypeId, new H5Array <double>(dAttrs)); } H5T.close(attrTypeId); H5A.close(attrId); if (groupId != null) { H5G.close(groupId); } if (dataSetId != null) { H5D.close(dataSetId); } H5F.close(fileId); return((double)dAttrs[0]); } catch (HDFException e) { Console.WriteLine("Error: Unhandled HDF5 exception"); Console.WriteLine(e.Message); } return(attr); }
public void H5DreadTest1() { byte[] rdata = new byte[512]; hid_t mem_type = H5T.copy(H5T.C_S1); Assert.IsTrue(H5T.set_size(mem_type, new IntPtr(2)) >= 0); GCHandle hnd = GCHandle.Alloc(rdata, GCHandleType.Pinned); Assert.IsTrue(H5D.read(m_v0_ascii_dset, mem_type, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()) >= 0); for (int i = 0; i < 256; ++i) { // H5T.FORTRAN_S1 is space (= ASCII dec. 32) padded if (i != 32) { Assert.IsTrue(rdata[2 * i] == (byte)i); } else { Assert.IsTrue(rdata[64] == (byte)0); } Assert.IsTrue(rdata[2 * i + 1] == (byte)0); } hnd.Free(); Assert.IsTrue(H5T.close(mem_type) >= 0); }
public static void Write1DArray <T>(Hdf5Dataset _dataset, T[] _array) { if (_dataset.Dataspace.NumberOfDimensions != 1) { throw new Hdf5ArrayDimensionsMismatchException(); } if ((ulong)_array.Length != _dataset.Dataspace.DimensionProperties[0].CurrentSize) { throw new Hdf5ArraySizeMismatchException(); } var datasetId = H5O.open(_dataset.FileId.Value, _dataset.Path.FullPath).ToId(); GCHandle arrayHandle = GCHandle.Alloc(_array, GCHandleType.Pinned); var typeId = H5T.copy(_dataset.DataType.NativeType.Value).ToId(); int result = H5D.write( datasetId.Value, typeId.Value, H5S.ALL, H5S.ALL, H5P.DEFAULT, arrayHandle.AddrOfPinnedObject()); arrayHandle.Free(); H5T.close(typeId.Value); H5O.close(datasetId.Value); FileHelper.FlushToFile(_dataset.FileId); }
private bool setOMXFileAttributes() { // write OMX attributes H5DataSpaceId dspace; H5DataTypeId dtype; H5AttributeId attr; // OMX version dspace = H5S.create(H5S.H5SClass.SCALAR); dtype = H5T.copy(H5T.H5Type.C_S1); // string datatype H5T.setSize(dtype, dllVersion[0].Length); attr = H5A.create(fileId, omxVersionName, dtype, dspace); ASCIIEncoding ascii = new ASCIIEncoding(); H5A.write(attr, dtype, new H5Array <System.Byte>(ascii.GetBytes(dllVersion[0]))); H5A.close(attr); // OMX shape - only 2D tables dspace = H5S.create_simple(1, new long[] { 2 }); dtype = H5T.copy(H5T.H5Type.NATIVE_INT); attr = H5A.create(fileId, omxShapeAttr, dtype, dspace); int[] shape = new int[2]; shape[0] = (int)Shape[0]; shape[1] = (int)Shape[1]; H5A.write <int>(attr, dtype, new H5Array <int>(shape)); H5S.close(dspace); H5A.close(attr); return(true); }
// test that we can create an empty file and add matrices public static void CreateMatrixTest(string file) { int zones = 3; double[,] testblock; string[] matrixNames = { "mat1", "mat2" }; OmxWriteStream ws = OmxFile.Create(file, zones, true); // NOTE: cannot create data type until after file stream is created H5DataTypeId matrixDataTypes = H5T.copy(H5T.H5Type.NATIVE_DOUBLE); for (int i = 0; i < matrixNames.Length; i++) { ws.AddMatrix(matrixNames[i], matrixDataTypes); } ws.Close(); OmxReadStream rs = OmxFile.OpenReadOnly(file); for (int i = 0; i < matrixNames.Length; i++) { testblock = rs.GetMatrixBlock <double>(matrixNames[i], 0, 0, 1, 1); } Console.WriteLine("mat shape is {0},{1}", rs.Shape[0], rs.Shape[1]); Console.WriteLine("mat names are {0},{1}", rs.MatrixNames[0], rs.MatrixNames[1]); Console.WriteLine("mat data type: {0},{1}", H5T.getClass(rs.GetMatrixDataType(matrixNames[0])), H5T.getClass(rs.GetMatrixDataType(matrixNames[1]))); rs.Close(); }
public void H5TcloseTest1() { hid_t dtype = H5T.copy(H5T.IEEE_F64LE); Assert.IsTrue(dtype >= 0); Assert.IsTrue(H5T.close(dtype) >= 0); }
/// <summary> /// 写数据集属性 /// </summary> public void WriteDatasetAttribute(string datasetName, string attrName, string value) { H5DataSetId datasetId = H5D.open(_fileId, datasetName); H5DataTypeId typeId = H5T.copy(H5T.H5Type.C_S1); H5DataSpaceId spaceId = H5S.create(H5S.H5SClass.SCALAR); H5T.setSize(typeId, value.Length); H5AttributeId attrId = H5A.create(datasetId, attrName, typeId, spaceId); if (value != "") { H5Array <byte> buffer = new H5Array <byte>(Encoding.Default.GetBytes(value)); H5A.write(attrId, typeId, buffer); } if (typeId != null) { H5T.close(typeId); } if (spaceId != null) { H5S.close(spaceId); } if (attrId != null) { H5A.close(attrId); } if (datasetId != null) { H5D.close(datasetId); } }
public static long OpenHDFDataType(Type clrType, long maxSize = 1) { if (clrType == typeof(double)) { return(H5T.copy(H5T.NATIVE_DOUBLE)); } if (clrType == typeof(float)) { return(H5T.copy(H5T.NATIVE_FLOAT)); } if (clrType == typeof(int)) { return(H5T.copy(H5T.NATIVE_INT32)); } if (clrType == typeof(long)) { return(H5T.copy(H5T.NATIVE_INT64)); } if (clrType == typeof(string)) { var type = H5T.copy(H5T.C_S1); var ptr = new IntPtr(maxSize);//Leak? var status = H5T.set_size(type, ptr); Debug.Assert(status >= 0); return(type); } throw new NotImplementedException(); }
public static int WriteDataset <T>(int groupId, string name, T[,] dset) where T : struct { ulong[] dims = new ulong[] { (ulong)dset.GetLength(0), (ulong)dset.GetLength(1) }; ulong[] maxDims = null; var spaceId = H5S.create_simple(2, dims, maxDims); var datatype = GetDatatype(typeof(T)); var typeId = H5T.copy(datatype); if (datatype == H5T.C_S1) { H5T.set_size(datatype, new IntPtr(2)); //var wdata = Encoding.ASCII.GetBytes((char[,]) dset); } name = ToHdf5Name(name); var datasetId = H5D.create(groupId, name, datatype, spaceId); GCHandle hnd = GCHandle.Alloc(dset, GCHandleType.Pinned); var result = H5D.write(datasetId, datatype, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); H5D.close(datasetId); H5S.close(spaceId); H5T.close(typeId); return(result); }
private static double[,] ReadDataArray(hid_t fileLoc, string name, bool transpose = false) { hid_t dset = H5D.open(fileLoc, name); hid_t fspace = H5D.get_space(dset); hid_t count = H5S.get_simple_extent_ndims(fspace); hid_t type = H5D.get_type(dset); hsize_t[] dims = new hsize_t[count]; hsize_t[] maxdims = new hsize_t[count]; H5S.get_simple_extent_dims(fspace, dims, maxdims); H5S.close(fspace); byte[] rdata = new byte[dims[0] * dims[1] * 8]; hid_t mem_type = H5T.copy(H5T.NATIVE_DOUBLE); H5T.set_size(mem_type, new IntPtr(8)); GCHandle hnd = GCHandle.Alloc(rdata, GCHandleType.Pinned); H5D.read(dset, mem_type, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); H5T.close(mem_type); if (transpose) { double[,] val = new double[dims[1], dims[0]]; int cnt = 0; for (int i = 0; i < (int)dims[0]; i++) { for (int j = 0; j < (int)dims[1]; j++) { val[j, i] = BitConverter.ToDouble(rdata, cnt * 8); cnt++; } } return(val); } else { double[,] val = new double[dims[0], dims[1]]; int cnt = 0; for (int i = 0; i < (int)dims[0]; i++) { for (int j = 0; j < (int)dims[1]; j++) { val[i, j] = BitConverter.ToDouble(rdata, cnt * 8); cnt++; } } return(val); } }
private static void WriteFile(string filePath) { var file = H5F.create(filePath, H5F.CreateMode.ACC_TRUNC); var group = H5G.create(file, "/group"); H5G.close(group); const int RANK = 2; const int DIM0 = 3; const int DIM1 = 4; var dims = new long[RANK] { DIM0, DIM1 }; var dataSpace = H5S.create_simple(RANK, dims); var dataSet = H5D.create(file, "/group/dataset", H5T.H5Type.NATIVE_INT, dataSpace); H5S.close(dataSpace); var data = new int[DIM0, DIM1] { { 1, 2, 3, 4 }, { 5, 6, 7, 8 }, { 9, 10, 11, 12 } }; H5D.write(dataSet, new H5DataTypeId(H5T.H5Type.NATIVE_INT), new H5Array <int>(data)); var dataType = new H5DataTypeId(H5T.H5Type.NATIVE_INT); dataSpace = H5S.create(H5S.H5SClass.SCALAR); var integerAttribute = H5A.create(dataSet, "int", dataType, dataSpace); H5A.write(integerAttribute, dataType, new H5Array <int>(new int[1] { 42 })); H5A.close(integerAttribute); H5S.close(dataSpace); //H5T.close(dataType); // Read-only. var str = "Hello, world!"; var strBytes = Encoding.ASCII.GetBytes(str); // There is a H5T.get_cset, but there does not seem to be a way of setting the character encoding, i.e. set_cset. dataType = H5T.copy(H5T.H5Type.C_S1); H5T.setSize(dataType, strBytes.Length); dataSpace = H5S.create(H5S.H5SClass.SCALAR); var stringAttribute = H5A.create(dataSet, "string", dataType, dataSpace); H5A.write(stringAttribute, dataType, new H5Array <byte>(strBytes)); H5A.close(stringAttribute); H5S.close(dataSpace); H5T.close(dataType); H5D.close(dataSet); H5F.close(file); }
/// <summary> /// Constructor to create a chuncked dataset object /// </summary> /// <param name="name"></param> /// <param name="groupId"></param> public ChunkedDataset(string name, long groupId) { Datasetname = name; GroupId = groupId; datatype = Hdf5.GetDatatype(typeof(T)); typeId = H5T.copy(datatype); chunkDims = null; }
/// <summary> /// Constructor to create a chuncked dataset object /// </summary> /// <param name="name"></param> /// <param name="groupId"></param> /// <param name="chunkSize"></param> public ChunkedDataset(string name, long groupId, ulong[] chunkSize) { Datasetname = name; GroupId = groupId; _datatype = Hdf5.GetDatatype(typeof(T)); _typeId = H5T.copy(_datatype); _chunkDims = chunkSize; }
public static Hdf5Dataset CreateDataset( Hdf5Identifier _fileId, Hdf5Path _parentPath, string _name, Hdf5DataTypes _datatype, int _numberOfDimensions, List <Hdf5DimensionProperty> _properties) { Hdf5Path path = _parentPath.Append(_name); UInt64[] dimensionSize = new UInt64[_numberOfDimensions]; UInt64[] maxSize = null; // new UInt64[_numberOfDimensions]; int i = 0; foreach (var property in _properties) { dimensionSize[i] = property.CurrentSize; //if (property.MaximumSize == UInt64.MaxValue) //{ // maxSize[i] = H5S.UNLIMITED; //} //else //{ // maxSize[i] = property.MaximumSize; //} i++; } Hdf5Identifier dataspaceId = H5S.create_simple(_numberOfDimensions, dimensionSize, maxSize).ToId(); //TODO handle string datasets Hdf5Identifier typeId = H5T.copy(TypeHelper.GetNativeType(_datatype).Value).ToId(); var status = H5T.set_order(typeId.Value, H5T.order_t.LE); Hdf5Identifier datasetId = H5D.create(_fileId.Value, path.FullPath, typeId.Value, dataspaceId.Value).ToId(); Hdf5Dataset dataset = null; if (datasetId.Value > 0) { dataset = new Hdf5Dataset(_fileId, datasetId, path.FullPath) { DataType = TypeHelper.GetDataTypeFromDataset(datasetId), Dataspace = DataspaceHelper.GetDataspace(datasetId) }; H5D.close(datasetId.Value); } H5T.close(typeId.Value); FileHelper.FlushToFile(_fileId); return(dataset); }
public static unsafe void AddString(long fileId, ContainerType container) { long res; var dims = new ulong[] { 2, 2, 3 }; /* "extendible contiguous non-external dataset not allowed" */ // fixed length string attribute (ASCII) var typeIdFixed = H5T.copy(H5T.C_S1); res = H5T.set_size(typeIdFixed, new IntPtr(2)); res = H5T.set_cset(typeIdFixed, H5T.cset_t.ASCII); var dataFixed = new string[] { "00", "11", "22", "33", "44", "55", "66", "77", " ", "AA", "ZZ", "!!" }; var dataFixedChar = dataFixed .SelectMany(value => Encoding.ASCII.GetBytes(value)) .ToArray(); TestUtils.Add(container, fileId, "string", "fixed", typeIdFixed, dataFixedChar.AsSpan(), dims); res = H5T.close(typeIdFixed); // variable length string attribute (ASCII) var typeIdVar = H5T.copy(H5T.C_S1); res = H5T.set_size(typeIdVar, H5T.VARIABLE); res = H5T.set_cset(typeIdVar, H5T.cset_t.ASCII); var dataVar = new string[] { "00", "11", "22", "33", "44", "55", "66", "77", " ", "AA", "ZZ", "!!" }; var dataVarIntPtr = dataVar.Select(x => Marshal.StringToCoTaskMemUTF8(x)).ToArray(); TestUtils.Add(container, fileId, "string", "variable", typeIdVar, dataVarIntPtr.AsSpan(), dims); foreach (var ptr in dataVarIntPtr) { Marshal.FreeCoTaskMem(ptr); } res = H5T.close(typeIdVar); // variable length string attribute (UTF8) var typeIdVarUTF8 = H5T.copy(H5T.C_S1); res = H5T.set_size(typeIdVarUTF8, H5T.VARIABLE); res = H5T.set_cset(typeIdVarUTF8, H5T.cset_t.UTF8); var dataVarUTF8 = new string[] { "00", "11", "22", "33", "44", "55", "66", "77", " ", "ÄÄ", "的的", "!!" }; var dataVarUTF8IntPtr = dataVarUTF8.Select(x => Marshal.StringToCoTaskMemUTF8(x)).ToArray(); TestUtils.Add(container, fileId, "string", "variableUTF8", typeIdVarUTF8, dataVarUTF8IntPtr.AsSpan(), dims); foreach (var ptr in dataVarUTF8IntPtr) { Marshal.FreeCoTaskMem(ptr); } res = H5T.close(typeIdVarUTF8); }
public static int AttributeAsInt32(H5AttributeId _attributeId) { H5DataTypeId attributeType = H5T.copy(H5T.H5Type.NATIVE_INT); int[] value = new int[1]; H5A.read <int>(_attributeId, attributeType, new H5Array <int>(value)); return(value[0]); }
public static Double AttributeAsDouble(H5AttributeId _attributeId) { H5DataTypeId attributeType = H5T.copy(H5T.H5Type.NATIVE_DOUBLE); double[] value = new double[1]; H5A.read <double>(_attributeId, attributeType, new H5Array <double>(value)); return(value[0]); }
/// <summary> /// Constructor to create a chuncked dataset object /// </summary> /// <param name="name"></param> /// <param name="groupId"></param> /// <param name="chunckSize"></param> public ChunkedDataset(string name, hid_t groupId, ulong[] chunckSize) { //Datasetname = Hdf5.ToHdf5Name(name); Datasetname = name; GroupId = groupId; datatype = Hdf5.GetDatatype(typeof(T)); typeId = H5T.copy(datatype); chunkDims = chunckSize; }
private static byte[] EncodeStringData(string str, out H5DataTypeId dtype) { byte[] strdata = System.Text.Encoding.UTF8.GetBytes(str); dtype = H5T.copy(H5T.H5Type.C_S1); H5T.setSize(dtype, strdata.Length); return(strdata); }
// Generate floating number attributes // GroupName: target group for the new attribute // AttName: attribute name // AttContent: content for the attribute, has to be a single floating number, here 32bit floating number is used, consider using 64bit double if necessary public static void NumberAttributeGenerator(H5GroupId GroupName, string AttName, float AttContent) { float[] AttArray = new float[1] { AttContent }; long[] dims = new long[1]; dims[0] = AttArray.Length; H5AttributeId attributeId = H5A.create(GroupName, AttName, H5T.copy(H5T.H5Type.NATIVE_FLOAT), H5S.create_simple(1, dims)); H5A.write(attributeId, H5T.copy(H5T.H5Type.NATIVE_FLOAT), new H5Array <float>(AttArray)); H5A.close(attributeId); }
public void H5TcommitTest1() { hid_t dtype = H5T.copy(H5T.IEEE_F64LE); Assert.IsTrue(dtype >= 0); Assert.IsTrue(H5T.commit(m_v0_test_file, "foo", dtype) >= 0); // can't commit twice Assert.IsFalse(H5T.commit(m_v0_test_file, "bar", dtype) >= 0); // can't commit to different files Assert.IsFalse(H5T.commit(m_v2_test_file, "bar", dtype) >= 0); Assert.IsTrue(H5T.close(dtype) >= 0); }
// Generate double attributes // GroupName: target group for the new attribute // AttName: attribute name // AttContent: content for the attribute, has to be a single floating number, here 64bit double is used, to generate subgroups in Data public static void DoubleAttributeGenerator(H5GroupId GroupName, string AttName, double AttContent) { double[] AttArray = new double[1] { AttContent }; long[] dims = new long[1]; dims[0] = AttArray.Length; H5AttributeId attributeId = H5A.create(GroupName, AttName, H5T.copy(H5T.H5Type.NATIVE_DOUBLE), H5S.create_simple(1, dims)); H5A.write(attributeId, H5T.copy(H5T.H5Type.NATIVE_FLOAT), new H5Array <double>(AttArray)); H5A.close(attributeId); }
static void Main2222(string[] args) { var h5 = H5F.create(@"D:\test.h5", H5F.ACC_TRUNC); var typeId = H5T.create(H5T.class_t.COMPOUND, new IntPtr(40)); var strtype = H5T.copy(H5T.C_S1); H5T.set_size(strtype, new IntPtr(16)); H5T.insert(typeId, "Name", new IntPtr(0), strtype); H5T.insert(typeId, "x_pos", new IntPtr(16), H5T.NATIVE_INT32); H5T.insert(typeId, "y_pos", new IntPtr(20), H5T.NATIVE_INT32); H5T.insert(typeId, "Mass", new IntPtr(24), H5T.NATIVE_FLOAT); H5T.insert(typeId, "Temperature", new IntPtr(32), H5T.NATIVE_DOUBLE); ulong[] dims = new ulong[] { 10000 }; ulong[] chunk_size = new ulong[] { 1000 }; var spaceid = H5S.create_simple(dims.Length, dims, null); var dcpl = H5P.create(H5P.DATASET_CREATE); H5P.set_layout(dcpl, H5D.layout_t.COMPACT); H5P.set_deflate(dcpl, 6); H5P.set_chunk(dcpl, chunk_size.Length, chunk_size); var datasetid = H5D.create(h5, "Table1", typeId, spaceid, H5P.DEFAULT, dcpl); ComType ct = new ComType() { Name = "aabb", x_pos = 2, y_pos = 1, Mass = 1.24F, Temperature = 45.7, }; IntPtr p = Marshal.AllocHGlobal(40 * (int)dims[0]); Marshal.StructureToPtr(ct, p, false); H5D.write(datasetid, typeId, spaceid, H5S.ALL, H5P.DEFAULT, p); H5F.close(h5); }
private static void WriteAttribute(H5ObjectWithAttributes target, string name, long value) { H5DataTypeId dtype = H5T.copy(H5T.H5Type.NATIVE_LLONG); H5DataSpaceId spaceId = H5S.create(H5S.H5SClass.SCALAR); H5AttributeId attributeId = H5A.create(target, name, dtype, spaceId); H5A.write(attributeId, dtype, new H5Array <long>(new[] { value })); H5A.close(attributeId); H5T.close(dtype); H5S.close(spaceId); }