static void Main2222(string[] args) { var h5 = H5F.create(@"D:\test.h5", H5F.ACC_TRUNC); var typeId = H5T.create(H5T.class_t.COMPOUND, new IntPtr(40)); var strtype = H5T.copy(H5T.C_S1); H5T.set_size(strtype, new IntPtr(16)); H5T.insert(typeId, "Name", new IntPtr(0), strtype); H5T.insert(typeId, "x_pos", new IntPtr(16), H5T.NATIVE_INT32); H5T.insert(typeId, "y_pos", new IntPtr(20), H5T.NATIVE_INT32); H5T.insert(typeId, "Mass", new IntPtr(24), H5T.NATIVE_FLOAT); H5T.insert(typeId, "Temperature", new IntPtr(32), H5T.NATIVE_DOUBLE); ulong[] dims = new ulong[] { 10000 }; ulong[] chunk_size = new ulong[] { 1000 }; var spaceid = H5S.create_simple(dims.Length, dims, null); var dcpl = H5P.create(H5P.DATASET_CREATE); H5P.set_layout(dcpl, H5D.layout_t.COMPACT); H5P.set_deflate(dcpl, 6); H5P.set_chunk(dcpl, chunk_size.Length, chunk_size); var datasetid = H5D.create(h5, "Table1", typeId, spaceid, H5P.DEFAULT, dcpl); ComType ct = new ComType() { Name = "aabb", x_pos = 2, y_pos = 1, Mass = 1.24F, Temperature = 45.7, }; IntPtr p = Marshal.AllocHGlobal(40 * (int)dims[0]); Marshal.StructureToPtr(ct, p, false); H5D.write(datasetid, typeId, spaceid, H5S.ALL, H5P.DEFAULT, p); H5F.close(h5); }
private static long create_type(Type t) { var size = Marshal.SizeOf(t); var float_size = Marshal.SizeOf(typeof(float)); var int_size = Marshal.SizeOf(typeof(int)); var typeId = H5T.create(H5T.class_t.COMPOUND, new IntPtr(size)); var compoundInfo = Hdf5.GetCompoundInfo(t); foreach (var cmp in compoundInfo) { //Console.WriteLine(string.Format("{0} {1}", cmp.name, cmp.datatype)); H5T.insert(typeId, cmp.name, Marshal.OffsetOf(t, cmp.name), cmp.datatype); } return(typeId); }
/// private static int calcCompoundSize(Type type, bool useIEEE, ref hid_t id) { // Create the compound datatype for the file. Because the standard // types we are using for the file may have different sizes than // the corresponding native types var compoundInfo = Hdf5.GetCompoundInfo(type, useIEEE); var curCompound = compoundInfo.Last(); var compoundSize = curCompound.offset + curCompound.size; //Create the compound datatype for memory. id = H5T.create(H5T.class_t.COMPOUND, new IntPtr(compoundSize)); foreach (var cmp in compoundInfo) { H5T.insert(id, cmp.name, new IntPtr(cmp.offset), cmp.datatype); } return(compoundSize); }
private static long CreateType(Type t) { var size = Marshal.SizeOf(t); var float_size = Marshal.SizeOf(typeof(float)); var int_size = Marshal.SizeOf(typeof(int)); var typeId = H5T.create(H5T.class_t.COMPOUND, new IntPtr(size)); var compoundInfo = Hdf5.GetCompoundInfo(t); foreach (var cmp in compoundInfo) { //Console.WriteLine(string.Format("{0} {1}", cmp.name, cmp.datatype)); // Lines below don't produce an error message but hdfview can't read compounds properly //var typeLong = GetDatatype(cmp.type); //H5T.insert(typeId, cmp.name, Marshal.OffsetOf(t, cmp.name), typeLong); H5T.insert(typeId, cmp.name, Marshal.OffsetOf(t, cmp.name), cmp.datatype); } return(typeId); }
public static long GetHdfTypeIdFromType(long fileId, Type type) { Type elementType; elementType = type.IsArray ? type.GetElementType() : type; if (elementType == typeof(bool)) { return(H5T.NATIVE_UINT8); } else if (elementType == typeof(Byte)) { return(H5T.NATIVE_UINT8); } else if (elementType == typeof(SByte)) { return(H5T.NATIVE_INT8); } else if (elementType == typeof(UInt16)) { return(H5T.NATIVE_UINT16); } else if (elementType == typeof(Int16)) { return(H5T.NATIVE_INT16); } else if (elementType == typeof(UInt32)) { return(H5T.NATIVE_UINT32); } else if (elementType == typeof(Int32)) { return(H5T.NATIVE_INT32); } else if (elementType == typeof(UInt64)) { return(H5T.NATIVE_UINT64); } else if (elementType == typeof(Int64)) { return(H5T.NATIVE_INT64); } else if (elementType == typeof(Single)) { return(H5T.NATIVE_FLOAT); } else if (elementType == typeof(Double)) { return(H5T.NATIVE_DOUBLE); } else if (elementType == typeof(string) || elementType == typeof(IntPtr)) { long typeId = 0; if (H5I.is_valid(fileId) > 0 && H5L.exists(fileId, "string_t") > 0) { typeId = H5T.open(fileId, "string_t"); } else { typeId = H5T.copy(H5T.C_S1); H5T.set_size(typeId, H5T.VARIABLE); H5T.set_cset(typeId, H5T.cset_t.UTF8); if (fileId > -1 && H5T.commit(fileId, "string_t", typeId) < 0) { throw new Exception(ErrorMessage.TypeConversionHelper_CouldNotCommitDataType); } } return(typeId); } else if (elementType.IsValueType && !elementType.IsPrimitive && !elementType.IsEnum) { long typeId = 0; if (H5I.is_valid(fileId) > 0 && H5L.exists(fileId, elementType.Name) > 0) { typeId = H5T.open(fileId, elementType.Name); } else { typeId = H5T.create(H5T.class_t.COMPOUND, new IntPtr(Marshal.SizeOf(elementType))); foreach (FieldInfo fieldInfo in elementType.GetFields()) { long fieldType = -1; fieldType = TypeConversionHelper.GetHdfTypeIdFromType(fileId, fieldInfo.FieldType); H5T.insert(typeId, fieldInfo.Name, Marshal.OffsetOf(elementType, fieldInfo.Name), fieldType); if (H5I.is_valid(fieldType) > 0) { H5T.close(fieldType); } } if (fileId > -1 && H5T.commit(fileId, elementType.Name, typeId) < 0) { throw new Exception(ErrorMessage.TypeConversionHelper_CouldNotCommitDataType); } } return(typeId); } else { throw new NotSupportedException(); } }
private static long GetHdfTypeIdFromType(Type type) { var elementType = type.IsArray ? type.GetElementType() : type; if (elementType == typeof(bool)) { return(H5T.NATIVE_UINT8); } else if (elementType == typeof(byte)) { return(H5T.NATIVE_UINT8); } else if (elementType == typeof(sbyte)) { return(H5T.NATIVE_INT8); } else if (elementType == typeof(ushort)) { return(H5T.NATIVE_UINT16); } else if (elementType == typeof(short)) { return(H5T.NATIVE_INT16); } else if (elementType == typeof(uint)) { return(H5T.NATIVE_UINT32); } else if (elementType == typeof(int)) { return(H5T.NATIVE_INT32); } else if (elementType == typeof(ulong)) { return(H5T.NATIVE_UINT64); } else if (elementType == typeof(long)) { return(H5T.NATIVE_INT64); } else if (elementType == typeof(float)) { return(H5T.NATIVE_FLOAT); } else if (elementType == typeof(double)) { return(H5T.NATIVE_DOUBLE); } // issues: https://en.wikipedia.org/wiki/Long_double //else if (elementType == typeof(decimal)) // return H5T.NATIVE_LDOUBLE; else if (elementType.IsEnum) { var baseTypeId = TestUtils.GetHdfTypeIdFromType(Enum.GetUnderlyingType(elementType)); var typeId = H5T.enum_create(baseTypeId); foreach (var value in Enum.GetValues(type)) { var value_converted = Convert.ToInt64(value); var name = Enum.GetName(type, value_converted); var handle = GCHandle.Alloc(value_converted, GCHandleType.Pinned); H5T.enum_insert(typeId, name, handle.AddrOfPinnedObject()); } return(typeId); } else if (elementType == typeof(string) || elementType == typeof(IntPtr)) { var typeId = H5T.copy(H5T.C_S1); H5T.set_size(typeId, H5T.VARIABLE); H5T.set_cset(typeId, H5T.cset_t.UTF8); return(typeId); } else if (elementType.IsValueType && !elementType.IsPrimitive) { var typeId = H5T.create(H5T.class_t.COMPOUND, new IntPtr(Marshal.SizeOf(elementType))); foreach (var fieldInfo in elementType.GetFields()) { var fieldType = TestUtils.GetHdfTypeIdFromType(fieldInfo.FieldType); var attribute = fieldInfo.GetCustomAttribute <H5NameAttribute>(true); var hdfFieldName = attribute is not null ? attribute.Name : fieldInfo.Name; H5T.insert(typeId, hdfFieldName, Marshal.OffsetOf(elementType, fieldInfo.Name), fieldType); if (H5I.is_valid(fieldType) > 0) { H5T.close(fieldType); } } return(typeId); } else { throw new NotSupportedException(); } }
static void make_table() { try { Console.Write("Making a table for testing"); uint ii, nn; long[] offsets = { 0, 1, 5 }; long[] count = { N_RECORDS }; string[] field_names = { "c", "i", "l" }; // Allocate space for the points array s1[] points = new s1[DIM0]; // Initialize the dataset for (ii = nn = 0; ii < DIM0; ii++) { points[ii].c = 't'; points[ii].i = nn++; points[ii].l = (ii * 10) * nn; } // Create the file. H5FileId fileId = H5F.create(FILE_NAME, H5F.CreateMode.ACC_TRUNC); hssize_t[] dims = { N_RECORDS }; H5DataSpaceId spaceId = H5S.create_simple(1, dims); // Create the memory data type. H5DataTypeId typeId = H5T.create(H5T.CreateClass.COMPOUND, 16); // Insert members. H5T.insert(typeId, field_names[0], 0, H5T.H5Type.STD_U8LE); H5T.insert(typeId, field_names[1], 1, H5T.H5Type.STD_U32LE); H5T.insert(typeId, field_names[2], 5, H5T.H5Type.STD_I64BE); // Create the dataset. H5DataSetId dsetId = H5D.create(fileId, TABLE_NAME, typeId, spaceId); // Define a hyperslab in the dataset of the size of the records */ H5S.selectHyperslab(spaceId, H5S.SelectOperator.SET, offsets, count); // Write the dataset. H5D.write(dsetId, typeId, spaceId, spaceId, new H5PropertyListId(H5P.Template.DEFAULT), new H5Array <s1>(points)); // Close resources. H5D.close(dsetId); H5S.close(spaceId); H5T.close(typeId); H5F.close(fileId); Console.WriteLine("\t\t\t\tPASSED"); } // end try block catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // make_table
/// <summary> /// Constructs a new EpochHDF5Persistor with an HDF5 file at the given path. /// </summary> /// <param name="filename">Desired HDF5 path</param> /// <param name="assocFilePrefix">Prefix for auxiliary (e.g. image) file associated with this HDF5 file</param> /// <param name="guidGenerator">Function for generating new UUIDs (e.g. Guid.NewGuid)</param> /// <param name="compression">Automatically numeric data compression (0 = none, 9 = maximum)</param> public EpochHDF5Persistor(string filename, string assocFilePrefix, Func <Guid> guidGenerator, uint compression = 9) : base(guidGenerator) { if (filename == null) { throw new ArgumentException("File name must not be null", "filename"); } if (compression > 9) { throw new ArgumentException("Compression must be 0-9", "compression"); } if (assocFilePrefix == null) { assocFilePrefix = ""; } this.AssociatedFilePrefix = assocFilePrefix; NumericDataCompression = compression; EpochGroupsIDs = new Stack <EpochGroupIDs>(); var fInfo = new FileInfo(filename); string prefixedFilePath = fInfo.DirectoryName + Path.DirectorySeparatorChar + this.AssociatedFilePrefix + fInfo.Name; var currentFile = new FileInfo(prefixedFilePath); if (currentFile.Exists) { fileId = H5F.open(prefixedFilePath, H5F.OpenMode.ACC_RDWR); string_t = H5T.open(fileId, "STRING40"); keyval_t = H5T.open(fileId, "KEY40VAR40"); measurement_t = H5T.open(fileId, "MEASUREMENT"); extdevmeasurement_t = H5T.open(fileId, "EXTDEV_MEASUREMENT"); //TODO Check persistence version } else { fileId = H5F.create(prefixedFilePath, H5F.CreateMode.ACC_EXCL); WriteAttribute(fileId, "version", Version); // Create our standard String type (string of length FIXED_STRING_LENGTH characters) string_t = H5T.copy(H5T.H5Type.C_S1); H5T.setSize(string_t, 40); H5T.commit(fileId, "STRING40", string_t); // Create our key/value compound type (two strings of length 40 characters) keyval_t = H5T.create(H5T.CreateClass.COMPOUND, 80); H5T.insert(keyval_t, "key", 0, string_t); H5T.insert(keyval_t, "value", FIXED_STRING_LENGTH, string_t); H5T.commit(fileId, "KEY40VAR40", keyval_t); // Create the Measurement compound type measurement_t = H5T.create(H5T.CreateClass.COMPOUND, 48); // confirm 48 is enough/too much/whatever H5T.insert(measurement_t, "quantity", 0, H5T.H5Type.NATIVE_DOUBLE); H5T.insert(measurement_t, "unit", H5T.getSize(H5T.H5Type.NATIVE_DOUBLE), string_t); H5T.commit(fileId, "MEASUREMENT", measurement_t); // Create the ExtDev/Measurement compound type extdevmeasurement_t = H5T.create(H5T.CreateClass.COMPOUND, H5T.getSize(string_t) + 2 * H5T.getSize(measurement_t)); H5T.insert(extdevmeasurement_t, "externalDevice", 0, string_t); H5T.insert(extdevmeasurement_t, "measurement", H5T.getSize(string_t), measurement_t); H5T.commit(fileId, "EXTDEV_MEASUREMENT", extdevmeasurement_t); } Interlocked.Increment(ref _openHdf5FileCount); }
public void GetDataset <T>(H5FileId fileId, string datasetName, string groupName, T[, ,] datasetOut, DataValueType type) { H5GroupId groupId = H5G.open(fileId, groupName); H5DataSetId dataSetId = H5D.open(groupId, datasetName /*"EV_Emissive"*/); switch (type) { case DataValueType.FLOAT: H5DataTypeId tidfloat = new H5DataTypeId(H5T.H5Type.NATIVE_FLOAT); // Read the array back H5D.read(dataSetId, tidfloat, new H5Array <T>(datasetOut));//(dataSetId, tid1, new H5Array<int>(vlReadBackArray)); // H5T.close(tidfloat); break; case DataValueType.INT: H5DataTypeId tidint = new H5DataTypeId(H5T.H5Type.NATIVE_INT); // H5T.H5TClass c = H5T.getMemberClass(tid0); // Read the array back H5D.read(dataSetId, tidint, new H5Array <T>(datasetOut));//(dataSetId, tid1, new H5Array<int>(vlReadBackArray)); //H5T.close(tidint); break; case DataValueType.COMPOUND: H5DataTypeId tid0 = H5D.getType(dataSetId); int nMember = H5T.getNMembers(tid0); H5DataSpaceId spaceid = H5D.getSpace(dataSetId); long[] dims = H5S.getSimpleExtentDims(spaceid);//得到数据数组的大小,比如[3,1800,2048] int length = 1; for (int i = 0; i < dims.Length; i++) { length *= (int)dims[i]; } for (int i = 0; i < nMember; i++) { string memberName = H5T.getMemberName(tid0, i); H5DataTypeId memberTypeId = H5T.getMemberType(tid0, i); H5T.H5TClass dataClass = H5T.getClass(memberTypeId); //得到数据集的类型 string typeName = dataClass.ToString(); if (typeName == "INTEGER") //目前先只支持整形的 { H5DataTypeId tidtmp = H5T.create(H5T.CreateClass.COMPOUND, sizeof(int)); H5T.insert(tidtmp, memberName, 0, H5T.H5Type.NATIVE_INT); int[] dataTmp = new int[length]; H5D.read(dataSetId, tidtmp, new H5Array <int>(dataTmp)); for (int j = 0; j < length; j++) { datasetOut[0, j, i] = (T)Convert.ChangeType(dataTmp[j], datasetOut[0, j, i].GetType()); } } } H5S.close(spaceid); break; default: break; } H5D.close(dataSetId); H5G.close(groupId); //H5F.close(fileId); }
private long create_type() { var t = typeof(PbTickStruct5); var size = Marshal.SizeOf(t); var float_size = Marshal.SizeOf(typeof(float)); var int_size = Marshal.SizeOf(typeof(int)); var typeId = H5T.create(H5T.class_t.COMPOUND, new IntPtr(size)); H5T.insert(typeId, "TradingDay", Marshal.OffsetOf(t, "TradingDay"), H5T.NATIVE_INT32); H5T.insert(typeId, "ActionDay", Marshal.OffsetOf(t, "ActionDay"), H5T.NATIVE_INT32); H5T.insert(typeId, "UpdateTime", Marshal.OffsetOf(t, "UpdateTime"), H5T.NATIVE_INT32); H5T.insert(typeId, "UpdateMillisec", Marshal.OffsetOf(t, "UpdateMillisec"), H5T.NATIVE_INT32); var Symbol_type = H5T.copy(H5T.C_S1); H5T.set_size(Symbol_type, new IntPtr(64)); var Exchange_type = H5T.copy(H5T.C_S1); H5T.set_size(Exchange_type, new IntPtr(9)); H5T.insert(typeId, "Symbol", Marshal.OffsetOf(t, "Symbol"), Symbol_type); H5T.insert(typeId, "Exchange", Marshal.OffsetOf(t, "Exchange"), Exchange_type); H5T.insert(typeId, "LastPrice", Marshal.OffsetOf(t, "LastPrice"), H5T.NATIVE_FLOAT); H5T.insert(typeId, "Volume", Marshal.OffsetOf(t, "Volume"), H5T.NATIVE_FLOAT); H5T.insert(typeId, "Turnover", Marshal.OffsetOf(t, "Turnover"), H5T.NATIVE_FLOAT); H5T.insert(typeId, "OpenInterest", Marshal.OffsetOf(t, "OpenInterest"), H5T.NATIVE_FLOAT); H5T.insert(typeId, "AveragePrice", Marshal.OffsetOf(t, "AveragePrice"), H5T.NATIVE_FLOAT); H5T.insert(typeId, "OpenPrice", Marshal.OffsetOf(t, "OpenPrice"), H5T.NATIVE_FLOAT); H5T.insert(typeId, "HighestPrice", Marshal.OffsetOf(t, "HighestPrice"), H5T.NATIVE_FLOAT); H5T.insert(typeId, "LowestPrice", Marshal.OffsetOf(t, "LowestPrice"), H5T.NATIVE_FLOAT); H5T.insert(typeId, "ClosePrice", Marshal.OffsetOf(t, "ClosePrice"), H5T.NATIVE_FLOAT); H5T.insert(typeId, "SettlementPrice", Marshal.OffsetOf(t, "SettlementPrice"), H5T.NATIVE_FLOAT); H5T.insert(typeId, "UpperLimitPrice", Marshal.OffsetOf(t, "UpperLimitPrice"), H5T.NATIVE_FLOAT); H5T.insert(typeId, "LowerLimitPrice", Marshal.OffsetOf(t, "LowerLimitPrice"), H5T.NATIVE_FLOAT); H5T.insert(typeId, "PreClosePrice", Marshal.OffsetOf(t, "PreClosePrice"), H5T.NATIVE_FLOAT); H5T.insert(typeId, "PreSettlementPrice", Marshal.OffsetOf(t, "PreSettlementPrice"), H5T.NATIVE_FLOAT); H5T.insert(typeId, "PreOpenInterest", Marshal.OffsetOf(t, "PreOpenInterest"), H5T.NATIVE_FLOAT); var price_intptr = Marshal.OffsetOf(t, "Price"); H5T.insert(typeId, "BidPrice5", price_intptr + float_size * 0, H5T.NATIVE_FLOAT); H5T.insert(typeId, "BidPrice4", price_intptr + float_size * 1, H5T.NATIVE_FLOAT); H5T.insert(typeId, "BidPrice3", price_intptr + float_size * 2, H5T.NATIVE_FLOAT); H5T.insert(typeId, "BidPrice2", price_intptr + float_size * 3, H5T.NATIVE_FLOAT); H5T.insert(typeId, "BidPrice1", price_intptr + float_size * 4, H5T.NATIVE_FLOAT); H5T.insert(typeId, "AskPrice1", price_intptr + float_size * 5, H5T.NATIVE_FLOAT); H5T.insert(typeId, "AskPrice2", price_intptr + float_size * 6, H5T.NATIVE_FLOAT); H5T.insert(typeId, "AskPrice3", price_intptr + float_size * 7, H5T.NATIVE_FLOAT); H5T.insert(typeId, "AskPrice4", price_intptr + float_size * 8, H5T.NATIVE_FLOAT); H5T.insert(typeId, "AskPrice5", price_intptr + float_size * 9, H5T.NATIVE_FLOAT); var size_intptr = Marshal.OffsetOf(t, "Size"); H5T.insert(typeId, "BidSize5", size_intptr + int_size * 0, H5T.NATIVE_INT32); H5T.insert(typeId, "BidSize4", size_intptr + int_size * 1, H5T.NATIVE_INT32); H5T.insert(typeId, "BidSize3", size_intptr + int_size * 2, H5T.NATIVE_INT32); H5T.insert(typeId, "BidSize2", size_intptr + int_size * 3, H5T.NATIVE_INT32); H5T.insert(typeId, "BidSize1", size_intptr + int_size * 4, H5T.NATIVE_INT32); H5T.insert(typeId, "AskSize1", size_intptr + int_size * 5, H5T.NATIVE_INT32); H5T.insert(typeId, "AskSize2", size_intptr + int_size * 6, H5T.NATIVE_INT32); H5T.insert(typeId, "AskSize3", size_intptr + int_size * 7, H5T.NATIVE_INT32); H5T.insert(typeId, "AskSize4", size_intptr + int_size * 8, H5T.NATIVE_INT32); H5T.insert(typeId, "AskSize5", size_intptr + int_size * 9, H5T.NATIVE_INT32); return(typeId); }
public void H5TinsertTest1() { // a fixed-length string type hid_t fls = H5T.create(H5T.class_t.STRING, new IntPtr(16)); Assert.IsTrue(fls >= 0); Assert.IsTrue(H5T.is_variable_str(fls) == 0); // a variable-length string type hid_t vls = H5T.create(H5T.class_t.STRING, H5T.VARIABLE); Assert.IsTrue(vls >= 0); Assert.IsTrue(H5T.is_variable_str(vls) > 0); // a key-value compound IntPtr size = new IntPtr(16 + IntPtr.Size); hid_t kvt = H5T.create(H5T.class_t.COMPOUND, size); Assert.IsTrue(H5T.insert(kvt, "key", IntPtr.Zero, fls) >= 0); Assert.IsTrue(H5T.insert(kvt, "value", new IntPtr(16), vls) >= 0); Assert.IsTrue(H5T.close(vls) >= 0); Assert.IsTrue(H5T.close(fls) >= 0); // create a key-value dataset (3 elements) hid_t fsp = H5S.create_simple(1, new hsize_t[] { 3 }, null); Assert.IsTrue(fsp >= 0); hid_t dset = H5D.create(m_v2_class_file, "KeyVal", kvt, fsp); Assert.IsTrue(dset >= 0); Assert.IsTrue(H5S.close(fsp) >= 0); // write a 3 elements string[] keys = new string[] { "Key0123456789ABC", "Key0123456789DEF", "Key0123456789GHI" }; IntPtr[] values = new IntPtr[3]; values[0] = Marshal.StringToHGlobalAnsi("I am a managed String!"); values[1] = Marshal.StringToHGlobalAnsi("I am also a managed String!"); values[2] = Marshal.StringToHGlobalAnsi("I am another managed String!"); MemoryStream ms = new MemoryStream(); BinaryWriter writer = new BinaryWriter(ms); for (int i = 0; i < 3; ++i) { writer.Write(Encoding.ASCII.GetBytes(keys[i])); if (IntPtr.Size == 8) { writer.Write(values[i].ToInt64()); } else { writer.Write(values[i].ToInt32()); } } byte[] wdata = ms.ToArray(); GCHandle hnd = GCHandle.Alloc(wdata, GCHandleType.Pinned); Assert.IsTrue(H5D.write(dset, kvt, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()) >= 0); hnd.Free(); // now read it back byte[] rdata = new byte[3 * size.ToInt32()]; hnd = GCHandle.Alloc(rdata, GCHandleType.Pinned); Assert.IsTrue(H5D.read(dset, kvt, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()) >= 0); hnd.Free(); // check it out MemoryStream ms1 = new MemoryStream(rdata); BinaryReader reader = new BinaryReader(ms1); for (int i = 0; i < 3; ++i) { string k = Encoding.ASCII.GetString(reader.ReadBytes(16)); Assert.IsTrue(k == keys[i]); IntPtr ptr = IntPtr.Zero; if (IntPtr.Size == 8) { ptr = new IntPtr(reader.ReadInt64()); } else { ptr = new IntPtr(reader.ReadInt32()); } string v = Marshal.PtrToStringAnsi(ptr); Assert.IsTrue(v == Marshal.PtrToStringAnsi(values[i])); Marshal.FreeHGlobal(ptr); Marshal.FreeHGlobal(values[i]); } Assert.IsTrue(H5D.close(dset) >= 0); Assert.IsTrue(H5T.close(kvt) >= 0); }
static void test_compound_dtype(H5FileId fileId) { uint i, j, n; try { Console.Write("Testing compound datatypes"); // Allocate space for the points & check arrays s1[,] points = new s1[DIM0, DIM1]; s1[,] check = new s1[DIM0, DIM1]; // Initialize the dataset for (i = n = 0; i < DIM0; i++) { for (j = 0; j < DIM1; j++) { points[i, j].c = 't'; points[i, j].i = n++; points[i, j].l = (i * 10 + j * 100) * n; } } // Create the data space hssize_t[] dims = { DIM0, DIM1 }; H5DataSpaceId spaceId = H5S.create_simple(2, dims); // Create compound datatype for disk storage H5DataTypeId typeId = H5T.create(H5T.CreateClass.COMPOUND, 16); // Insert members H5T.insert(typeId, "c", 0, H5T.H5Type.STD_U8LE); H5T.insert(typeId, "i", 1, H5T.H5Type.STD_U32LE); H5T.insert(typeId, "l", 5, H5T.H5Type.STD_I64BE); // Create the dataset H5DataSetId dsetId = H5D.create(fileId, DSET_COMPOUND_NAME, typeId, spaceId); // Write the dataset H5D.write(dsetId, typeId, new H5Array <s1>(points)); // Close dataset and dataspace H5D.close(dsetId); H5S.close(spaceId); H5T.close(typeId); // Open dataset again to check various functions. dsetId = H5D.open(fileId, DSET_COMPOUND_NAME); // Get its type and native type. H5DataTypeId dset_typeId = H5D.getType(dsetId); H5DataTypeId native_type = H5T.getNativeType(dset_typeId, H5T.Direction.DEFAULT); // Check name against this list string[] memb_names = { "c", "i", "l" }; int[] memb_offsets = { 0, 1, 5 }; H5DataTypeId mtypeId; // member type H5T.H5TClass memb_cls1, memb_cls2; // member classes retrieved different ways string memb_name; // member name int memb_idx; // member index // Get the number of members in the type. int nmembers = H5T.getNMembers(native_type); // For each member, check its name, class, index, and size. for (int ii = 0; ii < nmembers; ii++) { // Get the type of the ith member. mtypeId = H5T.getMemberType(native_type, ii); // Get the name of the ith member. memb_name = H5T.getMemberName(native_type, ii); if (memb_name != memb_names[ii]) { Console.WriteLine("test_compound_dtypes: incorrect member name, {0}, for member no {1}", memb_name, i); nerrors++; } // Get the class of the ith member and then verify the class. memb_cls1 = H5T.getMemberClass(native_type, ii); if (memb_cls1 != H5T.H5TClass.INTEGER) { Console.WriteLine("test_compound_dtypes: incorrect class, {0}, for member no {1}", memb_cls1, ii); nerrors++; } // Get the class via type id memb_cls2 = H5T.getClass(mtypeId); if (memb_cls1 != memb_cls2) { Console.WriteLine("test_compound_dtypes: H5T.getMemberClass and H5T.getClass return different classes for the same type."); nerrors++; } // Get member's index back from its name and verify it. memb_idx = H5T.getMemberIndex(dset_typeId, memb_name); if (memb_idx != ii) { Console.WriteLine("test_compound_dtypes: H5T.getMemberName and/or H5T.getMemberIndex returned false values."); nerrors++; } // Get member's offset and verify it. int memb_offset = H5T.getMemberOffset(dset_typeId, ii); if (memb_offset != memb_offsets[ii]) { Console.WriteLine("test_compound_dtypes: Incorrect offset value {0}, should be {1}.", memb_offset, memb_offsets[ii]); nerrors++; } // Get size of the member's type and verify it. int tsize = H5T.getSize(mtypeId); switch (ii) { case 0: //Console.WriteLine("tsize = {0}, STD_U8LE = {1}", tsize, H5T.getSize(H5T.H5Type.STD_U8LE)); if (tsize != H5T.getSize(H5T.H5Type.STD_U8LE)) { Console.WriteLine("test_compound_dtypes: First member has incorrect size"); nerrors++; } break; case 1: if (tsize != H5T.getSize(H5T.H5Type.STD_U32LE)) { Console.WriteLine("test_compound_dtypes: Second member has incorrect size"); nerrors++; } break; case 2: if (tsize != H5T.getSize(H5T.H5Type.STD_I64BE)) { Console.WriteLine("test_compound_dtypes: Third member has incorrect size"); nerrors++; } break; default: Console.WriteLine("test_compound_dtypes: Only 3 members."); break; } // end switch // Close current member type. H5T.close(mtypeId); } // end for // Close objects. H5T.close(dset_typeId); H5T.close(native_type); H5D.close(dsetId); Console.WriteLine("\t\t\t\tPASSED"); } // end of try block catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // test_compound_dtype
} // test_attr_plist static void test_attr_compound_write() { try { Console.Write("Testing write attributes with compound datatype"); const int NX = 256; // data set dimension const int NY = 512; // Create a file. H5FileId fileId = H5F.create(COMP_FNAME, H5F.CreateMode.ACC_TRUNC); // Create dataspace for dataset. hssize_t[] dims = { NX, NY }; H5DataSpaceId spaceId = H5S.create_simple(SPACE1_RANK, dims); // Create a dataset. H5DataSetId dsetId = H5D.create(fileId, DSET1_NAME, H5T.H5Type.NATIVE_UCHAR, spaceId); // Close dataset's dataspace H5S.close(spaceId); // this number 16 needs to be verified. // Create the attribute datatype. H5DataTypeId typeId = H5T.create(H5T.CreateClass.COMPOUND, 16); //tid1 = H5Tcreate(H5T_COMPOUND, sizeof(struct attr4_struct)); int attr4_field1_off = 0; int attr4_field2_off = 1; int attr4_field3_off = 5; H5T.insert(typeId, "c", attr4_field1_off, H5T.H5Type.STD_U8LE); H5T.insert(typeId, "i", attr4_field2_off, H5T.H5Type.NATIVE_INT); H5T.insert(typeId, "l", attr4_field3_off, H5T.H5Type.STD_I64BE); // Create dataspace for first attribute. hssize_t[] dims2 = { ATTR4_DIM1, ATTR4_DIM2 }; spaceId = H5S.create_simple(ATTR4_RANK, dims2); // Create complex attribute for the dataset. H5AttributeId attrId = H5A.create(dsetId, ATTR4_NAME, typeId, spaceId); // Try to create the same attribute again (should fail.) try { attrId = H5A.create(dsetId, ATTR4_NAME, typeId, spaceId); // should fail, but didn't, print an error message. Console.WriteLine("\ntest_attr_compound_write: Attempting to create an existing attribute."); nerrors++; } catch (HDFException) { } // does nothing, it should fail // Allocate space for the points & check arrays attr4_struct[,] attr_data4 = new attr4_struct[ATTR4_DIM1, ATTR4_DIM2]; // Initialize the dataset int ii, jj, nn; for (ii = nn = 0; ii < ATTR4_DIM1; ii++) { for (jj = 0; jj < ATTR4_DIM2; jj++) { attr_data4[ii, jj].c = 't'; attr_data4[ii, jj].i = nn++; attr_data4[ii, jj].l = (ii * 10 + jj * 100) * nn; } } // Write complex attribute data. H5A.write(attrId, typeId, new H5Array <attr4_struct>(attr_data4)); // Close all objects and file. H5A.close(attrId); H5S.close(spaceId); H5T.close(typeId); H5D.close(dsetId); H5F.close(fileId); Console.WriteLine("\t\tPASSED"); } catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // test_attr_compound_write