public void H5Dvlen_get_buf_sizeTest1() { // write a VLEN dataset hid_t vlen = H5T.vlen_create(H5T.NATIVE_INT); Assert.IsTrue(vlen >= 0); hsize_t[] dims = { 10 }; hid_t space = H5S.create_simple(1, dims, null); Assert.IsTrue(space >= 0); hid_t dset = H5D.create(m_v0_test_file, "vlen", vlen, space); Assert.IsTrue(space >= 0); hid_t dset1 = H5D.create(m_v2_test_file, "vlen", vlen, space); Assert.IsTrue(space >= 0); H5T.hvl_t[] wdata = new H5T.hvl_t[dims[0]]; GCHandle[] whndl = new GCHandle[wdata.Length]; int[][] jagged = new int[dims[0]][]; for (int i = 0; i < wdata.Length; ++i) { jagged[i] = new int[i + 1]; whndl[i] = GCHandle.Alloc(jagged[i], GCHandleType.Pinned); wdata[i].len = new IntPtr(i + 1); wdata[i].p = whndl[i].AddrOfPinnedObject(); } GCHandle wdata_hndl = GCHandle.Alloc(wdata, GCHandleType.Pinned); Assert.IsTrue(H5D.write(dset, vlen, H5S.ALL, H5S.ALL, H5P.DEFAULT, wdata_hndl.AddrOfPinnedObject()) >= 0); Assert.IsTrue(H5D.write(dset1, vlen, H5S.ALL, H5S.ALL, H5P.DEFAULT, wdata_hndl.AddrOfPinnedObject()) >= 0); wdata_hndl.Free(); for (int i = 0; i < wdata.Length; ++i) { whndl[i].Free(); } hsize_t size = 0; Assert.IsTrue(H5S.select_all(space) >= 0); Assert.IsTrue( H5D.vlen_get_buf_size(dset, vlen, space, ref size) >= 0); Assert.IsTrue(size == 220); // (1 + 2 + ... + 10) x sizeof(int) Assert.IsTrue( H5D.vlen_get_buf_size(dset1, vlen, space, ref size) >= 0); Assert.IsTrue(size == 220); // (1 + 2 + ... + 10) x sizeof(int) Assert.IsTrue(H5D.close(dset1) >= 0); Assert.IsTrue(H5D.close(dset) >= 0); Assert.IsTrue(H5T.close(vlen) >= 0); Assert.IsTrue(H5S.close(space) >= 0); }
public static hid_t CreateDataSet(hid_t hid, string dataSetName, hid_t typeId, ulong[] dims, ulong[] maxDims = null, ulong[] chunk = null, object fillValue = null) { if (string.IsNullOrEmpty(dataSetName)) { throw new ArgumentException("dataSetName"); } if (dims == null) { throw new ArgumentNullException("dims"); } if (maxDims != null && maxDims.Length != dims.Length) { throw new ArgumentException("dims and maxDims should have the identical length."); } var chunked = false; if (maxDims != null) { for (int i = 0; i < dims.Length; ++i) { if (maxDims[i] != H5S.UNLIMITED && maxDims[i] < dims[i]) { throw new ArgumentException("maxDims[i] < dims[i]"); } if (!chunked) { chunked = maxDims[i] == H5S.UNLIMITED; } } } if (chunked) { if (chunk == null) { throw new ArgumentNullException("chunk"); } if (chunk.Length != dims.Length) { throw new ArgumentException("chunk"); } if (chunk.Any(size => size <= 0)) { throw new ArgumentException("All dimensions should have a positive length."); } } if (DataSetExists(hid, dataSetName)) { throw new HDF5Exception("Data set \"{0}\" already exists.", dataSetName); } var space = H5S.create_simple(dims.Length, dims, maxDims); if (space < 0) { throw new HDF5Exception("H5S.create_simple failed."); } if (!chunked) { var dataSet = H5D.create(hid, dataSetName, typeId, space); H5S.close(space); if (dataSet < 0) { throw new HDF5Exception("Failed to create data set \"{0}\".", dataSetName); } return(dataSet); } else { var createPropertyList = H5P.create(H5P.DATASET_CREATE); if (createPropertyList < 0) { H5S.close(space); throw new HDF5Exception("Failed to create property list for chunked data set \"{0}\".", dataSetName); } if (H5P.set_chunk(createPropertyList, chunk.Length, chunk) < 0) { H5S.close(space); throw new HDF5Exception("H5P.set_chunk failed."); } var dataSet = H5D.create(hid, dataSetName, typeId, space, H5P.DEFAULT, createPropertyList, H5P.DEFAULT); H5S.close(space); H5P.close(createPropertyList); if (dataSet < 0) { throw new HDF5Exception("Failed to create chunked data set \"{0}\".", dataSetName); } return(dataSet); } }
private static void createVDS() { // create files m_a_class_file = Utilities.H5TempFile(ref m_a_class_file_name, H5F.libver_t.LATEST, true); Assert.IsTrue(m_a_class_file >= 0); m_b_class_file = Utilities.H5TempFile(ref m_b_class_file_name, H5F.libver_t.LATEST, true); Assert.IsTrue(m_b_class_file >= 0); m_c_class_file = Utilities.H5TempFile(ref m_c_class_file_name, H5F.libver_t.LATEST, true); Assert.IsTrue(m_c_class_file >= 0); m_vds_class_file = Utilities.H5TempFile(ref m_vds_class_file_name); Assert.IsTrue(m_vds_class_file >= 0); // // create target datasets // hid_t dcpl = H5P.create(H5P.DATASET_CREATE); Assert.IsTrue(dcpl >= 0); int fill_value = 1; GCHandle hnd = GCHandle.Alloc(fill_value, GCHandleType.Pinned); Assert.IsTrue(H5P.set_fill_value(dcpl, H5T.NATIVE_INT, hnd.AddrOfPinnedObject()) >= 0); hsize_t[] dims = { 6 }; hid_t src_dsp = H5S.create_simple(1, dims, null); // A fill_value = 1; hid_t a = H5D.create(m_a_class_file, "A", H5T.STD_I32LE, src_dsp); Assert.IsTrue(a >= 0); Assert.IsTrue(H5D.close(a) >= 0); // B fill_value = 2; hid_t b = H5D.create(m_b_class_file, "B", H5T.STD_I32LE, src_dsp); Assert.IsTrue(b >= 0); Assert.IsTrue(H5D.close(b) >= 0); // B fill_value = 3; hid_t c = H5D.create(m_c_class_file, "C", H5T.STD_I32LE, src_dsp); Assert.IsTrue(c >= 0); Assert.IsTrue(H5D.close(c) >= 0); // // create the VDS // fill_value = -1; hsize_t[] vds_dims = { 4, 6 }; hid_t vds_dsp = H5S.create_simple(2, vds_dims, null); hsize_t[] start = { 0, 0 }; hsize_t[] count = { 1, 1 }; hsize_t[] block = { 1, 6 }; start[0] = 0; Assert.IsTrue(H5S.select_hyperslab(vds_dsp, H5S.seloper_t.SET, start, null, count, block) >= 0); Assert.IsTrue(H5P.set_virtual(dcpl, vds_dsp, m_a_class_file_name, "A", src_dsp) >= 0); start[0] = 1; Assert.IsTrue(H5S.select_hyperslab(vds_dsp, H5S.seloper_t.SET, start, null, count, block) >= 0); Assert.IsTrue(H5P.set_virtual(dcpl, vds_dsp, m_b_class_file_name, "B", src_dsp) >= 0); start[0] = 2; Assert.IsTrue(H5S.select_hyperslab(vds_dsp, H5S.seloper_t.SET, start, null, count, block) >= 0); Assert.IsTrue(H5P.set_virtual(dcpl, vds_dsp, m_c_class_file_name, "C", src_dsp) >= 0); hid_t vds = H5D.create(m_vds_class_file, "VDS", H5T.STD_I32LE, vds_dsp, H5P.DEFAULT, dcpl, H5P.DEFAULT); Assert.IsTrue(vds >= 0); Assert.IsTrue(H5D.close(vds) >= 0); Assert.IsTrue(H5S.close(vds_dsp) >= 0); Assert.IsTrue(H5S.close(src_dsp) >= 0); Assert.IsTrue(H5P.close(dcpl) >= 0); hnd.Free(); // close the satellite files Assert.IsTrue(H5F.close(m_a_class_file) >= 0); Assert.IsTrue(H5F.close(m_b_class_file) >= 0); Assert.IsTrue(H5F.close(m_c_class_file) >= 0); }
/// <summary> /// Currently does not support arrays /// </summary> /// <typeparam name="T"></typeparam> /// <param name="_objectId"></param> /// <param name="_title"></param> /// <param name="_value"></param> /// <returns></returns> public static Hdf5Attribute CreateAttribute <T>(Hdf5Identifier _objectId, string _title, T _value) { ulong[] sizes = new ulong[1] { 1 }; Hdf5Identifier dataspaceId; Hdf5Identifier attributeId; Hdf5Identifier typeId; Hdf5Attribute attribute = null; Type type = _value.GetType(); var datatype = TypeHelper.GetDataTypesEnum(type); if (datatype != Hdf5DataTypes.String) { var tempType = TypeHelper.GetNativeType(datatype); typeId = H5T.copy(tempType.Value).ToId(); var dataTypeObject = TypeHelper.GetDataTypeByType(typeId); var status = H5T.set_order(typeId.Value, H5T.order_t.LE); dataspaceId = H5S.create_simple(1, sizes, null).ToId(); attributeId = H5A.create(_objectId.Value, _title, typeId.Value, dataspaceId.Value).ToId(); if (attributeId.Value > 0) { WriteValue(dataTypeObject, attributeId, _value); } } else { string tempValue = Convert.ToString(_value); dataspaceId = H5S.create(H5S.class_t.SCALAR).ToId(); typeId = H5T.copy(H5T.C_S1).ToId(); int length = tempValue.Length + 1; var result = H5T.set_size(typeId.Value, new IntPtr(length)); attributeId = H5A.create(_objectId.Value, _title, typeId.Value, dataspaceId.Value).ToId(); IntPtr valueArray = Marshal.StringToHGlobalAnsi(tempValue); result = H5A.write(attributeId.Value, typeId.Value, valueArray); Marshal.FreeHGlobal(valueArray); } H5S.close(dataspaceId.Value); H5T.close(typeId.Value); H5A.close(attributeId.Value); if (attributeId.Value > 0) { attribute = new Hdf5Attribute { Value = _value, Name = _title, Id = attributeId }; } return(attribute); }
} // test_create static void test_onedim_array(H5FileId fileId) { try { Console.Write("Testing write/read one-dimensional array"); const string DSET_NAME = ("One-dim IntArray"); const int NX = 5; // data set dimension const int RANK = 1; // one-dimension // Data and output buffer initialization. int i; int[] data = new int[NX]; for (i = 0; i < NX; i++) { data[i] = i; } // Describe the size of the array and create the data space for fixed // size dataset. long[] dims = { NX }; H5DataSpaceId dspaceId = H5S.create_simple(RANK, dims); // Define datatype for the data in the file. // We will store little endian INT numbers. H5DataTypeId dtypeId = H5T.copy(H5T.H5Type.STD_I32LE); // Create the data set DATASETNAME. H5DataSetId dsetId = H5D.create(fileId, DSET_NAME, dtypeId, dspaceId); // Write the one-dimensional data set array H5D.write(dsetId, new H5DataTypeId(H5T.H5Type.STD_I32LE), new H5Array <int>(data)); int[] outdata = new int[NX]; for (i = 0; i < NX; i++) { outdata[i] = 0; } // Read data back. H5D.read(dsetId, new H5DataTypeId(H5T.H5Type.STD_I32LE), new H5Array <int>(outdata)); // Compare against input buffer to verify. for (i = 0; i < NX; i++) { if (outdata[i] != data[i]) { Console.WriteLine("\ntest_onedim_array: read value differs from input: read {0} - input {1}", outdata[i], data[i]); nerrors++; } } // Close all objects and file. H5D.close(dsetId); H5T.close(dtypeId); H5S.close(dspaceId); Console.WriteLine("\t\tPASSED"); } catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // test_onedim_array
public void SimpleDataReadWrite() { // Create an HDF5 file. // The enumeration type H5F.CreateMode provides only the legal // creation modes. Missing H5Fcreate parameters are provided // with default values. H5FileId fileId = H5F.create(TEST_FILE, H5F.CreateMode.ACC_TRUNC); // Create a group in the file H5GroupId groupId = H5G.create(fileId, "/simple"); // Prepare to create a data space for writing a 1-dimensional // signed integer array. const int RANK = 1; long[] dims = new long[RANK]; const int SIZE = 12; dims[0] = SIZE; // Put descending ramp data in an array so that we can // write it to the file. int[] dset_data = new int[SIZE]; for (int i = 0; i < SIZE; i++) { dset_data[i] = SIZE - i; } // Create a data space to accommodate our 1-dimensional array. // The resulting H5DataSpaceId will be used to create the // data set. H5DataSpaceId spaceId = H5S.create_simple(RANK, dims); // Create the data set. H5DataSetId dataSetId = H5D.create(fileId, "/arrayIntExample", H5T.H5Type.NATIVE_INT, spaceId); // Write the integer data to the data set. H5D.write(dataSetId, new H5DataTypeId(H5T.H5Type.NATIVE_INT), new H5Array <int>(dset_data)); // If we were writing a single value it might look like this. // Create the data set. H5DataSetId scalarId = H5D.create(fileId, "/scalarIntExample", H5T.H5Type.NATIVE_INT, spaceId); int singleValue = 100; H5D.writeScalar(scalarId, new H5DataTypeId(H5T.H5Type.NATIVE_INT), ref singleValue); // Close everything down. H5D.close(dataSetId); H5D.close(scalarId); H5S.close(spaceId); H5G.close(groupId); H5F.close(fileId); Assert.IsTrue(System.IO.File.Exists(TEST_FILE)); fileId = H5F.open(TEST_FILE, H5F.OpenMode.ACC_RDONLY); Assert.IsTrue(fileId.Id > 0); groupId = H5G.open(fileId, "/simple"); Assert.IsTrue(groupId.Id > 0); Assert.AreEqual(0, H5G.getNumObjects(groupId)); // Open the data set dataSetId = H5D.open(fileId, "/arrayIntExample"); Assert.IsTrue(dataSetId.Id > 0); long datasetsize = H5D.getStorageSize(dataSetId); Assert.AreEqual(SIZE * sizeof(int), datasetsize); // Read the integer data back from the data set int[] readDataBack = new int[SIZE]; H5D.read(dataSetId, new H5DataTypeId(H5T.H5Type.NATIVE_INT), new H5Array <int>(readDataBack)); for (int i = 0; i < SIZE; i++) { Assert.AreEqual(SIZE - i, readDataBack[i]); } // Read back the single-int example scalarId = H5D.open(fileId, "/scalarIntExample"); Assert.IsTrue(scalarId.Id > 0); H5D.readScalar <int>(scalarId, new H5DataTypeId(H5T.H5Type.NATIVE_INT), ref singleValue); Assert.AreEqual(100, singleValue); H5D.close(dataSetId); H5D.close(scalarId); H5G.close(groupId); H5F.close(fileId); }
private void ConvertHdf4To5(string f4name, string f5name, Action <string> messageAction) { var hdf = new H4File(null, null, null, new long[] { 0, 0 }); hdf.Load(f4name); H4SDS[] sds = hdf.Datasets; // Create a new file using H5F_ACC_TRUNC access, // default file creation properties, and default file // access properties. H5FileId fileId = H5F.create(f5name, H5F.CreateMode.ACC_TRUNC); //测试读取的科学数据集及其属性 //for (int k = 0; k < hdf.Num_Datasets; k++) for (int k = 0; k < 1; k++) { H4SDS sd = hdf.Datasets[k]; HDFAttribute[] attrs = sd.SDAttributes; string sdName = sd.Name; int rank = sd.Rank; if (messageAction != null) { messageAction(string.Format("正在转换数据集 {0}", sdName)); } if (rank == 2) { int nx = sd.Dimsizes[0]; int ny = sd.Dimsizes[1]; int buffersize = nx * ny; int typesize = HDFDataType.GetSize(sd.Datatype); IntPtr ptr = Marshal.AllocHGlobal(buffersize * typesize); sd.Read(new int[] { 0, 0 }, null, sd.Dimsizes, ptr); short[] buffer = new short[buffersize]; Marshal.Copy(ptr, buffer, 0, buffersize); Marshal.FreeHGlobal(ptr); // Data and input buffer initialization. int[,] data = new int[nx, ny]; for (int i = 0; i < nx; i++) { for (int j = 0; j < ny; j++) { int index = i * ny + j; data[i, j] = buffer[index]; } } // Describe the size of the array and create the data space for fixed // size dataset. long[] dims = new long[rank]; dims[0] = Convert.ToInt64(nx); dims[1] = Convert.ToInt64(ny); H5DataSpaceId dspaceId = H5S.create_simple(rank, dims); // Define datatype for the data in the file. H5DataTypeId dtypeId = H5T.copy(H5T.H5Type.NATIVE_INT); // Create the data set DATASETNAME. H5DataSetId dsetId = H5D.create(fileId, sdName, dtypeId, dspaceId); // Write the one-dimensional data set array H5D.write(dsetId, new H5DataTypeId(H5T.H5Type.NATIVE_INT), new H5Array <int>(data)); // Close dataset and file. H5D.close(dsetId); H5F.close(fileId); } } }
static void test_create(H5FileId fileId) { try { Console.Write("Testing create, open, and close datasets"); const string DSET_DEFAULT_NAME = ("default"); const int RANK = 2; // one-dimension // Create the data space. hssize_t[] dims = { 256, 512 }; H5DataSpaceId space = H5S.create_simple(RANK, dims); // Create a small data space for compact dataset. hssize_t[] small_dims = { 16, 8 }; H5DataSpaceId small_space = H5S.create_simple(RANK, small_dims); // Create a dataset using the default dataset creation properties. H5DataSetId dataset = H5D.create(fileId, DSET_DEFAULT_NAME, H5T.H5Type.NATIVE_INT, small_space); // Close the dataset. H5D.close(dataset); // Try creating a dataset that already exists. This should fail since a // dataset can only be created once. try { dataset = H5D.create(fileId, DSET_DEFAULT_NAME, H5T.H5Type.NATIVE_DOUBLE, space); // should fail, but didn't, print an error message. Console.WriteLine("\ntest_create: Attempting to create an existing dataset."); nerrors++; } catch (HDFException) { } // does nothing, it should fail // Open the dataset we created above and then close it. This is how // existing datasets are accessed. dataset = H5D.open(fileId, DSET_DEFAULT_NAME); H5D.close(dataset); // Try opening a non-existent dataset. This should fail since new datasets // cannot be created with this function. try { dataset = H5D.open(fileId, "does_not_exist"); // should fail, but didn't, print an error message. Console.WriteLine("\ntest_create: Opened a non-existent dataset."); nerrors++; } catch (HDFException) { } // does nothing, it should fail Console.WriteLine("\t\tPASSED"); } // end try block catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // test_create
[InlineData("relativecd", "", "")] // file sits next to current directory public void CanFollowExternalLink(string externalFilePath, string environment, string prefix) { // Arrange if (externalFilePath == "absolute") { externalFilePath = Path.GetTempFileName(); } var filePath = TestUtils.PrepareTestFile(H5F.libver_t.LATEST, fileId => TestUtils.AddExternalFileLink(fileId, externalFilePath)); if (externalFilePath == "relative") { externalFilePath = Path.Combine(Path.GetTempPath(), externalFilePath); } else if (externalFilePath == "relativecd") { externalFilePath = Path.Combine(Environment.CurrentDirectory, externalFilePath); } if (environment == "single") { environment = Path.GetDirectoryName(externalFilePath); Environment.SetEnvironmentVariable("HDF5_EXT_PREFIX", environment); } else if (environment == "multiple") { // Why did HDF Group choose a colon as prefix separator? This test must fail. environment = $"::C:\\temp:{Path.GetDirectoryName(externalFilePath)}"; Environment.SetEnvironmentVariable("HDF5_EXT_PREFIX", environment); } if (prefix == "yes") { prefix = Path.GetDirectoryName(externalFilePath); } long res; var externalFileId = H5F.create(externalFilePath, H5F.ACC_TRUNC); var externalGroupId1 = H5G.create(externalFileId, "external"); var externalGroupId2 = H5G.create(externalGroupId1, "group"); var spaceId = H5S.create_simple(1, new ulong[] { 1 }, new ulong[] { 1 }); var datasetId = H5D.create(externalGroupId2, "Hello from external file =)", H5T.NATIVE_UINT, spaceId); res = H5S.close(spaceId); res = H5D.close(datasetId); res = H5G.close(externalGroupId2); res = H5G.close(externalGroupId1); res = H5F.close(externalFileId); // Act using var root = H5File.OpenReadCore(filePath, deleteOnClose: true); var linkAccess = string.IsNullOrWhiteSpace(prefix) ? new H5LinkAccess() : new H5LinkAccess() { ExternalLinkPrefix = prefix }; var dataset = root.Dataset("/links/external_link/Hello from external file =)", linkAccess); }
static void Main(string[] args) { try { // We will write and read an int array of this length. const int DATA_ARRAY_LENGTH = 12; // Rank is the number of dimensions of the data array. const int RANK = 1; // Create an HDF5 file. // The enumeration type H5F.CreateMode provides only the legal // creation modes. Missing H5Fcreate parameters are provided // with default values. H5FileId fileId = H5F.create("myCSharp.h5", H5F.CreateMode.ACC_TRUNC); // Create a HDF5 group. H5GroupId groupId = H5G.create(fileId, "/cSharpGroup", 0); H5GroupId subGroup = H5G.create(groupId, "mySubGroup", 0); // Demonstrate getObjectInfo ObjectInfo info = H5G.getObjectInfo(fileId, "/cSharpGroup", true); Console.WriteLine("cSharpGroup header size is {0}", info.headerSize); Console.WriteLine("cSharpGroup nlinks is {0}", info.nHardLinks); Console.WriteLine("cSharpGroup fileno is {0} {1}", info.fileNumber[0], info.fileNumber[1]); Console.WriteLine("cSharpGroup objno is {0} {1}", info.objectNumber[0], info.objectNumber[1]); Console.WriteLine("cSharpGroup type is {0}", info.objectType); H5G.close(subGroup); // Prepare to create a data space for writing a 1-dimensional // signed integer array. ulong[] dims = new ulong[RANK]; dims[0] = DATA_ARRAY_LENGTH; // Put descending ramp data in an array so that we can // write it to the file. int[] dset_data = new int[DATA_ARRAY_LENGTH]; for (int i = 0; i < DATA_ARRAY_LENGTH; i++) dset_data[i] = DATA_ARRAY_LENGTH - i; // Create a data space to accommodate our 1-dimensional array. // The resulting H5DataSpaceId will be used to create the // data set. H5DataSpaceId spaceId = H5S.create_simple(RANK, dims); // Create a copy of a standard data type. We will use the // resulting H5DataTypeId to create the data set. We could // have used the HST.H5Type data directly in the call to // H5D.create, but this demonstrates the use of H5T.copy // and the use of a H5DataTypeId in H5D.create. H5DataTypeId typeId = H5T.copy(H5T.H5Type.NATIVE_INT); // Find the size of the type uint typeSize = H5T.getSize(typeId); Console.WriteLine("typeSize is {0}", typeSize); // Set the order to big endian H5T.setOrder(typeId, H5T.Order.BE); // Set the order to little endian H5T.setOrder(typeId, H5T.Order.LE); // Create the data set. H5DataSetId dataSetId = H5D.create(fileId, "/csharpExample", typeId, spaceId); // Write the integer data to the data set. H5D.write(dataSetId, new H5DataTypeId(H5T.H5Type.NATIVE_INT), new H5Array<int>(dset_data)); // If we were writing a single value it might look like this. // int singleValue = 100; // H5D.writeScalar(dataSetId, new H5DataTypeId(H5T.H5Type.NATIVE_INT), // ref singleValue); // Create an integer array to receive the read data. int[] readDataBack = new int[DATA_ARRAY_LENGTH]; // Read the integer data back from the data set H5D.read(dataSetId, new H5DataTypeId(H5T.H5Type.NATIVE_INT), new H5Array<int>(readDataBack)); // Echo the data for(int i=0;i<DATA_ARRAY_LENGTH;i++) { Console.WriteLine(readDataBack[i]); } // Close all the open resources. H5D.close(dataSetId); // Reopen and close the data sets to show that we can. dataSetId = H5D.open(fileId, "/csharpExample"); H5D.close(dataSetId); dataSetId = H5D.open(groupId, "/csharpExample"); H5D.close(dataSetId); H5S.close(spaceId); H5T.close(typeId); H5G.close(groupId); //int x = 10; //H5T.enumInsert<int>(typeId, "myString", ref x); //H5G.close(groupId); H5GIterateDelegate myDelegate; myDelegate = myFunction; int x = 9; int index = H5G.iterate(fileId, "/cSharpGroup", myDelegate, x, 0); // Reopen the group id to show that we can. groupId = H5G.open(fileId, "/cSharpGroup"); H5G.close(groupId); H5F.close(fileId); // Reopen and reclose the file. H5FileId openId = H5F.open("myCSharp.h5", H5F.OpenMode.ACC_RDONLY); H5F.close(openId); } // This catches all the HDF exception classes. Because each call // generates unique exception, different exception can be handled // separately. For example, to catch open errors we could have used // catch (H5FopenException openException). catch (HDFException e) { Console.WriteLine(e.Message); } Console.WriteLine("Processing complete!"); Console.ReadLine(); }
public static void WriteHdfAttribute(H5ObjectWithAttributes fileOrdatasetId, HDFAttributeDef hDFAttributeDef) { H5DataSpaceId dataSpaceId = H5S.create_simple(1, new long[] { (long)hDFAttributeDef.Size }); H5DataTypeId dataTypeId = null; H5AttributeId attributeId = null; try { switch (Type.GetTypeCode(hDFAttributeDef.Type)) { case TypeCode.Byte: dataTypeId = H5T.copy(H5T.H5Type.STD_U8BE); attributeId = H5A.create(fileOrdatasetId, hDFAttributeDef.Name, dataTypeId, dataSpaceId); WriteTAttribute <byte> .WriteAttribute(hDFAttributeDef, dataTypeId, attributeId); break; case TypeCode.Char: dataTypeId = H5T.copy(H5T.H5Type.STD_U8BE); attributeId = H5A.create(fileOrdatasetId, hDFAttributeDef.Name, dataTypeId, dataSpaceId); WriteTAttribute <char> .WriteAttribute(hDFAttributeDef, dataTypeId, attributeId); break; case TypeCode.Double: //dataTypeId = H5T.copy(H5T.H5Type.IEEE_F64BE); dataTypeId = H5T.copy(H5T.H5Type.IEEE_F64LE); attributeId = H5A.create(fileOrdatasetId, hDFAttributeDef.Name, dataTypeId, dataSpaceId); WriteTAttribute <double> .WriteAttribute(hDFAttributeDef, dataTypeId, attributeId); break; case TypeCode.Int16: dataTypeId = H5T.copy(H5T.H5Type.STD_I16BE); attributeId = H5A.create(fileOrdatasetId, hDFAttributeDef.Name, dataTypeId, dataSpaceId); WriteTAttribute <Int16> .WriteAttribute(hDFAttributeDef, dataTypeId, attributeId); break; case TypeCode.Int32: dataTypeId = H5T.copy(H5T.H5Type.STD_I32BE); attributeId = H5A.create(fileOrdatasetId, hDFAttributeDef.Name, dataTypeId, dataSpaceId); WriteTAttribute <Int32> .WriteAttribute(hDFAttributeDef, dataTypeId, attributeId); break; case TypeCode.Int64: dataTypeId = H5T.copy(H5T.H5Type.STD_I64BE); attributeId = H5A.create(fileOrdatasetId, hDFAttributeDef.Name, dataTypeId, dataSpaceId); WriteTAttribute <Int64> .WriteAttribute(hDFAttributeDef, dataTypeId, attributeId); break; case TypeCode.Object: dataTypeId = H5T.copy(H5T.H5Type.STD_REF_OBJ); attributeId = H5A.create(fileOrdatasetId, hDFAttributeDef.Name, dataTypeId, dataSpaceId); WriteTAttribute <object> .WriteAttribute(hDFAttributeDef, dataTypeId, attributeId); break; case TypeCode.Single: dataTypeId = H5T.copy(H5T.H5Type.IEEE_F32BE); attributeId = H5A.create(fileOrdatasetId, hDFAttributeDef.Name, dataTypeId, dataSpaceId); WriteTAttribute <Single> .WriteAttribute(hDFAttributeDef, dataTypeId, attributeId); break; case TypeCode.String: dataTypeId = H5T.copy(H5T.H5Type.C_S1); dataSpaceId = H5S.create(H5S.H5SClass.SCALAR); attributeId = WriteStringAttribute(fileOrdatasetId, hDFAttributeDef, dataSpaceId, dataTypeId); break; case TypeCode.UInt16: dataTypeId = H5T.copy(H5T.H5Type.STD_U16BE); attributeId = H5A.create(fileOrdatasetId, hDFAttributeDef.Name, dataTypeId, dataSpaceId); WriteTAttribute <UInt16> .WriteAttribute(hDFAttributeDef, dataTypeId, attributeId); break; case TypeCode.UInt32: dataTypeId = H5T.copy(H5T.H5Type.STD_U32BE); attributeId = H5A.create(fileOrdatasetId, hDFAttributeDef.Name, dataTypeId, dataSpaceId); WriteTAttribute <UInt32> .WriteAttribute(hDFAttributeDef, dataTypeId, attributeId); break; case TypeCode.UInt64: dataTypeId = H5T.copy(H5T.H5Type.STD_U64BE); attributeId = H5A.create(fileOrdatasetId, hDFAttributeDef.Name, dataTypeId, dataSpaceId); WriteTAttribute <UInt64> .WriteAttribute(hDFAttributeDef, dataTypeId, attributeId); break; } } catch (Exception ex) { int i = 9; int j = i; } finally { if (attributeId != null) { H5A.close(attributeId); } } }
public void H5Rget_regionTest1() { byte[] path = Encoding.UTF8.GetBytes(String.Join("/", m_utf8strings)); // make room for the trailling \0 byte[] name = new byte[path.Length + 1]; Array.Copy(path, name, path.Length); hsize_t[] dims = new hsize_t[] { 10, 20 }; hid_t space = H5S.create_simple(2, dims, null); Assert.IsTrue(space >= 0); hid_t dset = H5D.create(m_v0_test_file, name, H5T.STD_I32LE, space, m_lcpl_utf8); Assert.IsTrue(dset >= 0); hsize_t[] start = { 5, 10 }; hsize_t[] count = { 1, 1 }; hsize_t[] block = { 2, 4 }; Assert.IsTrue( H5S.select_hyperslab(space, H5S.seloper_t.SET, start, null, count, block) >= 0); byte[] refer = new byte[H5R.DSET_REG_REF_BUF_SIZE]; GCHandle hnd = GCHandle.Alloc(refer, GCHandleType.Pinned); Assert.IsTrue( H5R.create(hnd.AddrOfPinnedObject(), m_v0_test_file, name, H5R.type_t.DATASET_REGION, space) >= 0); ssize_t size = H5R.get_name(m_v0_test_file, H5R.type_t.DATASET_REGION, hnd.AddrOfPinnedObject(), (byte[])null, IntPtr.Zero); Assert.IsTrue(size.ToInt32() == name.Length); byte[] buf = new byte[size.ToInt32() + 1]; size = H5R.get_name(m_v0_test_file, H5R.type_t.DATASET_REGION, hnd.AddrOfPinnedObject(), buf, new IntPtr(buf.Length)); Assert.IsTrue(size.ToInt32() == name.Length); // we need to account for the leading "/", which was not included // in path for (int i = 0; i < name.Length; ++i) { Assert.IsTrue(name[i] == buf[i + 1]); } hid_t sel = H5R.get_region(dset, H5R.type_t.DATASET_REGION, hnd.AddrOfPinnedObject()); Assert.IsTrue(sel >= 0); hnd.Free(); Assert.IsTrue(H5S.extent_equal(space, sel) > 0); Assert.IsTrue(H5S.get_select_hyper_nblocks(space) == H5S.get_select_hyper_nblocks(sel)); Assert.IsTrue(H5S.close(sel) >= 0); Assert.IsTrue(H5D.close(dset) >= 0); Assert.IsTrue(H5S.close(space) >= 0); }
public string Get(string path, bool outfolder) { logger.Log(LogLevel.Info, "Entered AVISTEDHDF5Converter GET()"); try { string content = File.ReadAllText(path); List <Dictionary <string, string> > data = JsonConvert.DeserializeObject <List <Dictionary <string, string> > >(content); string result = "false"; string randomlyGeneratedFolderNamePart = Path.GetFileNameWithoutExtension(Path.GetRandomFileName()); string timeRelatedFolderNamePart = DateTime.Now.Year.ToString() + DateTime.Now.Month.ToString() + DateTime.Now.Day.ToString() + DateTime.Now.Hour.ToString() + DateTime.Now.Minute.ToString() + DateTime.Now.Second.ToString() + DateTime.Now.Millisecond.ToString(); string processRelatedFolderNamePart = System.Diagnostics.Process.GetCurrentProcess().Id.ToString(); string copypath = ""; if (outfolder) { copypath = ConfigurationManager.AppSettings["Save_Downloads"].ToString(); } else { copypath = ConfigurationManager.AppSettings["Converters"].ToString(); } string temporaryDirectoryName = Path.Combine(copypath , timeRelatedFolderNamePart + processRelatedFolderNamePart + randomlyGeneratedFolderNamePart); Directory.CreateDirectory(temporaryDirectoryName); logger.Log(LogLevel.Info, "Created Directory"); string uri = Path.Combine(temporaryDirectoryName, "result" + ".h5"); H5FileId fileId = H5F.create(uri, H5F.CreateMode.ACC_TRUNC); string[] results = new string[data.Count + 1]; int i = 0, j = 0; Dictionary <string, string> resultdict = new Dictionary <string, string>(); Dictionary <string, string> tempdict = data.First(); string[] names = tempdict.Keys.ToArray(); string[] values = new string[names.Length]; foreach (Dictionary <string, string> dict in data) { var value = dict.Values.ToArray(); if (j == 0) { for (int k = 0; k < values.Length; k++) { values[k] = value[k]; } j = 1; } else { for (int k = 0; k < values.Length; k++) { values[k] += "," + value[k]; } } } int index = 0; foreach (string s in names) { if (s.Equals("date")) { string[] strings = values[index++].Split(','); byte[] bytes = Encoding.UTF8.GetBytes(String.Concat(strings)); char[,] myChars = new char[strings.Length, 10]; myChars = StringToChar(myChars, strings); // Prepare to 9create a data space for writing a 1 - dimensional // signed integer array. long[] dims = new long[1]; dims[0] = strings.Length; H5DataSpaceId spaceId = H5S.create_simple(1, dims); H5DataTypeId typeId = H5T.copy(H5T.H5Type.C_S1); // Find the size of the type int typeSize = H5T.getSize(typeId) * 10; H5T.setSize(typeId, 10); string name = "/" + s; // Create the data set. H5DataSetId dataSetId = H5D.create(fileId, s, typeId, spaceId); H5D.write(dataSetId, typeId, new H5Array <byte>(bytes)); H5D.close(dataSetId); H5S.close(spaceId); logger.Log(LogLevel.Info, "Created parameter {0}", s); } else { string[] strings = values[index++].Split(','); float[] vl = new float[strings.Length]; int l = 0; foreach (string d in strings) { vl[l++] = float.Parse(d); } // Prepare to create a data space for writing a 1 - dimensional // signed integer array. long[] dims = new long[1]; dims[0] = strings.Length; H5DataSpaceId spaceId = H5S.create_simple(1, dims); H5DataTypeId typeId1 = H5T.copy(H5T.H5Type.NATIVE_FLOAT); // Find the size of the type int typeSize = H5T.getSize(typeId1); // Set the order to big endian H5T.setOrder(typeId1, H5T.Order.BE); // Set the order to little endian H5T.setOrder(typeId1, H5T.Order.LE); string name = "/" + s; // Create the data set. H5DataSetId dataSetId = H5D.create(fileId, s, typeId1, spaceId); H5D.write(dataSetId, new H5DataTypeId(H5T.H5Type.NATIVE_FLOAT), new H5Array <float>(vl)); // dscopy.AddVariable<float>(s, vl); H5D.close(dataSetId); H5S.close(spaceId); H5T.close(typeId1); logger.Log(LogLevel.Info, "Created parameter {0}", s); } } H5F.close(fileId); string SourceFolderPath = temporaryDirectoryName; return(SourceFolderPath); } catch (Exception ex) { logger.Error("AVISTEDHDF5Converter:Failed with exception {0}", ex.Message); } return("Error"); }
} // test_vlen_dtype static void test_copy() { try { Console.Write("Testing copying datatypes"); // Make a copy of a predefined type. H5DataTypeId inttype = H5T.copy(H5T.H5Type.NATIVE_INT); int intsize = H5T.getSize(inttype); // Make a copy of that type. H5DataTypeId tcopy1 = H5T.copy(inttype); int tcopy1_size = H5T.getSize(tcopy1); // The sizes of the copies should be the same. if (intsize != tcopy1_size) { Console.WriteLine("test_copy: copy types incorrectly"); nerrors++; } // Close second type H5T.close(tcopy1); /* * Test copy a datatype from a dataset. */ // Create a new file. H5FileId fileId = H5F.create("sometypes.h5", H5F.CreateMode.ACC_TRUNC); // Create a dataset with a simple dataspace. hssize_t[] dims = { DIM0, DIM1 }; H5DataSpaceId dspace = H5S.create_simple(2, dims); H5DataSetId dset = H5D.create(fileId, "test_types", inttype, dspace); // Obtain the datatype from the dataset. H5DataTypeId dstype = H5T.copy(dset); // Check this datatype's class, size, and sign. H5T.H5TClass tclass = H5T.getClass(dstype); if (tclass != H5T.H5TClass.INTEGER) { Console.WriteLine("test_copy: copy of dataset's datatype has incorrect class"); nerrors++; } int tsize = H5T.getSize(dstype); if (tsize != intsize) { Console.WriteLine("test_copy: copy of dataset's datatype has incorrect size"); nerrors++; } H5T.Sign tsign = H5T.getSign(dstype); if (tsign != H5T.Sign.TWOS_COMPLEMENT) { Console.WriteLine("test_copy: copy of dataset's datatype has incorrect sign, {0}", tsign); nerrors++; } // Close objects. H5T.close(inttype); H5S.close(dspace); H5T.close(dstype); H5D.close(dset); H5F.close(fileId); Console.WriteLine("\t\t\t\tPASSED"); } catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // test_copy
public static T[] Read <T>(long dataPortId, DataContainerType dataContainerType, long dataspaceId = -1) { long dataspaceId_file = -1; long dataspaceId_memory = -1; long typeId = -1; long elementCount; int elementTypeSize; int byteLength; IntPtr bufferPtr; Type elementType; T[] returnValue; elementTypeSize = 0; byteLength = 0; bufferPtr = IntPtr.Zero; elementType = typeof(T); returnValue = null; try { if (dataspaceId > -1) { dataspaceId = H5S.copy(dataspaceId); } switch (dataContainerType) { case DataContainerType.Attribute: if (dataspaceId == -1) { dataspaceId = H5A.get_space(dataPortId); } break; case DataContainerType.Dataset: if (dataspaceId == -1) { dataspaceId = H5D.get_space(dataPortId); } dataspaceId_file = dataspaceId; break; default: throw new NotSupportedException(); } if (elementType == typeof(string)) { elementTypeSize = Marshal.SizeOf <IntPtr>(); } else if (elementType == typeof(bool)) { elementTypeSize = Marshal.SizeOf <byte>(); } else { elementTypeSize = Marshal.SizeOf(elementType); } elementCount = H5S.get_select_npoints(dataspaceId); byteLength = (int)elementCount * elementTypeSize; bufferPtr = Marshal.AllocHGlobal(byteLength); typeId = TypeConversionHelper.GetHdfTypeIdFromType(elementType); switch (dataContainerType) { case DataContainerType.Attribute: if (H5A.read(dataPortId, typeId, bufferPtr) < 0) { throw new Exception(ErrorMessage.IOHelper_CouldNotReadAttribute); } break; case DataContainerType.Dataset: dataspaceId_memory = H5S.create_simple(1, new ulong[] { (ulong)elementCount }, new ulong[] { (ulong)elementCount }); if (H5D.read(dataPortId, typeId, dataspaceId_memory, dataspaceId_file, H5P.DEFAULT, bufferPtr) < 0) { throw new Exception(ErrorMessage.IOHelper_CouldNotReadDataset); } break; default: throw new NotSupportedException(); } if (elementType.IsPrimitive) { T[] genericSet; GCHandle gcHandle; byte[] byteSet; genericSet = new T[(int)elementCount]; gcHandle = GCHandle.Alloc(genericSet, GCHandleType.Pinned); byteSet = new byte[byteLength]; Marshal.Copy(bufferPtr, byteSet, 0, byteLength); Marshal.Copy(byteSet, 0, gcHandle.AddrOfPinnedObject(), byteLength); returnValue = genericSet; gcHandle.Free(); } else if (elementType == typeof(string)) { IntPtr[] intPtrSet; intPtrSet = new IntPtr[(int)elementCount]; Marshal.Copy(bufferPtr, intPtrSet, 0, (int)elementCount); returnValue = intPtrSet.Select(x => { string result = Marshal.PtrToStringAnsi(x); H5.free_memory(x); return(result); }).Cast <T>().ToArray(); } else if (elementType.IsValueType && !elementType.IsPrimitive && !elementType.IsEnum) { T[] structSet; int offset; structSet = new T[(int)elementCount]; offset = 0; Enumerable.Range(0, (int)elementCount).ToList().ForEach(x => { structSet[x] = Marshal.PtrToStructure <T>(IntPtr.Add(bufferPtr, offset)); offset += elementTypeSize; }); returnValue = structSet; } else { throw new NotSupportedException(); } } finally { Marshal.FreeHGlobal(bufferPtr); if (H5I.is_valid(typeId) > 0) { H5T.close(typeId); } if (H5I.is_valid(dataspaceId_memory) > 0) { H5S.close(dataspaceId_memory); } if (H5I.is_valid(dataspaceId) > 0) { H5S.close(dataspaceId); } } return(returnValue); }
} // test_onedim_array static void test_twodims_array() { try { Console.Write("Testing write/read two-dimensional array"); const string FILE_NAME = ("SDStwodim.h5"); const string DSET_NAME = ("Two-dim IntArray"); const int NX = 5; // data set dimension const int NY = 2; const int RANK = 2; // two-dimension // Data and input buffer initialization. int i, j; int[,] data = new int[NX, NY]; for (i = 0; i < NX; i++) { for (j = 0; j < NY; j++) { data[i, j] = i + j; } } // Create a new file using H5F_ACC_TRUNC access, // default file creation properties, and default file // access properties. H5FileId fileId = H5F.create(FILE_NAME, H5F.CreateMode.ACC_TRUNC); // Describe the size of the array and create the data space for fixed // size dataset. long[] dims = new long[RANK]; dims[0] = NX; dims[1] = NY; H5DataSpaceId dspaceId = H5S.create_simple(RANK, dims); // Define datatype for the data in the file. H5DataTypeId dtypeId = H5T.copy(H5T.H5Type.NATIVE_INT); // Create the data set DATASETNAME. H5DataSetId dsetId = H5D.create(fileId, DSET_NAME, dtypeId, dspaceId); // Write the one-dimensional data set array H5D.write(dsetId, new H5DataTypeId(H5T.H5Type.NATIVE_INT), new H5Array <int>(data)); // Close dataset and file. H5D.close(dsetId); H5F.close(fileId); // Open the file again in read only mode. fileId = H5F.open(FILE_NAME, H5F.OpenMode.ACC_RDONLY); // Open the dataset using its name. dsetId = H5D.open(fileId, DSET_NAME); int[,] outdata = new int[NX, NY]; for (i = 0; i < NX; i++) { for (j = 0; j < NY; j++) { outdata[i, j] = 0; } } // Read data back. H5D.read(dsetId, new H5DataTypeId(H5T.H5Type.NATIVE_INT), new H5Array <int>(outdata)); // Compare against input buffer to verify. for (i = 0; i < NX; i++) { for (j = 0; j < NY; j++) { if (outdata[i, j] != data[i, j]) { Console.WriteLine("\ntest_twodim_array: read value differs from input: read {0} - input {1}", outdata[i, j], data[i, j]); nerrors++; } } } // Close all objects and file. H5D.close(dsetId); H5T.close(dtypeId); H5S.close(dspaceId); H5F.close(fileId); Console.WriteLine("\t\tPASSED"); } catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // test_twodims_array
} // test_attr_plist static void test_attr_compound_write() { try { Console.Write("Testing write attributes with compound datatype"); const int NX = 256; // data set dimension const int NY = 512; // Create a file. H5FileId fileId = H5F.create(COMP_FNAME, H5F.CreateMode.ACC_TRUNC); // Create dataspace for dataset. hssize_t[] dims = { NX, NY }; H5DataSpaceId spaceId = H5S.create_simple(SPACE1_RANK, dims); // Create a dataset. H5DataSetId dsetId = H5D.create(fileId, DSET1_NAME, H5T.H5Type.NATIVE_UCHAR, spaceId); // Close dataset's dataspace H5S.close(spaceId); // this number 16 needs to be verified. // Create the attribute datatype. H5DataTypeId typeId = H5T.create(H5T.CreateClass.COMPOUND, 16); //tid1 = H5Tcreate(H5T_COMPOUND, sizeof(struct attr4_struct)); int attr4_field1_off = 0; int attr4_field2_off = 1; int attr4_field3_off = 5; H5T.insert(typeId, "c", attr4_field1_off, H5T.H5Type.STD_U8LE); H5T.insert(typeId, "i", attr4_field2_off, H5T.H5Type.NATIVE_INT); H5T.insert(typeId, "l", attr4_field3_off, H5T.H5Type.STD_I64BE); // Create dataspace for first attribute. hssize_t[] dims2 = { ATTR4_DIM1, ATTR4_DIM2 }; spaceId = H5S.create_simple(ATTR4_RANK, dims2); // Create complex attribute for the dataset. H5AttributeId attrId = H5A.create(dsetId, ATTR4_NAME, typeId, spaceId); // Try to create the same attribute again (should fail.) try { attrId = H5A.create(dsetId, ATTR4_NAME, typeId, spaceId); // should fail, but didn't, print an error message. Console.WriteLine("\ntest_attr_compound_write: Attempting to create an existing attribute."); nerrors++; } catch (HDFException) { } // does nothing, it should fail // Allocate space for the points & check arrays attr4_struct[,] attr_data4 = new attr4_struct[ATTR4_DIM1, ATTR4_DIM2]; // Initialize the dataset int ii, jj, nn; for (ii = nn = 0; ii < ATTR4_DIM1; ii++) { for (jj = 0; jj < ATTR4_DIM2; jj++) { attr_data4[ii, jj].c = 't'; attr_data4[ii, jj].i = nn++; attr_data4[ii, jj].l = (ii * 10 + jj * 100) * nn; } } // Write complex attribute data. H5A.write(attrId, typeId, new H5Array <attr4_struct>(attr_data4)); // Close all objects and file. H5A.close(attrId); H5S.close(spaceId); H5T.close(typeId); H5D.close(dsetId); H5F.close(fileId); Console.WriteLine("\t\tPASSED"); } catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // test_attr_compound_write
} // test_twodims_array static void test_fivedims_array(H5FileId fileId) { try { Console.Write("Testing write/read five-dimensional array"); const string DSET_NAME = ("Five-dim IntArray"); const int DIM1 = 1; // data set dimension const int DIM2 = 2; const int DIM3 = 2; const int DIM4 = 4; const int DIM5 = 4; const int RANK = 5; // five-dimension // Data and output buffer initialization. int i, j, k, m, n; int[, , , ,] data = new int[DIM1, DIM2, DIM3, DIM4, DIM5]; for (i = 0; i < DIM1; i++) { for (j = 0; j < DIM2; j++) { for (k = 0; k < DIM3; k++) { for (m = 0; m < DIM4; m++) { for (n = 0; n < DIM5; n++) { data[i, j, k, m, n] = i + j + k + m + n; } } } } } // Describe the size of the array and create the data space for fixed // size dataset. long[] dims = { DIM1, DIM2, DIM3, DIM4, DIM5 }; H5DataSpaceId dspaceId = H5S.create_simple(RANK, dims); // Define datatype for the data in the file. H5DataTypeId dtypeId = H5T.copy(H5T.H5Type.NATIVE_INT); // Create the data set DSET_NAME. H5DataSetId dsetId = H5D.create(fileId, DSET_NAME, dtypeId, dspaceId); // Write the one-dimensional data set array. H5D.write(dsetId, dtypeId, new H5Array <int>(data)); int[, , , ,] outdata = new int[DIM1, DIM2, DIM3, DIM4, DIM5]; for (i = 0; i < DIM1; i++) { for (j = 0; j < DIM2; j++) { for (k = 0; k < DIM3; k++) { for (m = 0; m < DIM4; m++) { for (n = 0; n < DIM5; n++) { outdata[i, j, k, m, n] = 0; } } } } } // Close and re-open the dataset. H5D.close(dsetId); dsetId = H5D.open(fileId, DSET_NAME); // Read back data. H5D.read(dsetId, dtypeId, new H5Array <int>(outdata)); // Compare against input buffer to verify. for (i = 0; i < DIM1; i++) { for (j = 0; j < DIM2; j++) { for (k = 0; k < DIM3; k++) { for (m = 0; m < DIM4; m++) { for (n = 0; n < DIM5; n++) { int out_value = outdata[i, j, k, m, n]; int in_value = data[i, j, k, m, n]; if (out_value != in_value) { Console.WriteLine("\ntest_fivedim_array: read value differs from input: read {0} - input {1}", out_value, in_value); nerrors++; } } } } } } // Close all objects and file. H5D.close(dsetId); H5T.close(dtypeId); H5S.close(dspaceId); Console.WriteLine("\t\tPASSED"); } catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } }
static void test_attr_basic_write() { try { Console.Write("Testing Basic Scalar Attribute Writing Functions"); // Create a new file using H5F_ACC_TRUNC access, // default file creation properties, and default file // access properties. H5FileId fileId = H5F.create(FILE_NAME, H5F.CreateMode.ACC_TRUNC); // Copy datatype for use. H5DataTypeId typeId = H5T.copy(H5T.H5Type.NATIVE_INT); // Open the root group. H5GroupId groupId = H5G.open(fileId, "/"); // Create dataspace for attribute. hssize_t[] gdims = { GATTR_DIM1, GATTR_DIM2 }; H5DataSpaceId gspace_Id = H5S.create_simple(GATTR_RANK, gdims); // Create an attribute for the group. H5AttributeId attrId = H5A.create(groupId, RGATTR_NAME, typeId, gspace_Id); H5A.close(attrId); H5G.close(groupId); // Create a group in this file. groupId = H5G.create(fileId, GROUP1_NAME); // Create an attribute for group /Group1. attrId = H5A.create(groupId, GATTR_NAME, typeId, gspace_Id); H5A.write(attrId, typeId, new H5Array <int>(gattr_data)); // Create the dataspace. hssize_t[] dims1 = { SPACE1_DIM1, SPACE1_DIM2 }; H5DataSpaceId space1_Id = H5S.create_simple(SPACE1_RANK, dims1); // Create a dataset using default properties. H5DataSetId dsetId = H5D.create(fileId, DSET1_NAME, H5T.H5Type.NATIVE_UCHAR, space1_Id); // Close objects and file. H5A.close(attrId); H5D.close(dsetId); H5G.close(groupId); H5F.close(fileId); // Open the file again. fileId = H5F.open(FILE_NAME, H5F.OpenMode.ACC_RDWR); // Open the root group. groupId = H5G.open(fileId, "/"); // Open attribute again. attrId = H5A.open(groupId, RGATTR_NAME); // Close attribute and root group. H5A.close(attrId); H5G.close(groupId); // Open dataset. dsetId = H5D.open(fileId, DSET1_NAME); // Create the dataspace for dataset's attribute. hssize_t[] attdims = { ATTR1_DIM }; H5DataSpaceId attspaceId = H5S.create_simple(ATTR1_RANK, attdims); // Create an attribute for the dataset. attrId = H5A.create(dsetId, D1ATTR1_NAME, typeId, attspaceId); // Try to create the same attribute again (should fail.) try { H5AttributeId attr_twice = H5A.create(dsetId, D1ATTR1_NAME, typeId, attspaceId); // should fail, but didn't, print an error message. Console.WriteLine("\ntest_attr_basic_write: Attempting to create an existing attribute."); nerrors++; } catch (HDFException) { } // does nothing, it should fail // Write attribute information. int[] attr_data1 = new int[] { 512, -234, 98123 }; /* Test data for 1st attribute */ H5A.write(attrId, typeId, new H5Array <int>(attr_data1)); // Create another attribute for the dataset. H5AttributeId attr2Id = H5A.create(dsetId, D1ATTR2_NAME, typeId, attspaceId); // Write attribute information. int[] attr_data2 = new int[] { 256, 11945, -22107 }; H5A.write(attr2Id, typeId, new H5Array <int>(attr_data2)); // Read attribute information immediately, without closing attribute. int[] read_data1 = new int[3]; H5A.read(attrId, typeId, new H5Array <int>(read_data1)); // Verify values read in. int ii; for (ii = 0; ii < ATTR1_DIM; ii++) { if (attr_data1[ii] != read_data1[ii]) { Console.WriteLine("\ntest_attr_basic_write: check1: read value differs from input: read {0} - input {1}", read_data1[ii], attr_data1[ii]); nerrors++; } } // Close attributes. H5A.close(attrId); H5A.close(attr2Id); // Open attribute again and verify its name. attrId = H5A.openIndex(dsetId, 0); string attr_name = H5A.getName(attrId); if (attr_name != D1ATTR1_NAME) { Console.WriteLine("\ntest_attr_basic_write: attribute name incorrect: is {0} - should be {1}", attr_name, D1ATTR1_NAME); nerrors++; } // Close attribute. H5A.close(attrId); // Open the second attribute again and verify its name. attr2Id = H5A.openIndex(dsetId, 1); attr_name = H5A.getName(attr2Id); if (attr_name != D1ATTR2_NAME) { Console.WriteLine("\ntest_attr_basic_write: attribute name incorrect: is {0} - should be {1}", attr_name, D1ATTR2_NAME); nerrors++; } // Close all objects. H5A.close(attr2Id); H5S.close(space1_Id); H5S.close(gspace_Id); H5D.close(dsetId); H5F.close(fileId); Console.WriteLine("\tPASSED"); } // end try block catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // test_attr_basic_write
private static bool CreateOrOverwriteVariableStringAttribute(hid_t hid, string key, IEnumerable <string> values, bool utf8) { if (H5A.exists(hid, key) == 0) { // Attribute doesn't exist. #if true var type = H5T.create(H5T.class_t.STRING, H5T.VARIABLE); if (type < 0) { return(false); } #else var type = H5T.copy(H5T.C_S1); if (type < 0) { return(false); } H5T.set_size(type, H5T.VARIABLE); #endif if (utf8) { H5T.set_cset(type, H5T.cset_t.UTF8); } H5T.set_strpad(type, H5T.str_t.NULLTERM); var space = values.Count() == 1 ? H5S.create(H5S.class_t.SCALAR) : H5S.create_simple(1, new ulong[1] { (ulong)values.Count() }, null); if (space < 0) { H5T.close(type); return(false); } var attribute = H5A.create(hid, key, type, space); if (attribute < 0) { H5S.close(space); H5T.close(type); return(false); } H5S.close(space); var pinnedObjects = new PinnedObject[values.Count()]; var data = new IntPtr[values.Count()]; int count = 0; foreach (string str in values) { var bytes = str.ToBytes(utf8); pinnedObjects[count] = new PinnedObject(bytes); data[count] = pinnedObjects[count]; count += 1; } H5A.write(attribute, type, new PinnedObject(data)); H5T.close(type); H5A.close(attribute); } else { // Attribute exists. var attribute = H5A.open(hid, key); if (attribute < 0) { return(false); } var type = H5A.get_type(attribute); if (type < 0) { H5A.close(attribute); return(false); } var pinnedObjects = new PinnedObject[values.Count()]; var data = new IntPtr[values.Count()]; int count = 0; foreach (string str in values) { var bytes = str.ToBytes(utf8); pinnedObjects[count] = new PinnedObject(bytes); data[count] = pinnedObjects[count]; count += 1; } H5A.write(attribute, type, new PinnedObject(data)); H5T.close(type); H5A.close(attribute); } return(true); }
public void H5Dvlen_reclaimTest1() { // write a VLEN dataset hid_t vlen = H5T.vlen_create(H5T.NATIVE_INT); Assert.IsTrue(vlen >= 0); hsize_t[] dims = { 10 }; hid_t space = H5S.create_simple(1, dims, null); Assert.IsTrue(space >= 0); hid_t dset = H5D.create(m_v0_test_file, "vlen", vlen, space); Assert.IsTrue(space >= 0); hid_t dset1 = H5D.create(m_v2_test_file, "vlen", vlen, space); Assert.IsTrue(space >= 0); H5T.hvl_t[] wdata = new H5T.hvl_t [dims[0]]; GCHandle[] whndl = new GCHandle [wdata.Length]; int[][] jagged = new int[wdata.Length][]; for (int i = 0; i < wdata.Length; ++i) { jagged[i] = new int [i + 1]; whndl[i] = GCHandle.Alloc(jagged[i], GCHandleType.Pinned); wdata[i].len = new IntPtr(i + 1); wdata[i].p = whndl[i].AddrOfPinnedObject(); } GCHandle wdata_hndl = GCHandle.Alloc(wdata, GCHandleType.Pinned); Assert.IsTrue(H5D.write(dset, vlen, H5S.ALL, H5S.ALL, H5P.DEFAULT, wdata_hndl.AddrOfPinnedObject()) >= 0); Assert.IsTrue(H5D.write(dset1, vlen, H5S.ALL, H5S.ALL, H5P.DEFAULT, wdata_hndl.AddrOfPinnedObject()) >= 0); wdata_hndl.Free(); for (int i = 0; i < wdata.Length; ++i) { whndl[i].Free(); } // read it back H5T.hvl_t[] rdata = new H5T.hvl_t[dims[0]]; GCHandle rdata_hndl = GCHandle.Alloc(rdata, GCHandleType.Pinned); Assert.IsTrue(H5D.read(dset, vlen, H5S.ALL, H5S.ALL, H5P.DEFAULT, rdata_hndl.AddrOfPinnedObject()) >= 0); for (int i = 0; i < rdata.Length; ++i) { Assert.IsTrue(rdata[i].len.ToInt32() == i + 1); } Assert.IsTrue(H5D.vlen_reclaim(vlen, space, H5P.DEFAULT, rdata_hndl.AddrOfPinnedObject()) >= 0); Assert.IsTrue(H5D.read(dset1, vlen, H5S.ALL, H5S.ALL, H5P.DEFAULT, rdata_hndl.AddrOfPinnedObject()) >= 0); for (int i = 0; i < rdata.Length; ++i) { Assert.IsTrue(rdata[i].len.ToInt32() == i + 1); } // reclaim the space Assert.IsTrue(H5D.vlen_reclaim(vlen, space, H5P.DEFAULT, rdata_hndl.AddrOfPinnedObject()) >= 0); rdata_hndl.Free(); Assert.IsTrue(H5D.close(dset1) >= 0); Assert.IsTrue(H5D.close(dset) >= 0); Assert.IsTrue(H5T.close(vlen) >= 0); Assert.IsTrue(H5S.close(space) >= 0); }
public void H5TinsertTest1() { // a fixed-length string type hid_t fls = H5T.create(H5T.class_t.STRING, new IntPtr(16)); Assert.IsTrue(fls >= 0); Assert.IsTrue(H5T.is_variable_str(fls) == 0); // a variable-length string type hid_t vls = H5T.create(H5T.class_t.STRING, H5T.VARIABLE); Assert.IsTrue(vls >= 0); Assert.IsTrue(H5T.is_variable_str(vls) > 0); // a key-value compound IntPtr size = new IntPtr(16 + IntPtr.Size); hid_t kvt = H5T.create(H5T.class_t.COMPOUND, size); Assert.IsTrue(H5T.insert(kvt, "key", IntPtr.Zero, fls) >= 0); Assert.IsTrue(H5T.insert(kvt, "value", new IntPtr(16), vls) >= 0); Assert.IsTrue(H5T.close(vls) >= 0); Assert.IsTrue(H5T.close(fls) >= 0); // create a key-value dataset (3 elements) hid_t fsp = H5S.create_simple(1, new hsize_t[] { 3 }, null); Assert.IsTrue(fsp >= 0); hid_t dset = H5D.create(m_v2_class_file, "KeyVal", kvt, fsp); Assert.IsTrue(dset >= 0); Assert.IsTrue(H5S.close(fsp) >= 0); // write a 3 elements string[] keys = new string[] { "Key0123456789ABC", "Key0123456789DEF", "Key0123456789GHI" }; IntPtr[] values = new IntPtr[3]; values[0] = Marshal.StringToHGlobalAnsi("I am a managed String!"); values[1] = Marshal.StringToHGlobalAnsi("I am also a managed String!"); values[2] = Marshal.StringToHGlobalAnsi("I am another managed String!"); MemoryStream ms = new MemoryStream(); BinaryWriter writer = new BinaryWriter(ms); for (int i = 0; i < 3; ++i) { writer.Write(Encoding.ASCII.GetBytes(keys[i])); if (IntPtr.Size == 8) { writer.Write(values[i].ToInt64()); } else { writer.Write(values[i].ToInt32()); } } byte[] wdata = ms.ToArray(); GCHandle hnd = GCHandle.Alloc(wdata, GCHandleType.Pinned); Assert.IsTrue(H5D.write(dset, kvt, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()) >= 0); hnd.Free(); // now read it back byte[] rdata = new byte[3 * size.ToInt32()]; hnd = GCHandle.Alloc(rdata, GCHandleType.Pinned); Assert.IsTrue(H5D.read(dset, kvt, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()) >= 0); hnd.Free(); // check it out MemoryStream ms1 = new MemoryStream(rdata); BinaryReader reader = new BinaryReader(ms1); for (int i = 0; i < 3; ++i) { string k = Encoding.ASCII.GetString(reader.ReadBytes(16)); Assert.IsTrue(k == keys[i]); IntPtr ptr = IntPtr.Zero; if (IntPtr.Size == 8) { ptr = new IntPtr(reader.ReadInt64()); } else { ptr = new IntPtr(reader.ReadInt32()); } string v = Marshal.PtrToStringAnsi(ptr); Assert.IsTrue(v == Marshal.PtrToStringAnsi(values[i])); Marshal.FreeHGlobal(ptr); Marshal.FreeHGlobal(values[i]); } Assert.IsTrue(H5D.close(dset) >= 0); Assert.IsTrue(H5T.close(kvt) >= 0); }
static void test_enum_dtype(H5FileId fileId) { short i, j; string[] mname = { "RED", "GREEN", "BLUE", "YELLOW", "PINK", "PURPLE", "ORANGE", "WHITE" }; short[,] spoints2 = new short[DIM0, DIM1]; short[,] scheck2 = new short[DIM0, DIM1]; try { Console.Write("Testing enumeration datatypes"); // Create the data space */ hssize_t[] dims = { DIM0, DIM1 }; H5DataSpaceId dspace = H5S.create_simple(2, dims); // Construct enum type based on native type H5DataTypeId etype = H5T.enumCreate(H5T.H5Type.NATIVE_SHORT); // Insert members to type. for (i = 0; i < 8; i++) { H5T.enumInsert(etype, mname[i], ref i); } // Assign a name to the enum type, close it, and open it by name. H5T.commit(fileId, "Color Type", etype); H5T.close(etype); H5DataTypeId color_type = H5T.open(fileId, "Color Type"); // Check its class H5T.H5TClass tcls = H5T.getClass(color_type); if (tcls != H5T.H5TClass.ENUM) { Console.WriteLine("test_enum: class of color_type = {0} is incorrect, should be ENUM", tcls); } // Create the dataset H5DataSetId dsetId = H5D.create(fileId, DSET_ENUM_NAME, color_type, dspace); // Construct enum type based on native type in memory. H5DataTypeId etype_m = H5T.enumCreate(H5T.H5Type.NATIVE_SHORT); // Insert members to type. for (i = 0; i < 8; i++) { H5T.enumInsert(etype_m, mname[i], ref i); } // Initialize the dataset and buffer. for (i = 0; i < DIM0; i++) { for (j = 0; j < DIM1; j++) { spoints2[i, j] = i; scheck2[i, j] = 0; } } // Write the data to the dataset. H5D.write(dsetId, etype_m, new H5Array <short>(spoints2)); // Close objects. H5D.close(dsetId); H5T.close(color_type); H5S.close(dspace); H5T.close(etype_m); // Open dataset again to check the type. dsetId = H5D.open(fileId, DSET_ENUM_NAME); // Get dataset's datatype. H5DataTypeId dstype = H5D.getType(dsetId); // Get the datatype's class and check that it is of class ENUM. H5T.H5TClass tclass = H5T.getClass(dstype); if (tclass != H5T.H5TClass.ENUM) { Console.WriteLine("Type should be an enum class"); nerrors++; } // Check name of an enum value. int memb_num = 2; string memb_name = H5T.enumNameOf(dstype, ref memb_num); if (memb_name != "BLUE") { Console.WriteLine("Member name of value 2 should be BLUE"); nerrors++; } // Check value of an enum name. int memb_value = 0; H5T.enumValueOf(dstype, memb_name, out memb_value); if (memb_value != 2) { Console.WriteLine("Member value of BLUE should be 2"); nerrors++; } // Check member's value by member number. H5T.getMemberValue(dstype, 4, out memb_value); // Read data back. H5D.read(dsetId, dstype, new H5Array <short>(scheck2)); // Close objects. H5D.close(dsetId); H5T.close(dstype); Console.WriteLine("\t\t\t\tPASSED"); } // end of try block catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // end of test_enum_dtype
/// <summary> /// Appends a dataset to a hdf5 file. If called the first time a dataset is created /// </summary> /// <typeparam name="T">Generic parameter only primitive types are allowed</typeparam> /// <param name="groupId">id of the group. Can also be a file Id</param> /// <param name="name">name of the dataset</param> /// <param name="dset">The dataset</param> /// <returns>status of the write method</returns> public static hid_t AppendDataset <T>(hid_t groupId, string name, Array dset, ulong chunkX = 200) where T : struct { var rank = dset.Rank; ulong[] dimsExtend = Enumerable.Range(0, rank).Select(i => { return((ulong)dset.GetLength(i)); }).ToArray(); ulong[] maxDimsExtend = null; ulong[] dimsChunk = new ulong[] { chunkX }.Concat(dimsExtend.Skip(1)).ToArray(); ulong[] zeros = Enumerable.Range(0, rank).Select(z => (ulong)0).ToArray(); hid_t status, spaceId, datasetId; // name = ToHdf5Name(name); var datatype = GetDatatype(typeof(T)); var typeId = H5T.copy(datatype); var datasetExists = H5L.exists(groupId, name) > 0; /* Create a new dataset within the file using chunk * creation properties. */ if (!datasetExists) { spaceId = H5S.create_simple(dset.Rank, dimsExtend, maxDimsExtend); var propId = H5P.create(H5P.DATASET_CREATE); status = H5P.set_chunk(propId, rank, dimsChunk); datasetId = H5D.create(groupId, name, datatype, spaceId, H5P.DEFAULT, propId, H5P.DEFAULT); /* Write data to dataset */ GCHandle hnd = GCHandle.Alloc(dset, GCHandleType.Pinned); status = H5D.write(datasetId, datatype, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); H5P.close(propId); } else { datasetId = H5D.open(groupId, name); spaceId = H5D.get_space(datasetId); var rank_old = H5S.get_simple_extent_ndims(spaceId); ulong[] maxDims = new ulong[rank_old]; ulong[] dims = new ulong[rank_old]; var memId1 = H5S.get_simple_extent_dims(spaceId, dims, maxDims); ulong[] oldChunk = null; int chunkDims = 0; var propId = H5P.create(H5P.DATASET_ACCESS); status = H5P.get_chunk(propId, chunkDims, oldChunk); /* Extend the dataset. */ var size = new ulong[] { dims[0] + dimsExtend[0] }.Concat(dims.Skip(1)).ToArray(); status = H5D.set_extent(datasetId, size); /* Select a hyperslab in extended portion of dataset */ var filespaceId = H5D.get_space(datasetId); var offset = new ulong[] { dims[0] }.Concat(zeros.Skip(1)).ToArray(); status = H5S.select_hyperslab(filespaceId, H5S.seloper_t.SET, offset, null, dimsExtend, null); /* Define memory space */ var memId2 = H5S.create_simple(rank, dimsExtend, null); /* Write the data to the extended portion of dataset */ GCHandle hnd = GCHandle.Alloc(dset, GCHandleType.Pinned); status = H5D.write(datasetId, datatype, memId2, spaceId, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); H5S.close(memId1); H5S.close(memId2); H5D.close(filespaceId); } H5D.close(datasetId); H5S.close(spaceId); return(status); }
static void test_compound_dtype(H5FileId fileId) { uint i, j, n; try { Console.Write("Testing compound datatypes"); // Allocate space for the points & check arrays s1[,] points = new s1[DIM0, DIM1]; s1[,] check = new s1[DIM0, DIM1]; // Initialize the dataset for (i = n = 0; i < DIM0; i++) { for (j = 0; j < DIM1; j++) { points[i, j].c = 't'; points[i, j].i = n++; points[i, j].l = (i * 10 + j * 100) * n; } } // Create the data space hssize_t[] dims = { DIM0, DIM1 }; H5DataSpaceId spaceId = H5S.create_simple(2, dims); // Create compound datatype for disk storage H5DataTypeId typeId = H5T.create(H5T.CreateClass.COMPOUND, 16); // Insert members H5T.insert(typeId, "c", 0, H5T.H5Type.STD_U8LE); H5T.insert(typeId, "i", 1, H5T.H5Type.STD_U32LE); H5T.insert(typeId, "l", 5, H5T.H5Type.STD_I64BE); // Create the dataset H5DataSetId dsetId = H5D.create(fileId, DSET_COMPOUND_NAME, typeId, spaceId); // Write the dataset H5D.write(dsetId, typeId, new H5Array <s1>(points)); // Close dataset and dataspace H5D.close(dsetId); H5S.close(spaceId); H5T.close(typeId); // Open dataset again to check various functions. dsetId = H5D.open(fileId, DSET_COMPOUND_NAME); // Get its type and native type. H5DataTypeId dset_typeId = H5D.getType(dsetId); H5DataTypeId native_type = H5T.getNativeType(dset_typeId, H5T.Direction.DEFAULT); // Check name against this list string[] memb_names = { "c", "i", "l" }; int[] memb_offsets = { 0, 1, 5 }; H5DataTypeId mtypeId; // member type H5T.H5TClass memb_cls1, memb_cls2; // member classes retrieved different ways string memb_name; // member name int memb_idx; // member index // Get the number of members in the type. int nmembers = H5T.getNMembers(native_type); // For each member, check its name, class, index, and size. for (int ii = 0; ii < nmembers; ii++) { // Get the type of the ith member. mtypeId = H5T.getMemberType(native_type, ii); // Get the name of the ith member. memb_name = H5T.getMemberName(native_type, ii); if (memb_name != memb_names[ii]) { Console.WriteLine("test_compound_dtypes: incorrect member name, {0}, for member no {1}", memb_name, i); nerrors++; } // Get the class of the ith member and then verify the class. memb_cls1 = H5T.getMemberClass(native_type, ii); if (memb_cls1 != H5T.H5TClass.INTEGER) { Console.WriteLine("test_compound_dtypes: incorrect class, {0}, for member no {1}", memb_cls1, ii); nerrors++; } // Get the class via type id memb_cls2 = H5T.getClass(mtypeId); if (memb_cls1 != memb_cls2) { Console.WriteLine("test_compound_dtypes: H5T.getMemberClass and H5T.getClass return different classes for the same type."); nerrors++; } // Get member's index back from its name and verify it. memb_idx = H5T.getMemberIndex(dset_typeId, memb_name); if (memb_idx != ii) { Console.WriteLine("test_compound_dtypes: H5T.getMemberName and/or H5T.getMemberIndex returned false values."); nerrors++; } // Get member's offset and verify it. int memb_offset = H5T.getMemberOffset(dset_typeId, ii); if (memb_offset != memb_offsets[ii]) { Console.WriteLine("test_compound_dtypes: Incorrect offset value {0}, should be {1}.", memb_offset, memb_offsets[ii]); nerrors++; } // Get size of the member's type and verify it. int tsize = H5T.getSize(mtypeId); switch (ii) { case 0: //Console.WriteLine("tsize = {0}, STD_U8LE = {1}", tsize, H5T.getSize(H5T.H5Type.STD_U8LE)); if (tsize != H5T.getSize(H5T.H5Type.STD_U8LE)) { Console.WriteLine("test_compound_dtypes: First member has incorrect size"); nerrors++; } break; case 1: if (tsize != H5T.getSize(H5T.H5Type.STD_U32LE)) { Console.WriteLine("test_compound_dtypes: Second member has incorrect size"); nerrors++; } break; case 2: if (tsize != H5T.getSize(H5T.H5Type.STD_I64BE)) { Console.WriteLine("test_compound_dtypes: Third member has incorrect size"); nerrors++; } break; default: Console.WriteLine("test_compound_dtypes: Only 3 members."); break; } // end switch // Close current member type. H5T.close(mtypeId); } // end for // Close objects. H5T.close(dset_typeId); H5T.close(native_type); H5D.close(dsetId); Console.WriteLine("\t\t\t\tPASSED"); } // end of try block catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // test_compound_dtype
/// <summary> /// Writes a piece of data to the open hdf file based on the specified path. /// /// i.e. the following method calls will result in a scalar, vector and matrix being written to the group "examples". /// <code> /// CreateDataset("/examples/scalar", 5.2); /// CreateDataset("/examples/vector", new double[]{1, 2, 3}); /// CreateDataset("/examples/matrix", new double[,]{{1, 2}, {3, 4}}); /// </code> /// </summary> /// <param name="path">Path to the variable, separated with / </param> /// <param name="data">data to be written.</param> public void CreateDataset(string path, object data) { // types to support: // scalar, vector, matrix, list; double, int, long, string DataType dataType = DataType.GetDataType(data); if (!IsSupported(dataType.info)) { throw new NotImplementedException($"Data type {dataType.info} is not supported by this library"); } string[] groups = path.Split(PATH_SEP); string name = groups[groups.Length - 1]; long groupId = PathToGroupId(path); /* Define memory space */ ulong[] shape = dataType.GetShape(); if (dataType.IsScalar() && dataType.IsText()) { data = new string[] { (string)data } } ; // wrap single string into an iterable object long dspace = H5S.create_simple(shape.Length, shape, null); // lookup the HD5 type code. long ctype = TypeMap[Type.GetTypeCode(dataType.info)]; long dtype = H5T.copy(ctype); // geen properties long dset = H5D.create(groupId, name, dtype, dspace); var filespaceId = H5D.get_space(dset); /* Write the data to the extended portion of dataset */ GCHandle hnd; if (dataType.IsNumeric()) { hnd = GCHandle.Alloc(data, GCHandleType.Pinned); } else { GCHandle[] handles = ToMem((IList)data); IntPtr[] wdata = HandleToAddress(handles); hnd = GCHandle.Alloc(wdata, GCHandleType.Pinned); ReleaseHandles(handles); } // GCHandle hnd = GCHandle.Alloc(data, GCHandleType.Pinned); H5D.write(dset, dtype, dspace, filespaceId, H5P.DEFAULT, hnd.AddrOfPinnedObject()); // cleanup mem and handles hnd.Free(); CloseGroup(groupId); H5D.close(dset); H5S.close(dspace); H5T.close(dtype); }
} // test_compound_dtype static void test_vlen_dtype(H5FileId fileId) { try { Console.Write("Testing variable-length datatypes"); // Create a VL datatype of int. H5DataTypeId vltId = H5T.vlenCreate(H5T.H5Type.NATIVE_UINT); // Make certain that the correct classes can be detected H5T.H5TClass tcls = H5T.getClass(vltId); if (tcls != H5T.H5TClass.VLEN) { Console.WriteLine("Test class should have been H5T_VLEN"); nerrors++; } // Create a dataset with a simple dataspace. hssize_t[] dims = { DIM1 }; H5DataSpaceId spaceId = H5S.create_simple(1, dims); H5DataSetId dsetId = H5D.create(fileId, "Vlen Dataset", vltId, spaceId); // Change to the custom memory allocation routines for reading VL data. H5PropertyListId xferpId = H5P.create(H5P.PropertyListClass.DATASET_XFER); // Writing unsafe { hvl_t[] wdata = new hvl_t[DIM1]; /* Information to write */ hvl_t[] wdata2 = new hvl_t[DIM1]; /* Information to write */ hvl_t[] rdata = new hvl_t[DIM1]; /* Information read in */ /* Allocate and initialize VL data to write */ for (uint ii = 0; ii < DIM1; ii++) { IntPtr iPtr = new IntPtr((ii + 1) * sizeof(uint)); wdata[ii].p = H5CrtHeap.Allocate(iPtr).ToPointer(); wdata[ii].len = ii + 1; ((uint *)wdata[ii].p)[0] = ii * 10; wdata2[ii].p = (void *)0; wdata2[ii].len = 0; } /* end for */ H5D.write(dsetId, vltId, new H5Array <hvl_t>(wdata)); // Read from dataset before writing data. H5D.read(dsetId, vltId, new H5Array <hvl_t>(rdata)); // Write "nil" data to disk. H5D.write(dsetId, vltId, new H5Array <hvl_t>(wdata2)); // Read from dataset with "nil" data. H5D.read(dsetId, vltId, new H5Array <hvl_t>(rdata)); // Check data read in. // Write data to dataset. H5D.write(dsetId, vltId, new H5Array <hvl_t>(wdata)); // Close resources. H5D.close(dsetId); H5T.close(vltId); H5S.close(spaceId); // Open the dataset. dsetId = H5D.open(fileId, "Vlen Dataset"); // Get dataspace and datatype for the dataset. spaceId = H5D.getSpace(dsetId); vltId = H5D.getType(dsetId); H5AllocCallback allocCallback = new H5AllocCallback(Program.crtHeapAllocate); H5FreeCallback freeCallback = new H5FreeCallback(Program.crtHeapFree); H5P.setVlenMemManager(xferpId, allocCallback, IntPtr.Zero, freeCallback, IntPtr.Zero); // Read dataset from disk. H5D.read(dsetId, vltId, new H5DataSpaceId(H5S.H5SType.ALL), new H5DataSpaceId(H5S.H5SType.ALL), xferpId, new H5Array <hvl_t>(rdata)); // Reclaim the read VL data. H5D.vlenReclaim(vltId, spaceId, xferpId, new H5Array <hvl_t>(rdata)); } // end of unsafe // Close resources. H5D.close(dsetId); H5T.close(vltId); H5S.close(spaceId); H5P.close(xferpId); Console.WriteLine("\t\t\tPASSED"); } catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // test_vlen_dtype
static void test_h5s_scalar() { try { hssize_t[] tdims = new hssize_t[3]; // Output message about test being performed. Console.Write("Testing dataspace during writing"); // Create the file. H5FileId fid1 = H5F.create(DATAFILE, H5F.CreateMode.ACC_TRUNC); // Create scalar dataspace. H5DataSpaceId sid1 = H5S.create_simple(SPACE3_RANK, null); // Get the logical rank of dataspace and verify it. int rank = H5S.getSimpleExtentNDims(sid1); if (rank != SPACE3_RANK) { Console.WriteLine("\ntest_h5s_scalar: incorrect rank {0}, should be SPACE3_RANK({1})", rank, SPACE3_RANK); nerrors++; } // Create and write the dataset. uint space3_data = 65; H5DataSetId dataset = H5D.create(fid1, "Dataset1", H5T.H5Type.NATIVE_UINT, sid1); H5D.writeScalar(dataset, new H5DataTypeId(H5T.H5Type.NATIVE_UINT), ref space3_data); // Close objects and file. H5D.close(dataset); H5S.close(sid1); H5F.close(fid1); /* Open the file and verify the dataspace. */ // Open the file. fid1 = H5F.open(DATAFILE, H5F.OpenMode.ACC_RDWR); // Create a dataset. dataset = H5D.open(fid1, "Dataset1"); // Get dataset's dataspace. sid1 = H5D.getSpace(dataset); rank = H5S.getSimpleExtentNDims(sid1); if (rank != SPACE3_RANK) { Console.WriteLine("\ntest_h5s_scalar: incorrect rank {0}", rank); } tdims = H5S.getSimpleExtentDims(sid1); //Console.WriteLine("tdims[0] = {0}, tdims[1] = {1}", tdims[0], tdims[1]); if (rank != 0) { Console.WriteLine("\ntest_h5s_scalar: incorrect rank {0}", rank); } // Read the dataset. uint rdata = 0; H5D.readScalar(dataset, new H5DataTypeId(H5T.H5Type.NATIVE_UINT), ref rdata); if (rdata != space3_data) { Console.WriteLine("\ntest_h5s_scalar: incorrect data {0}, should be {1}", rdata, space3_data); } // Close objects. H5D.close(dataset); H5S.close(sid1); H5F.close(fid1); Console.WriteLine("\t\t\tPASSED"); } // end of try catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // test_h5s_scalar_write
public static void ClassInit(TestContext testContext) { // create test files which persists across file tests m_v0_class_file = Utilities.H5TempFile(ref m_v0_class_file_name, H5F.libver_t.EARLIEST); Assert.IsTrue(m_v0_class_file >= 0); m_v2_class_file = Utilities.H5TempFile(ref m_v2_class_file_name); Assert.IsTrue(m_v2_class_file >= 0); m_space_null = H5S.create(H5S.class_t.NULL); Assert.IsTrue(m_space_null >= 0); m_space_scalar = H5S.create(H5S.class_t.SCALAR); Assert.IsTrue(m_space_scalar >= 0); // create two datasets of the extended ASCII character set // store as H5T.FORTRAN_S1 -> space padding hsize_t[] dims = { 256 }; hid_t space = H5S.create_simple(1, dims, null); m_v0_ascii_dset = H5D.create(m_v0_class_file, "ASCII", H5T.FORTRAN_S1, space); m_v2_ascii_dset = H5D.create(m_v2_class_file, "ASCII", H5T.FORTRAN_S1, space); Assert.IsTrue(H5S.close(space) >= 0); // we write from C and must provide null-terminated strings byte[] wdata = new byte[512]; for (int i = 0; i < 256; ++i) { wdata[2 * i] = (byte)i; } hid_t mem_type = H5T.copy(H5T.C_S1); Assert.IsTrue(H5T.set_size(mem_type, new IntPtr(2)) >= 0); GCHandle hnd = GCHandle.Alloc(wdata, GCHandleType.Pinned); Assert.IsTrue(H5D.write(m_v0_ascii_dset, mem_type, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()) >= 0); Assert.IsTrue(H5D.write(m_v2_ascii_dset, mem_type, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()) >= 0); hnd.Free(); Assert.IsTrue(H5T.close(mem_type) >= 0); // create UTF-8 encoded test datasets hid_t dtype = H5T.create(H5T.class_t.STRING, H5T.VARIABLE); Assert.IsTrue(H5T.set_cset(dtype, H5T.cset_t.UTF8) >= 0); Assert.IsTrue(H5T.set_strpad(dtype, H5T.str_t.SPACEPAD) >= 0); hid_t dspace = H5S.create_simple(1, new hsize_t[] { (hsize_t)m_utf8strings.Count }, null); m_v0_utf8_dset = H5D.create(m_v0_class_file, "UTF-8", dtype, dspace); Assert.IsTrue(m_v0_utf8_dset >= 0); m_v2_utf8_dset = H5D.create(m_v2_class_file, "UTF-8", dtype, dspace); Assert.IsTrue(m_v2_utf8_dset >= 0); GCHandle[] hnds = new GCHandle[m_utf8strings.Count]; IntPtr[] wdata1 = new IntPtr[m_utf8strings.Count]; for (int i = 0; i < m_utf8strings.Count; ++i) { hnds[i] = GCHandle.Alloc( Encoding.UTF8.GetBytes((string)m_utf8strings[i]), GCHandleType.Pinned); wdata1[i] = hnds[i].AddrOfPinnedObject(); } hnd = GCHandle.Alloc(wdata1, GCHandleType.Pinned); Assert.IsTrue(H5D.write(m_v0_utf8_dset, dtype, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()) >= 0); Assert.IsTrue(H5D.write(m_v2_utf8_dset, dtype, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()) >= 0); hnd.Free(); for (int i = 0; i < m_utf8strings.Count; ++i) { hnds[i].Free(); } Assert.IsTrue(H5S.close(dspace) >= 0); Assert.IsTrue(H5T.close(dtype) >= 0); }