public void H5Fget_create_plistTest1() { hid_t fcpl = H5F.get_create_plist(m_v0_class_file); Assert.IsTrue(fcpl >= 0); Assert.IsTrue(H5P.close(fcpl) >= 0); fcpl = H5F.get_create_plist(m_v2_class_file); Assert.IsTrue(fcpl >= 0); Assert.IsTrue(H5P.close(fcpl) >= 0); }
public void H5Fget_access_plistTest1() { hid_t fapl = H5F.get_access_plist(m_v0_class_file); Assert.IsTrue(fapl >= 0); Assert.IsTrue(H5P.close(fapl) >= 0); fapl = H5F.get_access_plist(m_v2_class_file); Assert.IsTrue(fapl >= 0); Assert.IsTrue(H5P.close(fapl) >= 0); }
public static unsafe void AddCompactDataset(long fileId) { long res; var dcpl_id = H5P.create(H5P.DATASET_CREATE); res = H5P.set_layout(dcpl_id, H5D.layout_t.COMPACT); TestUtils.Add(ContainerType.Dataset, fileId, "compact", "compact", H5T.NATIVE_INT32, TestData.SmallData.AsSpan(), cpl: dcpl_id); res = H5P.close(dcpl_id); }
public void H5Pset_userblockTest3() { hid_t fcpl = H5P.create(H5P.FILE_CREATE); Assert.IsTrue(fcpl >= 0); Assert.IsTrue(H5P.set_userblock(fcpl, 0) >= 0); Assert.IsFalse(H5P.set_userblock(fcpl, 111) >= 0); Assert.IsTrue(H5P.set_userblock(fcpl, 512) >= 0); Assert.IsFalse(H5P.set_userblock(fcpl, 513) >= 0); Assert.IsTrue(H5P.close(fcpl) >= 0); }
public static void ClassCleanup() { Assert.IsTrue(H5P.close(m_lcpl) >= 0); Assert.IsTrue(H5P.close(m_lcpl_utf8) >= 0); // close the global test files Assert.IsTrue(H5F.close(m_v0_class_file) >= 0); Assert.IsTrue(H5F.close(m_v2_class_file) >= 0); File.Delete(m_v0_class_file_name); File.Delete(m_v2_class_file_name); }
public void H5Dget_chunk_infoTest1() { hsize_t[] dims = { 10, 10 }; hsize_t[] max_dims = { H5S.UNLIMITED, H5S.UNLIMITED }; hid_t space = H5S.create_simple(2, dims, max_dims); hid_t dcpl = H5P.create(H5P.DATASET_CREATE); Assert.IsTrue(dcpl >= 0); hsize_t[] chunk = { 4, 4 }; Assert.IsTrue(H5P.set_chunk(dcpl, 2, chunk) >= 0); Assert.IsTrue(H5P.set_alloc_time(dcpl, H5D.alloc_time_t.EARLY) >= 0); Assert.IsTrue(H5P.set_fill_time(dcpl, H5D.fill_time_t.ALLOC) >= 0); hid_t dset = H5D.create(m_v0_test_file, "Early Bird1", H5T.IEEE_F32BE, space, H5P.DEFAULT, dcpl); Assert.IsTrue(dset >= 0); // This should work but doesn't: // Assert.IsTrue(H5D.get_num_chunks(dset, H5S.ALL, ref nchunks) >= 0); hid_t fspace = H5D.get_space(dset); Assert.IsTrue(fspace >= 0); Assert.IsTrue(H5S.select_all(fspace) >= 0); hsize_t index = 8, size = 0; hsize_t[] offset = { 4711, 4712 }; uint32_t filter_mask = 0; haddr_t addr = 0; Assert.IsTrue(H5D.get_chunk_info(dset, fspace, index, offset, ref filter_mask, ref addr, ref size) >= 0); Assert.IsTrue(offset[0] > 0); Assert.IsTrue(filter_mask == 0 && size > 0 && addr > 0); Assert.IsTrue(H5D.close(dset) >= 0); dset = H5D.create(m_v2_test_file, "Early Bird1", H5T.IEEE_F32BE, space, H5P.DEFAULT, dcpl); Assert.IsTrue(dset >= 0); // This should work but doesn't: // Assert.IsTrue(H5D.get_num_chunks(dset, H5S.ALL, ref nchunks) >= 0); fspace = H5D.get_space(dset); Assert.IsTrue(fspace >= 0); Assert.IsTrue(H5S.select_all(fspace) >= 0); Assert.IsTrue(H5D.get_chunk_info(dset, fspace, index, offset, ref filter_mask, ref addr, ref size) >= 0); Assert.IsTrue(offset[0] > 0); Assert.IsTrue(filter_mask == 0 && size > 0 && addr > 0); Assert.IsTrue(H5D.close(dset) >= 0); }
public static void ClassCleanup() { #if HDF5_VER1_10 Assert.IsTrue(H5P.close(m_lcpl) >= 0); Assert.IsTrue(H5P.close(m_lcpl_utf8) >= 0); // close the global test files Assert.IsTrue(H5F.close(m_v3_class_file) >= 0); File.Delete(m_v3_class_file_name); #endif }
public static void ClassInit(TestContext testContext) { hid_t fapl = H5P.create(H5P.FILE_ACCESS); Assert.IsTrue(fapl >= 0); Assert.IsTrue( H5P.set_libver_bounds(fapl, H5F.libver_t.LATEST) >= 0); m_shared_file_id = H5F.create(m_shared_file_name, H5F.ACC_TRUNC, H5P.DEFAULT, fapl); Assert.IsTrue(H5P.close(fapl) >= 0); }
public static void ClassCleanup() { Assert.IsTrue(H5P.close(m_acpl) >= 0); // close the global test files Assert.IsTrue(H5F.close(m_v0_class_file) >= 0); Assert.IsTrue(H5F.close(m_v2_class_file) >= 0); Assert.IsTrue(H5S.close(m_space_null) >= 0); Assert.IsTrue(H5S.close(m_space_scalar) >= 0); File.Delete(m_v0_class_file_name); File.Delete(m_v2_class_file_name); }
public HDF5DataSet CreateDataset(string name, ulong[] shape, Type dType, long maxSize = 1, bool[] unlimited = null, ulong[] chunkShape = null, bool compress = false) { HDF5DataSet result = null; With((id) => { int nDims = shape.Length; if (unlimited == null) { unlimited = Enumerable.Range(0, nDims).Select(d => false).ToArray(); } ulong[] maxShape = Enumerable.Range(0, nDims).Select(d => unlimited[d] ? H5S.UNLIMITED : shape[d]).ToArray(); var dataspaceID = H5S.create_simple(nDims, shape, maxShape); long dataTypeID = HDF5DataSet.OpenHDFDataType(dType, maxSize); long creationPropertyList = 0L; if (compress) { if (chunkShape == null) { chunkShape = shape; } creationPropertyList = H5P.create(H5P.DATASET_CREATE); H5P.set_layout(creationPropertyList, H5D.layout_t.CHUNKED); H5P.set_deflate(creationPropertyList, 9); H5P.set_chunk(creationPropertyList, shape.Length, chunkShape); } var newID = H5D.create(id, name, dataTypeID, dataspaceID, 0L, creationPropertyList, 0L); if (creationPropertyList > 0) { H5P.close(creationPropertyList); } H5T.close(dataTypeID); H5S.close(dataspaceID); if (newID <= 0) { throw new H5SSException("Couldn't create DataSet"); } // write! H5D.close(newID); result = new HDF5DataSet(name, this); }); return(result); }
static void Main2222(string[] args) { var h5 = H5F.create(@"D:\test.h5", H5F.ACC_TRUNC); var typeId = H5T.create(H5T.class_t.COMPOUND, new IntPtr(40)); var strtype = H5T.copy(H5T.C_S1); H5T.set_size(strtype, new IntPtr(16)); H5T.insert(typeId, "Name", new IntPtr(0), strtype); H5T.insert(typeId, "x_pos", new IntPtr(16), H5T.NATIVE_INT32); H5T.insert(typeId, "y_pos", new IntPtr(20), H5T.NATIVE_INT32); H5T.insert(typeId, "Mass", new IntPtr(24), H5T.NATIVE_FLOAT); H5T.insert(typeId, "Temperature", new IntPtr(32), H5T.NATIVE_DOUBLE); ulong[] dims = new ulong[] { 10000 }; ulong[] chunk_size = new ulong[] { 1000 }; var spaceid = H5S.create_simple(dims.Length, dims, null); var dcpl = H5P.create(H5P.DATASET_CREATE); H5P.set_layout(dcpl, H5D.layout_t.COMPACT); H5P.set_deflate(dcpl, 6); H5P.set_chunk(dcpl, chunk_size.Length, chunk_size); var datasetid = H5D.create(h5, "Table1", typeId, spaceid, H5P.DEFAULT, dcpl); ComType ct = new ComType() { Name = "aabb", x_pos = 2, y_pos = 1, Mass = 1.24F, Temperature = 45.7, }; IntPtr p = Marshal.AllocHGlobal(40 * (int)dims[0]); Marshal.StructureToPtr(ct, p, false); H5D.write(datasetid, typeId, spaceid, H5S.ALL, H5P.DEFAULT, p); H5F.close(h5); }
static void test_attr_plist() { try { Console.Write("Testing attribute property lists"); hssize_t[] dims = { 256, 512 }; const string PLST_FILE_NAME = ("tattr_plist.h5"); hssize_t[] dims1 = { SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3 }; hssize_t[] dims2 = { ATTR1_DIM }; // Create file. H5FileId fileId = H5F.create(PLST_FILE_NAME, H5F.CreateMode.ACC_TRUNC); // Create dataspace for dataset. H5DataSpaceId space1_Id = H5S.create_simple(SPACE1_RANK, dims1); // Create a dataset. H5DataSetId dsetId = H5D.create(fileId, DSET1_NAME, H5T.H5Type.NATIVE_UCHAR, space1_Id); // Create dataspace for attribute. H5DataSpaceId space2_Id = H5S.create_simple(ATTR1_RANK, dims2); // Create default property list for attribute. H5PropertyListId plist = H5P.create(H5P.PropertyListClass.ATTRIBUTE_CREATE); // Create an attribute for the dataset using the property list. H5AttributeId attrId = H5A.create(dsetId, ATTR1_NAME, new H5DataTypeId(H5T.H5Type.NATIVE_INT), space2_Id, plist); // Close all objects. H5S.close(space1_Id); H5S.close(space2_Id); H5P.close(plist); H5A.close(attrId); H5D.close(dsetId); H5F.close(fileId); Console.WriteLine("\t\t\tPASSED"); } catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // test_attr_plist
public static (long DatasetId, bool IsNew) OpenOrCreateDataset(long locationId, string datasetPath, long datasetTypeId, ulong chunkLength, ulong chunkCount, IntPtr fillValue = default) { return(IOHelper.OpenOrCreateDataset(locationId, datasetPath, datasetTypeId, () => { long dcPropertyId = -1; long lcPropertyId = -1; long dataspaceId = -1; long datasetId = -1; try { dcPropertyId = H5P.create(H5P.DATASET_CREATE); if (fillValue != IntPtr.Zero) { H5P.set_fill_value(dcPropertyId, datasetTypeId, fillValue); } H5P.set_shuffle(dcPropertyId); H5P.set_deflate(dcPropertyId, 7); H5P.set_chunk(dcPropertyId, 1, new ulong[] { chunkLength }); lcPropertyId = H5P.create(H5P.LINK_CREATE); H5P.set_create_intermediate_group(lcPropertyId, 1); dataspaceId = H5S.create_simple(1, new ulong[] { chunkLength *chunkCount }, null); datasetId = H5D.create(locationId, datasetPath, datasetTypeId, dataspaceId, lcPropertyId, dcPropertyId); if (H5I.is_valid(datasetId) <= 0) { throw new Exception($"{ ErrorMessage.IOHelper_CouldNotOpenOrCreateDataset } Dataset: '{ datasetPath }'."); } } finally { if (H5I.is_valid(dcPropertyId) > 0) { H5P.close(dcPropertyId); } if (H5I.is_valid(lcPropertyId) > 0) { H5P.close(lcPropertyId); } if (H5I.is_valid(dataspaceId) > 0) { H5S.close(dataspaceId); } } return datasetId; })); }
public static unsafe void AddChunkedDatasetForHyperslab(long fileId) { long res; var dcpl_id = H5P.create(H5P.DATASET_CREATE); var dims = new ulong[] { 25, 25, 4 }; var chunkDims = new ulong[] { 7, 20, 3 }; res = H5P.set_chunk(dcpl_id, 3, chunkDims); TestUtils.Add(ContainerType.Dataset, fileId, "chunked", "hyperslab", H5T.NATIVE_INT32, TestData.MediumData.AsSpan(), dims, cpl: dcpl_id); res = H5P.close(dcpl_id); }
public void H5Fset_metadata_read_attemptsTestSWMR1() { hid_t fapl = H5P.create(H5P.FILE_ACCESS); Assert.IsTrue(fapl >= 0); uint attempts = 12; Assert.IsTrue( H5P.set_metadata_read_attempts(fapl, attempts) >= 0); Assert.IsTrue(H5P.close(fapl) >= 0); }
public void H5Pset_object_flush_cbTestSWMR1() { hid_t fapl = H5P.create(H5P.FILE_ACCESS); Assert.IsTrue(fapl >= 0); H5F.flush_cb_t cb = flush_func; Assert.IsTrue( H5P.set_object_flush_cb(fapl, cb, IntPtr.Zero) >= 0); Assert.IsTrue(H5P.close(fapl) >= 0); }
public void Writer(List <PbTickView> list, string dataset_name) { var t = typeof(PbTickStruct5); var size = Marshal.SizeOf(t); var typeId = create_type(); // chunk一定得小于dim数据量,多了会出错 // 如果数据100条左右,按 var log10 = (int)Math.Log10(list.Count); ulong pow = (ulong)Math.Pow(10, log10); ulong c_s = Math.Min(1000, pow); ulong[] chunk_size = new ulong[] { c_s }; ulong[] dims = new ulong[] { (ulong)list.Count }; long dcpl = 0; if (list.Count == 0 || log10 == 0) { } else { dcpl = create_property(chunk_size); } var spaceid = H5S.create_simple(dims.Length, dims, null); var datasetid = H5D.create(h5, dataset_name, typeId, spaceid, H5P.DEFAULT, dcpl); IntPtr p = Marshal.AllocHGlobal(size * (int)dims[0]); int i = 0; foreach (var b in list) { var s = DataConvert.toStruct(b); Marshal.StructureToPtr(s, new IntPtr(p.ToInt32() + size * i), false); ++i; } H5D.write(datasetid, typeId, spaceid, H5S.ALL, H5P.DEFAULT, p); H5D.close(datasetid); H5S.close(spaceid); H5T.close(typeId); H5P.close(dcpl); Marshal.FreeHGlobal(p); }
/// <summary> /// Reads an n-dimensional dataset. /// </summary> /// <typeparam name="T">Generic parameter strings or primitive type</typeparam> /// <param name="groupId">id of the group. Can also be a file Id</param> /// <param name="name">name of the dataset</param> /// <returns>The n-dimensional dataset</returns> public static Array ReadDatasetToArray <T>(hid_t groupId, string name) //where T : struct { var datatype = GetDatatype(typeof(T)); var datasetId = H5D.open(groupId, name); var spaceId = H5D.get_space(datasetId); int rank = H5S.get_simple_extent_ndims(spaceId); long count = H5S.get_simple_extent_npoints(spaceId); Array dset; Type type = typeof(T); if (rank >= 0 && count >= 0) { int rankChunk; ulong[] maxDims = new ulong[rank]; ulong[] dims = new ulong[rank]; ulong[] chunkDims = new ulong[rank]; hid_t memId = H5S.get_simple_extent_dims(spaceId, dims, maxDims); long[] lengths = dims.Select(d => Convert.ToInt64(d)).ToArray(); dset = Array.CreateInstance(type, lengths); var typeId = H5D.get_type(datasetId); var mem_type = H5T.copy(datatype); if (datatype == H5T.C_S1) { H5T.set_size(datatype, new IntPtr(2)); } var propId = H5D.get_create_plist(datasetId); if (H5D.layout_t.CHUNKED == H5P.get_layout(propId)) { rankChunk = H5P.get_chunk(propId, rank, chunkDims); } memId = H5S.create_simple(rank, dims, maxDims); GCHandle hnd = GCHandle.Alloc(dset, GCHandleType.Pinned); H5D.read(datasetId, datatype, memId, spaceId, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); } else { dset = Array.CreateInstance(type, new long[1] { 0 }); } H5D.close(datasetId); H5S.close(spaceId); return(dset); }
public void H5Dget_access_plistTest1() { hid_t dset = H5D.create(m_v0_test_file, "dset", H5T.IEEE_F64BE, m_space_null); Assert.IsTrue(dset >= 0); hid_t dapl = H5D.get_access_plist(dset); Assert.IsTrue(dapl >= 0); Assert.IsTrue(H5P.close(dapl) >= 0); Assert.IsTrue(H5D.close(dset) >= 0); }
public void H5DOappendTestSWMR1() { hsize_t[] dims = { 6, 0 }; hsize_t[] maxdims = { 6, H5S.UNLIMITED }; hsize_t[] chunk_dims = { 2, 5 }; int[] cbuf = new int [6]; hid_t dsp = H5S.create_simple(2, dims, maxdims); Assert.IsTrue(dsp >= 0); hid_t dcpl = H5P.create(H5P.DATASET_CREATE); Assert.IsTrue(dcpl >= 0); Assert.IsTrue(H5P.set_chunk(dcpl, 2, chunk_dims) >= 0); hsize_t[] boundary = { 0, 1 }; hid_t dapl = H5P.create(H5P.DATASET_ACCESS); Assert.IsTrue(dapl >= 0); H5D.append_cb_t cb = DOappend_func; Assert.IsTrue( H5P.set_append_flush(dapl, 2, boundary, cb, new IntPtr(99)) >= 0); hid_t dst = H5D.create(m_v3_test_file_swmr, "dset", H5T.NATIVE_INT, dsp, H5P.DEFAULT, dcpl, dapl); Assert.IsTrue(dst >= 0); GCHandle hnd = GCHandle.Alloc(cbuf, GCHandleType.Pinned); for (int i = 0; i < 3; ++i) { for (int j = 0; j < 6; ++j) { cbuf[j] = ((i * 6) + (j + 1)) * -1; } Assert.IsTrue( H5DO.append(dst, H5P.DEFAULT, 1, new IntPtr(1), H5T.NATIVE_INT, hnd.AddrOfPinnedObject()) >= 0); } hnd.Free(); Assert.IsTrue(H5D.close(dst) >= 0); Assert.IsTrue(H5P.close(dapl) >= 0); Assert.IsTrue(H5P.close(dcpl) >= 0); Assert.IsTrue(H5S.close(dsp) >= 0); }
public void AppendOrCreateDataset(Array dataset) { if (_chunkDims == null) { if (dataset.Rank < 1) { string msg = "Empty array was passed. Ignoring."; Hdf5Utils.LogError?.Invoke(msg); return; } for (int dimension = 1; dimension <= dataset.Rank; dimension++) { var size = dataset.GetUpperBound(dimension - 1) + 1; if (size == 0) { string msg = $"Empty array was passed for dimension {dimension}. Ignoring."; Hdf5Utils.LogError?.Invoke(msg); return; } } _chunkDims = new[] { Convert.ToUInt64(dataset.GetLongLength(0)), Convert.ToUInt64(dataset.GetLongLength(1)) }; Rank = dataset.Rank; _currentDims = GetDims(dataset); /* Create the data space with unlimited dimensions. */ _spaceId = H5S.create_simple(Rank, _currentDims, _maxDims); /* Modify dataset creation properties, i.e. enable chunking */ _propId = H5P.create(H5P.DATASET_CREATE); _status = H5P.set_chunk(_propId, Rank, _chunkDims); /* Create a new dataset within the file using chunk creation properties. */ _datasetId = H5D.create(GroupId, Hdf5Utils.NormalizedName(Datasetname), _datatype, _spaceId, H5P.DEFAULT, _propId); /* Write data to dataset */ GCHandle hnd = GCHandle.Alloc(dataset, GCHandleType.Pinned); _status = H5D.write(_datasetId, _datatype, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); H5S.close(_spaceId); _spaceId = -1; } else { AppendDataset(dataset); } }
public void H5Fget_metadata_read_attemptsTestSWMR2() { hid_t fapl = H5F.get_access_plist(m_v3_test_file_no_swmr); Assert.IsTrue(fapl >= 0); uint attempts = 0; Assert.IsTrue( H5P.get_metadata_read_attempts(fapl, ref attempts) >= 0); Assert.IsTrue(attempts > 0); Assert.IsTrue(H5P.close(fapl) >= 0); }
public static unsafe void AddFilteredDataset_Fletcher(long fileId) { long res; var length = (ulong)TestData.MediumData.Length / 4; var dims = new ulong[] { length, 4 }; var dcpl_id = H5P.create(H5P.DATASET_CREATE); res = H5P.set_chunk(dcpl_id, 2, new ulong[] { 1000, 4 }); res = H5P.set_fletcher32(dcpl_id); TestUtils.Add(ContainerType.Dataset, fileId, "filtered", $"fletcher", H5T.NATIVE_INT32, TestData.MediumData.AsSpan(), dims, cpl: dcpl_id); res = H5P.close(dcpl_id); }
public static unsafe void AddFilteredDataset_ZLib(long fileId) { long res; var length = (ulong)TestData.MediumData.Length / 4; var dims = new ulong[] { length, 4 }; var dcpl_id = H5P.create(H5P.DATASET_CREATE); res = H5P.set_chunk(dcpl_id, 2, new ulong[] { 1000, 4 }); res = H5P.set_filter(dcpl_id, H5Z.filter_t.DEFLATE, 0, new IntPtr(1), new uint[] { 5 } /* compression level */); TestUtils.Add(ContainerType.Dataset, fileId, "filtered", $"deflate", H5T.NATIVE_INT32, TestData.MediumData.AsSpan(), dims, cpl: dcpl_id); res = H5P.close(dcpl_id); }
public static unsafe void AddChunkedDataset_Implicit(long fileId) { long res; var length = (ulong)TestData.MediumData.Length / 4; var dims = new ulong[] { length, 4 }; var dcpl_id = H5P.create(H5P.DATASET_CREATE); res = H5P.set_chunk(dcpl_id, 2, new ulong[] { 1000, 3 }); res = H5P.set_alloc_time(dcpl_id, H5D.alloc_time_t.EARLY); TestUtils.Add(ContainerType.Dataset, fileId, "chunked", "chunked_implicit", H5T.NATIVE_INT32, TestData.MediumData.AsSpan(), dims, cpl: dcpl_id); res = H5P.close(dcpl_id); }
public void H5Dget_num_chunksTest1() { hsize_t[] dims = { 10, 10 }; hsize_t[] max_dims = { H5S.UNLIMITED, H5S.UNLIMITED }; hid_t space = H5S.create_simple(2, dims, max_dims); hid_t dcpl = H5P.create(H5P.DATASET_CREATE); Assert.IsTrue(dcpl >= 0); hsize_t[] chunk = { 4, 4 }; Assert.IsTrue(H5P.set_chunk(dcpl, 2, chunk) >= 0); Assert.IsTrue(H5P.set_alloc_time(dcpl, H5D.alloc_time_t.EARLY) >= 0); Assert.IsTrue(H5P.set_fill_time(dcpl, H5D.fill_time_t.ALLOC) >= 0); hid_t dset = H5D.create(m_v0_test_file, "Early Bird", H5T.IEEE_F32BE, space, H5P.DEFAULT, dcpl); Assert.IsTrue(dset >= 0); // This should work but doesn't: // Assert.IsTrue(H5D.get_num_chunks(dset, H5S.ALL, ref nchunks) >= 0); hid_t fspace = H5D.get_space(dset); Assert.IsTrue(fspace >= 0); Assert.IsTrue(H5S.select_all(fspace) >= 0); hsize_t nchunks = 0; Assert.IsTrue(H5D.get_num_chunks(dset, fspace, ref nchunks) >= 0); Assert.IsTrue(nchunks == 9); Assert.IsTrue(H5D.close(dset) >= 0); dset = H5D.create(m_v2_test_file, "Early Bird", H5T.IEEE_F32BE, space, H5P.DEFAULT, dcpl); Assert.IsTrue(dset >= 0); // This should work but doesn't: // Assert.IsTrue(H5D.get_num_chunks(dset, H5S.ALL, ref nchunks) >= 0); fspace = H5D.get_space(dset); Assert.IsTrue(fspace >= 0); Assert.IsTrue(H5S.select_all(fspace) >= 0); nchunks = 0; Assert.IsTrue(H5D.get_num_chunks(dset, fspace, ref nchunks) >= 0); Assert.IsTrue(nchunks == 9); Assert.IsTrue(H5D.close(dset) >= 0); }
/// <summary> /// Create a new hdf5 `H5DataSet` at `loc_id`. /// </summary> /// <remarks> /// `maxdims` may be `null` in which case it is set to `dims`. /// </remarks> internal static H5DataSet Create(hid_t loc_id, string key, int rank, long[] dims, long[] maxdims, Type primitive_type) { hid_t dcpl; // the 'dataset creation property list' controls chunking.. if (maxdims == null || dims.SequenceEqual(maxdims)) { dcpl = H5P.DEFAULT; } else if (HasH5Pcreate) { // ..which is needed for later resizing: var chunk = new ulong[rank]; // the chunk is of size 1 in each 'unlimited' dimension and of size 'maxdims' // for all other dimensions (just like the 'SPECdata/Intensities' dataset): for (int i = 0; i < rank; i++) { if (maxdims[i] == H5Space.Unlimited) { chunk[i] = 1UL; } else if (maxdims[i] > 0) { checked { chunk[i] = (ulong)maxdims[i]; } } else { throw new ArgumentException($"invalid value in parameter 'maxdims'"); } } dcpl = H5P.create(H5P.DATASET_CREATE); H5P.set_chunk(dcpl, rank, chunk); } else { maxdims = dims; dcpl = H5P.DEFAULT; } hid_t id; using (H5Space space = H5Space.Create(rank, dims, maxdims)) using (H5Type dtype = H5Type.Create(primitive_type)) { if ((id = H5D.create(loc_id, key, dtype.ID, space.ID, H5P.DEFAULT, dcpl, H5P.DEFAULT)) < 0) { throw new H5LibraryException($"H5D.create() returned ({id})"); } } return(FromID(id)); }
/// <summary> /// Dispose function as suggested in the stackoverflow discussion below /// See: http://stackoverflow.com/questions/538060/proper-use-of-the-idisposable-interface/538238#538238 /// </summary> /// <param name="itIsSafeToAlsoFreeManagedObjects"></param> protected virtual void Dispose(bool itIsSafeToAlsoFreeManagedObjects) { if (!DatasetExists) { Hdf5Utils.LogInfo?.Invoke("Data set does not exist."); return; } H5D.close(datasetId); H5P.close(propId); H5S.close(spaceId); if (itIsSafeToAlsoFreeManagedObjects) { } }
public void H5Pset_append_flushTestSWMR1() { hid_t dapl = H5P.create(H5P.DATASET_ACCESS); Assert.IsTrue(dapl >= 0); hsize_t[] boundary = { 1, 1 }; H5D.append_cb_t cb = append_func; Assert.IsTrue( H5P.set_append_flush(dapl, 2, boundary, cb, IntPtr.Zero) >= 0); Assert.IsTrue(H5P.close(dapl) >= 0); }