public void AppendOrCreateDataset(Array dataset) { if (_chunkDims == null) { _chunkDims = new[] { Convert.ToUInt64(dataset.GetLongLength(0)), Convert.ToUInt64(dataset.GetLongLength(1)) }; Rank = dataset.Rank; _currentDims = GetDims(dataset); /* Create the data space with unlimited dimensions. */ _spaceId = H5S.create_simple(Rank, _currentDims, _maxDims); /* Modify dataset creation properties, i.e. enable chunking */ _propId = H5P.create(H5P.DATASET_CREATE); _status = H5P.set_chunk(_propId, Rank, _chunkDims); /* Create a new dataset within the file using chunk creation properties. */ _datasetId = H5D.create(GroupId, Hdf5Utils.NormalizedName(Datasetname), _datatype, _spaceId, H5P.DEFAULT, _propId); /* Write data to dataset */ GCHandle hnd = GCHandle.Alloc(dataset, GCHandleType.Pinned); _status = H5D.write(_datasetId, _datatype, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); H5S.close(_spaceId); _spaceId = -1; } else { AppendDataset(dataset); } }
public void H5DflushTestSWMR2() { hsize_t[] dims = { 6, 6 }; hsize_t[] maxdims = { 6, H5S.UNLIMITED }; hsize_t[] chunk_dims = { 2, 5 }; int[] cbuf = new int[36]; hid_t dsp = H5S.create_simple(2, dims, maxdims); Assert.IsTrue(dsp >= 0); hid_t dcpl = H5P.create(H5P.DATASET_CREATE); Assert.IsTrue(dcpl >= 0); Assert.IsTrue(H5P.set_chunk(dcpl, 2, chunk_dims) >= 0); hid_t dst = H5D.create(m_v3_test_file_swmr, "dset", H5T.NATIVE_INT, dsp, H5P.DEFAULT, dcpl); Assert.IsTrue(dst >= 0); GCHandle hnd = GCHandle.Alloc(cbuf, GCHandleType.Pinned); Assert.IsTrue(H5D.write(dst, H5T.NATIVE_INT, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()) >= 0); hnd.Free(); Assert.IsTrue(H5D.flush(dst) >= 0); Assert.IsTrue(H5D.close(dst) >= 0); Assert.IsTrue(H5P.close(dcpl) >= 0); Assert.IsTrue(H5S.close(dsp) >= 0); }
// Matrix Specific Methods // TODO: // 1. add handling for matrix title // 2. add specification of NA values // 3. other attributes: pa-format flag, year int, source string /// <summary> /// Returns a row of the matrix /// </summary> /// <typeparam name="T"></typeparam> /// <param name="matName"></param> /// <param name="rowIndex"></param> /// <returns></returns> public T[] GetMatrixRow <T>(string matName, int rowIndex) { var rowData = new T[Shape[1]]; // check that matrix exists if (tables.ContainsKey(matName)) { H5DataSetId matId; tables.TryGetValue(matName, out matId); H5DataTypeId matDataId = H5D.getType(matId); H5DataSpaceId spaceId = H5S.create_simple(2, Shape); var h5matrix = new H5Array <T>(rowData); long[] start = { rowIndex, 0 }; long[] count = { 1, Shape[1] }; H5S.selectHyperslab(spaceId, H5S.SelectOperator.SET, start, count); H5DataSpaceId readSpaceId = H5S.create_simple(2, count); H5D.read(matId, matDataId, readSpaceId, spaceId, H5P.create(H5P.PropertyListClass.DATASET_XFER), h5matrix); H5S.close(spaceId); H5S.close(readSpaceId); } else { Console.WriteLine("table {0} not found in matrix file", matName); } return(rowData); }
public void FirstDataset(Array dataset) { if (FalseGroupId) { throw new Exception("cannot call FirstDataset because group or file couldn't be created"); } if (DatasetExists) { throw new Exception("cannot call FirstDataset because dataset already exists"); } Rank = dataset.Rank; currentDims = GetDims(dataset); /* Create the data space with unlimited dimensions. */ spaceId = H5S.create_simple(Rank, currentDims, maxDims); /* Modify dataset creation properties, i.e. enable chunking */ propId = H5P.create(H5P.DATASET_CREATE); status = H5P.set_chunk(propId, Rank, chunkDims); /* Create a new dataset within the file using chunk creation properties. */ datasetId = H5D.create(GroupId, Datasetname, datatype, spaceId, H5P.DEFAULT, propId, H5P.DEFAULT); /* Write data to dataset */ GCHandle hnd = GCHandle.Alloc(dataset, GCHandleType.Pinned); status = H5D.write(datasetId, datatype, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); H5S.close(spaceId); }
public void H5Pset_mdc_image_configTest1() { hid_t fapl = H5P.create(H5P.FILE_ACCESS); Assert.IsTrue(fapl >= 0); Assert.IsTrue(H5P.set_libver_bounds(fapl, H5F.libver_t.LATEST) >= 0); H5AC.cache_image_config_t conf = new H5AC.cache_image_config_t(); conf.version = H5AC.CURR_CACHE_IMAGE_CONFIG_VERSION; conf.entry_ageout = H5AC.CACHE_IMAGE__ENTRY_AGEOUT__NONE; IntPtr config_ptr = Marshal.AllocHGlobal(Marshal.SizeOf(conf)); Marshal.StructureToPtr(conf, config_ptr, false); H5AC.cache_image_config_t conf1 = new H5AC.cache_image_config_t(); conf1 = (H5AC.cache_image_config_t)Marshal.PtrToStructure( config_ptr, typeof(H5AC.cache_image_config_t)); //Assert.IsTrue(H5P.set_mdc_image_config(fapl, config_ptr) >= 0); Assert.IsTrue(H5P.close(fapl) >= 0); Marshal.FreeHGlobal(config_ptr); }
/// <summary> /// Create a temporary HDF5 with SWMR access and return /// its name and a file handle. /// </summary> public static hid_t H5TempFileSWMR(ref string fileName) { hid_t fapl = H5P.create(H5P.FILE_ACCESS); if (fapl < 0) { throw new ApplicationException("H5P.create failed."); } if (H5P.set_libver_bounds(fapl, H5F.libver_t.LATEST) < 0) { throw new ApplicationException("H5P.set_libver_bounds failed."); } if (H5P.set_fclose_degree(fapl, H5F.close_degree_t.STRONG) < 0) { throw new ApplicationException("H5P.set_fclose_degree failed."); } fileName = Path.GetTempFileName(); hid_t file = H5F.create(fileName, H5F.ACC_TRUNC | H5F.ACC_SWMR_WRITE, H5P.DEFAULT, fapl); if (file < 0) { throw new ApplicationException("H5F.create failed."); } if (H5P.close(fapl) < 0) { throw new ApplicationException("H5P.close failed."); } return(file); }
public void H5Dcreate_anonTest2() { hsize_t[] dims = { 10, 10, 10 }; hsize_t[] max_dims = { H5S.UNLIMITED, H5S.UNLIMITED, H5S.UNLIMITED }; hid_t space = H5S.create_simple(3, dims, max_dims); hid_t dcpl = H5P.create(H5P.DATASET_CREATE); Assert.IsTrue(dcpl >= 0); hsize_t[] chunk = { 64, 64, 64 }; Assert.IsTrue(H5P.set_chunk(dcpl, 3, chunk) >= 0); Assert.IsTrue(H5P.set_deflate(dcpl, 9) >= 0); hid_t dset = H5D.create_anon(m_v0_test_file, H5T.IEEE_F32BE, space, dcpl); Assert.IsTrue(dset >= 0); Assert.IsTrue(H5D.close(dset) >= 0); dset = H5D.create_anon(m_v2_test_file, H5T.IEEE_F32BE, space, dcpl); Assert.IsTrue(dset >= 0); Assert.IsTrue(H5D.close(dset) >= 0); Assert.IsTrue(H5P.close(dcpl) >= 0); Assert.IsTrue(H5S.close(space) >= 0); }
public void H5Dget_storage_sizeTest1() { hsize_t[] dims = { 1024, 2048 }; hid_t space = H5S.create_simple(2, dims, null); hid_t dcpl = H5P.create(H5P.DATASET_CREATE); Assert.IsTrue(dcpl >= 0); Assert.IsTrue( H5P.set_alloc_time(dcpl, H5D.alloc_time_t.EARLY) >= 0); hid_t dset = H5D.create(m_v0_test_file, "dset", H5T.STD_I16LE, space, H5P.DEFAULT, dcpl); Assert.IsTrue(dset >= 0); Assert.IsTrue(H5D.get_storage_size(dset) == 4194304); Assert.IsTrue(H5D.close(dset) >= 0); dset = H5D.create(m_v2_test_file, "dset", H5T.STD_I16LE, space, H5P.DEFAULT, dcpl); Assert.IsTrue(dset >= 0); Assert.IsTrue(H5D.get_storage_size(dset) == 4194304); Assert.IsTrue(H5D.close(dset) >= 0); Assert.IsTrue(H5P.close(dcpl) >= 0); Assert.IsTrue(H5S.close(space) >= 0); }
public void H5Pset_append_flushTestSWMR2() { hid_t dapl = H5P.create(H5P.DATASET_ACCESS); Assert.IsTrue(dapl >= 0); hsize_t[] boundary = { 1, 1 }; H5D.append_cb_t cb = append_func; Assert.IsTrue( H5P.set_append_flush(dapl, 2, boundary, cb, IntPtr.Zero) >= 0); hsize_t[] check_boundary = { 0, 0, 0 }; H5D.append_cb_t check_cb = null; IntPtr check_ptr = new IntPtr(4711); Assert.IsTrue( H5P.get_append_flush(dapl, 2, check_boundary, ref check_cb, ref check_ptr) >= 0); Assert.IsTrue(check_boundary[0] == 1); Assert.IsTrue(check_boundary[1] == 1); Assert.IsTrue(check_boundary[2] == 0); Assert.IsTrue(check_cb == cb); Assert.IsTrue(check_ptr == IntPtr.Zero); Assert.IsTrue(H5P.close(dapl) >= 0); }
private void Write(H5GroupId parent, string name, IEnumerable <IMeasurement> measurements) { H5DataSpaceId spaceId = H5S.create_simple(1, new long[1] { (long)measurements.Count() }); // Set compression options for dataset H5PropertyListId dataSetPropertyList = H5P.create(H5P.PropertyListClass.DATASET_CREATE); H5P.setDeflate(dataSetPropertyList, NumericDataCompression); H5P.setChunk(dataSetPropertyList, new long[] { (long)measurements.Count() }); H5DataSetId dataSetId = H5D.create(parent, name, measurement_t, spaceId, new H5PropertyListId(H5P.Template.DEFAULT), dataSetPropertyList, new H5PropertyListId(H5P.Template.DEFAULT)); MeasurementT[] ms = new MeasurementT[measurements.Count()]; int ilmCount = 0; foreach (IMeasurement m in measurements) { MeasurementT mt = Convert(m); ms[ilmCount++] = mt; } H5D.write <MeasurementT>(dataSetId, measurement_t, new H5Array <MeasurementT>(ms)); H5D.close(dataSetId); H5S.close(spaceId); }
public static unsafe void AddMass(long fileId, ContainerType container) { long res; var typeId = TestUtils.GetHdfTypeIdFromType(typeof(TestStructL1)); for (int i = 0; i < 1000; i++) { var dims = new ulong[] { 2, 2, 3 }; if (i == 450) { var acpl_id = H5P.create(H5P.ATTRIBUTE_CREATE); res = H5P.set_char_encoding(acpl_id, H5T.cset_t.UTF8); var name = "字形碼 / 字形码, Zìxíngmǎ"; TestUtils.Add(container, fileId, "mass_attributes", name, typeId, TestData.NonNullableStructData.AsSpan(), dims, cpl: acpl_id); } else { var name = $"mass_{i.ToString("D4")}"; TestUtils.Add(container, fileId, "mass_attributes", name, typeId, TestData.NonNullableStructData.AsSpan(), dims); } } res = H5T.close(typeId); }
/// <summary> /// Writes the entire matrix table /// </summary> /// <typeparam name="T"></typeparam> /// <param name="matName"></param> /// <param name="rowIndex"></param> /// <returns></returns> public void SetMatrix <T>(string matName, T[,] data) { // check that matrix exists if (tables.ContainsKey(matName)) { H5DataSetId matId; tables.TryGetValue(matName, out matId); H5DataTypeId matDataId = H5D.getType(matId); H5DataSpaceId spaceId = H5S.create_simple(2, Shape); long[] start = { 0, 0 }; long[] count = { Shape[0], Shape[1] }; var h5matrix = new H5Array <T>(data); H5S.selectHyperslab(spaceId, H5S.SelectOperator.SET, start, count); H5DataSpaceId readSpaceId = H5S.create_simple(2, count); H5D.write(matId, matDataId, readSpaceId, spaceId, H5P.create(H5P.PropertyListClass.DATASET_XFER), h5matrix); H5S.close(spaceId); H5S.close(readSpaceId); } else { Console.WriteLine("table {0} not found in matrix file", matName); } return; }
public void H5Pset_object_flush_cbTestSWMR2() { hid_t fapl = H5P.create(H5P.FILE_ACCESS); Assert.IsTrue(fapl >= 0); H5F.flush_cb_t cb = flush_func; Assert.IsTrue( H5P.set_object_flush_cb(fapl, cb, IntPtr.Zero) >= 0); H5F.flush_cb_t check_cb = null; IntPtr check_ptr = new IntPtr(4711); Assert.IsTrue( H5P.get_object_flush_cb(fapl, ref check_cb, ref check_ptr) >= 0); Assert.IsTrue(check_cb == cb); Assert.IsTrue(check_ptr == IntPtr.Zero); Assert.IsTrue(H5P.close(fapl) >= 0); }
/// <summary> /// Create a temporary HDF5 file IN MEMORY and return its name and /// a file handle. /// </summary> public static hid_t H5TempFile(ref string fileName, H5F.libver_t version = H5F.libver_t.LATEST, bool backing_store = false) { hid_t fapl = H5P.create(H5P.FILE_ACCESS); if (fapl < 0) { throw new ApplicationException("H5P.create failed."); } if (H5P.set_libver_bounds(fapl, version) < 0) { throw new ApplicationException("H5P.set_libver_bounds failed."); } // use the core VFD, 64K increments, no backing store if (H5P.set_fapl_core(fapl, new IntPtr(65536), (uint)(backing_store ? 1 : 0)) < 0) { throw new ApplicationException("H5P.set_fapl_core failed."); } fileName = Path.GetTempFileName(); hid_t file = H5F.create(fileName, H5F.ACC_TRUNC, H5P.DEFAULT, fapl); if (file < 0) { throw new ApplicationException("H5F.create failed."); } if (H5P.close(fapl) < 0) { throw new ApplicationException("H5P.close failed."); } return(file); }
public void H5DOappendTestSWMR2() { hsize_t[] dims = { 0 }; hsize_t[] maxdims = { H5S.UNLIMITED }; hsize_t[] chunk_dims = { 10 }; uint[] cbuf = { 123, 456, 789 }; hid_t dsp = H5S.create_simple(1, dims, maxdims); Assert.IsTrue(dsp >= 0); hid_t dcpl = H5P.create(H5P.DATASET_CREATE); Assert.IsTrue(dcpl >= 0); Assert.IsTrue(H5P.set_chunk(dcpl, 1, chunk_dims) >= 0); hid_t dst = H5D.create(m_v3_test_file_no_swmr, "dset1", H5T.NATIVE_UINT, dsp, H5P.DEFAULT, dcpl, H5P.DEFAULT); Assert.IsTrue(dst >= 0); GCHandle hnd = GCHandle.Alloc(cbuf, GCHandleType.Pinned); Assert.IsTrue( H5DO.append(dst, H5P.DEFAULT, 0, new IntPtr(3), H5T.NATIVE_UINT, hnd.AddrOfPinnedObject()) >= 0); hnd.Free(); Assert.IsTrue(H5D.close(dst) >= 0); Assert.IsTrue(H5P.close(dcpl) >= 0); Assert.IsTrue(H5S.close(dsp) >= 0); }
public static unsafe void AddExternalDataset(long fileId, string datasetName, string absolutePrefix, H5DatasetAccess datasetAccess) { long res; var bytesoftype = 4; var dcpl_id = H5P.create(H5P.DATASET_CREATE); var dapl_id = H5P.create(H5P.DATASET_ACCESS); res = H5P.set_layout(dcpl_id, H5D.layout_t.CONTIGUOUS); // a (more than one chunk in file) var pathA = H5Utils.ConstructExternalFilePath(Path.Combine(absolutePrefix, $"{datasetName}_a.raw"), datasetAccess); if (File.Exists(pathA)) { File.Delete(pathA); } res = H5P.set_external(dcpl_id, pathA, new IntPtr(120), (ulong)(10 * bytesoftype)); res = H5P.set_external(dcpl_id, pathA, new IntPtr(80), (ulong)(10 * bytesoftype)); res = H5P.set_external(dcpl_id, pathA, new IntPtr(0), (ulong)(10 * bytesoftype)); // b (file size smaller than set size) var pathB = H5Utils.ConstructExternalFilePath(Path.Combine(absolutePrefix, $"{datasetName}_b.raw"), datasetAccess); if (File.Exists(pathB)) { File.Delete(pathB); } res = H5P.set_external(dcpl_id, pathB, new IntPtr(0), (ulong)(10 * bytesoftype)); // c (normal file) var pathC = H5Utils.ConstructExternalFilePath(Path.Combine(absolutePrefix, $"{datasetName}_c.raw"), datasetAccess); if (File.Exists(pathC)) { File.Delete(pathC); } res = H5P.set_external(dcpl_id, pathC, new IntPtr(0), (ulong)((TestData.MediumData.Length - 40) * bytesoftype)); // write data if (datasetAccess.ExternalFilePrefix is not null) { H5P.set_efile_prefix(dapl_id, datasetAccess.ExternalFilePrefix); } TestUtils.Add(ContainerType.Dataset, fileId, "external", datasetName, H5T.NATIVE_INT32, TestData.MediumData.AsSpan(), apl: dapl_id, cpl: dcpl_id); // truncate file b using (var fileStream2 = File.OpenWrite(pathB)) { fileStream2.SetLength(10); }; res = H5P.close(dapl_id); res = H5P.close(dcpl_id); }
public void H5Pset_evict_on_closeTest2() { hid_t fcpl = H5P.create(H5P.FILE_CREATE); Assert.IsTrue(fcpl >= 0); Assert.IsFalse(H5P.set_evict_on_close(fcpl, 1) >= 0); Assert.IsTrue(H5P.close(fcpl) >= 0); }
public void H5Pset_evict_on_closeTest1() { hid_t fapl = H5P.create(H5P.FILE_ACCESS); Assert.IsTrue(fapl >= 0); Assert.IsTrue(H5P.set_evict_on_close(fapl, 1) >= 0); Assert.IsTrue(H5P.close(fapl) >= 0); }
public void H5Pset_userblockTest1() { hid_t fcpl = H5P.create(H5P.FILE_CREATE); Assert.IsTrue(fcpl >= 0); Assert.IsTrue(H5P.set_userblock(fcpl, 1024) >= 0); Assert.IsTrue(H5P.close(fcpl) >= 0); }
private static long create_property(ulong[] chunk_size) { var dcpl = H5P.create(H5P.DATASET_CREATE); H5P.set_layout(dcpl, H5D.layout_t.CHUNKED); H5P.set_chunk(dcpl, chunk_size.Length, chunk_size); H5P.set_deflate(dcpl, 6); return(dcpl); }
private void Menu_2() { long vdsFileId = -1; long vdsMetaFileId = -1; long fcPropertyId = -1; string vdsFilePath; string vdsMetaFilePath; List <CampaignInfo> campaignInfoSet; IList <HdfElementBase> currentList; // vdsFilePath = Path.Combine(Program.BaseDirectoryPath, "VDS.h5"); vdsMetaFilePath = Path.Combine(Program.BaseDirectoryPath, "VDS_META.h5"); try { if (File.Exists(vdsFilePath)) { vdsFileId = H5F.open(vdsFilePath, H5F.ACC_RDONLY); } else { return; } if (File.Exists(vdsMetaFilePath)) { vdsMetaFileId = H5F.open(vdsMetaFilePath, H5F.ACC_RDWR); } if (vdsMetaFileId == -1) { fcPropertyId = H5P.create(H5P.FILE_CREATE); H5P.set_file_space(fcPropertyId, H5F.file_space_type_t.ALL_PERSIST); vdsMetaFileId = H5F.create(vdsMetaFilePath, H5F.ACC_TRUNC, fcPropertyId); } campaignInfoSet = GeneralHelper.GetCampaignInfoSet(vdsFileId, true); currentList = campaignInfoSet.Cast <HdfElementBase>().ToList(); new VdsMetaNavigator(vdsFileId, vdsMetaFileId, "/", currentList); } finally { if (H5I.is_valid(vdsFileId) > 0) { H5F.close(vdsFileId); } if (H5I.is_valid(vdsMetaFileId) > 0) { H5F.close(vdsMetaFileId); } } }
public static unsafe void AddCompactDataset(long fileId) { long res; var dcpl_id = H5P.create(H5P.DATASET_CREATE); res = H5P.set_layout(dcpl_id, H5D.layout_t.COMPACT); TestUtils.Add(ContainerType.Dataset, fileId, "compact", "compact", H5T.NATIVE_INT32, TestData.SmallData.AsSpan(), cpl: dcpl_id); res = H5P.close(dcpl_id); }
public void H5Pset_userblockTest3() { hid_t fcpl = H5P.create(H5P.FILE_CREATE); Assert.IsTrue(fcpl >= 0); Assert.IsTrue(H5P.set_userblock(fcpl, 0) >= 0); Assert.IsFalse(H5P.set_userblock(fcpl, 111) >= 0); Assert.IsTrue(H5P.set_userblock(fcpl, 512) >= 0); Assert.IsFalse(H5P.set_userblock(fcpl, 513) >= 0); Assert.IsTrue(H5P.close(fcpl) >= 0); }
public void H5Dget_chunk_infoTest1() { hsize_t[] dims = { 10, 10 }; hsize_t[] max_dims = { H5S.UNLIMITED, H5S.UNLIMITED }; hid_t space = H5S.create_simple(2, dims, max_dims); hid_t dcpl = H5P.create(H5P.DATASET_CREATE); Assert.IsTrue(dcpl >= 0); hsize_t[] chunk = { 4, 4 }; Assert.IsTrue(H5P.set_chunk(dcpl, 2, chunk) >= 0); Assert.IsTrue(H5P.set_alloc_time(dcpl, H5D.alloc_time_t.EARLY) >= 0); Assert.IsTrue(H5P.set_fill_time(dcpl, H5D.fill_time_t.ALLOC) >= 0); hid_t dset = H5D.create(m_v0_test_file, "Early Bird1", H5T.IEEE_F32BE, space, H5P.DEFAULT, dcpl); Assert.IsTrue(dset >= 0); // This should work but doesn't: // Assert.IsTrue(H5D.get_num_chunks(dset, H5S.ALL, ref nchunks) >= 0); hid_t fspace = H5D.get_space(dset); Assert.IsTrue(fspace >= 0); Assert.IsTrue(H5S.select_all(fspace) >= 0); hsize_t index = 8, size = 0; hsize_t[] offset = { 4711, 4712 }; uint32_t filter_mask = 0; haddr_t addr = 0; Assert.IsTrue(H5D.get_chunk_info(dset, fspace, index, offset, ref filter_mask, ref addr, ref size) >= 0); Assert.IsTrue(offset[0] > 0); Assert.IsTrue(filter_mask == 0 && size > 0 && addr > 0); Assert.IsTrue(H5D.close(dset) >= 0); dset = H5D.create(m_v2_test_file, "Early Bird1", H5T.IEEE_F32BE, space, H5P.DEFAULT, dcpl); Assert.IsTrue(dset >= 0); // This should work but doesn't: // Assert.IsTrue(H5D.get_num_chunks(dset, H5S.ALL, ref nchunks) >= 0); fspace = H5D.get_space(dset); Assert.IsTrue(fspace >= 0); Assert.IsTrue(H5S.select_all(fspace) >= 0); Assert.IsTrue(H5D.get_chunk_info(dset, fspace, index, offset, ref filter_mask, ref addr, ref size) >= 0); Assert.IsTrue(offset[0] > 0); Assert.IsTrue(filter_mask == 0 && size > 0 && addr > 0); Assert.IsTrue(H5D.close(dset) >= 0); }
public static void ClassInit(TestContext testContext) { hid_t fapl = H5P.create(H5P.FILE_ACCESS); Assert.IsTrue(fapl >= 0); Assert.IsTrue( H5P.set_libver_bounds(fapl, H5F.libver_t.LATEST) >= 0); m_shared_file_id = H5F.create(m_shared_file_name, H5F.ACC_TRUNC, H5P.DEFAULT, fapl); Assert.IsTrue(H5P.close(fapl) >= 0); }
public static (long DatasetId, bool IsNew) OpenOrCreateDataset(long locationId, string datasetPath, long datasetTypeId, ulong chunkLength, ulong chunkCount, IntPtr fillValue = default) { return(IOHelper.OpenOrCreateDataset(locationId, datasetPath, datasetTypeId, () => { long dcPropertyId = -1; long lcPropertyId = -1; long dataspaceId = -1; long datasetId = -1; try { dcPropertyId = H5P.create(H5P.DATASET_CREATE); if (fillValue != IntPtr.Zero) { H5P.set_fill_value(dcPropertyId, datasetTypeId, fillValue); } H5P.set_shuffle(dcPropertyId); H5P.set_deflate(dcPropertyId, 7); H5P.set_chunk(dcPropertyId, 1, new ulong[] { chunkLength }); lcPropertyId = H5P.create(H5P.LINK_CREATE); H5P.set_create_intermediate_group(lcPropertyId, 1); dataspaceId = H5S.create_simple(1, new ulong[] { chunkLength *chunkCount }, null); datasetId = H5D.create(locationId, datasetPath, datasetTypeId, dataspaceId, lcPropertyId, dcPropertyId); if (H5I.is_valid(datasetId) <= 0) { throw new Exception($"{ ErrorMessage.IOHelper_CouldNotOpenOrCreateDataset } Dataset: '{ datasetPath }'."); } } finally { if (H5I.is_valid(dcPropertyId) > 0) { H5P.close(dcPropertyId); } if (H5I.is_valid(lcPropertyId) > 0) { H5P.close(lcPropertyId); } if (H5I.is_valid(dataspaceId) > 0) { H5S.close(dataspaceId); } } return datasetId; })); }
public HDF5DataSet CreateDataset(string name, ulong[] shape, Type dType, long maxSize = 1, bool[] unlimited = null, ulong[] chunkShape = null, bool compress = false) { HDF5DataSet result = null; With((id) => { int nDims = shape.Length; if (unlimited == null) { unlimited = Enumerable.Range(0, nDims).Select(d => false).ToArray(); } ulong[] maxShape = Enumerable.Range(0, nDims).Select(d => unlimited[d] ? H5S.UNLIMITED : shape[d]).ToArray(); var dataspaceID = H5S.create_simple(nDims, shape, maxShape); long dataTypeID = HDF5DataSet.OpenHDFDataType(dType, maxSize); long creationPropertyList = 0L; if (compress) { if (chunkShape == null) { chunkShape = shape; } creationPropertyList = H5P.create(H5P.DATASET_CREATE); H5P.set_layout(creationPropertyList, H5D.layout_t.CHUNKED); H5P.set_deflate(creationPropertyList, 9); H5P.set_chunk(creationPropertyList, shape.Length, chunkShape); } var newID = H5D.create(id, name, dataTypeID, dataspaceID, 0L, creationPropertyList, 0L); if (creationPropertyList > 0) { H5P.close(creationPropertyList); } H5T.close(dataTypeID); H5S.close(dataspaceID); if (newID <= 0) { throw new H5SSException("Couldn't create DataSet"); } // write! H5D.close(newID); result = new HDF5DataSet(name, this); }); return(result); }
static void Main2222(string[] args) { var h5 = H5F.create(@"D:\test.h5", H5F.ACC_TRUNC); var typeId = H5T.create(H5T.class_t.COMPOUND, new IntPtr(40)); var strtype = H5T.copy(H5T.C_S1); H5T.set_size(strtype, new IntPtr(16)); H5T.insert(typeId, "Name", new IntPtr(0), strtype); H5T.insert(typeId, "x_pos", new IntPtr(16), H5T.NATIVE_INT32); H5T.insert(typeId, "y_pos", new IntPtr(20), H5T.NATIVE_INT32); H5T.insert(typeId, "Mass", new IntPtr(24), H5T.NATIVE_FLOAT); H5T.insert(typeId, "Temperature", new IntPtr(32), H5T.NATIVE_DOUBLE); ulong[] dims = new ulong[] { 10000 }; ulong[] chunk_size = new ulong[] { 1000 }; var spaceid = H5S.create_simple(dims.Length, dims, null); var dcpl = H5P.create(H5P.DATASET_CREATE); H5P.set_layout(dcpl, H5D.layout_t.COMPACT); H5P.set_deflate(dcpl, 6); H5P.set_chunk(dcpl, chunk_size.Length, chunk_size); var datasetid = H5D.create(h5, "Table1", typeId, spaceid, H5P.DEFAULT, dcpl); ComType ct = new ComType() { Name = "aabb", x_pos = 2, y_pos = 1, Mass = 1.24F, Temperature = 45.7, }; IntPtr p = Marshal.AllocHGlobal(40 * (int)dims[0]); Marshal.StructureToPtr(ct, p, false); H5D.write(datasetid, typeId, spaceid, H5S.ALL, H5P.DEFAULT, p); H5F.close(h5); }
static void test_attr_plist() { try { Console.Write("Testing attribute property lists"); hssize_t[] dims = { 256, 512 }; const string PLST_FILE_NAME = ("tattr_plist.h5"); hssize_t[] dims1 = { SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3 }; hssize_t[] dims2 = { ATTR1_DIM }; // Create file. H5FileId fileId = H5F.create(PLST_FILE_NAME, H5F.CreateMode.ACC_TRUNC); // Create dataspace for dataset. H5DataSpaceId space1_Id = H5S.create_simple(SPACE1_RANK, dims1); // Create a dataset. H5DataSetId dsetId = H5D.create(fileId, DSET1_NAME, H5T.H5Type.NATIVE_UCHAR, space1_Id); // Create dataspace for attribute. H5DataSpaceId space2_Id = H5S.create_simple(ATTR1_RANK, dims2); // Create default property list for attribute. H5PropertyListId plist = H5P.create(H5P.PropertyListClass.ATTRIBUTE_CREATE); // Create an attribute for the dataset using the property list. H5AttributeId attrId = H5A.create(dsetId, ATTR1_NAME, new H5DataTypeId(H5T.H5Type.NATIVE_INT), space2_Id, plist); // Close all objects. H5S.close(space1_Id); H5S.close(space2_Id); H5P.close(plist); H5A.close(attrId); H5D.close(dsetId); H5F.close(fileId); Console.WriteLine("\t\t\tPASSED"); } catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // test_attr_plist
public static unsafe void AddChunkedDatasetForHyperslab(long fileId) { long res; var dcpl_id = H5P.create(H5P.DATASET_CREATE); var dims = new ulong[] { 25, 25, 4 }; var chunkDims = new ulong[] { 7, 20, 3 }; res = H5P.set_chunk(dcpl_id, 3, chunkDims); TestUtils.Add(ContainerType.Dataset, fileId, "chunked", "hyperslab", H5T.NATIVE_INT32, TestData.MediumData.AsSpan(), dims, cpl: dcpl_id); res = H5P.close(dcpl_id); }