/// <summary> /// Change the dimensions of the dataset to `new_dims`. /// </summary> public void Resize(params long[] new_dims) { if (!HasH5Pcreate) { throw new NotImplementedException($"cannot resize using hdf5 v{H5Library.LibVersion}"); } using (H5Space space = GetSpace()) { if (new_dims.Length != space.Rank) { throw new RankException($"{new_dims.Length} != {space.Rank}"); } ulong[] extent = new ulong[space.Rank]; long[] maxdims = space.MaxDims; for (int i = 0; i < new_dims.Length; i++) { if (!(0 <= new_dims[i] && (new_dims[i] <= maxdims[i] || maxdims[i] == H5Space.Unlimited))) { throw new IndexOutOfRangeException($"{new_dims[i]} > {maxdims[i]}"); } extent[i] = (ulong)new_dims[i]; } int status; if ((status = H5D.set_extent(ID, extent)) < 0) { throw new H5LibraryException($"H5D.set_extent() returned ({status})"); } } }
public void H5Dset_extentTest2() { hsize_t[] dims = { 10, 10, 10 }; hsize_t[] max_dims = { H5S.UNLIMITED, H5S.UNLIMITED, H5S.UNLIMITED }; hid_t space = H5S.create_simple(3, dims, max_dims); hid_t dcpl = H5P.create(H5P.DATASET_CREATE); Assert.IsTrue(dcpl >= 0); hsize_t[] chunk = { 64, 64, 64 }; Assert.IsTrue(H5P.set_chunk(dcpl, 3, chunk) >= 0); hid_t dset = H5D.create_anon(m_v0_test_file, H5T.IEEE_F32BE, space, dcpl); Assert.IsTrue(dset >= 0); Assert.IsFalse(H5D.set_extent(dset, (ulong[])null) >= 0); Assert.IsTrue(H5D.close(dset) >= 0); dset = H5D.create_anon(m_v2_test_file, H5T.IEEE_F32BE, space, dcpl); Assert.IsTrue(dset >= 0); Assert.IsFalse(H5D.set_extent(dset, (ulong[])null) >= 0); Assert.IsTrue(H5D.close(dset) >= 0); Assert.IsTrue(H5P.close(dcpl) >= 0); Assert.IsTrue(H5S.close(space) >= 0); }
public void HDF5Append() { var newDimensions = new hsize_t[] { dataSetRows_ + (hsize_t)bytes_.Length }; if (H5D.set_extent(dataSet_, newDimensions) < 0) { throw new HDF5Exception("H5D.set_extent failed."); } AppendData(dataSet_, bytes_, ref dataSetRows_); }
public void AppendDataset(Array dataset) { if (!Hdf5Utils.GetRealName(GroupId, Datasetname, string.Empty).valid) { string msg = "call constructor or FirstDataset first before appending."; Hdf5Utils.LogError?.Invoke(msg); throw new Hdf5Exception(msg); } _oldDims = _currentDims; _currentDims = GetDims(dataset); int rank = dataset.Rank; ulong[] zeros = Enumerable.Range(0, rank).Select(z => (ulong)0).ToArray(); /* Extend the dataset. Dataset becomes 10 x 3 */ var size = new[] { _oldDims[0] + _currentDims[0] }.Concat(_oldDims.Skip(1)).ToArray(); _status = H5D.set_extent(_datasetId, size); ulong[] offset = new[] { _oldDims[0] }.Concat(zeros.Skip(1)).ToArray(); /* Select a hyperslab in extended portion of dataset */ var filespaceId = H5D.get_space(_datasetId); _status = H5S.select_hyperslab(filespaceId, H5S.seloper_t.SET, offset, null, _currentDims, null); /* Define memory space */ var memId = H5S.create_simple(Rank, _currentDims, null); /* Write the data to the extended portion of dataset */ GCHandle hnd = GCHandle.Alloc(dataset, GCHandleType.Pinned); _status = H5D.write(_datasetId, _datatype, memId, filespaceId, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); _currentDims = size; H5S.close(memId); H5S.close(filespaceId); }
public void AppendDataset(Array dataset) { if (!DatasetExists) { throw new Exception("call constructor or FirstDataset first before appending."); } oldDims = currentDims; currentDims = getDims(dataset); int rank = dataset.Rank; ulong[] zeros = Enumerable.Range(0, rank).Select(z => (ulong)0).ToArray(); /* Extend the dataset. Dataset becomes 10 x 3 */ var size = new ulong[] { oldDims[0] + currentDims[0] }.Concat(oldDims.Skip(1)).ToArray(); status = H5D.set_extent(datasetId, size); ulong[] offset = new ulong[] { oldDims[0] }.Concat(zeros.Skip(1)).ToArray(); /* Select a hyperslab in extended portion of dataset */ var filespaceId = H5D.get_space(datasetId); status = H5S.select_hyperslab(filespaceId, H5S.seloper_t.SET, offset, null, currentDims, null); /* Define memory space */ var memId = H5S.create_simple(Rank, currentDims, null); /* Write the data to the extended portion of dataset */ GCHandle hnd = GCHandle.Alloc(dataset, GCHandleType.Pinned); status = H5D.write(datasetId, datatype, memId, filespaceId, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); currentDims = size; H5S.close(memId); //filespaceId must be closed by H5S.close() not H5D H5S.close(filespaceId); }
static void TestExtent(hid_t file, string dataSetName, ulong[] dims, ulong[] extent) { var dataSet = CreateDataSet(file, dataSetName, H5T.NATIVE_INT, dims, new ulong[] { H5S.UNLIMITED, 4 }, new ulong[] { 4, 4 }); H5D.set_extent(dataSet, extent); var fileSpace = H5D.get_space(dataSet); H5S.select_hyperslab(fileSpace, H5S.seloper_t.SET, new ulong[] { 0, 0 }, null, new ulong[] { 2, 4 }, null); var memSpace = H5S.create_simple(2, new ulong[] { 2, 4 }, null); if (H5D.write(dataSet, H5T.NATIVE_INT, memSpace, fileSpace, H5P.DEFAULT, new PinnedObject(Enumerable.Range(0, 8).ToArray())) < 0) { WriteLine("H5D.write failed."); } H5D.close(dataSet); }
private static void WriteToFile() { var filePath = Path.Combine(IOHelper.GetAppPath(), nowString_.Replace(':', '-') + ".h5"); var file = H5F.create(filePath, H5F.ACC_TRUNC); if (file < 0) { logger_.Error("Failed to create HDF5 file \"{0}\"", filePath); return; } hid_t dataSet; try { dataSet = CreateDataSet(file, "data", H5T.NATIVE_INT, new ulong[] { 0 }, new ulong[] { H5S.UNLIMITED }, new ulong[] { 256 }); } catch (Exception ex) { H5F.close(file); logger_.Error(ex, "Failed to create data set."); return; } FlushFile(file); var random = new Random(); int count = 0; while (Thread.VolatileRead(ref isRunning_) > 0) { var numbers = Enumerable.Range(count, random.Next(1, 100)).ToArray(); // Extend data set var newDimensions = new ulong[] { (ulong)(count + numbers.Length) }; if (H5D.set_extent(dataSet, newDimensions) < 0) { logger_.Error("Failed to extend data set."); break; } // Append data AppendData(dataSet, numbers, ref count); logger_.Info("Write {0}...{1} to file successfully!", numbers.Min(), numbers.Max()); if (Thread.VolatileRead(ref flushFile_) > 0) { FlushFile(file); Thread.VolatileWrite(ref flushFile_, 0); logger_.Info("Flush file."); } Thread.Sleep(1); } H5D.close(dataSet); H5F.close(file); logger_.Info("Done!"); }
/// <summary> /// Appends a dataset to a hdf5 file. If called the first time a dataset is created /// </summary> /// <typeparam name="T">Generic parameter only primitive types are allowed</typeparam> /// <param name="groupId">id of the group. Can also be a file Id</param> /// <param name="name">name of the dataset</param> /// <param name="dset">The dataset</param> /// <returns>status of the write method</returns> public static hid_t AppendDataset <T>(hid_t groupId, string name, Array dset, ulong chunkX = 200) where T : struct { var rank = dset.Rank; ulong[] dimsExtend = Enumerable.Range(0, rank).Select(i => { return((ulong)dset.GetLength(i)); }).ToArray(); ulong[] maxDimsExtend = null; ulong[] dimsChunk = new ulong[] { chunkX }.Concat(dimsExtend.Skip(1)).ToArray(); ulong[] zeros = Enumerable.Range(0, rank).Select(z => (ulong)0).ToArray(); hid_t status, spaceId, datasetId; // name = ToHdf5Name(name); var datatype = GetDatatype(typeof(T)); var typeId = H5T.copy(datatype); var datasetExists = H5L.exists(groupId, name) > 0; /* Create a new dataset within the file using chunk * creation properties. */ if (!datasetExists) { spaceId = H5S.create_simple(dset.Rank, dimsExtend, maxDimsExtend); var propId = H5P.create(H5P.DATASET_CREATE); status = H5P.set_chunk(propId, rank, dimsChunk); datasetId = H5D.create(groupId, name, datatype, spaceId, H5P.DEFAULT, propId, H5P.DEFAULT); /* Write data to dataset */ GCHandle hnd = GCHandle.Alloc(dset, GCHandleType.Pinned); status = H5D.write(datasetId, datatype, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); H5P.close(propId); } else { datasetId = H5D.open(groupId, name); spaceId = H5D.get_space(datasetId); var rank_old = H5S.get_simple_extent_ndims(spaceId); ulong[] maxDims = new ulong[rank_old]; ulong[] dims = new ulong[rank_old]; var memId1 = H5S.get_simple_extent_dims(spaceId, dims, maxDims); ulong[] oldChunk = null; int chunkDims = 0; var propId = H5P.create(H5P.DATASET_ACCESS); status = H5P.get_chunk(propId, chunkDims, oldChunk); /* Extend the dataset. */ var size = new ulong[] { dims[0] + dimsExtend[0] }.Concat(dims.Skip(1)).ToArray(); status = H5D.set_extent(datasetId, size); /* Select a hyperslab in extended portion of dataset */ var filespaceId = H5D.get_space(datasetId); var offset = new ulong[] { dims[0] }.Concat(zeros.Skip(1)).ToArray(); status = H5S.select_hyperslab(filespaceId, H5S.seloper_t.SET, offset, null, dimsExtend, null); /* Define memory space */ var memId2 = H5S.create_simple(rank, dimsExtend, null); /* Write the data to the extended portion of dataset */ GCHandle hnd = GCHandle.Alloc(dset, GCHandleType.Pinned); status = H5D.write(datasetId, datatype, memId2, spaceId, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); H5S.close(memId1); H5S.close(memId2); H5D.close(filespaceId); } H5D.close(datasetId); H5S.close(spaceId); return(status); }