public void H5DflushTestSWMR2() { hsize_t[] dims = { 6, 6 }; hsize_t[] maxdims = { 6, H5S.UNLIMITED }; hsize_t[] chunk_dims = { 2, 5 }; int[] cbuf = new int[36]; hid_t dsp = H5S.create_simple(2, dims, maxdims); Assert.IsTrue(dsp >= 0); hid_t dcpl = H5P.create(H5P.DATASET_CREATE); Assert.IsTrue(dcpl >= 0); Assert.IsTrue(H5P.set_chunk(dcpl, 2, chunk_dims) >= 0); hid_t dst = H5D.create(m_v3_test_file_swmr, "dset", H5T.NATIVE_INT, dsp, H5P.DEFAULT, dcpl); Assert.IsTrue(dst >= 0); GCHandle hnd = GCHandle.Alloc(cbuf, GCHandleType.Pinned); Assert.IsTrue(H5D.write(dst, H5T.NATIVE_INT, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()) >= 0); hnd.Free(); Assert.IsTrue(H5D.flush(dst) >= 0); Assert.IsTrue(H5D.close(dst) >= 0); Assert.IsTrue(H5P.close(dcpl) >= 0); Assert.IsTrue(H5S.close(dsp) >= 0); }
public void H5Dget_storage_sizeTest1() { hsize_t[] dims = { 1024, 2048 }; hid_t space = H5S.create_simple(2, dims, null); hid_t dcpl = H5P.create(H5P.DATASET_CREATE); Assert.IsTrue(dcpl >= 0); Assert.IsTrue( H5P.set_alloc_time(dcpl, H5D.alloc_time_t.EARLY) >= 0); hid_t dset = H5D.create(m_v0_test_file, "dset", H5T.STD_I16LE, space, H5P.DEFAULT, dcpl); Assert.IsTrue(dset >= 0); Assert.IsTrue(H5D.get_storage_size(dset) == 4194304); Assert.IsTrue(H5D.close(dset) >= 0); dset = H5D.create(m_v2_test_file, "dset", H5T.STD_I16LE, space, H5P.DEFAULT, dcpl); Assert.IsTrue(dset >= 0); Assert.IsTrue(H5D.get_storage_size(dset) == 4194304); Assert.IsTrue(H5D.close(dset) >= 0); Assert.IsTrue(H5P.close(dcpl) >= 0); Assert.IsTrue(H5S.close(space) >= 0); }
public void AppendOrCreateDataset(Array dataset) { if (_chunkDims == null) { _chunkDims = new[] { Convert.ToUInt64(dataset.GetLongLength(0)), Convert.ToUInt64(dataset.GetLongLength(1)) }; Rank = dataset.Rank; _currentDims = GetDims(dataset); /* Create the data space with unlimited dimensions. */ _spaceId = H5S.create_simple(Rank, _currentDims, _maxDims); /* Modify dataset creation properties, i.e. enable chunking */ _propId = H5P.create(H5P.DATASET_CREATE); _status = H5P.set_chunk(_propId, Rank, _chunkDims); /* Create a new dataset within the file using chunk creation properties. */ _datasetId = H5D.create(GroupId, Hdf5Utils.NormalizedName(Datasetname), _datatype, _spaceId, H5P.DEFAULT, _propId); /* Write data to dataset */ GCHandle hnd = GCHandle.Alloc(dataset, GCHandleType.Pinned); _status = H5D.write(_datasetId, _datatype, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); H5S.close(_spaceId); _spaceId = -1; } else { AppendDataset(dataset); } }
public static int WriteDataset <T>(int groupId, string name, T[,] dset) where T : struct { ulong[] dims = new ulong[] { (ulong)dset.GetLength(0), (ulong)dset.GetLength(1) }; ulong[] maxDims = null; var spaceId = H5S.create_simple(2, dims, maxDims); var datatype = GetDatatype(typeof(T)); var typeId = H5T.copy(datatype); if (datatype == H5T.C_S1) { H5T.set_size(datatype, new IntPtr(2)); //var wdata = Encoding.ASCII.GetBytes((char[,]) dset); } name = ToHdf5Name(name); var datasetId = H5D.create(groupId, name, datatype, spaceId); GCHandle hnd = GCHandle.Alloc(dset, GCHandleType.Pinned); var result = H5D.write(datasetId, datatype, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); H5D.close(datasetId); H5S.close(spaceId); H5T.close(typeId); return(result); }
public void H5DcreateTest2() { hsize_t[] dims = { 10, 10, 10 }; hsize_t[] max_dims = { H5S.UNLIMITED, H5S.UNLIMITED, H5S.UNLIMITED }; hid_t space = H5S.create_simple(3, dims, max_dims); hid_t lcpl = H5P.create(H5P.LINK_CREATE); Assert.IsTrue(H5P.set_create_intermediate_group(lcpl, 1) >= 0); hid_t dcpl = H5P.create(H5P.DATASET_CREATE); Assert.IsTrue(dcpl >= 0); hsize_t[] chunk = { 64, 64, 64 }; Assert.IsTrue(H5P.set_chunk(dcpl, 3, chunk) >= 0); Assert.IsTrue(H5P.set_deflate(dcpl, 9) >= 0); hid_t dset = H5D.create(m_v0_test_file, "A/B/C", H5T.IEEE_F32BE, space, lcpl, dcpl); Assert.IsTrue(dset >= 0); Assert.IsTrue(H5D.close(dset) >= 0); dset = H5D.create(m_v2_test_file, "A/B/C", H5T.IEEE_F32BE, space, lcpl, dcpl); Assert.IsTrue(dset >= 0); Assert.IsTrue(H5D.close(dset) >= 0); Assert.IsTrue(H5P.close(dcpl) >= 0); Assert.IsTrue(H5P.close(lcpl) >= 0); Assert.IsTrue(H5S.close(space) >= 0); }
public void H5Dget_typeTest1() { hsize_t[] dims = { 1024, 2048 }; hid_t space = H5S.create_simple(3, dims, null); hid_t dset = H5D.create(m_v0_test_file, "dset", H5T.STD_I16LE, space); Assert.IsTrue(dset >= 0); hid_t type = H5D.get_type(dset); Assert.IsTrue(type >= 0); Assert.IsTrue(H5T.equal(type, H5T.STD_I16LE) > 0); Assert.IsTrue(H5T.close(type) >= 0); Assert.IsTrue(H5D.close(dset) >= 0); dset = H5D.create(m_v2_test_file, "dset", H5T.STD_I16LE, space); Assert.IsTrue(dset >= 0); type = H5D.get_type(dset); Assert.IsTrue(type >= 0); Assert.IsTrue(H5T.equal(type, H5T.STD_I16LE) > 0); Assert.IsTrue(H5T.close(type) >= 0); Assert.IsTrue(H5D.close(dset) >= 0); Assert.IsTrue(H5S.close(space) >= 0); }
// information: https://www.hdfgroup.org/ftp/HDF5/examples/examples-by-api/hdf5-examples/1_8/C/H5T/h5ex_t_cmpd.c //or: https://www.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html#t=HDF5_Users_Guide%2FDatatypes%2FHDF5_Datatypes.htm%3Frhtocid%3Dtoc6.5%23TOC_6_8_Complex_Combinationsbc-22 public static int WriteCompounds <T>(hid_t groupId, string name, IEnumerable <T> list) //where T : struct { Type type = typeof(T); var size = Marshal.SizeOf(type); var cnt = list.Count(); var typeId = create_type(type); var log10 = (int)Math.Log10(cnt); ulong pow = (ulong)Math.Pow(10, log10); ulong c_s = Math.Min(1000, pow); ulong[] chunk_size = new ulong[] { c_s }; ulong[] dims = new ulong[] { (ulong)cnt }; long dcpl = 0; if (list.Count() == 0 || log10 == 0) { } else { dcpl = create_property(chunk_size); } // Create dataspace. Setting maximum size to NULL sets the maximum // size to be the current size. var spaceId = H5S.create_simple(dims.Length, dims, null); // Create the dataset and write the compound data to it. var datasetId = H5D.create(groupId, name, typeId, spaceId, H5P.DEFAULT, dcpl); IntPtr p = Marshal.AllocHGlobal(size * (int)dims[0]); var ms = new MemoryStream(); BinaryWriter writer = new BinaryWriter(ms); foreach (var strct in list) { writer.Write(getBytes(strct)); } var bytes = ms.ToArray(); GCHandle hnd = GCHandle.Alloc(bytes, GCHandleType.Pinned); var statusId = H5D.write(datasetId, typeId, spaceId, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); /* * Close and release resources. */ H5D.close(datasetId); H5S.close(spaceId); H5T.close(typeId); H5P.close(dcpl); Marshal.FreeHGlobal(p); return(statusId); }
private void Write(H5GroupId parent, string name, IEnumerable <IMeasurement> measurements) { H5DataSpaceId spaceId = H5S.create_simple(1, new long[1] { (long)measurements.Count() }); // Set compression options for dataset H5PropertyListId dataSetPropertyList = H5P.create(H5P.PropertyListClass.DATASET_CREATE); H5P.setDeflate(dataSetPropertyList, NumericDataCompression); H5P.setChunk(dataSetPropertyList, new long[] { (long)measurements.Count() }); H5DataSetId dataSetId = H5D.create(parent, name, measurement_t, spaceId, new H5PropertyListId(H5P.Template.DEFAULT), dataSetPropertyList, new H5PropertyListId(H5P.Template.DEFAULT)); MeasurementT[] ms = new MeasurementT[measurements.Count()]; int ilmCount = 0; foreach (IMeasurement m in measurements) { MeasurementT mt = Convert(m); ms[ilmCount++] = mt; } H5D.write <MeasurementT>(dataSetId, measurement_t, new H5Array <MeasurementT>(ms)); H5D.close(dataSetId); H5S.close(spaceId); }
public void FirstDataset(Array dataset) { if (FalseGroupId) { throw new Exception("cannot call FirstDataset because group or file couldn't be created"); } if (DatasetExists) { throw new Exception("cannot call FirstDataset because dataset already exists"); } Rank = dataset.Rank; currentDims = GetDims(dataset); /* Create the data space with unlimited dimensions. */ spaceId = H5S.create_simple(Rank, currentDims, maxDims); /* Modify dataset creation properties, i.e. enable chunking */ propId = H5P.create(H5P.DATASET_CREATE); status = H5P.set_chunk(propId, Rank, chunkDims); /* Create a new dataset within the file using chunk creation properties. */ datasetId = H5D.create(GroupId, Datasetname, datatype, spaceId, H5P.DEFAULT, propId, H5P.DEFAULT); /* Write data to dataset */ GCHandle hnd = GCHandle.Alloc(dataset, GCHandleType.Pinned); status = H5D.write(datasetId, datatype, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); H5S.close(spaceId); }
private static long GetId(long parentId, string name, long dataType, long spaceId, Hdf5ElementType type) { string normalizedName = NormalizedName(name); bool exists = ItemExists(parentId, normalizedName, type); if (exists) { LogMessage($"{normalizedName} already exists", Hdf5LogLevel.Debug); if (!Hdf5.Settings.OverrideExistingData) { if (Hdf5.Settings.ThrowOnError) { throw new Hdf5Exception($"{normalizedName} already exists"); } return(-1); } } var datasetId = -1L; switch (type) { case Hdf5ElementType.Unknown: break; case Hdf5ElementType.Group: case Hdf5ElementType.Dataset: if (exists) { H5L.delete(parentId, normalizedName); // datasetId = H5D.open(parentId, normalizedName); } datasetId = H5D.create(parentId, normalizedName, dataType, spaceId); break; case Hdf5ElementType.Attribute: if (exists) { H5A.delete(parentId, normalizedName); } datasetId = H5A.create(parentId, normalizedName, dataType, spaceId); break; default: throw new ArgumentOutOfRangeException(nameof(type), type, null); } if (datasetId == -1L) { string error = $"Unable to create dataset for {normalizedName}"; LogMessage($"{normalizedName} already exists", Hdf5LogLevel.Error); if (Hdf5.Settings.ThrowOnError) { throw new Hdf5Exception(error); } } return(datasetId); }
public static unsafe void AddSomeLinks(long fileId) { long res; var groupId = H5G.create(fileId, "simple"); var groupId_sub = H5G.create(groupId, "sub"); // datasets var dataspaceId1 = H5S.create_simple(1, new ulong[] { 1 }, new ulong[] { 1 }); var datasetId1 = H5D.create(fileId, "D", H5T.NATIVE_INT8, dataspaceId1); var data1 = new byte[] { 1 }; fixed(void *ptr = data1) { res = H5D.write(datasetId1, H5T.NATIVE_INT8, dataspaceId1, dataspaceId1, 0, new IntPtr(ptr)); } res = H5D.close(datasetId1); res = H5S.close(dataspaceId1); var dataspaceId2 = H5S.create_simple(1, new ulong[] { 1 }, new ulong[] { 1 }); var datasetId2 = H5D.create(groupId, "D1", H5T.NATIVE_INT8, dataspaceId2); res = H5D.close(datasetId2); res = H5S.close(dataspaceId2); var dataspaceId3 = H5S.create_simple(1, new ulong[] { 1 }, new ulong[] { 1 }); var datasetId3 = H5D.create(groupId_sub, "D1.1", H5T.NATIVE_INT8, dataspaceId3); res = H5D.close(datasetId3); res = H5S.close(dataspaceId3); res = H5G.close(groupId); res = H5G.close(groupId_sub); }
public void H5DwriteTest1() { string utf8string = "Γαζέες καὶ μυρτιὲς δὲν θὰ βρῶ πιὰ στὸ χρυσαφὶ ξέφωτο"; byte[] wdata = Encoding.UTF8.GetBytes(utf8string); hid_t dtype = H5T.create(H5T.class_t.STRING, new IntPtr(wdata.Length)); Assert.IsTrue(H5T.set_cset(dtype, H5T.cset_t.UTF8) >= 0); Assert.IsTrue(H5T.set_strpad(dtype, H5T.str_t.SPACEPAD) >= 0); hid_t dset_v0 = H5D.create(m_v0_test_file, "dset", dtype, m_space_scalar); Assert.IsTrue(dset_v0 >= 0); hid_t dset_v2 = H5D.create(m_v2_test_file, "dset", dtype, m_space_scalar); Assert.IsTrue(dset_v2 >= 0); GCHandle hnd = GCHandle.Alloc(wdata, GCHandleType.Pinned); Assert.IsTrue(H5D.write(dset_v0, dtype, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()) >= 0); Assert.IsTrue(H5D.write(dset_v2, dtype, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()) >= 0); hnd.Free(); Assert.IsTrue(H5T.close(dtype) >= 0); Assert.IsTrue(H5D.close(dset_v2) >= 0); Assert.IsTrue(H5D.close(dset_v0) >= 0); }
public void H5Dget_spaceTest1() { hsize_t[] dims = { 1024, 2048 }; hid_t space = H5S.create_simple(3, dims, null); hid_t dset = H5D.create(m_v0_test_file, "dset", H5T.STD_I16LE, space); Assert.IsTrue(dset >= 0); hid_t space1 = H5D.get_space(dset); Assert.IsTrue(space1 >= 0); Assert.IsTrue(H5S.extent_equal(space, space1) > 0); Assert.IsTrue(H5S.close(space1) >= 0); Assert.IsTrue(H5D.close(dset) >= 0); dset = H5D.create(m_v2_test_file, "dset", H5T.STD_I16LE, space); Assert.IsTrue(dset >= 0); space1 = H5D.get_space(dset); Assert.IsTrue(space1 >= 0); Assert.IsTrue(H5S.extent_equal(space, space1) > 0); Assert.IsTrue(H5S.close(space1) >= 0); Assert.IsTrue(H5D.close(dset) >= 0); Assert.IsTrue(H5S.close(space) >= 0); }
public void H5DOappendTestSWMR2() { hsize_t[] dims = { 0 }; hsize_t[] maxdims = { H5S.UNLIMITED }; hsize_t[] chunk_dims = { 10 }; uint[] cbuf = { 123, 456, 789 }; hid_t dsp = H5S.create_simple(1, dims, maxdims); Assert.IsTrue(dsp >= 0); hid_t dcpl = H5P.create(H5P.DATASET_CREATE); Assert.IsTrue(dcpl >= 0); Assert.IsTrue(H5P.set_chunk(dcpl, 1, chunk_dims) >= 0); hid_t dst = H5D.create(m_v3_test_file_no_swmr, "dset1", H5T.NATIVE_UINT, dsp, H5P.DEFAULT, dcpl, H5P.DEFAULT); Assert.IsTrue(dst >= 0); GCHandle hnd = GCHandle.Alloc(cbuf, GCHandleType.Pinned); Assert.IsTrue( H5DO.append(dst, H5P.DEFAULT, 0, new IntPtr(3), H5T.NATIVE_UINT, hnd.AddrOfPinnedObject()) >= 0); hnd.Free(); Assert.IsTrue(H5D.close(dst) >= 0); Assert.IsTrue(H5P.close(dcpl) >= 0); Assert.IsTrue(H5S.close(dsp) >= 0); }
public static int WriteDatasetFromArray <T>(hid_t groupId, string name, Array dset, string datasetName = null) //where T : struct { int rank = dset.Rank; ulong[] dims = Enumerable.Range(0, rank).Select(i => { return((ulong)dset.GetLength(i)); }).ToArray(); ulong[] maxDims = null; var spaceId = H5S.create_simple(rank, dims, maxDims); var datatype = GetDatatype(typeof(T)); var typeId = H5T.copy(datatype); if (datatype == H5T.C_S1) { H5T.set_size(datatype, new IntPtr(2)); } var datasetId = H5D.create(groupId, name, datatype, spaceId); GCHandle hnd = GCHandle.Alloc(dset, GCHandleType.Pinned); var result = H5D.write(datasetId, datatype, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); H5D.close(datasetId); H5S.close(spaceId); H5T.close(typeId); return(result); }
private static void WriteFile(string filePath) { var file = H5F.create(filePath, H5F.CreateMode.ACC_TRUNC); var group = H5G.create(file, "/group"); H5G.close(group); const int RANK = 2; const int DIM0 = 3; const int DIM1 = 4; var dims = new long[RANK] { DIM0, DIM1 }; var dataSpace = H5S.create_simple(RANK, dims); var dataSet = H5D.create(file, "/group/dataset", H5T.H5Type.NATIVE_INT, dataSpace); H5S.close(dataSpace); var data = new int[DIM0, DIM1] { { 1, 2, 3, 4 }, { 5, 6, 7, 8 }, { 9, 10, 11, 12 } }; H5D.write(dataSet, new H5DataTypeId(H5T.H5Type.NATIVE_INT), new H5Array <int>(data)); var dataType = new H5DataTypeId(H5T.H5Type.NATIVE_INT); dataSpace = H5S.create(H5S.H5SClass.SCALAR); var integerAttribute = H5A.create(dataSet, "int", dataType, dataSpace); H5A.write(integerAttribute, dataType, new H5Array <int>(new int[1] { 42 })); H5A.close(integerAttribute); H5S.close(dataSpace); //H5T.close(dataType); // Read-only. var str = "Hello, world!"; var strBytes = Encoding.ASCII.GetBytes(str); // There is a H5T.get_cset, but there does not seem to be a way of setting the character encoding, i.e. set_cset. dataType = H5T.copy(H5T.H5Type.C_S1); H5T.setSize(dataType, strBytes.Length); dataSpace = H5S.create(H5S.H5SClass.SCALAR); var stringAttribute = H5A.create(dataSet, "string", dataType, dataSpace); H5A.write(stringAttribute, dataType, new H5Array <byte>(strBytes)); H5A.close(stringAttribute); H5S.close(dataSpace); H5T.close(dataType); H5D.close(dataSet); H5F.close(file); }
public void H5RcreateTest4() { byte[] path = Encoding.UTF8.GetBytes(String.Join("/", m_utf8strings)); // make room for the trailling \0 byte[] name = new byte[path.Length + 1]; Array.Copy(path, name, path.Length); hsize_t[] dims = new hsize_t[] { 10, 20 }; hid_t space = H5S.create_simple(2, dims, null); Assert.IsTrue(space >= 0); hid_t dset = H5D.create(m_v2_test_file, name, H5T.STD_I32LE, space, m_lcpl_utf8); Assert.IsTrue(dset >= 0); hsize_t[] start = { 5, 10 }; hsize_t[] count = { 1, 1 }; hsize_t[] block = { 2, 4 }; Assert.IsTrue( H5S.select_hyperslab(space, H5S.seloper_t.SET, start, null, count, block) >= 0); byte[] refer = new byte[H5R.DSET_REG_REF_BUF_SIZE]; GCHandle hnd = GCHandle.Alloc(refer, GCHandleType.Pinned); Assert.IsTrue( H5R.create(hnd.AddrOfPinnedObject(), m_v2_test_file, name, H5R.type_t.DATASET_REGION, space) >= 0); ssize_t size = H5R.get_name(m_v2_test_file, H5R.type_t.DATASET_REGION, hnd.AddrOfPinnedObject(), (byte[])null, IntPtr.Zero); Assert.IsTrue(size.ToInt32() == name.Length); byte[] buf = new byte[size.ToInt32() + 1]; size = H5R.get_name(m_v2_test_file, H5R.type_t.DATASET_REGION, hnd.AddrOfPinnedObject(), buf, new IntPtr(buf.Length)); Assert.IsTrue(size.ToInt32() == name.Length); // we need to account for the leading "/", which was not included // in path for (int i = 0; i < name.Length; ++i) { Assert.IsTrue(name[i] == buf[i + 1]); } hid_t sel = H5R.get_region(dset, H5R.type_t.DATASET_REGION, hnd.AddrOfPinnedObject()); Assert.IsTrue(sel >= 0); hnd.Free(); Assert.IsTrue(H5S.extent_equal(space, sel) > 0); Assert.IsTrue(H5S.get_select_hyper_nblocks(space) == H5S.get_select_hyper_nblocks(sel)); Assert.IsTrue(H5S.close(sel) >= 0); Assert.IsTrue(H5D.close(dset) >= 0); Assert.IsTrue(H5S.close(space) >= 0); }
static void test_file_open() { try { // Output message about test being performed. Console.Write("Testing file opening I/O"); // First ensure the file does not exist File.Delete(FILE2); // Try opening a non-existent file. This should fail. try { H5FileId non_exist_file = H5F.open(FILE2, H5F.OpenMode.ACC_RDWR); // should fail, but didn't, print out the error message. Console.WriteLine("\ntest_file_open: Attempting to open a non-existent file."); nerrors++; } catch (H5FopenException) { } // does nothing, it should fail // Open the file. H5FileId fileId = H5F.open(FILE1, H5F.OpenMode.ACC_RDWR); // Create dataspace for the dataset in the file. hssize_t[] dims = { 20 }; H5DataSpaceId dspace = H5S.create_simple(RANK, dims); // Create a group. H5GroupId groupId = H5G.create(fileId, GROUP_NAME); // Create a dataset using file as location. H5DataSetId dset1Id = H5D.create(fileId, DSET1_NAME, H5T.H5Type.NATIVE_INT, dspace); // Create a dataset using group as location. H5DataSetId dset2Id = H5D.create(groupId, DSET2_NAME, H5T.H5Type.NATIVE_SHORT, dspace); // Close objects and files. H5D.close(dset1Id); H5D.close(dset2Id); H5S.close(dspace); H5G.close(groupId); H5F.close(fileId); Console.WriteLine("\t\t\t\tPASSED"); } catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // test_file_open
public static Hdf5Dataset CreateDataset( Hdf5Identifier _fileId, Hdf5Path _parentPath, string _name, Hdf5DataTypes _datatype, int _numberOfDimensions, List <Hdf5DimensionProperty> _properties) { Hdf5Path path = _parentPath.Append(_name); UInt64[] dimensionSize = new UInt64[_numberOfDimensions]; UInt64[] maxSize = null; // new UInt64[_numberOfDimensions]; int i = 0; foreach (var property in _properties) { dimensionSize[i] = property.CurrentSize; //if (property.MaximumSize == UInt64.MaxValue) //{ // maxSize[i] = H5S.UNLIMITED; //} //else //{ // maxSize[i] = property.MaximumSize; //} i++; } Hdf5Identifier dataspaceId = H5S.create_simple(_numberOfDimensions, dimensionSize, maxSize).ToId(); //TODO handle string datasets Hdf5Identifier typeId = H5T.copy(TypeHelper.GetNativeType(_datatype).Value).ToId(); var status = H5T.set_order(typeId.Value, H5T.order_t.LE); Hdf5Identifier datasetId = H5D.create(_fileId.Value, path.FullPath, typeId.Value, dataspaceId.Value).ToId(); Hdf5Dataset dataset = null; if (datasetId.Value > 0) { dataset = new Hdf5Dataset(_fileId, datasetId, path.FullPath) { DataType = TypeHelper.GetDataTypeFromDataset(datasetId), Dataspace = DataspaceHelper.GetDataspace(datasetId) }; H5D.close(datasetId.Value); } H5T.close(typeId.Value); FileHelper.FlushToFile(_fileId); return(dataset); }
public void H5Dget_chunk_infoTest1() { hsize_t[] dims = { 10, 10 }; hsize_t[] max_dims = { H5S.UNLIMITED, H5S.UNLIMITED }; hid_t space = H5S.create_simple(2, dims, max_dims); hid_t dcpl = H5P.create(H5P.DATASET_CREATE); Assert.IsTrue(dcpl >= 0); hsize_t[] chunk = { 4, 4 }; Assert.IsTrue(H5P.set_chunk(dcpl, 2, chunk) >= 0); Assert.IsTrue(H5P.set_alloc_time(dcpl, H5D.alloc_time_t.EARLY) >= 0); Assert.IsTrue(H5P.set_fill_time(dcpl, H5D.fill_time_t.ALLOC) >= 0); hid_t dset = H5D.create(m_v0_test_file, "Early Bird1", H5T.IEEE_F32BE, space, H5P.DEFAULT, dcpl); Assert.IsTrue(dset >= 0); // This should work but doesn't: // Assert.IsTrue(H5D.get_num_chunks(dset, H5S.ALL, ref nchunks) >= 0); hid_t fspace = H5D.get_space(dset); Assert.IsTrue(fspace >= 0); Assert.IsTrue(H5S.select_all(fspace) >= 0); hsize_t index = 8, size = 0; hsize_t[] offset = { 4711, 4712 }; uint32_t filter_mask = 0; haddr_t addr = 0; Assert.IsTrue(H5D.get_chunk_info(dset, fspace, index, offset, ref filter_mask, ref addr, ref size) >= 0); Assert.IsTrue(offset[0] > 0); Assert.IsTrue(filter_mask == 0 && size > 0 && addr > 0); Assert.IsTrue(H5D.close(dset) >= 0); dset = H5D.create(m_v2_test_file, "Early Bird1", H5T.IEEE_F32BE, space, H5P.DEFAULT, dcpl); Assert.IsTrue(dset >= 0); // This should work but doesn't: // Assert.IsTrue(H5D.get_num_chunks(dset, H5S.ALL, ref nchunks) >= 0); fspace = H5D.get_space(dset); Assert.IsTrue(fspace >= 0); Assert.IsTrue(H5S.select_all(fspace) >= 0); Assert.IsTrue(H5D.get_chunk_info(dset, fspace, index, offset, ref filter_mask, ref addr, ref size) >= 0); Assert.IsTrue(offset[0] > 0); Assert.IsTrue(filter_mask == 0 && size > 0 && addr > 0); Assert.IsTrue(H5D.close(dset) >= 0); }
public void H5RdereferenceTest4() { byte[] path = Encoding.UTF8.GetBytes(String.Join("/", m_utf8strings)); // make room for the trailling \0 byte[] name = new byte[path.Length + 1]; Array.Copy(path, name, path.Length); hsize_t[] dims = new hsize_t[] { 10, 20 }; hid_t space = H5S.create_simple(2, dims, null); Assert.IsTrue(space >= 0); hid_t dset = H5D.create(m_v2_test_file, name, H5T.STD_I32LE, space, m_lcpl_utf8); H5O.info_t info = new H5O.info_t(); Assert.IsTrue(H5O.get_info(dset, ref info) >= 0); haddr_t address = info.addr; Assert.IsTrue(H5D.close(dset) >= 0); Assert.IsTrue(dset >= 0); hsize_t[] start = { 5, 10 }; hsize_t[] count = { 1, 1 }; hsize_t[] block = { 2, 4 }; Assert.IsTrue( H5S.select_hyperslab(space, H5S.seloper_t.SET, start, null, count, block) >= 0); byte[] refer = new byte[H5R.DSET_REG_REF_BUF_SIZE]; GCHandle hnd = GCHandle.Alloc(refer, GCHandleType.Pinned); Assert.IsTrue( H5R.create(hnd.AddrOfPinnedObject(), m_v2_test_file, name, H5R.type_t.DATASET_REGION, space) >= 0); #if HDF5_VER1_10 dset = H5R.dereference(m_v2_test_file, H5P.DEFAULT, H5R.type_t.DATASET_REGION, hnd.AddrOfPinnedObject()); #else dset = H5R.dereference(m_v2_test_file, H5R.type_t.DATASET_REGION, hnd.AddrOfPinnedObject()); #endif Assert.IsTrue(dset >= 0); hnd.Free(); Assert.IsTrue(H5O.get_info(dset, ref info) >= 0); Assert.IsTrue(address == info.addr); Assert.IsTrue(H5D.close(dset) >= 0); Assert.IsTrue(H5S.close(space) >= 0); }
public static (long DatasetId, bool IsNew) OpenOrCreateDataset(long locationId, string datasetPath, long datasetTypeId, ulong chunkLength, ulong chunkCount, IntPtr fillValue = default) { return(IOHelper.OpenOrCreateDataset(locationId, datasetPath, datasetTypeId, () => { long dcPropertyId = -1; long lcPropertyId = -1; long dataspaceId = -1; long datasetId = -1; try { dcPropertyId = H5P.create(H5P.DATASET_CREATE); if (fillValue != IntPtr.Zero) { H5P.set_fill_value(dcPropertyId, datasetTypeId, fillValue); } H5P.set_shuffle(dcPropertyId); H5P.set_deflate(dcPropertyId, 7); H5P.set_chunk(dcPropertyId, 1, new ulong[] { chunkLength }); lcPropertyId = H5P.create(H5P.LINK_CREATE); H5P.set_create_intermediate_group(lcPropertyId, 1); dataspaceId = H5S.create_simple(1, new ulong[] { chunkLength *chunkCount }, null); datasetId = H5D.create(locationId, datasetPath, datasetTypeId, dataspaceId, lcPropertyId, dcPropertyId); if (H5I.is_valid(datasetId) <= 0) { throw new Exception($"{ ErrorMessage.IOHelper_CouldNotOpenOrCreateDataset } Dataset: '{ datasetPath }'."); } } finally { if (H5I.is_valid(dcPropertyId) > 0) { H5P.close(dcPropertyId); } if (H5I.is_valid(lcPropertyId) > 0) { H5P.close(lcPropertyId); } if (H5I.is_valid(dataspaceId) > 0) { H5S.close(dataspaceId); } } return datasetId; })); }
public HDF5DataSet CreateDataset(string name, ulong[] shape, Type dType, long maxSize = 1, bool[] unlimited = null, ulong[] chunkShape = null, bool compress = false) { HDF5DataSet result = null; With((id) => { int nDims = shape.Length; if (unlimited == null) { unlimited = Enumerable.Range(0, nDims).Select(d => false).ToArray(); } ulong[] maxShape = Enumerable.Range(0, nDims).Select(d => unlimited[d] ? H5S.UNLIMITED : shape[d]).ToArray(); var dataspaceID = H5S.create_simple(nDims, shape, maxShape); long dataTypeID = HDF5DataSet.OpenHDFDataType(dType, maxSize); long creationPropertyList = 0L; if (compress) { if (chunkShape == null) { chunkShape = shape; } creationPropertyList = H5P.create(H5P.DATASET_CREATE); H5P.set_layout(creationPropertyList, H5D.layout_t.CHUNKED); H5P.set_deflate(creationPropertyList, 9); H5P.set_chunk(creationPropertyList, shape.Length, chunkShape); } var newID = H5D.create(id, name, dataTypeID, dataspaceID, 0L, creationPropertyList, 0L); if (creationPropertyList > 0) { H5P.close(creationPropertyList); } H5T.close(dataTypeID); H5S.close(dataspaceID); if (newID <= 0) { throw new H5SSException("Couldn't create DataSet"); } // write! H5D.close(newID); result = new HDF5DataSet(name, this); }); return(result); }
static void Main2222(string[] args) { var h5 = H5F.create(@"D:\test.h5", H5F.ACC_TRUNC); var typeId = H5T.create(H5T.class_t.COMPOUND, new IntPtr(40)); var strtype = H5T.copy(H5T.C_S1); H5T.set_size(strtype, new IntPtr(16)); H5T.insert(typeId, "Name", new IntPtr(0), strtype); H5T.insert(typeId, "x_pos", new IntPtr(16), H5T.NATIVE_INT32); H5T.insert(typeId, "y_pos", new IntPtr(20), H5T.NATIVE_INT32); H5T.insert(typeId, "Mass", new IntPtr(24), H5T.NATIVE_FLOAT); H5T.insert(typeId, "Temperature", new IntPtr(32), H5T.NATIVE_DOUBLE); ulong[] dims = new ulong[] { 10000 }; ulong[] chunk_size = new ulong[] { 1000 }; var spaceid = H5S.create_simple(dims.Length, dims, null); var dcpl = H5P.create(H5P.DATASET_CREATE); H5P.set_layout(dcpl, H5D.layout_t.COMPACT); H5P.set_deflate(dcpl, 6); H5P.set_chunk(dcpl, chunk_size.Length, chunk_size); var datasetid = H5D.create(h5, "Table1", typeId, spaceid, H5P.DEFAULT, dcpl); ComType ct = new ComType() { Name = "aabb", x_pos = 2, y_pos = 1, Mass = 1.24F, Temperature = 45.7, }; IntPtr p = Marshal.AllocHGlobal(40 * (int)dims[0]); Marshal.StructureToPtr(ct, p, false); H5D.write(datasetid, typeId, spaceid, H5S.ALL, H5P.DEFAULT, p); H5F.close(h5); }
static void test_attr_plist() { try { Console.Write("Testing attribute property lists"); hssize_t[] dims = { 256, 512 }; const string PLST_FILE_NAME = ("tattr_plist.h5"); hssize_t[] dims1 = { SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3 }; hssize_t[] dims2 = { ATTR1_DIM }; // Create file. H5FileId fileId = H5F.create(PLST_FILE_NAME, H5F.CreateMode.ACC_TRUNC); // Create dataspace for dataset. H5DataSpaceId space1_Id = H5S.create_simple(SPACE1_RANK, dims1); // Create a dataset. H5DataSetId dsetId = H5D.create(fileId, DSET1_NAME, H5T.H5Type.NATIVE_UCHAR, space1_Id); // Create dataspace for attribute. H5DataSpaceId space2_Id = H5S.create_simple(ATTR1_RANK, dims2); // Create default property list for attribute. H5PropertyListId plist = H5P.create(H5P.PropertyListClass.ATTRIBUTE_CREATE); // Create an attribute for the dataset using the property list. H5AttributeId attrId = H5A.create(dsetId, ATTR1_NAME, new H5DataTypeId(H5T.H5Type.NATIVE_INT), space2_Id, plist); // Close all objects. H5S.close(space1_Id); H5S.close(space2_Id); H5P.close(plist); H5A.close(attrId); H5D.close(dsetId); H5F.close(fileId); Console.WriteLine("\t\t\tPASSED"); } catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // test_attr_plist
/// <summary> /// Create mapping index - assumes that matrices are square and uses first dimension as the map size /// </summary> /// <typeparam name="T"></typeparam> /// <param name="tazEquiv">array the size of the matrix table dimension</param> /// <param name="mapDataType">Data type of array</param> /// <param name="mapName">Name of index map</param> public void CreateMapping <T>(T[] tazEquiv, H5DataTypeId mapDataType, string mapName) { long[] oneDShape = { Shape[0] }; H5DataSpaceId mapSpaceId = H5S.create_simple(1, oneDShape); H5DataSetId newMappingID = H5D.create(luGroup, mapName, mapDataType, mapSpaceId); H5D.write(newMappingID, mapDataType, new H5Array <T>(tazEquiv)); IndexMapNames.Add(mapName); NumIndexMap++; this.indexMaps.Add(mapName, newMappingID); }
public void Writer(List <PbTickView> list, string dataset_name) { var t = typeof(PbTickStruct5); var size = Marshal.SizeOf(t); var typeId = create_type(); // chunk一定得小于dim数据量,多了会出错 // 如果数据100条左右,按 var log10 = (int)Math.Log10(list.Count); ulong pow = (ulong)Math.Pow(10, log10); ulong c_s = Math.Min(1000, pow); ulong[] chunk_size = new ulong[] { c_s }; ulong[] dims = new ulong[] { (ulong)list.Count }; long dcpl = 0; if (list.Count == 0 || log10 == 0) { } else { dcpl = create_property(chunk_size); } var spaceid = H5S.create_simple(dims.Length, dims, null); var datasetid = H5D.create(h5, dataset_name, typeId, spaceid, H5P.DEFAULT, dcpl); IntPtr p = Marshal.AllocHGlobal(size * (int)dims[0]); int i = 0; foreach (var b in list) { var s = DataConvert.toStruct(b); Marshal.StructureToPtr(s, new IntPtr(p.ToInt32() + size * i), false); ++i; } H5D.write(datasetid, typeId, spaceid, H5S.ALL, H5P.DEFAULT, p); H5D.close(datasetid); H5S.close(spaceid); H5T.close(typeId); H5P.close(dcpl); Marshal.FreeHGlobal(p); }
public static unsafe void Add(ContainerType container, long fileId, string groupName, string elementName, long typeId, void *dataPtr, long spaceId, long cpl = 0, long apl = 0) { long res; long groupId; if (H5L.exists(fileId, groupName) > 0) { groupId = H5G.open(fileId, groupName); } else { groupId = H5G.create(fileId, groupName); } long id; if (container == ContainerType.Dataset) { id = H5D.create(groupId, Encoding.UTF8.GetBytes(elementName), typeId, spaceId, dcpl_id: cpl, dapl_id: apl); if (id == -1) { throw new Exception("Could not create dataset."); } if ((int)dataPtr != 0) { res = H5D.write(id, typeId, spaceId, H5S.ALL, 0, new IntPtr(dataPtr)); } res = H5D.close(id); } else { id = H5A.create(groupId, Encoding.UTF8.GetBytes(elementName), typeId, spaceId, acpl_id: cpl); if (id == -1) { throw new Exception("Could not create attribute."); } if ((int)dataPtr != 0) { res = H5A.write(id, typeId, new IntPtr(dataPtr)); } res = H5A.close(id); } res = H5G.close(groupId); }
private void Write(H5GroupId parent, string name, IMeasurement m) { H5DataSpaceId spaceId = H5S.create_simple(1, new long[1] { (long)1 }); H5DataSetId dataSetId = H5D.create(parent, name, measurement_t, spaceId); MeasurementT mt = Convert(m); H5D.writeScalar <MeasurementT>(dataSetId, measurement_t, ref mt); H5D.close(dataSetId); H5S.close(spaceId); }
private void Write(H5GroupId parent, string name, double d) { H5DataSpaceId spaceId = H5S.create_simple(1, new long[1] { 1 }); H5DataSetId dataSetId = H5D.create(parent, name, H5T.H5Type.NATIVE_DOUBLE, spaceId); double data = d; H5D.writeScalar <double>(dataSetId, new H5DataTypeId(H5T.H5Type.NATIVE_DOUBLE), ref data); H5D.close(dataSetId); H5S.close(spaceId); }