public void AppendOrCreateDataset(Array dataset) { if (_chunkDims == null) { _chunkDims = new[] { Convert.ToUInt64(dataset.GetLongLength(0)), Convert.ToUInt64(dataset.GetLongLength(1)) }; Rank = dataset.Rank; _currentDims = GetDims(dataset); /* Create the data space with unlimited dimensions. */ _spaceId = H5S.create_simple(Rank, _currentDims, _maxDims); /* Modify dataset creation properties, i.e. enable chunking */ _propId = H5P.create(H5P.DATASET_CREATE); _status = H5P.set_chunk(_propId, Rank, _chunkDims); /* Create a new dataset within the file using chunk creation properties. */ _datasetId = H5D.create(GroupId, Hdf5Utils.NormalizedName(Datasetname), _datatype, _spaceId, H5P.DEFAULT, _propId); /* Write data to dataset */ GCHandle hnd = GCHandle.Alloc(dataset, GCHandleType.Pinned); _status = H5D.write(_datasetId, _datatype, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); H5S.close(_spaceId); _spaceId = -1; } else { AppendDataset(dataset); } }
public void H5DflushTestSWMR2() { hsize_t[] dims = { 6, 6 }; hsize_t[] maxdims = { 6, H5S.UNLIMITED }; hsize_t[] chunk_dims = { 2, 5 }; int[] cbuf = new int[36]; hid_t dsp = H5S.create_simple(2, dims, maxdims); Assert.IsTrue(dsp >= 0); hid_t dcpl = H5P.create(H5P.DATASET_CREATE); Assert.IsTrue(dcpl >= 0); Assert.IsTrue(H5P.set_chunk(dcpl, 2, chunk_dims) >= 0); hid_t dst = H5D.create(m_v3_test_file_swmr, "dset", H5T.NATIVE_INT, dsp, H5P.DEFAULT, dcpl); Assert.IsTrue(dst >= 0); GCHandle hnd = GCHandle.Alloc(cbuf, GCHandleType.Pinned); Assert.IsTrue(H5D.write(dst, H5T.NATIVE_INT, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()) >= 0); hnd.Free(); Assert.IsTrue(H5D.flush(dst) >= 0); Assert.IsTrue(H5D.close(dst) >= 0); Assert.IsTrue(H5P.close(dcpl) >= 0); Assert.IsTrue(H5S.close(dsp) >= 0); }
private static void AppendData <T>(hid_t dataSet, T[] data, ref int rows) { var fileSpace = H5D.get_space(dataSet); if (fileSpace < 0) { throw new Exception("Failed to get data space of data set."); } var offset = new ulong[] { (ulong)rows }; var count = new ulong[] { (ulong)data.Length }; if (H5S.select_hyperslab(fileSpace, H5S.seloper_t.SET, offset, null, count, null) < 0) { throw new Exception("H5S.select_hyperslab failed."); } var memSpace = H5S.create_simple(1, count, null); if (memSpace < 0) { throw new Exception("H5S.create_simple failed."); } if (H5D.write(dataSet, NumericTypeToHDF5Type <T>(), memSpace, fileSpace, H5P.DEFAULT, new PinnedObject(data)) < 0) { throw new Exception("H5D.write failed."); } H5S.close(memSpace); H5S.close(fileSpace); rows += data.Length; }
private void Write(H5GroupId parent, string name, IEnumerable <IMeasurement> measurements) { H5DataSpaceId spaceId = H5S.create_simple(1, new long[1] { (long)measurements.Count() }); // Set compression options for dataset H5PropertyListId dataSetPropertyList = H5P.create(H5P.PropertyListClass.DATASET_CREATE); H5P.setDeflate(dataSetPropertyList, NumericDataCompression); H5P.setChunk(dataSetPropertyList, new long[] { (long)measurements.Count() }); H5DataSetId dataSetId = H5D.create(parent, name, measurement_t, spaceId, new H5PropertyListId(H5P.Template.DEFAULT), dataSetPropertyList, new H5PropertyListId(H5P.Template.DEFAULT)); MeasurementT[] ms = new MeasurementT[measurements.Count()]; int ilmCount = 0; foreach (IMeasurement m in measurements) { MeasurementT mt = Convert(m); ms[ilmCount++] = mt; } H5D.write <MeasurementT>(dataSetId, measurement_t, new H5Array <MeasurementT>(ms)); H5D.close(dataSetId); H5S.close(spaceId); }
public static unsafe void AddSomeLinks(long fileId) { long res; var groupId = H5G.create(fileId, "simple"); var groupId_sub = H5G.create(groupId, "sub"); // datasets var dataspaceId1 = H5S.create_simple(1, new ulong[] { 1 }, new ulong[] { 1 }); var datasetId1 = H5D.create(fileId, "D", H5T.NATIVE_INT8, dataspaceId1); var data1 = new byte[] { 1 }; fixed(void *ptr = data1) { res = H5D.write(datasetId1, H5T.NATIVE_INT8, dataspaceId1, dataspaceId1, 0, new IntPtr(ptr)); } res = H5D.close(datasetId1); res = H5S.close(dataspaceId1); var dataspaceId2 = H5S.create_simple(1, new ulong[] { 1 }, new ulong[] { 1 }); var datasetId2 = H5D.create(groupId, "D1", H5T.NATIVE_INT8, dataspaceId2); res = H5D.close(datasetId2); res = H5S.close(dataspaceId2); var dataspaceId3 = H5S.create_simple(1, new ulong[] { 1 }, new ulong[] { 1 }); var datasetId3 = H5D.create(groupId_sub, "D1.1", H5T.NATIVE_INT8, dataspaceId3); res = H5D.close(datasetId3); res = H5S.close(dataspaceId3); res = H5G.close(groupId); res = H5G.close(groupId_sub); }
public void Put(Array data, ulong[] location = null) { ulong[] shape = data.Shape(); WithDataSpace((h5Ref, dsRef) => { long memDataSpace = H5S.ALL; if (location != null) { int selection = H5S.select_none(dsRef); if (selection < 0) { throw new H5SSException("Couldn't clear dataspace selection"); } ulong[] stride = Ones(shape.Length); selection = H5S.select_hyperslab(dsRef, H5S.seloper_t.SET, location, stride, stride, shape ); if (selection < 0) { throw new H5SSException("Couldn't select hyperslab"); } memDataSpace = H5S.create_simple(shape.Length, shape, shape); } IntPtr iPtr; var effectiveSize = data.Length * ElementSize; //if (DataType == HDF5DataType.String) //{ // // Convert to byte array... //} //else //{ //} var dtype = H5D.get_type(h5Ref); // Return? iPtr = CreateNativeArray(data, dtype); // copy to unmanaged array? var success = H5D.write(h5Ref, dtype, memDataSpace, dsRef, H5P.DEFAULT, iPtr); H5T.close(dtype); if (location != null) { H5S.close(memDataSpace); } Marshal.FreeHGlobal(iPtr); if (success < 0) { throw new H5SSException(string.Format("Couldn't write to dataset: {0}", this.Path)); } }); }
public static int WriteDataset <T>(int groupId, string name, T[,] dset) where T : struct { ulong[] dims = new ulong[] { (ulong)dset.GetLength(0), (ulong)dset.GetLength(1) }; ulong[] maxDims = null; var spaceId = H5S.create_simple(2, dims, maxDims); var datatype = GetDatatype(typeof(T)); var typeId = H5T.copy(datatype); if (datatype == H5T.C_S1) { H5T.set_size(datatype, new IntPtr(2)); //var wdata = Encoding.ASCII.GetBytes((char[,]) dset); } name = ToHdf5Name(name); var datasetId = H5D.create(groupId, name, datatype, spaceId); GCHandle hnd = GCHandle.Alloc(dset, GCHandleType.Pinned); var result = H5D.write(datasetId, datatype, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); H5D.close(datasetId); H5S.close(spaceId); H5T.close(typeId); return(result); }
public static int WriteDatasetFromArray <T>(hid_t groupId, string name, Array dset, string datasetName = null) //where T : struct { int rank = dset.Rank; ulong[] dims = Enumerable.Range(0, rank).Select(i => { return((ulong)dset.GetLength(i)); }).ToArray(); ulong[] maxDims = null; var spaceId = H5S.create_simple(rank, dims, maxDims); var datatype = GetDatatype(typeof(T)); var typeId = H5T.copy(datatype); if (datatype == H5T.C_S1) { H5T.set_size(datatype, new IntPtr(2)); } var datasetId = H5D.create(groupId, name, datatype, spaceId); GCHandle hnd = GCHandle.Alloc(dset, GCHandleType.Pinned); var result = H5D.write(datasetId, datatype, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); H5D.close(datasetId); H5S.close(spaceId); H5T.close(typeId); return(result); }
// information: https://www.hdfgroup.org/ftp/HDF5/examples/examples-by-api/hdf5-examples/1_8/C/H5T/h5ex_t_cmpd.c //or: https://www.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html#t=HDF5_Users_Guide%2FDatatypes%2FHDF5_Datatypes.htm%3Frhtocid%3Dtoc6.5%23TOC_6_8_Complex_Combinationsbc-22 public static int WriteCompounds <T>(hid_t groupId, string name, IEnumerable <T> list) //where T : struct { Type type = typeof(T); var size = Marshal.SizeOf(type); var cnt = list.Count(); var typeId = create_type(type); var log10 = (int)Math.Log10(cnt); ulong pow = (ulong)Math.Pow(10, log10); ulong c_s = Math.Min(1000, pow); ulong[] chunk_size = new ulong[] { c_s }; ulong[] dims = new ulong[] { (ulong)cnt }; long dcpl = 0; if (list.Count() == 0 || log10 == 0) { } else { dcpl = create_property(chunk_size); } // Create dataspace. Setting maximum size to NULL sets the maximum // size to be the current size. var spaceId = H5S.create_simple(dims.Length, dims, null); // Create the dataset and write the compound data to it. var datasetId = H5D.create(groupId, name, typeId, spaceId, H5P.DEFAULT, dcpl); IntPtr p = Marshal.AllocHGlobal(size * (int)dims[0]); var ms = new MemoryStream(); BinaryWriter writer = new BinaryWriter(ms); foreach (var strct in list) { writer.Write(getBytes(strct)); } var bytes = ms.ToArray(); GCHandle hnd = GCHandle.Alloc(bytes, GCHandleType.Pinned); var statusId = H5D.write(datasetId, typeId, spaceId, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); /* * Close and release resources. */ H5D.close(datasetId); H5S.close(spaceId); H5T.close(typeId); H5P.close(dcpl); Marshal.FreeHGlobal(p); return(statusId); }
public static void Write1DArray <T>(Hdf5Dataset _dataset, T[] _array) { if (_dataset.Dataspace.NumberOfDimensions != 1) { throw new Hdf5ArrayDimensionsMismatchException(); } if ((ulong)_array.Length != _dataset.Dataspace.DimensionProperties[0].CurrentSize) { throw new Hdf5ArraySizeMismatchException(); } var datasetId = H5O.open(_dataset.FileId.Value, _dataset.Path.FullPath).ToId(); GCHandle arrayHandle = GCHandle.Alloc(_array, GCHandleType.Pinned); var typeId = H5T.copy(_dataset.DataType.NativeType.Value).ToId(); int result = H5D.write( datasetId.Value, typeId.Value, H5S.ALL, H5S.ALL, H5P.DEFAULT, arrayHandle.AddrOfPinnedObject()); arrayHandle.Free(); H5T.close(typeId.Value); H5O.close(datasetId.Value); FileHelper.FlushToFile(_dataset.FileId); }
/// <summary> /// Writes the entire matrix table /// </summary> /// <typeparam name="T"></typeparam> /// <param name="matName"></param> /// <param name="rowIndex"></param> /// <returns></returns> public void SetMatrix <T>(string matName, T[,] data) { // check that matrix exists if (tables.ContainsKey(matName)) { H5DataSetId matId; tables.TryGetValue(matName, out matId); H5DataTypeId matDataId = H5D.getType(matId); H5DataSpaceId spaceId = H5S.create_simple(2, Shape); long[] start = { 0, 0 }; long[] count = { Shape[0], Shape[1] }; var h5matrix = new H5Array <T>(data); H5S.selectHyperslab(spaceId, H5S.SelectOperator.SET, start, count); H5DataSpaceId readSpaceId = H5S.create_simple(2, count); H5D.write(matId, matDataId, readSpaceId, spaceId, H5P.create(H5P.PropertyListClass.DATASET_XFER), h5matrix); H5S.close(spaceId); H5S.close(readSpaceId); } else { Console.WriteLine("table {0} not found in matrix file", matName); } return; }
public void FirstDataset(Array dataset) { if (FalseGroupId) { throw new Exception("cannot call FirstDataset because group or file couldn't be created"); } if (DatasetExists) { throw new Exception("cannot call FirstDataset because dataset already exists"); } Rank = dataset.Rank; currentDims = GetDims(dataset); /* Create the data space with unlimited dimensions. */ spaceId = H5S.create_simple(Rank, currentDims, maxDims); /* Modify dataset creation properties, i.e. enable chunking */ propId = H5P.create(H5P.DATASET_CREATE); status = H5P.set_chunk(propId, Rank, chunkDims); /* Create a new dataset within the file using chunk creation properties. */ datasetId = H5D.create(GroupId, Datasetname, datatype, spaceId, H5P.DEFAULT, propId, H5P.DEFAULT); /* Write data to dataset */ GCHandle hnd = GCHandle.Alloc(dataset, GCHandleType.Pinned); status = H5D.write(datasetId, datatype, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); H5S.close(spaceId); }
public void H5DwriteTest1() { string utf8string = "Γαζέες καὶ μυρτιὲς δὲν θὰ βρῶ πιὰ στὸ χρυσαφὶ ξέφωτο"; byte[] wdata = Encoding.UTF8.GetBytes(utf8string); hid_t dtype = H5T.create(H5T.class_t.STRING, new IntPtr(wdata.Length)); Assert.IsTrue(H5T.set_cset(dtype, H5T.cset_t.UTF8) >= 0); Assert.IsTrue(H5T.set_strpad(dtype, H5T.str_t.SPACEPAD) >= 0); hid_t dset_v0 = H5D.create(m_v0_test_file, "dset", dtype, m_space_scalar); Assert.IsTrue(dset_v0 >= 0); hid_t dset_v2 = H5D.create(m_v2_test_file, "dset", dtype, m_space_scalar); Assert.IsTrue(dset_v2 >= 0); GCHandle hnd = GCHandle.Alloc(wdata, GCHandleType.Pinned); Assert.IsTrue(H5D.write(dset_v0, dtype, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()) >= 0); Assert.IsTrue(H5D.write(dset_v2, dtype, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()) >= 0); hnd.Free(); Assert.IsTrue(H5T.close(dtype) >= 0); Assert.IsTrue(H5D.close(dset_v2) >= 0); Assert.IsTrue(H5D.close(dset_v0) >= 0); }
private static void WriteFile(string filePath) { var file = H5F.create(filePath, H5F.CreateMode.ACC_TRUNC); var group = H5G.create(file, "/group"); H5G.close(group); const int RANK = 2; const int DIM0 = 3; const int DIM1 = 4; var dims = new long[RANK] { DIM0, DIM1 }; var dataSpace = H5S.create_simple(RANK, dims); var dataSet = H5D.create(file, "/group/dataset", H5T.H5Type.NATIVE_INT, dataSpace); H5S.close(dataSpace); var data = new int[DIM0, DIM1] { { 1, 2, 3, 4 }, { 5, 6, 7, 8 }, { 9, 10, 11, 12 } }; H5D.write(dataSet, new H5DataTypeId(H5T.H5Type.NATIVE_INT), new H5Array <int>(data)); var dataType = new H5DataTypeId(H5T.H5Type.NATIVE_INT); dataSpace = H5S.create(H5S.H5SClass.SCALAR); var integerAttribute = H5A.create(dataSet, "int", dataType, dataSpace); H5A.write(integerAttribute, dataType, new H5Array <int>(new int[1] { 42 })); H5A.close(integerAttribute); H5S.close(dataSpace); //H5T.close(dataType); // Read-only. var str = "Hello, world!"; var strBytes = Encoding.ASCII.GetBytes(str); // There is a H5T.get_cset, but there does not seem to be a way of setting the character encoding, i.e. set_cset. dataType = H5T.copy(H5T.H5Type.C_S1); H5T.setSize(dataType, strBytes.Length); dataSpace = H5S.create(H5S.H5SClass.SCALAR); var stringAttribute = H5A.create(dataSet, "string", dataType, dataSpace); H5A.write(stringAttribute, dataType, new H5Array <byte>(strBytes)); H5A.close(stringAttribute); H5S.close(dataSpace); H5T.close(dataType); H5D.close(dataSet); H5F.close(file); }
/// <summary> /// 重写数据集的值(去条带的数据) /// </summary> /// <typeparam name="T">数据类型</typeparam> /// <param name="dataSetName">数据集的名称</param> /// <param name="dataTypeId">数据集的类型ID</param> /// <param name="values">去条带之后数据</param> /// <param name="BrandNo">在数据集的维度从0开始</param> private void ReWriteDataSet <T>(string dataSetName, H5DataTypeId dataTypeId, T[] values, int BrandNo) { H5FileId _h5FileId = null; H5DataSetId dataSetId = null; H5DataSpaceId spaceid = null; try { _h5FileId = H5F.open(fileName, H5F.OpenMode.ACC_RDWR); //先找出含有指定波段的数据集 dataSetId = H5D.open(_h5FileId, dataSetName); spaceid = H5D.getSpace(dataSetId); long[] dims = H5S.getSimpleExtentDims(spaceid); //得到数据数组的大小,比如[3,1800,2048] int rank = H5S.getSimpleExtentNDims(spaceid); //得到数据数组的维数,比如3 H5S.close(spaceid); //根据数据集的名字获取数据集的ID int size = 0; if (rank == 0) { size = 1; } else if (rank == 1) { size = Convert.ToInt32(dims[0]); } else if (rank == 2) { size = Convert.ToInt32(dims[0] * dims[1]); } else if (rank == 3) { size = Convert.ToInt32(dims[0] * dims[1] * dims[2]); } T[] v = new T[size]; //从数据集中读取原始数据 H5D.read <T>(dataSetId, dataTypeId, new H5Array <T>(v)); //将波段校正后的数据读取赋给相应的波段 for (int i = BrandNo; i < values.Length; i++) { v[i] = values[i]; } H5D.write <T>(dataSetId, dataTypeId, new H5Array <T>(v)); } catch (Exception e) { throw new Exception(e.Message); } finally { H5D.close(dataSetId); H5F.close(_h5FileId); } }
static void Main2222(string[] args) { var h5 = H5F.create(@"D:\test.h5", H5F.ACC_TRUNC); var typeId = H5T.create(H5T.class_t.COMPOUND, new IntPtr(40)); var strtype = H5T.copy(H5T.C_S1); H5T.set_size(strtype, new IntPtr(16)); H5T.insert(typeId, "Name", new IntPtr(0), strtype); H5T.insert(typeId, "x_pos", new IntPtr(16), H5T.NATIVE_INT32); H5T.insert(typeId, "y_pos", new IntPtr(20), H5T.NATIVE_INT32); H5T.insert(typeId, "Mass", new IntPtr(24), H5T.NATIVE_FLOAT); H5T.insert(typeId, "Temperature", new IntPtr(32), H5T.NATIVE_DOUBLE); ulong[] dims = new ulong[] { 10000 }; ulong[] chunk_size = new ulong[] { 1000 }; var spaceid = H5S.create_simple(dims.Length, dims, null); var dcpl = H5P.create(H5P.DATASET_CREATE); H5P.set_layout(dcpl, H5D.layout_t.COMPACT); H5P.set_deflate(dcpl, 6); H5P.set_chunk(dcpl, chunk_size.Length, chunk_size); var datasetid = H5D.create(h5, "Table1", typeId, spaceid, H5P.DEFAULT, dcpl); ComType ct = new ComType() { Name = "aabb", x_pos = 2, y_pos = 1, Mass = 1.24F, Temperature = 45.7, }; IntPtr p = Marshal.AllocHGlobal(40 * (int)dims[0]); Marshal.StructureToPtr(ct, p, false); H5D.write(datasetid, typeId, spaceid, H5S.ALL, H5P.DEFAULT, p); H5F.close(h5); }
/// <summary> /// Create mapping index - assumes that matrices are square and uses first dimension as the map size /// </summary> /// <typeparam name="T"></typeparam> /// <param name="tazEquiv">array the size of the matrix table dimension</param> /// <param name="mapDataType">Data type of array</param> /// <param name="mapName">Name of index map</param> public void CreateMapping <T>(T[] tazEquiv, H5DataTypeId mapDataType, string mapName) { long[] oneDShape = { Shape[0] }; H5DataSpaceId mapSpaceId = H5S.create_simple(1, oneDShape); H5DataSetId newMappingID = H5D.create(luGroup, mapName, mapDataType, mapSpaceId); H5D.write(newMappingID, mapDataType, new H5Array <T>(tazEquiv)); IndexMapNames.Add(mapName); NumIndexMap++; this.indexMaps.Add(mapName, newMappingID); }
public static unsafe void Add(ContainerType container, long fileId, string groupName, string elementName, long typeId, void *dataPtr, long spaceId, long cpl = 0, long apl = 0) { long res; long groupId; if (H5L.exists(fileId, groupName) > 0) { groupId = H5G.open(fileId, groupName); } else { groupId = H5G.create(fileId, groupName); } long id; if (container == ContainerType.Dataset) { id = H5D.create(groupId, Encoding.UTF8.GetBytes(elementName), typeId, spaceId, dcpl_id: cpl, dapl_id: apl); if (id == -1) { throw new Exception("Could not create dataset."); } if ((int)dataPtr != 0) { res = H5D.write(id, typeId, spaceId, H5S.ALL, 0, new IntPtr(dataPtr)); } res = H5D.close(id); } else { id = H5A.create(groupId, Encoding.UTF8.GetBytes(elementName), typeId, spaceId, acpl_id: cpl); if (id == -1) { throw new Exception("Could not create attribute."); } if ((int)dataPtr != 0) { res = H5A.write(id, typeId, new IntPtr(dataPtr)); } res = H5A.close(id); } res = H5G.close(groupId); }
public void Writer(List <PbTickView> list, string dataset_name) { var t = typeof(PbTickStruct5); var size = Marshal.SizeOf(t); var typeId = create_type(); // chunk一定得小于dim数据量,多了会出错 // 如果数据100条左右,按 var log10 = (int)Math.Log10(list.Count); ulong pow = (ulong)Math.Pow(10, log10); ulong c_s = Math.Min(1000, pow); ulong[] chunk_size = new ulong[] { c_s }; ulong[] dims = new ulong[] { (ulong)list.Count }; long dcpl = 0; if (list.Count == 0 || log10 == 0) { } else { dcpl = create_property(chunk_size); } var spaceid = H5S.create_simple(dims.Length, dims, null); var datasetid = H5D.create(h5, dataset_name, typeId, spaceid, H5P.DEFAULT, dcpl); IntPtr p = Marshal.AllocHGlobal(size * (int)dims[0]); int i = 0; foreach (var b in list) { var s = DataConvert.toStruct(b); Marshal.StructureToPtr(s, new IntPtr(p.ToInt32() + size * i), false); ++i; } H5D.write(datasetid, typeId, spaceid, H5S.ALL, H5P.DEFAULT, p); H5D.close(datasetid); H5S.close(spaceid); H5T.close(typeId); H5P.close(dcpl); Marshal.FreeHGlobal(p); }
public string this[long m, long n] { get { using (H5Space file_space = SelectPoint(m, n)) { using (H5Space mem_space = H5Space.Create(1, new long[] { 1 })) { using (H5Type dtype = GetDType()) { long slen = dtype.Size; byte[] buf = new byte[slen]; GCHandle pinnedArray = GCHandle.Alloc(buf, GCHandleType.Pinned); int status = H5D.read(ID, dtype.ID, mem_space.ID, file_space.ID, H5P.DEFAULT, pinnedArray.AddrOfPinnedObject()); pinnedArray.Free(); if (status < 0) { throw new H5LibraryException($"H5D.read() returned ({status})"); } string decoded = string1d.Enc.GetString(buf); return(decoded.TrimEnd('\0')); } } } } set { using (H5Space file_space = SelectPoint(m, n)) { using (H5Space mem_space = H5Space.Create(1, new long[] { 1 })) { using (H5Type dtype = GetDType()) { long slen = dtype.Size; if (value.Length > slen - 1) { throw new IndexOutOfRangeException($"string longer than ({slen})"); } byte[] buf = new byte[slen]; Array.Copy(string1d.Enc.GetBytes(value), buf, value.Length); GCHandle pinnedArray = GCHandle.Alloc(buf, GCHandleType.Pinned); int status = H5D.write(ID, dtype.ID, mem_space.ID, file_space.ID, H5P.DEFAULT, pinnedArray.AddrOfPinnedObject()); pinnedArray.Free(); if (status < 0) { throw new H5LibraryException($"H5D.read() returned ({status})"); } } } } } }
public static (int success, long CreatedgroupId) WriteStrings(long groupId, string name, IEnumerable <string> strs) { // create UTF-8 encoded test datasets long datatype = H5T.create(H5T.class_t.STRING, H5T.VARIABLE); H5T.set_cset(datatype, H5T.cset_t.ASCII); H5T.set_strpad(datatype, H5T.str_t.NULLTERM); int strSz = strs.Count(); long spaceId = H5S.create_simple(1, new[] { (ulong)strSz }, null); string normalizedName = Hdf5Utils.NormalizedName(name); var datasetId = Hdf5Utils.GetDatasetId(groupId, normalizedName, datatype, spaceId); if (datasetId == -1L) { return(-1, -1L); } GCHandle[] hnds = new GCHandle[strSz]; IntPtr[] wdata = new IntPtr[strSz]; int cntr = 0; foreach (string str in strs) { hnds[cntr] = GCHandle.Alloc( Encoding.UTF8.GetBytes(str), GCHandleType.Pinned); wdata[cntr] = hnds[cntr].AddrOfPinnedObject(); cntr++; } var hnd = GCHandle.Alloc(wdata, GCHandleType.Pinned); var result = H5D.write(datasetId, datatype, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); for (int i = 0; i < strSz; ++i) { hnds[i].Free(); } H5D.close(datasetId); H5S.close(spaceId); H5T.close(datatype); return(result, datasetId); }
public void AppendOrCreateDataset(Array dataset) { if (_chunkDims == null) { if (dataset.Rank < 1) { string msg = "Empty array was passed. Ignoring."; Hdf5Utils.LogError?.Invoke(msg); return; } for (int dimension = 1; dimension <= dataset.Rank; dimension++) { var size = dataset.GetUpperBound(dimension - 1) + 1; if (size == 0) { string msg = $"Empty array was passed for dimension {dimension}. Ignoring."; Hdf5Utils.LogError?.Invoke(msg); return; } } _chunkDims = new[] { Convert.ToUInt64(dataset.GetLongLength(0)), Convert.ToUInt64(dataset.GetLongLength(1)) }; Rank = dataset.Rank; _currentDims = GetDims(dataset); /* Create the data space with unlimited dimensions. */ _spaceId = H5S.create_simple(Rank, _currentDims, _maxDims); /* Modify dataset creation properties, i.e. enable chunking */ _propId = H5P.create(H5P.DATASET_CREATE); _status = H5P.set_chunk(_propId, Rank, _chunkDims); /* Create a new dataset within the file using chunk creation properties. */ _datasetId = H5D.create(GroupId, Hdf5Utils.NormalizedName(Datasetname), _datatype, _spaceId, H5P.DEFAULT, _propId); /* Write data to dataset */ GCHandle hnd = GCHandle.Alloc(dataset, GCHandleType.Pinned); _status = H5D.write(_datasetId, _datatype, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); H5S.close(_spaceId); _spaceId = -1; } else { AppendDataset(dataset); } }
public static int WriteDatasetFromArray <T>(hid_t groupId, string name, Array dset, string datasetName = null) //where T : struct { int rank = dset.Rank; ulong[] dims = Enumerable.Range(0, rank).Select(i => { return((ulong)dset.GetLength(i)); }).ToArray(); ulong[] maxDims = null; var spaceId = H5S.create_simple(rank, dims, maxDims); var datatype = GetDatatype(typeof(T)); var typeId = H5T.copy(datatype); if (datatype == H5T.C_S1) { H5T.set_size(datatype, new IntPtr(2)); } long dcpl = H5P.DEFAULT; if (datatype != H5T.C_S1) { Console.WriteLine("Do you want to compress the file? (Y/N)"); string response = "N"; response = Console.ReadLine(); if (response == "Y" || response == "y") { var cnt = dset.Length; var log10 = (int)Math.Log10(cnt); ulong pow = (ulong)Math.Pow(10, log10); //ulong c_s = Math.Min(1000, pow); ulong[] chunk_size = new ulong[] { pow }; //chunk_size[0] = ulong.Parse(Console.ReadLine()); Console.WriteLine("Auto-Chunking with chunk-size of {0} samples.", pow); dcpl = create_property(chunk_size); } else { Console.WriteLine("Creating an Uncompressed File..."); } } var datasetId = H5D.create(groupId, name, datatype, spaceId, H5P.DEFAULT, dcpl); GCHandle hnd = GCHandle.Alloc(dset, GCHandleType.Pinned); var result = H5D.write(datasetId, datatype, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); H5D.close(datasetId); H5S.close(spaceId); H5T.close(typeId); return(result); }
private static hid_t AddDataString(hid_t parentLoc, string name, string data) { byte[][] wdata = new byte[1][]; wdata[0] = ASCIIEncoding.ASCII.GetBytes(data); int n = wdata[0].Length + 1; /* * Create file and memory datatypes. For this example we will save * the strings as FORTRAN strings, therefore they do not need space * for the null terminator in the file. */ hsize_t[] dims = new hsize_t[1]; dims[0] = (hsize_t)1; hid_t filetype = H5T.copy(H5T.FORTRAN_S1); herr_t status = H5T.set_size(filetype, new IntPtr(n - 1)); hid_t memtype = H5T.copy(H5T.C_S1); status = H5T.set_size(memtype, new IntPtr(n)); /* * Create dataspace. Setting maximum size to NULL sets the maximum * size to be the current size. */ hid_t space = H5S.create_simple(1, dims, null); /* * Create the dataset and write the string data to it. */ hid_t dset = H5D.create(parentLoc, name, filetype, space, H5P.DEFAULT, H5P.DEFAULT, H5P.DEFAULT); GCHandle hnd = GCHandle.Alloc(wdata[0], GCHandleType.Pinned); // herr_t flag= H5D.write(dataSetId, type, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); status = H5D.write(dset, memtype, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); /* * Close and release resources. */ status = H5D.close(dset); status = H5S.close(space); status = H5T.close(filetype); status = H5T.close(memtype); return(dset); }
private void WriteData() { Console.WriteLine("Creating H5 file {0}...", filename); // Rank is the number of dimensions of the data array. const int RANK = 1; // Create an HDF5 file. // The enumeration type H5F.CreateMode provides only the legal // creation modes. Missing H5Fcreate parameters are provided // with default values. H5FileId fileId = H5F.create(filename, H5F.CreateMode.ACC_TRUNC); // Prepare to create a data space for writing a 1-dimensional // signed integer array. long[] dims = new long[RANK]; dims[0] = count; // Put descending ramp data in an array so that we can // write it to the file. mData[] dset_data = new mData[count]; for (int i = 0; i < count; i++) { dset_data[i] = new mData(i + 80, i + 40, i + 1); } // Create a data space to accommodate our 1-dimensional array. // The resulting H5DataSpaceId will be used to create the // data set. H5DataSpaceId spaceId = H5S.create_simple(RANK, dims); // Create a copy of a standard data type. We will use the // resulting H5DataTypeId to create the data set. We could // have used the HST.H5Type data directly in the call to // H5D.create, but this demonstrates the use of H5T.copy // and the use of a H5DataTypeId in H5D.create. H5DataTypeId typeId = H5T.copy(H5T.H5Type.STD_REF_OBJ); // Find the size of the type int typeSize = H5T.getSize(typeId); // Create the data set. H5DataSetId dataSetId = H5D.create(fileId, dataSetName, typeId, spaceId); // Write the integer data to the data set. H5D.write(dataSetId, new H5DataTypeId(H5T.H5Type.STD_REF_OBJ), new H5Array <mData>(dset_data)); H5D.close(dataSetId); H5F.close(fileId); Console.WriteLine("H5 file {0} created successfully!", filename); }
public static int WriteAsciiString(hid_t groupId, string name, string str) { var spaceNullId = H5S.create(H5S.class_t.NULL); var spaceScalarId = H5S.create(H5S.class_t.SCALAR); // create two datasets of the extended ASCII character set // store as H5T.FORTRAN_S1 -> space padding int strLength = str.Length; ulong[] dims = { (ulong)strLength, 1 }; /* Create the dataset. */ //name = ToHdf5Name(name); var spaceId = H5S.create_simple(1, dims, null); var datasetId = H5D.create(groupId, name, H5T.FORTRAN_S1, spaceId); H5S.close(spaceId); // we write from C and must provide null-terminated strings byte[] wdata = new byte[strLength * 2]; //for (int i = 0; i < strLength; ++i) //{ // wdata[2 * i] = (byte)i; //} for (int i = 0; i < strLength; ++i) { wdata[2 * i] = Convert.ToByte(str[i]); } var memId = H5T.copy(H5T.C_S1); H5T.set_size(memId, new IntPtr(2)); //H5T.set_strpad(memId, H5T.str_t.NULLTERM); GCHandle hnd = GCHandle.Alloc(wdata, GCHandleType.Pinned); int result = H5D.write(datasetId, memId, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); H5T.close(memId); H5D.close(datasetId); return(result); }
public static int WriteStrings(hid_t groupId, string name, IEnumerable <string> strs, string datasetName = null) { // create UTF-8 encoded test datasets hid_t datatype = H5T.create(H5T.class_t.STRING, H5T.VARIABLE); H5T.set_cset(datatype, H5T.cset_t.UTF8); H5T.set_strpad(datatype, H5T.str_t.SPACEPAD); int strSz = strs.Count(); hid_t spaceId = H5S.create_simple(1, new ulong[] { (ulong)strSz }, null); var datasetId = H5D.create(groupId, name, datatype, spaceId); GCHandle[] hnds = new GCHandle[strSz]; IntPtr[] wdata = new IntPtr[strSz]; int cntr = 0; foreach (string str in strs) { hnds[cntr] = GCHandle.Alloc( Encoding.UTF8.GetBytes(str), GCHandleType.Pinned); wdata[cntr] = hnds[cntr].AddrOfPinnedObject(); cntr++; } var hnd = GCHandle.Alloc(wdata, GCHandleType.Pinned); var result = H5D.write(datasetId, datatype, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); for (int i = 0; i < strSz; ++i) { hnds[i].Free(); } H5D.close(datasetId); H5S.close(spaceId); H5T.close(datatype); return(result); }
public static void WriteDataCube(H5FileId fileId, UInt16[,,] datacube) { H5GroupId dataGroup = H5G.create(fileId, "/data"); H5GroupId dataSubGroup = H5G.create(dataGroup, "DEsoftware"); long[] dims = new long[3] { datacube.GetLength(0), datacube.GetLength(1), datacube.GetLength(2) }; H5DataSpaceId spaceId = H5S.create_simple(3, dims); H5DataTypeId typeId = H5T.copy(H5T.H5Type.NATIVE_USHORT); H5DataSetId dataSetId = H5D.create(dataSubGroup, "data", typeId, spaceId); // create attribute emd_group_type for dataSubGroup, which is required to have value 1 int par = 1; long[] attdims = new long[1]; int[] AttArray = new int[1] { par }; dims[0] = AttArray.Length; H5AttributeId attributeId = H5A.create(dataSubGroup, "emd_group_type", H5T.copy(H5T.H5Type.NATIVE_UCHAR), H5S.create_simple(1, dims)); H5A.write(attributeId, H5T.copy(H5T.H5Type.NATIVE_INT), new H5Array <int>(AttArray)); H5A.close(attributeId); // write datacube to "data", which contains whole 3D datacube H5D.write <ushort>(dataSetId, typeId, new H5Array <ushort>(datacube)); // create three more array for three dimensions long[] dim1 = new long[1] { datacube.GetLength(0) }; double[] dimarray = new double [datacube.GetLength(0)]; spaceId = H5S.create_simple(3, dim1); typeId = H5T.copy(H5T.H5Type.NATIVE_DOUBLE); dataSetId = H5D.create(dataSubGroup, "dim1", typeId, spaceId); H5D.write <double>(dataSetId, typeId, new H5Array <double>(dimarray)); H5S.close(spaceId); H5T.close(typeId); H5D.close(dataSetId); H5G.close(dataGroup); H5G.close(dataSubGroup); }
public static void ClassInit(TestContext testContext) { #if HDF5_VER1_10 // create test files which persists across file tests m_v3_class_file = Utilities.H5TempFileSWMR(ref m_v3_class_file_name); Assert.IsTrue(m_v3_class_file >= 0); m_lcpl = H5P.create(H5P.LINK_CREATE); Assert.IsTrue(H5P.set_create_intermediate_group(m_lcpl, 1) >= 0); m_lcpl_utf8 = H5P.copy(m_lcpl); Assert.IsTrue( H5P.set_char_encoding(m_lcpl_utf8, H5T.cset_t.UTF8) >= 0); // create a sample dataset hsize_t[] dims = { 6, 6 }; hsize_t[] maxdims = { 6, H5S.UNLIMITED }; hsize_t[] chunk_dims = { 2, 5 }; int[] cbuf = new int[36]; hid_t dsp = H5S.create_simple(2, dims, maxdims); Assert.IsTrue(dsp >= 0); hid_t dcpl = H5P.create(H5P.DATASET_CREATE); Assert.IsTrue(dcpl >= 0); Assert.IsTrue(H5P.set_chunk(dcpl, 2, chunk_dims) >= 0); hid_t dst = H5D.create(m_v3_class_file, "int6x6", H5T.NATIVE_INT, dsp, H5P.DEFAULT, dcpl); Assert.IsTrue(dst >= 0); GCHandle hnd = GCHandle.Alloc(cbuf, GCHandleType.Pinned); Assert.IsTrue(H5D.write(dst, H5T.NATIVE_INT, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()) >= 0); hnd.Free(); Assert.IsTrue(H5D.flush(dst) >= 0); Assert.IsTrue(H5D.close(dst) >= 0); Assert.IsTrue(H5P.close(dcpl) >= 0); Assert.IsTrue(H5S.close(dsp) >= 0); #endif }
public void H5DwriteTest2() { ArrayList utf8strings = new ArrayList() { "Ελληνικά", "日本語", "العربية" }; hid_t dtype = H5T.create(H5T.class_t.STRING, H5T.VARIABLE); Assert.IsTrue(H5T.set_cset(dtype, H5T.cset_t.UTF8) >= 0); Assert.IsTrue(H5T.set_strpad(dtype, H5T.str_t.SPACEPAD) >= 0); hid_t dspace = H5S.create_simple(1, new hsize_t[] { (hsize_t)utf8strings.Count }, null); hid_t dset = H5D.create(m_v0_test_file, "dset", dtype, dspace); Assert.IsTrue(dset >= 0); GCHandle[] hnds = new GCHandle[utf8strings.Count]; IntPtr[] wdata = new IntPtr[utf8strings.Count]; for (int i = 0; i < utf8strings.Count; ++i) { hnds[i] = GCHandle.Alloc( Encoding.UTF8.GetBytes((string)utf8strings[i]), GCHandleType.Pinned); wdata[i] = hnds[i].AddrOfPinnedObject(); } GCHandle hnd = GCHandle.Alloc(wdata, GCHandleType.Pinned); Assert.IsTrue(H5D.write(dset, dtype, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()) >= 0); hnd.Free(); for (int i = 0; i < utf8strings.Count; ++i) { hnds[i].Free(); } Assert.IsTrue(H5D.close(dset) >= 0); Assert.IsTrue(H5S.close(dspace) >= 0); Assert.IsTrue(H5T.close(dtype) >= 0); }