public void H5SdecodeTest1() { hsize_t[] dims = { 1, 2, 3 }; hid_t space = H5S.create_simple(dims.Length, dims, dims); Assert.IsTrue(space > 0); size_t nalloc = new IntPtr(); Assert.IsTrue(H5S.encode(space, null, ref nalloc) >= 0); byte[] buf = new byte [nalloc.ToInt32()]; Assert.IsTrue(H5S.encode(space, buf, ref nalloc) >= 0); Assert.IsTrue(H5S.close(space) >= 0); space = H5S.decode(buf); Assert.IsTrue(space >= 0); Assert.IsTrue(H5S.get_simple_extent_ndims(space) == dims.Length); hsize_t[] tdims = new hsize_t[dims.Length]; Assert.IsTrue( H5S.get_simple_extent_dims(space, tdims, null) == dims.Length); for (int i = 0; i < dims.Length; ++i) { Assert.IsTrue(tdims[i] == dims[i]); } Assert.IsTrue(H5S.close(space) >= 0); }
/// <summary> /// /// </summary> /// <returns></returns> public static Hdf5Dataspace GetDataspace(Hdf5Identifier _datasetId) { var dataspaceId = H5D.get_space(_datasetId.Value).ToId(); int rank = H5S.get_simple_extent_ndims(dataspaceId.Value); ulong[] dims = new ulong[rank]; ulong[] maxDims = new ulong[rank]; H5S.get_simple_extent_dims(dataspaceId.Value, dims, maxDims); Hdf5Dataspace dataspace = new Hdf5Dataspace { Id = dataspaceId, NumberOfDimensions = rank }; for (int i = 0; i < dims.Length; i++) { Hdf5DimensionProperty property = new Hdf5DimensionProperty { CurrentSize = dims[i], //MaximumSize = maxDims[i] }; dataspace.DimensionProperties.Add(property); } H5S.close(dataspaceId.Value); return(dataspace); }
public Dictionary <string, string> TryReadDataTable(string datasetName) { Dictionary <string, string> result = null; var subDsDic = ds.GetSubDatasets(); if (subDsDic.Count > 0) { H5ID h5FileId = H5F.open(fileName, H5F.ACC_RDONLY); H5ID datasetId = H5D.open(h5FileId, datasetName); H5ID typeId = H5D.get_type(datasetId); H5ID spaceId = H5D.get_space(datasetId); if (H5T.get_class(typeId) == H5T.class_t.COMPOUND) { int numCount = H5T.get_nmembers(typeId); var size = H5T.get_size(typeId); byte[] buffer = new byte[size.ToInt32()]; GCHandle hnd = GCHandle.Alloc(buffer, GCHandleType.Pinned); int ndims = H5S.get_simple_extent_ndims(spaceId); if (ndims == 1) { result = new Dictionary <string, string>(); H5D.read(datasetId, typeId, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); for (uint i = 0; i < numCount; i++) { string name = Marshal.PtrToStringAnsi(H5T.get_member_name(typeId, i)); int offset = H5T.get_member_offset(typeId, i).ToInt32(); H5ID subTypeId = H5T.get_member_type(typeId, i); H5T.class_t typeClass = H5T.get_member_class(typeId, i); string value = ReadBuffer(buffer, offset, typeClass, subTypeId); result.Add(name, value); H5T.close(subTypeId); } } hnd.Free(); } if (spaceId != 0) { H5S.close(spaceId); } if (typeId != 0) { H5T.close(typeId); } if (datasetId != 0) { H5D.close(datasetId); } if (h5FileId != 0) { H5F.close(h5FileId); } } return(result); }
private static double[,] ReadDataArray(hid_t fileLoc, string name, bool transpose = false) { hid_t dset = H5D.open(fileLoc, name); hid_t fspace = H5D.get_space(dset); hid_t count = H5S.get_simple_extent_ndims(fspace); hid_t type = H5D.get_type(dset); hsize_t[] dims = new hsize_t[count]; hsize_t[] maxdims = new hsize_t[count]; H5S.get_simple_extent_dims(fspace, dims, maxdims); H5S.close(fspace); byte[] rdata = new byte[dims[0] * dims[1] * 8]; hid_t mem_type = H5T.copy(H5T.NATIVE_DOUBLE); H5T.set_size(mem_type, new IntPtr(8)); GCHandle hnd = GCHandle.Alloc(rdata, GCHandleType.Pinned); H5D.read(dset, mem_type, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); H5T.close(mem_type); if (transpose) { double[,] val = new double[dims[1], dims[0]]; int cnt = 0; for (int i = 0; i < (int)dims[0]; i++) { for (int j = 0; j < (int)dims[1]; j++) { val[j, i] = BitConverter.ToDouble(rdata, cnt * 8); cnt++; } } return(val); } else { double[,] val = new double[dims[0], dims[1]]; int cnt = 0; for (int i = 0; i < (int)dims[0]; i++) { for (int j = 0; j < (int)dims[1]; j++) { val[i, j] = BitConverter.ToDouble(rdata, cnt * 8); cnt++; } } return(val); } }
public void H5Sget_simple_extent_ndimsTest3() { hid_t space = H5S.create(H5S.class_t.SCALAR); Assert.IsTrue(space >= 0); Assert.IsTrue(H5S.get_simple_extent_ndims(space) == 0); Assert.IsTrue(H5S.close(space) >= 0); }
public void H5Sget_simple_extent_ndimsTest1() { hsize_t[] dims = { 1, 2, 3 }; hid_t space = H5S.create_simple(dims.Length, dims, dims); Assert.IsTrue(space >= 0); Assert.IsTrue(H5S.get_simple_extent_ndims(space) == 3); Assert.IsTrue(H5S.close(space) >= 0); }
/// <summary> /// WARNING: ADVANCED USE ONLY!! Loads a 2D generic dataset from an H5 file. /// The generic loaders only loads data in non-Unity friendly types, such as bytes, uints, longs etc... /// You'll have to know the correct cast to retrieve usable data. /// /// Created With help from https://github.com/LiorBanai/HDF5-CSharp/blob/master/HDF5-CSharp/Hdf5Dataset.cs /// </summary> /// <param name="filePath"></param> /// <param name="datasetName"></param> /// <typeparam name="T"></typeparam> /// <returns></returns> /// <exception cref="FileNotFoundException"></exception> static T[,] Load2DDataset <T>(string filePath, string datasetName) { if (!File.Exists(filePath)) { throw new FileNotFoundException($"Loading dataset {datasetName} from file that doesn't exist {filePath}"); } long fileId = H5F.open(filePath, H5F.ACC_RDONLY); T[,] resultArray = new T[2, 2]; try { ulong[] start = { 0, 0 }; ulong[] count = { 0, 0 }; long datasetId = H5D.open(fileId, datasetName); var datatype = H5D.get_type(datasetId); var spaceId = H5D.get_space(datasetId); int rank = H5S.get_simple_extent_ndims(spaceId); ulong[] maxDims = new ulong[rank]; ulong[] dims = new ulong[rank]; H5S.get_simple_extent_dims(spaceId, dims, maxDims); count[0] = dims[0]; count[1] = dims[1]; // Define file hyperslab. long status = H5S.select_hyperslab(spaceId, H5S.seloper_t.SET, start, null, count, null); // Define the memory dataspace. resultArray = new T[dims[0], dims[1]]; var memId = H5S.create_simple(rank, dims, null); // Define memory hyperslab. status = H5S.select_hyperslab(memId, H5S.seloper_t.SET, start, null, count, null); // Read data from hyperslab in the file into the hyperslab in // memory and display. GCHandle handle = GCHandle.Alloc(resultArray, GCHandleType.Pinned); try { H5D.read(datasetId, datatype, memId, spaceId, H5P.DEFAULT, handle.AddrOfPinnedObject()); } finally { handle.Free(); H5S.close(status); H5S.close(memId); H5S.close(spaceId); H5D.close(datatype); H5D.close(datasetId); } } finally { H5F.close(fileId); } return(resultArray); }
public static int GetDataSpaceRank(hid_t space) { var rank = H5S.get_simple_extent_ndims(space); if (rank < 0) { throw new HDF5Exception("Failed to get rank of data set."); } return(rank); }
static void ReadFile(string filePath) { var file = H5F.open(filePath, H5F.ACC_RDONLY); var dataSet = H5D.open(file, "/group/dataset"); var dataSpace = H5D.get_space(dataSet); var rank = H5S.get_simple_extent_ndims(dataSpace); if (rank == 2) { var dims = new ulong[2]; H5S.get_simple_extent_dims(dataSpace, dims, null); var data = new int[dims[0], dims[1]]; H5D.read(dataSet, H5T.NATIVE_INT, H5S.ALL, H5S.ALL, H5P.DEFAULT, new PinnedObject(data)); for (int i = 0; i < data.GetLength(0); ++i) { for (int j = 0; j < data.GetLength(1); ++j) { Write($"{data[i,j],3}"); } WriteLine(); } } H5S.close(dataSpace); var doubleAttribute = H5A.open(dataSet, "double"); #if false double pi = 0.0; var handle = GCHandle.Alloc(pi, GCHandleType.Pinned); H5A.read(doubleAttribute, H5T.NATIVE_DOUBLE, handle.AddrOfPinnedObject()); handle.Free(); WriteLine($"PI = {pi}"); #else var values = new double[1]; H5A.read(doubleAttribute, H5T.NATIVE_DOUBLE, new PinnedObject(values)); WriteLine($"PI = {values[0]}"); #endif H5A.close(doubleAttribute); WriteLine($"string: {ReadStringAttribute(dataSet, "string")}"); WriteLine($"string-ascii: {ReadStringAttribute(dataSet, "string-ascii")}"); WriteLine($"string-vlen: {ReadStringAttribute(dataSet, "string-vlen")}"); H5D.close(dataSet); H5F.close(file); }
/// <summary> /// Reads an n-dimensional dataset. /// </summary> /// <typeparam name="T">Generic parameter strings or primitive type</typeparam> /// <param name="groupId">id of the group. Can also be a file Id</param> /// <param name="name">name of the dataset</param> /// <returns>The n-dimensional dataset</returns> public static Array ReadDatasetToArray <T>(hid_t groupId, string name) //where T : struct { var datatype = GetDatatype(typeof(T)); var datasetId = H5D.open(groupId, name); var spaceId = H5D.get_space(datasetId); int rank = H5S.get_simple_extent_ndims(spaceId); long count = H5S.get_simple_extent_npoints(spaceId); Array dset; Type type = typeof(T); if (rank >= 0 && count >= 0) { int rankChunk; ulong[] maxDims = new ulong[rank]; ulong[] dims = new ulong[rank]; ulong[] chunkDims = new ulong[rank]; hid_t memId = H5S.get_simple_extent_dims(spaceId, dims, maxDims); long[] lengths = dims.Select(d => Convert.ToInt64(d)).ToArray(); dset = Array.CreateInstance(type, lengths); var typeId = H5D.get_type(datasetId); var mem_type = H5T.copy(datatype); if (datatype == H5T.C_S1) { H5T.set_size(datatype, new IntPtr(2)); } var propId = H5D.get_create_plist(datasetId); if (H5D.layout_t.CHUNKED == H5P.get_layout(propId)) { rankChunk = H5P.get_chunk(propId, rank, chunkDims); } memId = H5S.create_simple(rank, dims, maxDims); GCHandle hnd = GCHandle.Alloc(dset, GCHandleType.Pinned); H5D.read(datasetId, datatype, memId, spaceId, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); } else { dset = Array.CreateInstance(type, new long[1] { 0 }); } H5D.close(datasetId); H5S.close(spaceId); return(dset); }
static int[] GetDatasetDimensions(long spaceID) { int numberOfDimensions = H5S.get_simple_extent_ndims(spaceID); int[] dimensions = new int[0]; if (numberOfDimensions >= 0) { ulong[] dims = new ulong[numberOfDimensions]; H5S.get_simple_extent_dims(spaceID, dims, MaxDimensions); dimensions = ConvertDimensionsToIntegers(dims); } return(dimensions); }
public static IEnumerable <T> ReadCompounds <T>(hid_t groupId, string name) where T : struct { Type type = typeof(T); hid_t typeId = 0; // open dataset var datasetId = H5D.open(groupId, name); typeId = CreateType(type); var compoundSize = Marshal.SizeOf(type); /* * Get dataspace and allocate memory for read buffer. */ var spaceId = H5D.get_space(datasetId); int rank = H5S.get_simple_extent_ndims(spaceId); ulong[] dims = new ulong[rank]; var ndims = H5S.get_simple_extent_dims(spaceId, dims, null); int rows = Convert.ToInt32(dims[0]); byte[] bytes = new byte[rows * compoundSize]; // Read the data. GCHandle hnd = GCHandle.Alloc(bytes, GCHandleType.Pinned); IntPtr hndAddr = hnd.AddrOfPinnedObject(); H5D.read(datasetId, typeId, spaceId, H5S.ALL, H5P.DEFAULT, hndAddr); int counter = 0; IEnumerable <T> strcts = Enumerable.Range(1, rows).Select(i => { byte[] select = new byte[compoundSize]; Array.Copy(bytes, counter, select, 0, compoundSize); T s = fromBytes <T>(select); counter = counter + compoundSize; return(s); }); /* * Close and release resources. */ H5D.vlen_reclaim(typeId, spaceId, H5P.DEFAULT, hndAddr); hnd.Free(); H5D.close(datasetId); H5S.close(spaceId); H5T.close(typeId); return(strcts); }
public static T[,] ReadDataset <T>(int groupId, string name, ulong beginIndex, ulong endIndex) where T : struct { ulong[] start = { 0, 0 }, stride = null, count = { 0, 0 }, block = null, offsetOut = new ulong[] { 0, 0 }; var datatype = GetDatatype(typeof(T)); name = ToHdf5Name(name); var datasetId = H5D.open(groupId, name); var spaceId = H5D.get_space(datasetId); int rank = H5S.get_simple_extent_ndims(spaceId); ulong[] maxDims = new ulong[rank]; ulong[] dims = new ulong[rank]; ulong[] chunkDims = new ulong[rank]; var memId_n = H5S.get_simple_extent_dims(spaceId, dims, maxDims); start[0] = beginIndex; start[1] = 0; count[0] = endIndex - beginIndex; count[1] = dims[1]; var status = H5S.select_hyperslab(spaceId, H5S.seloper_t.SET, start, stride, count, block); // Define the memory dataspace. T[,] dset = new T[count[0], count[1]]; var memId = H5S.create_simple(rank, count, null); // Define memory hyperslab. status = H5S.select_hyperslab(memId, H5S.seloper_t.SET, offsetOut, null, count, null); /* * Read data from hyperslab in the file into the hyperslab in * memory and display. */ GCHandle hnd = GCHandle.Alloc(dset, GCHandleType.Pinned); H5D.read(datasetId, datatype, memId, spaceId, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); H5D.close(datasetId); H5S.close(spaceId); H5S.close(memId); return(dset); }
public float[] TryReadFactor(AbstractWarpDataset ds, string datasetName) { string dsPath = (ds.hdfOperator as Hdf5Operator).GetDatasetNames.Where(t => t.Contains(datasetName)) .FirstOrDefault(); float[] factor = null; int h5FileId = H5F.open(ds.fileName, H5F.ACC_RDONLY); int datasetId = H5D.open(h5FileId, dsPath); int typeId = H5D.get_type(datasetId); int spaceId = H5D.get_space(datasetId); if (H5T.get_class(typeId) == H5T.class_t.FLOAT) { var size = H5T.get_size(typeId); int rank = H5S.get_simple_extent_ndims(spaceId); ulong[] dims = new ulong[rank]; int err = H5S.get_simple_extent_dims(spaceId, dims, null); factor = new float[dims[0]]; GCHandle hnd = GCHandle.Alloc(factor, GCHandleType.Pinned); H5D.read(datasetId, typeId, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); } if (spaceId != 0) { H5S.close(spaceId); } if (typeId != 0) { H5T.close(typeId); } if (datasetId != 0) { H5D.close(datasetId); } if (h5FileId != 0) { H5F.close(h5FileId); } return(factor); }
private static string ReadDataCharArray(hid_t fileLoc, string name) { hid_t dset = H5D.open(fileLoc, name); hid_t fspace = H5D.get_space(dset); hid_t count = H5S.get_simple_extent_ndims(fspace); hid_t type = H5D.get_type(dset); hsize_t[] dims = new hsize_t[count]; hsize_t[] maxdims = new hsize_t[count]; H5S.get_simple_extent_dims(fspace, dims, maxdims); H5S.close(fspace); byte[] rdata = new byte[dims[0]]; hid_t mem_type = H5T.copy(type); H5T.set_size(mem_type, new IntPtr(1)); GCHandle hnd = GCHandle.Alloc(rdata, GCHandleType.Pinned); H5D.read(dset, mem_type, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); H5T.close(mem_type); char[] val = new char[dims[0]]; for (int i = 0; i < (int)dims[0]; i += 2) { val[i] = BitConverter.ToChar(rdata, i); } return(val.ToString()); }
public static IEnumerable ReadStrings(string filename, string dataset) { var f = H5F.open(filename, H5F.ACC_RDONLY); if (f < 0) { throw new Exception("Could not open file: " + filename); } var dset = H5D.open(f, Encoding.ASCII.GetBytes(dataset), H5P.DEFAULT); if (dset < 0) { throw new Exception("Could not open dataset: " + dataset); } var filetype = H5D.get_type(dset); var sdim = H5T.get_size(filetype) + 1; var space = H5D.get_space(dset); var ndims = H5S.get_simple_extent_ndims(space); ulong[] dims = new ulong[ndims]; H5S.get_simple_extent_dims(space, dims, null); var memtype = H5T.copy(H5T.C_S1); var status = H5T.set_size(memtype, sdim); int len = (int)(dims[0] * (ulong)sdim * SIZEOF_CHAR); byte[] buffer = new byte[len]; IntPtr ptr = Marshal.AllocHGlobal(len); status = H5D.read(dset, memtype, H5S.ALL, H5S.ALL, H5P.DEFAULT, ptr); Marshal.Copy(ptr, buffer, 0, len); Marshal.FreeHGlobal(ptr); string s = Encoding.ASCII.GetString(buffer); return(s.Split(new char[] { '\0' }, StringSplitOptions.RemoveEmptyEntries)); }
public static T[,] ReadDataset <T>(int groupId, string name) where T : struct { var datatype = GetDatatype(typeof(T)); name = ToHdf5Name(name); var datasetId = H5D.open(groupId, name); var spaceId = H5D.get_space(datasetId); int rank = H5S.get_simple_extent_ndims(spaceId); long count = H5S.get_simple_extent_npoints(spaceId); int rankChunk; ulong[] maxDims = new ulong[rank]; ulong[] dims = new ulong[rank]; ulong[] chunkDims = new ulong[rank]; var memId = H5S.get_simple_extent_dims(spaceId, dims, maxDims); T[,] dset = new T[dims[0], dims[1]]; var typeId = H5D.get_type(datasetId); var mem_type = H5T.copy(datatype); if (datatype == H5T.C_S1) { H5T.set_size(datatype, new IntPtr(2)); } var propId = H5D.get_create_plist(datasetId); if (H5D.layout_t.CHUNKED == H5P.get_layout(propId)) { rankChunk = H5P.get_chunk(propId, rank, chunkDims); } memId = H5S.create_simple(rank, dims, maxDims); GCHandle hnd = GCHandle.Alloc(dset, GCHandleType.Pinned); H5D.read(datasetId, datatype, memId, spaceId, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); H5D.close(typeId); H5D.close(datasetId); H5S.close(spaceId); return(dset); }
private static double[] ReadDataVector(hid_t fileLoc, string name) { hid_t dset = H5D.open(fileLoc, name); hid_t fspace = H5D.get_space(dset); hid_t count = H5S.get_simple_extent_ndims(fspace); hid_t type = H5D.get_type(dset); hsize_t[] dims = new hsize_t[count]; hsize_t[] maxdims = new hsize_t[count]; H5S.get_simple_extent_dims(fspace, dims, maxdims); H5S.close(fspace); byte[] rdata = new byte[dims[0] * 8]; hid_t mem_type = H5T.copy(H5T.NATIVE_DOUBLE); H5T.set_size(mem_type, new IntPtr(8)); GCHandle hnd = GCHandle.Alloc(rdata, GCHandleType.Pinned); H5D.read(dset, mem_type, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); H5T.close(mem_type); double[] val = new double[dims[0]]; for (int i = 0; i < (int)dims[0]; i++) { val[i] = BitConverter.ToDouble(rdata, i * 8); } return(val); }
public static string ReadAsciiString(hid_t groupId, string name) { var datatype = H5T.FORTRAN_S1; //name = ToHdf5Name(name); var datasetId = H5D.open(groupId, name); var spaceId = H5D.get_space(datasetId); int rank = H5S.get_simple_extent_ndims(spaceId); ulong[] maxDims = new ulong[rank]; ulong[] dims = new ulong[rank]; ulong[] chunkDims = new ulong[rank]; var memId_n = H5S.get_simple_extent_dims(spaceId, dims, null); // we write from C and must provide null-terminated strings byte[] wdata = new byte[dims[0] * 2]; var memId = H5T.copy(H5T.C_S1); H5T.set_size(memId, new IntPtr(2)); //H5T.set_strpad(memId, H5T.str_t.NULLTERM); GCHandle hnd = GCHandle.Alloc(wdata, GCHandleType.Pinned); int resultId = H5D.read(datasetId, memId, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); wdata = wdata.Where((b, i) => i % 2 == 0). Select(b => (b == 0)?(byte)32:b).ToArray(); string result = Encoding.ASCII.GetString(wdata); H5T.close(memId); H5D.close(datasetId); return(result); }
public static Array ReadPrimitiveAttributes <T>(hid_t groupId, string name) //where T : struct { Type type = typeof(T); var datatype = GetDatatype(type); var attributeId = H5A.open(groupId, name); var spaceId = H5A.get_space(attributeId); int rank = H5S.get_simple_extent_ndims(spaceId); ulong[] maxDims = new ulong[rank]; ulong[] dims = new ulong[rank]; hid_t memId = H5S.get_simple_extent_dims(spaceId, dims, maxDims); long[] lengths = dims.Select(d => Convert.ToInt64(d)).ToArray(); Array attributes = Array.CreateInstance(type, lengths); var typeId = H5A.get_type(attributeId); var mem_type = H5T.copy(datatype); if (datatype == H5T.C_S1) { H5T.set_size(datatype, new IntPtr(2)); } var propId = H5A.get_create_plist(attributeId); memId = H5S.create_simple(rank, dims, maxDims); GCHandle hnd = GCHandle.Alloc(attributes, GCHandleType.Pinned); H5A.read(attributeId, datatype, hnd.AddrOfPinnedObject()); hnd.Free(); H5A.close(typeId); H5A.close(attributeId); H5S.close(spaceId); return(attributes); }
private List <float[]> ReadDataSetToSingle(AbstractWarpDataset srcbandpro, int[] bands) { List <float[]> datas = new List <float[]>(); var prjBands = PrjBandTable.GetPrjBands(srcbandpro); H5ID h5FileId = H5F.open(srcbandpro.fileName, H5F.ACC_RDONLY); foreach (int index in bands) { //Single[] data = new Single[srcSize.Width * srcSize.Height]; var bandIndex = prjBands[index - 1].DataSetIndex; string dsName = "CALChannel" + bandIndex.ToString("00"); H5ID datasetId = H5D.open(h5FileId, dsName); if (datasetId <= 0) { throw new ArgumentNullException(string.Format("FY4辐射定标,未找到名称为{0}的数据.", "CALChannel" + index.ToString("00"))); } H5ID typeId = H5D.get_type(datasetId); H5ID spaceId = H5D.get_space(datasetId); if (H5T.get_class(typeId) == H5T.class_t.FLOAT) { int rank = H5S.get_simple_extent_ndims(spaceId); ulong[] dims = new ulong[rank]; ulong[] maxDims = new ulong[rank]; H5S.get_simple_extent_dims(spaceId, dims, maxDims); float[] buffer = new float[dims[0]]; GCHandle hnd = GCHandle.Alloc(buffer, GCHandleType.Pinned); H5D.read(datasetId, typeId, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); if (buffer.Any(t => t > Math.Pow(10, 10) || t < -Math.Pow(10, 10))) { for (int i = 0; i < buffer.Length; i++) { var t = BitConverter.GetBytes(buffer[i]); Array.Reverse(t); buffer[i] = BitConverter.ToSingle(t, 0); } } datas.Add(buffer); } if (spaceId != 0) { H5S.close(spaceId); } if (typeId != 0) { H5T.close(typeId); } if (datasetId != 0) { H5D.close(datasetId); } } if (h5FileId != 0) { H5F.close(h5FileId); } return(datas); }
private static List <HDF5info> ScanInfo(hid_t gId, List <HDF5info> fields, string fullname) { IntPtr MAX_NAME = new IntPtr(1024); System.Text.StringBuilder group_name = new System.Text.StringBuilder(); System.Text.StringBuilder member_name = new System.Text.StringBuilder(); IntPtr len = H5I.get_name(gId, group_name, MAX_NAME); hsize_t nobj = new hsize_t(); H5G.get_num_objs(gId, ref nobj); for (int i = 0; i < (int)nobj; i++) { member_name = new System.Text.StringBuilder(); member_name.Capacity = 1024; IntPtr len2 = H5G.get_objname_by_idx(gId, (ulong)i, member_name, MAX_NAME); int objtype = H5G.get_objtype_by_idx(gId, (ulong)i); if (objtype == 0) //group { hid_t gId2 = H5G.open(gId, member_name.ToString()); fields = ScanInfo(gId2, fields, string.Format("{0}/{1}", fullname, member_name)); } else if (objtype == 1) //Object is a dataset. { HDF5info hDF5Info = new HDF5info(); hid_t dset = H5D.open(gId, member_name.ToString()); hid_t fspace = H5D.get_space(dset); hid_t count = H5S.get_simple_extent_ndims(fspace); hid_t type = H5D.get_type(dset); hDF5Info.HDFclass = getH5Tstring(type); hDF5Info.field = string.Format("{0}/{1}", fullname, member_name); if (H5T.get_class(type) == H5T.class_t.STRING) { hDF5Info.description = nirs.io.ReadDataString(gId, string.Format("{0}", member_name)); } else if (H5T.get_class(type) == H5T.class_t.FLOAT | H5T.get_class(type) == H5T.class_t.INTEGER) { hsize_t[] dims = new hsize_t[count]; hsize_t[] maxdims = new hsize_t[count]; H5S.get_simple_extent_dims(fspace, dims, maxdims); if (dims.Length == 1 & dims[0] == 1) { var val = nirs.io.ReadDataValue(gId, string.Format("{0}", member_name)); hDF5Info.description = string.Format("{0}", val); } else if (dims.Length == 1 & dims[0] > 1) { hDF5Info.description = string.Format("Vector <{0} x 1>", dims[0]); } else { hDF5Info.description = string.Format("Array <{0} x {1}>", dims[0], dims[1]); if (hDF5Info.field.Contains("dataTimeSeries") & dims[0] > dims[1]) { hDF5Info.description += " TRANSPOSE WARNING "; } if (hDF5Info.field.Contains("Pos") & dims[1] != 3) { hDF5Info.description += " TRANSPOSE WARNING "; } if (hDF5Info.field.Contains("stim") & hDF5Info.field.Contains("data") & dims[1] != 3) { hDF5Info.description += " TRANSPOSE WARNING "; } } } else { hDF5Info.description = ""; } fields.Add(hDF5Info); } } H5G.close(gId); return(fields); }
static void Main_Read(string[] args) { int DATA_ARRAY_LENGTH = 5; //var h5 = H5F.open(@"E:\HDF5\HDF5DotNet-src\examples\CSharpExample\CSharpExample1\table.h5", H5F.ACC_RDONLY); //var h5 = H5F.open(@"D:\test.h5", H5F.ACC_RDONLY); //var h5 = H5F.open(@"E:\HDF5\Hdf5DotnetTools-master\ToolTest\bin\Debug\table.h5", H5F.ACC_RDONLY); var h5 = H5F.open(@"E:\HDF5\test_gzip.h5", H5F.ACC_RDONLY); var dataset = H5D.open(h5, "trans_detail/20160929"); var spaceid = H5D.get_space(dataset); var npoints = H5S.get_simple_extent_npoints(spaceid); //var dims = H5S.get_simple_extent_dims(spaceid); int rank = H5S.get_simple_extent_ndims(spaceid); // 是不是不能用自己的type var dtype = H5D.get_type(dataset); var dtcls = H5T.get_class(dtype); var size = H5T.get_size(dtype); var sz = Marshal.SizeOf(typeof(ComType)); var dtype_n = H5T.get_nmembers(dtype); for (uint i = 0; i < dtype_n; ++i) { var x = H5T.get_member_name(dtype, i); var x4 = Marshal.PtrToStringAnsi(x); var y = H5T.get_member_type(dtype, i); var z = H5T.get_class(y); var x1 = H5T.get_member_offset(dtype, i); var x3 = H5T.get_size(y); Console.WriteLine(x4); Console.WriteLine(z); Console.WriteLine(x1); //var x2 = Marshal.OffsetOf(typeof(ComType), x4).ToInt32(); //Console.WriteLine(x2); Console.WriteLine(x3); } int ss1 = Marshal.SizeOf(typeof(ComType)); IntPtr p = Marshal.AllocHGlobal(ss1 * 11); H5D.read(dataset, dtype, H5S.ALL, H5S.ALL, H5P.DEFAULT, p); var s = Marshal.PtrToStructure(p, typeof(ComType)); Console.WriteLine(s); var s2 = Marshal.PtrToStructure(p + ss1, typeof(ComType)); Console.WriteLine(s2); var s3 = Marshal.PtrToStructure(p + ss1 * 4, typeof(ComType)); Console.WriteLine(s3); var s4 = Marshal.PtrToStructure(p + ss1 * 5, typeof(ComType)); Console.WriteLine(s4); var s6 = Marshal.PtrToStructure(p + ss1 * 10, typeof(ComType)); Console.WriteLine(s6); }
static void ReadFile(string filePath) { var file = H5F.open(filePath, H5F.ACC_RDONLY); IterateObjects(file); var group = H5G.open(file, "group"); IterateObjects(group); H5G.close(group); var dataSet = H5D.open(file, "/group/dataset"); IterateObjects(dataSet); var dataSpace = H5D.get_space(dataSet); var rank = H5S.get_simple_extent_ndims(dataSpace); if (rank == 2) { var dims = new ulong[2]; H5S.get_simple_extent_dims(dataSpace, dims, null); var data = new int[dims[0], dims[1]]; H5D.read(dataSet, H5T.NATIVE_INT, H5S.ALL, H5S.ALL, H5P.DEFAULT, new PinnedObject(data)); for (int i = 0; i < data.GetLength(0); ++i) { for (int j = 0; j < data.GetLength(1); ++j) { Write($"{data[i,j],3}"); } WriteLine(); } } H5S.close(dataSpace); var doubleAttribute = H5A.open(dataSet, "double"); #if true //double pi = 0.0; // Won't work object pi = 0.0; H5A.read(doubleAttribute, H5T.NATIVE_DOUBLE, new PinnedObject(pi)); WriteLine($"PI = {pi}"); #else var values = new double[1]; H5A.read(doubleAttribute, H5T.NATIVE_DOUBLE, new PinnedObject(values)); WriteLine($"PI = {values[0]}"); #endif H5A.close(doubleAttribute); WriteLine($"string: {ReadStringAttribute(dataSet, "string")}"); WriteLine($"string-ascii: {ReadStringAttribute(dataSet, "string-ascii")}"); WriteLine($"string-vlen: {ReadStringAttribute(dataSet, "string-vlen")}"); WriteLine($"boolean-8-bit-enum: {ReadEnumAttribute<Boolean>(dataSet, "boolean-8-bit-enum")}"); H5D.close(dataSet); H5F.close(file); }
public void H5Sget_simple_extent_ndimsTest4() { Assert.IsFalse( H5S.get_simple_extent_ndims(Utilities.RandomInvalidHandle()) >= 0); }
/// <summary> /// Appends a dataset to a hdf5 file. If called the first time a dataset is created /// </summary> /// <typeparam name="T">Generic parameter only primitive types are allowed</typeparam> /// <param name="groupId">id of the group. Can also be a file Id</param> /// <param name="name">name of the dataset</param> /// <param name="dset">The dataset</param> /// <returns>status of the write method</returns> public static hid_t AppendDataset <T>(hid_t groupId, string name, Array dset, ulong chunkX = 200) where T : struct { var rank = dset.Rank; ulong[] dimsExtend = Enumerable.Range(0, rank).Select(i => { return((ulong)dset.GetLength(i)); }).ToArray(); ulong[] maxDimsExtend = null; ulong[] dimsChunk = new ulong[] { chunkX }.Concat(dimsExtend.Skip(1)).ToArray(); ulong[] zeros = Enumerable.Range(0, rank).Select(z => (ulong)0).ToArray(); hid_t status, spaceId, datasetId; // name = ToHdf5Name(name); var datatype = GetDatatype(typeof(T)); var typeId = H5T.copy(datatype); var datasetExists = H5L.exists(groupId, name) > 0; /* Create a new dataset within the file using chunk * creation properties. */ if (!datasetExists) { spaceId = H5S.create_simple(dset.Rank, dimsExtend, maxDimsExtend); var propId = H5P.create(H5P.DATASET_CREATE); status = H5P.set_chunk(propId, rank, dimsChunk); datasetId = H5D.create(groupId, name, datatype, spaceId, H5P.DEFAULT, propId, H5P.DEFAULT); /* Write data to dataset */ GCHandle hnd = GCHandle.Alloc(dset, GCHandleType.Pinned); status = H5D.write(datasetId, datatype, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); H5P.close(propId); } else { datasetId = H5D.open(groupId, name); spaceId = H5D.get_space(datasetId); var rank_old = H5S.get_simple_extent_ndims(spaceId); ulong[] maxDims = new ulong[rank_old]; ulong[] dims = new ulong[rank_old]; var memId1 = H5S.get_simple_extent_dims(spaceId, dims, maxDims); ulong[] oldChunk = null; int chunkDims = 0; var propId = H5P.create(H5P.DATASET_ACCESS); status = H5P.get_chunk(propId, chunkDims, oldChunk); /* Extend the dataset. */ var size = new ulong[] { dims[0] + dimsExtend[0] }.Concat(dims.Skip(1)).ToArray(); status = H5D.set_extent(datasetId, size); /* Select a hyperslab in extended portion of dataset */ var filespaceId = H5D.get_space(datasetId); var offset = new ulong[] { dims[0] }.Concat(zeros.Skip(1)).ToArray(); status = H5S.select_hyperslab(filespaceId, H5S.seloper_t.SET, offset, null, dimsExtend, null); /* Define memory space */ var memId2 = H5S.create_simple(rank, dimsExtend, null); /* Write the data to the extended portion of dataset */ GCHandle hnd = GCHandle.Alloc(dset, GCHandleType.Pinned); status = H5D.write(datasetId, datatype, memId2, spaceId, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); H5S.close(memId1); H5S.close(memId2); H5D.close(filespaceId); } H5D.close(datasetId); H5S.close(spaceId); return(status); }
private object GetAttributeValue(int _h5FileId, string attributeName) { H5AttributeId attId = H5A.open(_h5FileId, attributeName); if (attId == 0) { return(null); } H5DataTypeId typeId = 0; H5DataTypeId dtId = 0; H5A.info_t attInfo = new H5A.info_t(); H5DataSpaceId spaceId = 0; H5DataTypeId oldTypeId = 0; object retObject = null; try { typeId = H5A.get_type(attId); H5A.get_info(attId, ref attInfo); dtId = H5A.get_type(attId); spaceId = H5A.get_space(attId); IntPtr dataSize = H5T.get_size(dtId); // oldTypeId = typeId; typeId = H5T.get_native_type(typeId, H5T.direction_t.DEFAULT); H5T.class_t typeClass = H5T.get_class(typeId); int ndims = H5S.get_simple_extent_ndims(spaceId); ulong[] dims = new ulong[ndims]; H5S.get_simple_extent_dims(spaceId, dims, null); ulong dimSize = 1; if (dims.Length == 0) { dimSize = 1; } else { foreach (ulong dim in dims) { dimSize *= dim; } } switch (typeClass) { case H5T.class_t.NO_CLASS: break; case H5T.class_t.INTEGER: // H5T.Sign.TWOS_COMPLEMENT; H5T.sign_t sign = H5T.get_sign(oldTypeId); switch (dataSize.ToInt32()) { case 1: retObject = ReadArray <byte>(dimSize, attId, typeId); break; case 2: switch (sign) { case H5T.sign_t.SGN_2: retObject = ReadArray <Int16>(dimSize, attId, typeId); break; case H5T.sign_t.NONE: retObject = ReadArray <UInt16>(dimSize, attId, typeId); break; } break; case 4: switch (sign) { case H5T.sign_t.SGN_2: retObject = ReadArray <Int32>(dimSize, attId, typeId); break; case H5T.sign_t.NONE: retObject = ReadArray <UInt32>(dimSize, attId, typeId); break; } break; case 8: switch (sign) { case H5T.sign_t.SGN_2: retObject = ReadArray <Int64>(dimSize, attId, typeId); break; case H5T.sign_t.NONE: retObject = ReadArray <UInt64>(dimSize, attId, typeId); break; } break; } break; case H5T.class_t.FLOAT: switch (dataSize.ToInt32()) { case 4: retObject = ReadArray <float>(dimSize, attId, typeId); break; case 8: retObject = ReadArray <double>(dimSize, attId, typeId); break; } break; case H5T.class_t.STRING: ulong size = attInfo.data_size; byte[] chars = ReadArray <byte>(size, attId, typeId); retObject = Encoding.ASCII.GetString(chars); break; default: break; } return(retObject); } finally { if (spaceId != 0) { H5S.close(spaceId); } if (attId != 0) { H5A.close(attId); } if (oldTypeId != 0) { H5T.close(oldTypeId); } if (typeId != 0) { H5T.close(typeId); } if (dtId != 0) { H5T.close(dtId); } } }
public Dictionary <string, string> GetDatasetAttributes(string originalDatasetName) { H5DataSetId datasetId = 0; H5GroupId groupId = 0; H5DataTypeId typeId = 0; H5DataSpaceId spaceId = 0; try { if (_h5FileId < 0) { return(null); } string datasetName = GetDatasetFullNames(originalDatasetName, _h5FileId); if (string.IsNullOrEmpty(datasetName)) { return(null); } int groupIndex = datasetName.LastIndexOf('/'); if (groupIndex == -1) { datasetId = H5D.open(_h5FileId, datasetName); } else { string groupName = datasetName.Substring(0, groupIndex + 1); string dsName = datasetName.Substring(groupIndex + 1); groupId = H5G.open(_h5FileId, groupName); datasetId = H5D.open(groupId, dsName); } if (datasetId == 0) { return(null); } Dictionary <string, string> attValues = new Dictionary <string, string>(); typeId = H5D.get_type(datasetId); H5T.class_t type = H5T.get_class(typeId); IntPtr tSize = H5T.get_size(typeId); spaceId = H5D.get_space(datasetId); int length = H5S.get_simple_extent_ndims(spaceId); ulong[] dims = new ulong[length]; H5S.get_simple_extent_dims(spaceId, dims, null); ulong storageSize = H5D.get_storage_size(datasetId); attValues.Add("DataSetName", datasetName); attValues.Add("DataType", type.ToString()); attValues.Add("DataTypeSize", tSize.ToString() + "Byte"); attValues.Add("Dims", String.Join("*", dims)); attValues.Add("StorageSize", storageSize.ToString() + "Byte"); //所有Attributes的键 ArrayList arrayList = new ArrayList(); GCHandle handle = GCHandle.Alloc(arrayList); ulong n = 0; // the callback is defined in H5ATest.cs H5A.operator_t cb = (int location_id, IntPtr attr_name, ref H5A.info_t ainfo, IntPtr op_data) => { GCHandle hnd = (GCHandle)op_data; ArrayList al = (hnd.Target as ArrayList); int len = 0; while (Marshal.ReadByte(attr_name, len) != 0) { ++len; } byte[] buf = new byte[len]; Marshal.Copy(attr_name, buf, 0, len); al.Add(Encoding.UTF8.GetString(buf)); return(0); }; H5A.iterate(datasetId, H5.index_t.NAME, H5.iter_order_t.NATIVE, ref n, cb, (IntPtr)handle); handle.Free(); foreach (string attName in arrayList) { attValues.Add(attName, ReadAttributeValue(datasetId, attName)); } return(attValues); } finally { if (spaceId != 0) { H5S.close(spaceId); } if (typeId != 0) { H5T.close(typeId); } if (datasetId != 0) { H5D.close(datasetId); } if (groupId != 0) { H5G.close(groupId); } } }