/// <summary> /// /// </summary> /// <returns></returns> public static Hdf5Dataspace GetDataspace(Hdf5Identifier _datasetId) { var dataspaceId = H5D.get_space(_datasetId.Value).ToId(); int rank = H5S.get_simple_extent_ndims(dataspaceId.Value); ulong[] dims = new ulong[rank]; ulong[] maxDims = new ulong[rank]; H5S.get_simple_extent_dims(dataspaceId.Value, dims, maxDims); Hdf5Dataspace dataspace = new Hdf5Dataspace { Id = dataspaceId, NumberOfDimensions = rank }; for (int i = 0; i < dims.Length; i++) { Hdf5DimensionProperty property = new Hdf5DimensionProperty { CurrentSize = dims[i], //MaximumSize = maxDims[i] }; dataspace.DimensionProperties.Add(property); } H5S.close(dataspaceId.Value); return(dataspace); }
private static Hdf5Container_LidarDaimler ReadContainer(string sFilePath_inp) { int status = 0; long file_id = H5F.open(sFilePath_inp, H5F.ACC_RDWR); long testDataset_id = H5D.open(file_id, "distance"); long testDataspace_id = H5D.get_space(testDataset_id); ulong[] dims = new ulong[2]; status = H5S.get_simple_extent_dims(testDataspace_id, dims, null); int rows = Convert.ToInt32(dims[0]); int cols = Convert.ToInt32(dims[1]); Hdf5Container_LidarDaimler outContainer = new Hdf5Container_LidarDaimler(rows, cols) { _distances = Hdf5IO.GetFloatDataset(H5D.open(file_id, "distance"), rows, cols), _intensity = Hdf5IO.GetFloatDataset(H5D.open(file_id, "intensity"), rows, cols), _labelProbabilities = Hdf5IO.GetFloatDataset(H5D.open(file_id, "labelProbabilities"), rows, cols), _labelWorkingSet = Hdf5IO.GetLabelWorkingSet(H5G.open(file_id, "labelWorkingSet")), _labels = Hdf5IO.GetUintDataset(H5D.open(file_id, "labels"), rows, cols), _pointValid = Hdf5IO.GetIntDataset(H5D.open(file_id, "pointValid"), rows, cols), _sensorX = Hdf5IO.GetFloatDataset(H5D.open(file_id, "sensorX"), rows, cols), _sensorY = Hdf5IO.GetFloatDataset(H5D.open(file_id, "sensorY"), rows, cols), _sensorZ = Hdf5IO.GetFloatDataset(H5D.open(file_id, "sensorZ"), rows, cols), _vehicleX = Hdf5IO.GetFloatDataset(H5D.open(file_id, "vehicleX"), rows, cols), _vehicleY = Hdf5IO.GetFloatDataset(H5D.open(file_id, "vehicleY"), rows, cols), _vehicleZ = Hdf5IO.GetFloatDataset(H5D.open(file_id, "vehicleZ"), rows, cols) }; status = H5F.close(file_id); return(outContainer); }
public void H5SdecodeTest1() { hsize_t[] dims = { 1, 2, 3 }; hid_t space = H5S.create_simple(dims.Length, dims, dims); Assert.IsTrue(space > 0); size_t nalloc = new IntPtr(); Assert.IsTrue(H5S.encode(space, null, ref nalloc) >= 0); byte[] buf = new byte [nalloc.ToInt32()]; Assert.IsTrue(H5S.encode(space, buf, ref nalloc) >= 0); Assert.IsTrue(H5S.close(space) >= 0); space = H5S.decode(buf); Assert.IsTrue(space >= 0); Assert.IsTrue(H5S.get_simple_extent_ndims(space) == dims.Length); hsize_t[] tdims = new hsize_t[dims.Length]; Assert.IsTrue( H5S.get_simple_extent_dims(space, tdims, null) == dims.Length); for (int i = 0; i < dims.Length; ++i) { Assert.IsTrue(tdims[i] == dims[i]); } Assert.IsTrue(H5S.close(space) >= 0); }
private static double[,] ReadDataArray(hid_t fileLoc, string name, bool transpose = false) { hid_t dset = H5D.open(fileLoc, name); hid_t fspace = H5D.get_space(dset); hid_t count = H5S.get_simple_extent_ndims(fspace); hid_t type = H5D.get_type(dset); hsize_t[] dims = new hsize_t[count]; hsize_t[] maxdims = new hsize_t[count]; H5S.get_simple_extent_dims(fspace, dims, maxdims); H5S.close(fspace); byte[] rdata = new byte[dims[0] * dims[1] * 8]; hid_t mem_type = H5T.copy(H5T.NATIVE_DOUBLE); H5T.set_size(mem_type, new IntPtr(8)); GCHandle hnd = GCHandle.Alloc(rdata, GCHandleType.Pinned); H5D.read(dset, mem_type, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); H5T.close(mem_type); if (transpose) { double[,] val = new double[dims[1], dims[0]]; int cnt = 0; for (int i = 0; i < (int)dims[0]; i++) { for (int j = 0; j < (int)dims[1]; j++) { val[j, i] = BitConverter.ToDouble(rdata, cnt * 8); cnt++; } } return(val); } else { double[,] val = new double[dims[0], dims[1]]; int cnt = 0; for (int i = 0; i < (int)dims[0]; i++) { for (int j = 0; j < (int)dims[1]; j++) { val[i, j] = BitConverter.ToDouble(rdata, cnt * 8); cnt++; } } return(val); } }
/// <summary> /// WARNING: ADVANCED USE ONLY!! Loads a 2D generic dataset from an H5 file. /// The generic loaders only loads data in non-Unity friendly types, such as bytes, uints, longs etc... /// You'll have to know the correct cast to retrieve usable data. /// /// Created With help from https://github.com/LiorBanai/HDF5-CSharp/blob/master/HDF5-CSharp/Hdf5Dataset.cs /// </summary> /// <param name="filePath"></param> /// <param name="datasetName"></param> /// <typeparam name="T"></typeparam> /// <returns></returns> /// <exception cref="FileNotFoundException"></exception> static T[,] Load2DDataset <T>(string filePath, string datasetName) { if (!File.Exists(filePath)) { throw new FileNotFoundException($"Loading dataset {datasetName} from file that doesn't exist {filePath}"); } long fileId = H5F.open(filePath, H5F.ACC_RDONLY); T[,] resultArray = new T[2, 2]; try { ulong[] start = { 0, 0 }; ulong[] count = { 0, 0 }; long datasetId = H5D.open(fileId, datasetName); var datatype = H5D.get_type(datasetId); var spaceId = H5D.get_space(datasetId); int rank = H5S.get_simple_extent_ndims(spaceId); ulong[] maxDims = new ulong[rank]; ulong[] dims = new ulong[rank]; H5S.get_simple_extent_dims(spaceId, dims, maxDims); count[0] = dims[0]; count[1] = dims[1]; // Define file hyperslab. long status = H5S.select_hyperslab(spaceId, H5S.seloper_t.SET, start, null, count, null); // Define the memory dataspace. resultArray = new T[dims[0], dims[1]]; var memId = H5S.create_simple(rank, dims, null); // Define memory hyperslab. status = H5S.select_hyperslab(memId, H5S.seloper_t.SET, start, null, count, null); // Read data from hyperslab in the file into the hyperslab in // memory and display. GCHandle handle = GCHandle.Alloc(resultArray, GCHandleType.Pinned); try { H5D.read(datasetId, datatype, memId, spaceId, H5P.DEFAULT, handle.AddrOfPinnedObject()); } finally { handle.Free(); H5S.close(status); H5S.close(memId); H5S.close(spaceId); H5D.close(datatype); H5D.close(datasetId); } } finally { H5F.close(fileId); } return(resultArray); }
static void ReadFile(string filePath) { var file = H5F.open(filePath, H5F.ACC_RDONLY); var dataSet = H5D.open(file, "/group/dataset"); var dataSpace = H5D.get_space(dataSet); var rank = H5S.get_simple_extent_ndims(dataSpace); if (rank == 2) { var dims = new ulong[2]; H5S.get_simple_extent_dims(dataSpace, dims, null); var data = new int[dims[0], dims[1]]; H5D.read(dataSet, H5T.NATIVE_INT, H5S.ALL, H5S.ALL, H5P.DEFAULT, new PinnedObject(data)); for (int i = 0; i < data.GetLength(0); ++i) { for (int j = 0; j < data.GetLength(1); ++j) { Write($"{data[i,j],3}"); } WriteLine(); } } H5S.close(dataSpace); var doubleAttribute = H5A.open(dataSet, "double"); #if false double pi = 0.0; var handle = GCHandle.Alloc(pi, GCHandleType.Pinned); H5A.read(doubleAttribute, H5T.NATIVE_DOUBLE, handle.AddrOfPinnedObject()); handle.Free(); WriteLine($"PI = {pi}"); #else var values = new double[1]; H5A.read(doubleAttribute, H5T.NATIVE_DOUBLE, new PinnedObject(values)); WriteLine($"PI = {values[0]}"); #endif H5A.close(doubleAttribute); WriteLine($"string: {ReadStringAttribute(dataSet, "string")}"); WriteLine($"string-ascii: {ReadStringAttribute(dataSet, "string-ascii")}"); WriteLine($"string-vlen: {ReadStringAttribute(dataSet, "string-vlen")}"); H5D.close(dataSet); H5F.close(file); }
public static void GetDataSpaceDimensions(hid_t space, ref ulong[] dims, ref ulong[] maxDims) { var rank = GetDataSpaceRank(space); dims = new ulong[rank]; maxDims = new ulong[rank]; if (H5S.get_simple_extent_dims(space, dims, maxDims) < 0) { dims = maxDims = null; throw new HDF5Exception("Failed to get dimensions of data set."); } }
/// <summary> /// Reads an n-dimensional dataset. /// </summary> /// <typeparam name="T">Generic parameter strings or primitive type</typeparam> /// <param name="groupId">id of the group. Can also be a file Id</param> /// <param name="name">name of the dataset</param> /// <returns>The n-dimensional dataset</returns> public static Array ReadDatasetToArray <T>(hid_t groupId, string name) //where T : struct { var datatype = GetDatatype(typeof(T)); var datasetId = H5D.open(groupId, name); var spaceId = H5D.get_space(datasetId); int rank = H5S.get_simple_extent_ndims(spaceId); long count = H5S.get_simple_extent_npoints(spaceId); Array dset; Type type = typeof(T); if (rank >= 0 && count >= 0) { int rankChunk; ulong[] maxDims = new ulong[rank]; ulong[] dims = new ulong[rank]; ulong[] chunkDims = new ulong[rank]; hid_t memId = H5S.get_simple_extent_dims(spaceId, dims, maxDims); long[] lengths = dims.Select(d => Convert.ToInt64(d)).ToArray(); dset = Array.CreateInstance(type, lengths); var typeId = H5D.get_type(datasetId); var mem_type = H5T.copy(datatype); if (datatype == H5T.C_S1) { H5T.set_size(datatype, new IntPtr(2)); } var propId = H5D.get_create_plist(datasetId); if (H5D.layout_t.CHUNKED == H5P.get_layout(propId)) { rankChunk = H5P.get_chunk(propId, rank, chunkDims); } memId = H5S.create_simple(rank, dims, maxDims); GCHandle hnd = GCHandle.Alloc(dset, GCHandleType.Pinned); H5D.read(datasetId, datatype, memId, spaceId, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); } else { dset = Array.CreateInstance(type, new long[1] { 0 }); } H5D.close(datasetId); H5S.close(spaceId); return(dset); }
static int[] GetDatasetDimensions(long spaceID) { int numberOfDimensions = H5S.get_simple_extent_ndims(spaceID); int[] dimensions = new int[0]; if (numberOfDimensions >= 0) { ulong[] dims = new ulong[numberOfDimensions]; H5S.get_simple_extent_dims(spaceID, dims, MaxDimensions); dimensions = ConvertDimensionsToIntegers(dims); } return(dimensions); }
public static IEnumerable <T> ReadCompounds <T>(hid_t groupId, string name) where T : struct { Type type = typeof(T); hid_t typeId = 0; // open dataset var datasetId = H5D.open(groupId, name); typeId = CreateType(type); var compoundSize = Marshal.SizeOf(type); /* * Get dataspace and allocate memory for read buffer. */ var spaceId = H5D.get_space(datasetId); int rank = H5S.get_simple_extent_ndims(spaceId); ulong[] dims = new ulong[rank]; var ndims = H5S.get_simple_extent_dims(spaceId, dims, null); int rows = Convert.ToInt32(dims[0]); byte[] bytes = new byte[rows * compoundSize]; // Read the data. GCHandle hnd = GCHandle.Alloc(bytes, GCHandleType.Pinned); IntPtr hndAddr = hnd.AddrOfPinnedObject(); H5D.read(datasetId, typeId, spaceId, H5S.ALL, H5P.DEFAULT, hndAddr); int counter = 0; IEnumerable <T> strcts = Enumerable.Range(1, rows).Select(i => { byte[] select = new byte[compoundSize]; Array.Copy(bytes, counter, select, 0, compoundSize); T s = fromBytes <T>(select); counter = counter + compoundSize; return(s); }); /* * Close and release resources. */ H5D.vlen_reclaim(typeId, spaceId, H5P.DEFAULT, hndAddr); hnd.Free(); H5D.close(datasetId); H5S.close(spaceId); H5T.close(typeId); return(strcts); }
public static ulong[] PrepareAttributeValueSet <T>(long attributeId, ref T[] valueSet, bool isReference) { long dataspaceId = -1; ulong[] dimensionSet; ulong[] dimensionLimitSet; dimensionSet = new ulong[] { 0 }; dimensionLimitSet = new ulong[] { 0 }; try { dataspaceId = H5A.get_space(attributeId); H5S.get_simple_extent_dims(dataspaceId, null, dimensionLimitSet); // merge data if (dimensionLimitSet[0] == H5S.UNLIMITED) { T[] valueSet_File = IOHelper.Read <T>(attributeId, DataContainerType.Attribute); if (isReference) { if (valueSet_File.Count() == 0 || !Enumerable.SequenceEqual(valueSet_File, valueSet.Skip(Math.Max(0, valueSet.Count() - valueSet_File.Count())))) { valueSet = valueSet.Concat(valueSet_File).ToArray(); } } else { if (valueSet.Count() == 0 || !Enumerable.SequenceEqual(valueSet, valueSet_File.Skip(Math.Max(0, valueSet_File.Count() - valueSet.Count())))) { valueSet = valueSet_File.Concat(valueSet).ToArray(); } } } } finally { if (H5I.is_valid(dataspaceId) > 0) { H5S.close(dataspaceId); } } return(dimensionLimitSet); }
public static T[,] ReadDataset <T>(int groupId, string name, ulong beginIndex, ulong endIndex) where T : struct { ulong[] start = { 0, 0 }, stride = null, count = { 0, 0 }, block = null, offsetOut = new ulong[] { 0, 0 }; var datatype = GetDatatype(typeof(T)); name = ToHdf5Name(name); var datasetId = H5D.open(groupId, name); var spaceId = H5D.get_space(datasetId); int rank = H5S.get_simple_extent_ndims(spaceId); ulong[] maxDims = new ulong[rank]; ulong[] dims = new ulong[rank]; ulong[] chunkDims = new ulong[rank]; var memId_n = H5S.get_simple_extent_dims(spaceId, dims, maxDims); start[0] = beginIndex; start[1] = 0; count[0] = endIndex - beginIndex; count[1] = dims[1]; var status = H5S.select_hyperslab(spaceId, H5S.seloper_t.SET, start, stride, count, block); // Define the memory dataspace. T[,] dset = new T[count[0], count[1]]; var memId = H5S.create_simple(rank, count, null); // Define memory hyperslab. status = H5S.select_hyperslab(memId, H5S.seloper_t.SET, offsetOut, null, count, null); /* * Read data from hyperslab in the file into the hyperslab in * memory and display. */ GCHandle hnd = GCHandle.Alloc(dset, GCHandleType.Pinned); H5D.read(datasetId, datatype, memId, spaceId, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); H5D.close(datasetId); H5S.close(spaceId); H5S.close(memId); return(dset); }
public float[] TryReadFactor(AbstractWarpDataset ds, string datasetName) { string dsPath = (ds.hdfOperator as Hdf5Operator).GetDatasetNames.Where(t => t.Contains(datasetName)) .FirstOrDefault(); float[] factor = null; int h5FileId = H5F.open(ds.fileName, H5F.ACC_RDONLY); int datasetId = H5D.open(h5FileId, dsPath); int typeId = H5D.get_type(datasetId); int spaceId = H5D.get_space(datasetId); if (H5T.get_class(typeId) == H5T.class_t.FLOAT) { var size = H5T.get_size(typeId); int rank = H5S.get_simple_extent_ndims(spaceId); ulong[] dims = new ulong[rank]; int err = H5S.get_simple_extent_dims(spaceId, dims, null); factor = new float[dims[0]]; GCHandle hnd = GCHandle.Alloc(factor, GCHandleType.Pinned); H5D.read(datasetId, typeId, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); } if (spaceId != 0) { H5S.close(spaceId); } if (typeId != 0) { H5T.close(typeId); } if (datasetId != 0) { H5D.close(datasetId); } if (h5FileId != 0) { H5F.close(h5FileId); } return(factor); }
public static IEnumerable ReadStrings(string filename, string dataset) { var f = H5F.open(filename, H5F.ACC_RDONLY); if (f < 0) { throw new Exception("Could not open file: " + filename); } var dset = H5D.open(f, Encoding.ASCII.GetBytes(dataset), H5P.DEFAULT); if (dset < 0) { throw new Exception("Could not open dataset: " + dataset); } var filetype = H5D.get_type(dset); var sdim = H5T.get_size(filetype) + 1; var space = H5D.get_space(dset); var ndims = H5S.get_simple_extent_ndims(space); ulong[] dims = new ulong[ndims]; H5S.get_simple_extent_dims(space, dims, null); var memtype = H5T.copy(H5T.C_S1); var status = H5T.set_size(memtype, sdim); int len = (int)(dims[0] * (ulong)sdim * SIZEOF_CHAR); byte[] buffer = new byte[len]; IntPtr ptr = Marshal.AllocHGlobal(len); status = H5D.read(dset, memtype, H5S.ALL, H5S.ALL, H5P.DEFAULT, ptr); Marshal.Copy(ptr, buffer, 0, len); Marshal.FreeHGlobal(ptr); string s = Encoding.ASCII.GetString(buffer); return(s.Split(new char[] { '\0' }, StringSplitOptions.RemoveEmptyEntries)); }
public static T[,] ReadDataset <T>(int groupId, string name) where T : struct { var datatype = GetDatatype(typeof(T)); name = ToHdf5Name(name); var datasetId = H5D.open(groupId, name); var spaceId = H5D.get_space(datasetId); int rank = H5S.get_simple_extent_ndims(spaceId); long count = H5S.get_simple_extent_npoints(spaceId); int rankChunk; ulong[] maxDims = new ulong[rank]; ulong[] dims = new ulong[rank]; ulong[] chunkDims = new ulong[rank]; var memId = H5S.get_simple_extent_dims(spaceId, dims, maxDims); T[,] dset = new T[dims[0], dims[1]]; var typeId = H5D.get_type(datasetId); var mem_type = H5T.copy(datatype); if (datatype == H5T.C_S1) { H5T.set_size(datatype, new IntPtr(2)); } var propId = H5D.get_create_plist(datasetId); if (H5D.layout_t.CHUNKED == H5P.get_layout(propId)) { rankChunk = H5P.get_chunk(propId, rank, chunkDims); } memId = H5S.create_simple(rank, dims, maxDims); GCHandle hnd = GCHandle.Alloc(dset, GCHandleType.Pinned); H5D.read(datasetId, datatype, memId, spaceId, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); H5D.close(typeId); H5D.close(datasetId); H5S.close(spaceId); return(dset); }
private static string ReadDataCharArray(hid_t fileLoc, string name) { hid_t dset = H5D.open(fileLoc, name); hid_t fspace = H5D.get_space(dset); hid_t count = H5S.get_simple_extent_ndims(fspace); hid_t type = H5D.get_type(dset); hsize_t[] dims = new hsize_t[count]; hsize_t[] maxdims = new hsize_t[count]; H5S.get_simple_extent_dims(fspace, dims, maxdims); H5S.close(fspace); byte[] rdata = new byte[dims[0]]; hid_t mem_type = H5T.copy(type); H5T.set_size(mem_type, new IntPtr(1)); GCHandle hnd = GCHandle.Alloc(rdata, GCHandleType.Pinned); H5D.read(dset, mem_type, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); H5T.close(mem_type); char[] val = new char[dims[0]]; for (int i = 0; i < (int)dims[0]; i += 2) { val[i] = BitConverter.ToChar(rdata, i); } return(val.ToString()); }
public void H5Sget_simple_extent_dimsTest2() { hsize_t[] dims = { 1, 2, 3 }; hsize_t[] max_dims = { H5S.UNLIMITED, H5S.UNLIMITED, H5S.UNLIMITED }; hid_t space = -1; hsize_t[] dims_out = new hsize_t[3]; space = H5S.create_simple(dims.Length, dims, max_dims); Assert.IsTrue( H5S.get_simple_extent_dims(space, (ulong[])null, null) == 3); Assert.IsTrue( H5S.get_simple_extent_dims(space, dims_out, null) == 3); Assert.IsTrue(dims_out[0] == 1); Assert.IsTrue( H5S.get_simple_extent_dims(space, null, dims_out) == 3); Assert.IsTrue(dims_out[0] == H5S.UNLIMITED); Assert.IsTrue(space > 0); Assert.IsTrue(H5S.close(space) >= 0); }
private void UpdateDatasetInfo(FileContext fileContext, long datasetId) { long dataspaceId = -1; ulong[] actualDimenionSet = new ulong[1]; ulong[] maximumDimensionSet = new ulong[1]; try { dataspaceId = H5D.get_space(datasetId); H5S.get_simple_extent_dims(dataspaceId, actualDimenionSet, maximumDimensionSet); _sourceFileInfoSet.Add(new SourceFileInfo(fileContext.FilePath, actualDimenionSet.First(), fileContext.DateTime)); } finally { if (H5I.is_valid(dataspaceId) > 0) { H5S.close(dataspaceId); } } }
public static string ReadAsciiString(hid_t groupId, string name) { var datatype = H5T.FORTRAN_S1; //name = ToHdf5Name(name); var datasetId = H5D.open(groupId, name); var spaceId = H5D.get_space(datasetId); int rank = H5S.get_simple_extent_ndims(spaceId); ulong[] maxDims = new ulong[rank]; ulong[] dims = new ulong[rank]; ulong[] chunkDims = new ulong[rank]; var memId_n = H5S.get_simple_extent_dims(spaceId, dims, null); // we write from C and must provide null-terminated strings byte[] wdata = new byte[dims[0] * 2]; var memId = H5T.copy(H5T.C_S1); H5T.set_size(memId, new IntPtr(2)); //H5T.set_strpad(memId, H5T.str_t.NULLTERM); GCHandle hnd = GCHandle.Alloc(wdata, GCHandleType.Pinned); int resultId = H5D.read(datasetId, memId, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); wdata = wdata.Where((b, i) => i % 2 == 0). Select(b => (b == 0)?(byte)32:b).ToArray(); string result = Encoding.ASCII.GetString(wdata); H5T.close(memId); H5D.close(datasetId); return(result); }
private static double[] ReadDataVector(hid_t fileLoc, string name) { hid_t dset = H5D.open(fileLoc, name); hid_t fspace = H5D.get_space(dset); hid_t count = H5S.get_simple_extent_ndims(fspace); hid_t type = H5D.get_type(dset); hsize_t[] dims = new hsize_t[count]; hsize_t[] maxdims = new hsize_t[count]; H5S.get_simple_extent_dims(fspace, dims, maxdims); H5S.close(fspace); byte[] rdata = new byte[dims[0] * 8]; hid_t mem_type = H5T.copy(H5T.NATIVE_DOUBLE); H5T.set_size(mem_type, new IntPtr(8)); GCHandle hnd = GCHandle.Alloc(rdata, GCHandleType.Pinned); H5D.read(dset, mem_type, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); H5T.close(mem_type); double[] val = new double[dims[0]]; for (int i = 0; i < (int)dims[0]; i++) { val[i] = BitConverter.ToDouble(rdata, i * 8); } return(val); }
public static Array ReadPrimitiveAttributes <T>(hid_t groupId, string name) //where T : struct { Type type = typeof(T); var datatype = GetDatatype(type); var attributeId = H5A.open(groupId, name); var spaceId = H5A.get_space(attributeId); int rank = H5S.get_simple_extent_ndims(spaceId); ulong[] maxDims = new ulong[rank]; ulong[] dims = new ulong[rank]; hid_t memId = H5S.get_simple_extent_dims(spaceId, dims, maxDims); long[] lengths = dims.Select(d => Convert.ToInt64(d)).ToArray(); Array attributes = Array.CreateInstance(type, lengths); var typeId = H5A.get_type(attributeId); var mem_type = H5T.copy(datatype); if (datatype == H5T.C_S1) { H5T.set_size(datatype, new IntPtr(2)); } var propId = H5A.get_create_plist(attributeId); memId = H5S.create_simple(rank, dims, maxDims); GCHandle hnd = GCHandle.Alloc(attributes, GCHandleType.Pinned); H5A.read(attributeId, datatype, hnd.AddrOfPinnedObject()); hnd.Free(); H5A.close(typeId); H5A.close(attributeId); H5S.close(spaceId); return(attributes); }
private static List <HDF5info> ScanInfo(hid_t gId, List <HDF5info> fields, string fullname) { IntPtr MAX_NAME = new IntPtr(1024); System.Text.StringBuilder group_name = new System.Text.StringBuilder(); System.Text.StringBuilder member_name = new System.Text.StringBuilder(); IntPtr len = H5I.get_name(gId, group_name, MAX_NAME); hsize_t nobj = new hsize_t(); H5G.get_num_objs(gId, ref nobj); for (int i = 0; i < (int)nobj; i++) { member_name = new System.Text.StringBuilder(); member_name.Capacity = 1024; IntPtr len2 = H5G.get_objname_by_idx(gId, (ulong)i, member_name, MAX_NAME); int objtype = H5G.get_objtype_by_idx(gId, (ulong)i); if (objtype == 0) //group { hid_t gId2 = H5G.open(gId, member_name.ToString()); fields = ScanInfo(gId2, fields, string.Format("{0}/{1}", fullname, member_name)); } else if (objtype == 1) //Object is a dataset. { HDF5info hDF5Info = new HDF5info(); hid_t dset = H5D.open(gId, member_name.ToString()); hid_t fspace = H5D.get_space(dset); hid_t count = H5S.get_simple_extent_ndims(fspace); hid_t type = H5D.get_type(dset); hDF5Info.HDFclass = getH5Tstring(type); hDF5Info.field = string.Format("{0}/{1}", fullname, member_name); if (H5T.get_class(type) == H5T.class_t.STRING) { hDF5Info.description = nirs.io.ReadDataString(gId, string.Format("{0}", member_name)); } else if (H5T.get_class(type) == H5T.class_t.FLOAT | H5T.get_class(type) == H5T.class_t.INTEGER) { hsize_t[] dims = new hsize_t[count]; hsize_t[] maxdims = new hsize_t[count]; H5S.get_simple_extent_dims(fspace, dims, maxdims); if (dims.Length == 1 & dims[0] == 1) { var val = nirs.io.ReadDataValue(gId, string.Format("{0}", member_name)); hDF5Info.description = string.Format("{0}", val); } else if (dims.Length == 1 & dims[0] > 1) { hDF5Info.description = string.Format("Vector <{0} x 1>", dims[0]); } else { hDF5Info.description = string.Format("Array <{0} x {1}>", dims[0], dims[1]); if (hDF5Info.field.Contains("dataTimeSeries") & dims[0] > dims[1]) { hDF5Info.description += " TRANSPOSE WARNING "; } if (hDF5Info.field.Contains("Pos") & dims[1] != 3) { hDF5Info.description += " TRANSPOSE WARNING "; } if (hDF5Info.field.Contains("stim") & hDF5Info.field.Contains("data") & dims[1] != 3) { hDF5Info.description += " TRANSPOSE WARNING "; } } } else { hDF5Info.description = ""; } fields.Add(hDF5Info); } } H5G.close(gId); return(fields); }
static void ReadFile(string filePath) { var file = H5F.open(filePath, H5F.ACC_RDONLY); IterateObjects(file); var group = H5G.open(file, "group"); IterateObjects(group); H5G.close(group); var dataSet = H5D.open(file, "/group/dataset"); IterateObjects(dataSet); var dataSpace = H5D.get_space(dataSet); var rank = H5S.get_simple_extent_ndims(dataSpace); if (rank == 2) { var dims = new ulong[2]; H5S.get_simple_extent_dims(dataSpace, dims, null); var data = new int[dims[0], dims[1]]; H5D.read(dataSet, H5T.NATIVE_INT, H5S.ALL, H5S.ALL, H5P.DEFAULT, new PinnedObject(data)); for (int i = 0; i < data.GetLength(0); ++i) { for (int j = 0; j < data.GetLength(1); ++j) { Write($"{data[i,j],3}"); } WriteLine(); } } H5S.close(dataSpace); var doubleAttribute = H5A.open(dataSet, "double"); #if true //double pi = 0.0; // Won't work object pi = 0.0; H5A.read(doubleAttribute, H5T.NATIVE_DOUBLE, new PinnedObject(pi)); WriteLine($"PI = {pi}"); #else var values = new double[1]; H5A.read(doubleAttribute, H5T.NATIVE_DOUBLE, new PinnedObject(values)); WriteLine($"PI = {values[0]}"); #endif H5A.close(doubleAttribute); WriteLine($"string: {ReadStringAttribute(dataSet, "string")}"); WriteLine($"string-ascii: {ReadStringAttribute(dataSet, "string-ascii")}"); WriteLine($"string-vlen: {ReadStringAttribute(dataSet, "string-vlen")}"); WriteLine($"boolean-8-bit-enum: {ReadEnumAttribute<Boolean>(dataSet, "boolean-8-bit-enum")}"); H5D.close(dataSet); H5F.close(file); }
public Array Get() { Array result = null; WithDataSpace((h5Ref, dsRef) => { var success = H5S.select_none(dsRef); if (success < 0) { throw new H5SSException("Error with dataspace: select_none"); } success = H5S.select_all(dsRef); if (success < 0) { throw new H5SSException("Error with dataspace: select_all"); } int selectElemNpoints = (int)H5S.get_select_npoints(dsRef); var effectiveSize = ElementSize * selectElemNpoints; if (DataType == HDF5DataType.String) { effectiveSize *= _stringLength; } IntPtr iPtr = Marshal.AllocHGlobal(effectiveSize); // TODO Deallocate try { var dtype = H5D.get_type(h5Ref); // Return? success = H5D.read(h5Ref, dtype, H5S.ALL, dsRef, H5P.DEFAULT, iPtr); H5T.close(dtype); if (success < 0) { throw new H5SSException("Error reading dataset"); } var tmp = CreateClrArray(iPtr, selectElemNpoints); var shape = Shape.Select(ul => (long)ul).ToArray(); if (ClrType == typeof(byte)) { shape = shape.Concat(new[] { (long)_stringLength }).ToArray(); } result = Array.CreateInstance(ClrType, shape); Buffer.BlockCopy(tmp, 0, result, 0, effectiveSize); } finally { Marshal.FreeHGlobal(iPtr); } // Convert bytes to characters... if (DataType == HDF5DataType.String) { byte[,] byteArray = (byte[, ])result; result = Enumerable.Range(0, byteArray.GetLength(0)).Select(i => { var slice = Enumerable.Range(0, byteArray.GetLength(1)).Select(j => byteArray[i, j]).ToArray(); //return System.Text.Encoding.Default.GetString(slice); return(Encoding.ASCII.GetString(slice).TrimEnd((Char)0)); }).ToArray(); } H5S.get_simple_extent_dims(dsRef, _shape, _maxDims); // WTF? }); return(result); }
public Dictionary <string, string> GetDatasetAttributes(string originalDatasetName) { H5DataSetId datasetId = 0; H5GroupId groupId = 0; H5DataTypeId typeId = 0; H5DataSpaceId spaceId = 0; try { if (_h5FileId < 0) { return(null); } string datasetName = GetDatasetFullNames(originalDatasetName, _h5FileId); if (string.IsNullOrEmpty(datasetName)) { return(null); } int groupIndex = datasetName.LastIndexOf('/'); if (groupIndex == -1) { datasetId = H5D.open(_h5FileId, datasetName); } else { string groupName = datasetName.Substring(0, groupIndex + 1); string dsName = datasetName.Substring(groupIndex + 1); groupId = H5G.open(_h5FileId, groupName); datasetId = H5D.open(groupId, dsName); } if (datasetId == 0) { return(null); } Dictionary <string, string> attValues = new Dictionary <string, string>(); typeId = H5D.get_type(datasetId); H5T.class_t type = H5T.get_class(typeId); IntPtr tSize = H5T.get_size(typeId); spaceId = H5D.get_space(datasetId); int length = H5S.get_simple_extent_ndims(spaceId); ulong[] dims = new ulong[length]; H5S.get_simple_extent_dims(spaceId, dims, null); ulong storageSize = H5D.get_storage_size(datasetId); attValues.Add("DataSetName", datasetName); attValues.Add("DataType", type.ToString()); attValues.Add("DataTypeSize", tSize.ToString() + "Byte"); attValues.Add("Dims", String.Join("*", dims)); attValues.Add("StorageSize", storageSize.ToString() + "Byte"); //所有Attributes的键 ArrayList arrayList = new ArrayList(); GCHandle handle = GCHandle.Alloc(arrayList); ulong n = 0; // the callback is defined in H5ATest.cs H5A.operator_t cb = (int location_id, IntPtr attr_name, ref H5A.info_t ainfo, IntPtr op_data) => { GCHandle hnd = (GCHandle)op_data; ArrayList al = (hnd.Target as ArrayList); int len = 0; while (Marshal.ReadByte(attr_name, len) != 0) { ++len; } byte[] buf = new byte[len]; Marshal.Copy(attr_name, buf, 0, len); al.Add(Encoding.UTF8.GetString(buf)); return(0); }; H5A.iterate(datasetId, H5.index_t.NAME, H5.iter_order_t.NATIVE, ref n, cb, (IntPtr)handle); handle.Free(); foreach (string attName in arrayList) { attValues.Add(attName, ReadAttributeValue(datasetId, attName)); } return(attValues); } finally { if (spaceId != 0) { H5S.close(spaceId); } if (typeId != 0) { H5T.close(typeId); } if (datasetId != 0) { H5D.close(datasetId); } if (groupId != 0) { H5G.close(groupId); } } }
private object GetAttributeValue(int _h5FileId, string attributeName) { H5AttributeId attId = H5A.open(_h5FileId, attributeName); if (attId == 0) { return(null); } H5DataTypeId typeId = 0; H5DataTypeId dtId = 0; H5A.info_t attInfo = new H5A.info_t(); H5DataSpaceId spaceId = 0; H5DataTypeId oldTypeId = 0; object retObject = null; try { typeId = H5A.get_type(attId); H5A.get_info(attId, ref attInfo); dtId = H5A.get_type(attId); spaceId = H5A.get_space(attId); IntPtr dataSize = H5T.get_size(dtId); // oldTypeId = typeId; typeId = H5T.get_native_type(typeId, H5T.direction_t.DEFAULT); H5T.class_t typeClass = H5T.get_class(typeId); int ndims = H5S.get_simple_extent_ndims(spaceId); ulong[] dims = new ulong[ndims]; H5S.get_simple_extent_dims(spaceId, dims, null); ulong dimSize = 1; if (dims.Length == 0) { dimSize = 1; } else { foreach (ulong dim in dims) { dimSize *= dim; } } switch (typeClass) { case H5T.class_t.NO_CLASS: break; case H5T.class_t.INTEGER: // H5T.Sign.TWOS_COMPLEMENT; H5T.sign_t sign = H5T.get_sign(oldTypeId); switch (dataSize.ToInt32()) { case 1: retObject = ReadArray <byte>(dimSize, attId, typeId); break; case 2: switch (sign) { case H5T.sign_t.SGN_2: retObject = ReadArray <Int16>(dimSize, attId, typeId); break; case H5T.sign_t.NONE: retObject = ReadArray <UInt16>(dimSize, attId, typeId); break; } break; case 4: switch (sign) { case H5T.sign_t.SGN_2: retObject = ReadArray <Int32>(dimSize, attId, typeId); break; case H5T.sign_t.NONE: retObject = ReadArray <UInt32>(dimSize, attId, typeId); break; } break; case 8: switch (sign) { case H5T.sign_t.SGN_2: retObject = ReadArray <Int64>(dimSize, attId, typeId); break; case H5T.sign_t.NONE: retObject = ReadArray <UInt64>(dimSize, attId, typeId); break; } break; } break; case H5T.class_t.FLOAT: switch (dataSize.ToInt32()) { case 4: retObject = ReadArray <float>(dimSize, attId, typeId); break; case 8: retObject = ReadArray <double>(dimSize, attId, typeId); break; } break; case H5T.class_t.STRING: ulong size = attInfo.data_size; byte[] chars = ReadArray <byte>(size, attId, typeId); retObject = Encoding.ASCII.GetString(chars); break; default: break; } return(retObject); } finally { if (spaceId != 0) { H5S.close(spaceId); } if (attId != 0) { H5A.close(attId); } if (oldTypeId != 0) { H5T.close(oldTypeId); } if (typeId != 0) { H5T.close(typeId); } if (dtId != 0) { H5T.close(dtId); } } }
/// <summary> /// Appends a dataset to a hdf5 file. If called the first time a dataset is created /// </summary> /// <typeparam name="T">Generic parameter only primitive types are allowed</typeparam> /// <param name="groupId">id of the group. Can also be a file Id</param> /// <param name="name">name of the dataset</param> /// <param name="dset">The dataset</param> /// <returns>status of the write method</returns> public static hid_t AppendDataset <T>(hid_t groupId, string name, Array dset, ulong chunkX = 200) where T : struct { var rank = dset.Rank; ulong[] dimsExtend = Enumerable.Range(0, rank).Select(i => { return((ulong)dset.GetLength(i)); }).ToArray(); ulong[] maxDimsExtend = null; ulong[] dimsChunk = new ulong[] { chunkX }.Concat(dimsExtend.Skip(1)).ToArray(); ulong[] zeros = Enumerable.Range(0, rank).Select(z => (ulong)0).ToArray(); hid_t status, spaceId, datasetId; // name = ToHdf5Name(name); var datatype = GetDatatype(typeof(T)); var typeId = H5T.copy(datatype); var datasetExists = H5L.exists(groupId, name) > 0; /* Create a new dataset within the file using chunk * creation properties. */ if (!datasetExists) { spaceId = H5S.create_simple(dset.Rank, dimsExtend, maxDimsExtend); var propId = H5P.create(H5P.DATASET_CREATE); status = H5P.set_chunk(propId, rank, dimsChunk); datasetId = H5D.create(groupId, name, datatype, spaceId, H5P.DEFAULT, propId, H5P.DEFAULT); /* Write data to dataset */ GCHandle hnd = GCHandle.Alloc(dset, GCHandleType.Pinned); status = H5D.write(datasetId, datatype, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); H5P.close(propId); } else { datasetId = H5D.open(groupId, name); spaceId = H5D.get_space(datasetId); var rank_old = H5S.get_simple_extent_ndims(spaceId); ulong[] maxDims = new ulong[rank_old]; ulong[] dims = new ulong[rank_old]; var memId1 = H5S.get_simple_extent_dims(spaceId, dims, maxDims); ulong[] oldChunk = null; int chunkDims = 0; var propId = H5P.create(H5P.DATASET_ACCESS); status = H5P.get_chunk(propId, chunkDims, oldChunk); /* Extend the dataset. */ var size = new ulong[] { dims[0] + dimsExtend[0] }.Concat(dims.Skip(1)).ToArray(); status = H5D.set_extent(datasetId, size); /* Select a hyperslab in extended portion of dataset */ var filespaceId = H5D.get_space(datasetId); var offset = new ulong[] { dims[0] }.Concat(zeros.Skip(1)).ToArray(); status = H5S.select_hyperslab(filespaceId, H5S.seloper_t.SET, offset, null, dimsExtend, null); /* Define memory space */ var memId2 = H5S.create_simple(rank, dimsExtend, null); /* Write the data to the extended portion of dataset */ GCHandle hnd = GCHandle.Alloc(dset, GCHandleType.Pinned); status = H5D.write(datasetId, datatype, memId2, spaceId, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); H5S.close(memId1); H5S.close(memId2); H5D.close(filespaceId); } H5D.close(datasetId); H5S.close(spaceId); return(status); }
private List <float[]> ReadDataSetToSingle(AbstractWarpDataset srcbandpro, int[] bands) { List <float[]> datas = new List <float[]>(); var prjBands = PrjBandTable.GetPrjBands(srcbandpro); H5ID h5FileId = H5F.open(srcbandpro.fileName, H5F.ACC_RDONLY); foreach (int index in bands) { //Single[] data = new Single[srcSize.Width * srcSize.Height]; var bandIndex = prjBands[index - 1].DataSetIndex; string dsName = "CALChannel" + bandIndex.ToString("00"); H5ID datasetId = H5D.open(h5FileId, dsName); if (datasetId <= 0) { throw new ArgumentNullException(string.Format("FY4辐射定标,未找到名称为{0}的数据.", "CALChannel" + index.ToString("00"))); } H5ID typeId = H5D.get_type(datasetId); H5ID spaceId = H5D.get_space(datasetId); if (H5T.get_class(typeId) == H5T.class_t.FLOAT) { int rank = H5S.get_simple_extent_ndims(spaceId); ulong[] dims = new ulong[rank]; ulong[] maxDims = new ulong[rank]; H5S.get_simple_extent_dims(spaceId, dims, maxDims); float[] buffer = new float[dims[0]]; GCHandle hnd = GCHandle.Alloc(buffer, GCHandleType.Pinned); H5D.read(datasetId, typeId, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); if (buffer.Any(t => t > Math.Pow(10, 10) || t < -Math.Pow(10, 10))) { for (int i = 0; i < buffer.Length; i++) { var t = BitConverter.GetBytes(buffer[i]); Array.Reverse(t); buffer[i] = BitConverter.ToSingle(t, 0); } } datas.Add(buffer); } if (spaceId != 0) { H5S.close(spaceId); } if (typeId != 0) { H5T.close(typeId); } if (datasetId != 0) { H5D.close(datasetId); } } if (h5FileId != 0) { H5F.close(h5FileId); } return(datas); }