public void H5Dget_typeTest1() { hsize_t[] dims = { 1024, 2048 }; hid_t space = H5S.create_simple(3, dims, null); hid_t dset = H5D.create(m_v0_test_file, "dset", H5T.STD_I16LE, space); Assert.IsTrue(dset >= 0); hid_t type = H5D.get_type(dset); Assert.IsTrue(type >= 0); Assert.IsTrue(H5T.equal(type, H5T.STD_I16LE) > 0); Assert.IsTrue(H5T.close(type) >= 0); Assert.IsTrue(H5D.close(dset) >= 0); dset = H5D.create(m_v2_test_file, "dset", H5T.STD_I16LE, space); Assert.IsTrue(dset >= 0); type = H5D.get_type(dset); Assert.IsTrue(type >= 0); Assert.IsTrue(H5T.equal(type, H5T.STD_I16LE) > 0); Assert.IsTrue(H5T.close(type) >= 0); Assert.IsTrue(H5D.close(dset) >= 0); Assert.IsTrue(H5S.close(space) >= 0); }
public void Put(Array data, ulong[] location = null) { ulong[] shape = data.Shape(); WithDataSpace((h5Ref, dsRef) => { long memDataSpace = H5S.ALL; if (location != null) { int selection = H5S.select_none(dsRef); if (selection < 0) { throw new H5SSException("Couldn't clear dataspace selection"); } ulong[] stride = Ones(shape.Length); selection = H5S.select_hyperslab(dsRef, H5S.seloper_t.SET, location, stride, stride, shape ); if (selection < 0) { throw new H5SSException("Couldn't select hyperslab"); } memDataSpace = H5S.create_simple(shape.Length, shape, shape); } IntPtr iPtr; var effectiveSize = data.Length * ElementSize; //if (DataType == HDF5DataType.String) //{ // // Convert to byte array... //} //else //{ //} var dtype = H5D.get_type(h5Ref); // Return? iPtr = CreateNativeArray(data, dtype); // copy to unmanaged array? var success = H5D.write(h5Ref, dtype, memDataSpace, dsRef, H5P.DEFAULT, iPtr); H5T.close(dtype); if (location != null) { H5S.close(memDataSpace); } Marshal.FreeHGlobal(iPtr); if (success < 0) { throw new H5SSException(string.Format("Couldn't write to dataset: {0}", this.Path)); } }); }
private static double[,] ReadDataArray(hid_t fileLoc, string name, bool transpose = false) { hid_t dset = H5D.open(fileLoc, name); hid_t fspace = H5D.get_space(dset); hid_t count = H5S.get_simple_extent_ndims(fspace); hid_t type = H5D.get_type(dset); hsize_t[] dims = new hsize_t[count]; hsize_t[] maxdims = new hsize_t[count]; H5S.get_simple_extent_dims(fspace, dims, maxdims); H5S.close(fspace); byte[] rdata = new byte[dims[0] * dims[1] * 8]; hid_t mem_type = H5T.copy(H5T.NATIVE_DOUBLE); H5T.set_size(mem_type, new IntPtr(8)); GCHandle hnd = GCHandle.Alloc(rdata, GCHandleType.Pinned); H5D.read(dset, mem_type, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); H5T.close(mem_type); if (transpose) { double[,] val = new double[dims[1], dims[0]]; int cnt = 0; for (int i = 0; i < (int)dims[0]; i++) { for (int j = 0; j < (int)dims[1]; j++) { val[j, i] = BitConverter.ToDouble(rdata, cnt * 8); cnt++; } } return(val); } else { double[,] val = new double[dims[0], dims[1]]; int cnt = 0; for (int i = 0; i < (int)dims[0]; i++) { for (int j = 0; j < (int)dims[1]; j++) { val[i, j] = BitConverter.ToDouble(rdata, cnt * 8); cnt++; } } return(val); } }
public Dictionary <string, string> TryReadDataTable(string datasetName) { Dictionary <string, string> result = null; var subDsDic = ds.GetSubDatasets(); if (subDsDic.Count > 0) { H5ID h5FileId = H5F.open(fileName, H5F.ACC_RDONLY); H5ID datasetId = H5D.open(h5FileId, datasetName); H5ID typeId = H5D.get_type(datasetId); H5ID spaceId = H5D.get_space(datasetId); if (H5T.get_class(typeId) == H5T.class_t.COMPOUND) { int numCount = H5T.get_nmembers(typeId); var size = H5T.get_size(typeId); byte[] buffer = new byte[size.ToInt32()]; GCHandle hnd = GCHandle.Alloc(buffer, GCHandleType.Pinned); int ndims = H5S.get_simple_extent_ndims(spaceId); if (ndims == 1) { result = new Dictionary <string, string>(); H5D.read(datasetId, typeId, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); for (uint i = 0; i < numCount; i++) { string name = Marshal.PtrToStringAnsi(H5T.get_member_name(typeId, i)); int offset = H5T.get_member_offset(typeId, i).ToInt32(); H5ID subTypeId = H5T.get_member_type(typeId, i); H5T.class_t typeClass = H5T.get_member_class(typeId, i); string value = ReadBuffer(buffer, offset, typeClass, subTypeId); result.Add(name, value); H5T.close(subTypeId); } } hnd.Free(); } if (spaceId != 0) { H5S.close(spaceId); } if (typeId != 0) { H5T.close(typeId); } if (datasetId != 0) { H5D.close(datasetId); } if (h5FileId != 0) { H5F.close(h5FileId); } } return(result); }
/// <summary> /// WARNING: ADVANCED USE ONLY!! Loads a 2D generic dataset from an H5 file. /// The generic loaders only loads data in non-Unity friendly types, such as bytes, uints, longs etc... /// You'll have to know the correct cast to retrieve usable data. /// /// Created With help from https://github.com/LiorBanai/HDF5-CSharp/blob/master/HDF5-CSharp/Hdf5Dataset.cs /// </summary> /// <param name="filePath"></param> /// <param name="datasetName"></param> /// <typeparam name="T"></typeparam> /// <returns></returns> /// <exception cref="FileNotFoundException"></exception> static T[,] Load2DDataset <T>(string filePath, string datasetName) { if (!File.Exists(filePath)) { throw new FileNotFoundException($"Loading dataset {datasetName} from file that doesn't exist {filePath}"); } long fileId = H5F.open(filePath, H5F.ACC_RDONLY); T[,] resultArray = new T[2, 2]; try { ulong[] start = { 0, 0 }; ulong[] count = { 0, 0 }; long datasetId = H5D.open(fileId, datasetName); var datatype = H5D.get_type(datasetId); var spaceId = H5D.get_space(datasetId); int rank = H5S.get_simple_extent_ndims(spaceId); ulong[] maxDims = new ulong[rank]; ulong[] dims = new ulong[rank]; H5S.get_simple_extent_dims(spaceId, dims, maxDims); count[0] = dims[0]; count[1] = dims[1]; // Define file hyperslab. long status = H5S.select_hyperslab(spaceId, H5S.seloper_t.SET, start, null, count, null); // Define the memory dataspace. resultArray = new T[dims[0], dims[1]]; var memId = H5S.create_simple(rank, dims, null); // Define memory hyperslab. status = H5S.select_hyperslab(memId, H5S.seloper_t.SET, start, null, count, null); // Read data from hyperslab in the file into the hyperslab in // memory and display. GCHandle handle = GCHandle.Alloc(resultArray, GCHandleType.Pinned); try { H5D.read(datasetId, datatype, memId, spaceId, H5P.DEFAULT, handle.AddrOfPinnedObject()); } finally { handle.Free(); H5S.close(status); H5S.close(memId); H5S.close(spaceId); H5D.close(datatype); H5D.close(datasetId); } } finally { H5F.close(fileId); } return(resultArray); }
private void UpdateVariableInfo(FileContext fileContext, long variableGroupId) { ulong idx; idx = 0; _variableGroupSet = IOHelper.UpdateAttributeList(variableGroupId, "group_set", _variableGroupSet.ToArray()).ToList(); if (fileContext.FormatVersion != 1) { _unitSet = IOHelper.UpdateAttributeList(variableGroupId, "unit_set", _unitSet.ToArray()).ToList(); _transferFunctionSet = IOHelper.UpdateAttributeList(variableGroupId, "transfer_function_set", _transferFunctionSet.ToArray()).ToList(); } H5L.iterate(variableGroupId, H5.index_t.NAME, H5.iter_order_t.INC, ref idx, Callback, IntPtr.Zero); int Callback(long variableGroupId2, IntPtr intPtrName, ref H5L.info_t info, IntPtr userDataPtr) { long datasetId = -1; long typeId_do_not_close = -1; string name; DatasetInfo currentDatasetInfo; try { name = Marshal.PtrToStringAnsi(intPtrName); if (H5L.exists(variableGroupId2, name) > 0) { datasetId = H5D.open(variableGroupId2, name); currentDatasetInfo = _datasetInfoSet.FirstOrDefault(datasetInfo => datasetInfo.Name == name); if (currentDatasetInfo == null) { typeId_do_not_close = H5D.get_type(datasetId); currentDatasetInfo = new DatasetInfo(name, typeId_do_not_close, this, this.IsLazyLoading); _datasetInfoSet.Add(currentDatasetInfo); } currentDatasetInfo.Update(fileContext); } } finally { if (H5I.is_valid(datasetId) > 0) { H5D.close(datasetId); } } return(0); } }
private static bool IsHDF5String(hid_t fileLoc, string name) { hid_t dset = H5D.open(fileLoc, name); hid_t type = H5D.get_type(dset); H5T.class_t cl = H5T.get_class(type); return(cl == H5T.class_t.STRING); }
/// <summary> /// Returns the datatype. /// This assumes that the object is already open /// </summary> /// <param name="_objectId"></param> /// <returns></returns> public static Hdf5DataType GetDataTypeFromDataset(Hdf5Identifier _objectId) { var typeId = H5D.get_type(_objectId.Value).ToId(); if (typeId.Value > 0) { return(GetDataTypeByType(typeId)); } return(null); }
private ISimpleDataStorage LoadDataset(long sourceFileId, string datasetPath, ulong start, ulong stride, ulong block, ulong count) { long datasetId = -1; long typeId = -1; Array dataset; Array dataset_status; Type genericType; ExtendedDataStorageBase extendedDataStorage; ISimpleDataStorage simpleDataStorage; dataset = IOHelper.ReadDataset(sourceFileId, datasetPath, start, stride, block, count); // apply status (only if native dataset) if (H5L.exists(sourceFileId, datasetPath + "_status") > 0) { try { datasetId = H5D.open(sourceFileId, datasetPath); typeId = H5D.get_type(datasetId); dataset_status = IOHelper.ReadDataset(sourceFileId, datasetPath + "_status", start, stride, block, count).Cast <byte>().ToArray(); genericType = typeof(ExtendedDataStorage <>).MakeGenericType(TypeConversionHelper.GetTypeFromHdfTypeId(typeId)); extendedDataStorage = (ExtendedDataStorageBase)Activator.CreateInstance(genericType, dataset, dataset_status); dataset_status = null; } finally { if (H5I.is_valid(datasetId) > 0) { H5D.close(datasetId); } if (H5I.is_valid(typeId) > 0) { H5T.close(typeId); } } simpleDataStorage = extendedDataStorage.ToSimpleDataStorage(); extendedDataStorage.Dispose(); return(simpleDataStorage); } else { return(new SimpleDataStorage(dataset.Cast <double>().ToArray())); } }
/// <summary> /// Reads an n-dimensional dataset. /// </summary> /// <typeparam name="T">Generic parameter strings or primitive type</typeparam> /// <param name="groupId">id of the group. Can also be a file Id</param> /// <param name="name">name of the dataset</param> /// <returns>The n-dimensional dataset</returns> public static Array ReadDatasetToArray <T>(hid_t groupId, string name) //where T : struct { var datatype = GetDatatype(typeof(T)); var datasetId = H5D.open(groupId, name); var spaceId = H5D.get_space(datasetId); int rank = H5S.get_simple_extent_ndims(spaceId); long count = H5S.get_simple_extent_npoints(spaceId); Array dset; Type type = typeof(T); if (rank >= 0 && count >= 0) { int rankChunk; ulong[] maxDims = new ulong[rank]; ulong[] dims = new ulong[rank]; ulong[] chunkDims = new ulong[rank]; hid_t memId = H5S.get_simple_extent_dims(spaceId, dims, maxDims); long[] lengths = dims.Select(d => Convert.ToInt64(d)).ToArray(); dset = Array.CreateInstance(type, lengths); var typeId = H5D.get_type(datasetId); var mem_type = H5T.copy(datatype); if (datatype == H5T.C_S1) { H5T.set_size(datatype, new IntPtr(2)); } var propId = H5D.get_create_plist(datasetId); if (H5D.layout_t.CHUNKED == H5P.get_layout(propId)) { rankChunk = H5P.get_chunk(propId, rank, chunkDims); } memId = H5S.create_simple(rank, dims, maxDims); GCHandle hnd = GCHandle.Alloc(dset, GCHandleType.Pinned); H5D.read(datasetId, datatype, memId, spaceId, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); } else { dset = Array.CreateInstance(type, new long[1] { 0 }); } H5D.close(datasetId); H5S.close(spaceId); return(dset); }
public static (bool success, IEnumerable <string> result) ReadStrings(long groupId, string name, string alternativeName) { var datasetId = H5D.open(groupId, Hdf5Utils.NormalizedName(name)); if (datasetId < 0) //does not exist? { datasetId = H5D.open(groupId, Hdf5Utils.NormalizedName(alternativeName)); } if (datasetId <= 0) { Hdf5Utils.LogError?.Invoke($"Error reading {groupId}. Name:{name}. AlternativeName:{alternativeName}"); return(false, Array.Empty <string>()); } long typeId = H5D.get_type(datasetId); long spaceId = H5D.get_space(datasetId); long count = H5S.get_simple_extent_npoints(spaceId); H5S.close(spaceId); var strs = new List <string>(); if (count >= 0) { IntPtr[] rdata = new IntPtr[count]; GCHandle hnd = GCHandle.Alloc(rdata, GCHandleType.Pinned); H5D.read(datasetId, typeId, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); for (int i = 0; i < rdata.Length; ++i) { int len = 0; while (Marshal.ReadByte(rdata[i], len) != 0) { ++len; } byte[] buffer = new byte[len]; Marshal.Copy(rdata[i], buffer, 0, buffer.Length); string s = Hdf5Utils.ReadStringBuffer(buffer); strs.Add(s); // H5.free_memory(rdata[i]); } hnd.Free(); } H5T.close(typeId); H5D.close(datasetId); return(true, strs); }
public static (long DatasetId, bool IsNew) OpenOrCreateDataset(long locationId, string datasetPath, long datasetTypeId, Func <long> createDatasetCallback) { Contract.Requires(createDatasetCallback != null); long datasetId = -1; long datasetTypeId_actual = -1; bool isNew; try { if (IOHelper.CheckLinkExists(locationId, datasetPath)) { datasetId = H5D.open(locationId, datasetPath); datasetTypeId_actual = H5D.get_type(datasetId); if (H5T.equal(datasetTypeId_actual, datasetTypeId) <= 0) { throw new Exception($"{ ErrorMessage.IOHelper_DataTypeMismatch } Dataset: '{ datasetPath }'."); } isNew = false; } else { datasetId = createDatasetCallback.Invoke(); isNew = true; } if (H5I.is_valid(datasetId) <= 0) { throw new Exception($"{ ErrorMessage.IOHelper_CouldNotOpenOrCreateDataset } Dataset: '{ datasetPath }'."); } } finally { if (H5I.is_valid(datasetTypeId_actual) > 0) { H5T.close(datasetTypeId_actual); } } return(datasetId, isNew); }
public float[] TryReadFactor(AbstractWarpDataset ds, string datasetName) { string dsPath = (ds.hdfOperator as Hdf5Operator).GetDatasetNames.Where(t => t.Contains(datasetName)) .FirstOrDefault(); float[] factor = null; int h5FileId = H5F.open(ds.fileName, H5F.ACC_RDONLY); int datasetId = H5D.open(h5FileId, dsPath); int typeId = H5D.get_type(datasetId); int spaceId = H5D.get_space(datasetId); if (H5T.get_class(typeId) == H5T.class_t.FLOAT) { var size = H5T.get_size(typeId); int rank = H5S.get_simple_extent_ndims(spaceId); ulong[] dims = new ulong[rank]; int err = H5S.get_simple_extent_dims(spaceId, dims, null); factor = new float[dims[0]]; GCHandle hnd = GCHandle.Alloc(factor, GCHandleType.Pinned); H5D.read(datasetId, typeId, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); } if (spaceId != 0) { H5S.close(spaceId); } if (typeId != 0) { H5T.close(typeId); } if (datasetId != 0) { H5D.close(datasetId); } if (h5FileId != 0) { H5F.close(h5FileId); } return(factor); }
public static T[,] ReadDataset <T>(int groupId, string name) where T : struct { var datatype = GetDatatype(typeof(T)); name = ToHdf5Name(name); var datasetId = H5D.open(groupId, name); var spaceId = H5D.get_space(datasetId); int rank = H5S.get_simple_extent_ndims(spaceId); long count = H5S.get_simple_extent_npoints(spaceId); int rankChunk; ulong[] maxDims = new ulong[rank]; ulong[] dims = new ulong[rank]; ulong[] chunkDims = new ulong[rank]; var memId = H5S.get_simple_extent_dims(spaceId, dims, maxDims); T[,] dset = new T[dims[0], dims[1]]; var typeId = H5D.get_type(datasetId); var mem_type = H5T.copy(datatype); if (datatype == H5T.C_S1) { H5T.set_size(datatype, new IntPtr(2)); } var propId = H5D.get_create_plist(datasetId); if (H5D.layout_t.CHUNKED == H5P.get_layout(propId)) { rankChunk = H5P.get_chunk(propId, rank, chunkDims); } memId = H5S.create_simple(rank, dims, maxDims); GCHandle hnd = GCHandle.Alloc(dset, GCHandleType.Pinned); H5D.read(datasetId, datatype, memId, spaceId, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); H5D.close(typeId); H5D.close(datasetId); H5S.close(spaceId); return(dset); }
private static string ReadDataCharArray(hid_t fileLoc, string name) { hid_t dset = H5D.open(fileLoc, name); hid_t fspace = H5D.get_space(dset); hid_t count = H5S.get_simple_extent_ndims(fspace); hid_t type = H5D.get_type(dset); hsize_t[] dims = new hsize_t[count]; hsize_t[] maxdims = new hsize_t[count]; H5S.get_simple_extent_dims(fspace, dims, maxdims); H5S.close(fspace); byte[] rdata = new byte[dims[0]]; hid_t mem_type = H5T.copy(type); H5T.set_size(mem_type, new IntPtr(1)); GCHandle hnd = GCHandle.Alloc(rdata, GCHandleType.Pinned); H5D.read(dset, mem_type, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); H5T.close(mem_type); char[] val = new char[dims[0]]; for (int i = 0; i < (int)dims[0]; i += 2) { val[i] = BitConverter.ToChar(rdata, i); } return(val.ToString()); }
public static IEnumerable ReadStrings(string filename, string dataset) { var f = H5F.open(filename, H5F.ACC_RDONLY); if (f < 0) { throw new Exception("Could not open file: " + filename); } var dset = H5D.open(f, Encoding.ASCII.GetBytes(dataset), H5P.DEFAULT); if (dset < 0) { throw new Exception("Could not open dataset: " + dataset); } var filetype = H5D.get_type(dset); var sdim = H5T.get_size(filetype) + 1; var space = H5D.get_space(dset); var ndims = H5S.get_simple_extent_ndims(space); ulong[] dims = new ulong[ndims]; H5S.get_simple_extent_dims(space, dims, null); var memtype = H5T.copy(H5T.C_S1); var status = H5T.set_size(memtype, sdim); int len = (int)(dims[0] * (ulong)sdim * SIZEOF_CHAR); byte[] buffer = new byte[len]; IntPtr ptr = Marshal.AllocHGlobal(len); status = H5D.read(dset, memtype, H5S.ALL, H5S.ALL, H5P.DEFAULT, ptr); Marshal.Copy(ptr, buffer, 0, len); Marshal.FreeHGlobal(ptr); string s = Encoding.ASCII.GetString(buffer); return(s.Split(new char[] { '\0' }, StringSplitOptions.RemoveEmptyEntries)); }
public static T[] LoadDataset <T>(string filePath, string datasetName) { if (!File.Exists(filePath)) { throw new FileNotFoundException($"Loading dataset {datasetName} from file that doesn't exist {filePath}"); } long fileId = H5F.open(filePath, H5F.ACC_RDONLY); T[] resultArray; try { long datasetId = H5D.open(fileId, datasetName); if (datasetId == -1) { throw new ArgumentException($"Dataset could not be opened. Check filepath exists and is correct. FilePath = {filePath}"); } long typeId = H5D.get_type(datasetId); long spaceID = H5D.get_space(datasetId); int[] dimensions = GetDatasetDimensions(spaceID); resultArray = new T[dimensions[0]]; GCHandle gch = GCHandle.Alloc(resultArray, GCHandleType.Pinned); try { H5D.read(datasetId, typeId, H5S.ALL, H5S.ALL, H5P.DEFAULT, gch.AddrOfPinnedObject()); } finally { gch.Free(); H5D.close(typeId); H5D.close(spaceID); H5D.close(datasetId); } } finally { H5F.close(fileId); } return(resultArray); }
public static string ReadUnicodeString(hid_t groupId, string name) { hid_t datatype = H5T.create(H5T.class_t.STRING, H5T.VARIABLE); H5T.set_cset(datatype, H5T.cset_t.UTF8); H5T.set_strpad(datatype, H5T.str_t.NULLTERM); var datasetId = H5D.open(groupId, name); var typeId = H5D.get_type(datasetId); var classId = H5T.get_class(typeId); var order = H5T.get_order(typeId); IntPtr size = H5T.get_size(typeId); int strLen = (int)size; var spaceId = H5D.get_space(datasetId); hid_t count = H5S.get_simple_extent_npoints(spaceId); IntPtr[] rdata = new IntPtr[count]; byte[] wdata = new byte[strLen]; GCHandle hnd = GCHandle.Alloc(rdata, GCHandleType.Pinned); H5D.read(datasetId, datatype, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); for (int i = 0; i < strLen; i++) { Marshal.ReadByte(rdata[0], i); } Marshal.Copy(rdata[0], wdata, 0, strLen); string s = Encoding.UTF8.GetString(wdata); hnd.Free(); H5S.close(spaceId); H5T.close(datatype); H5D.close(datasetId); return(s); }
public static string[] LoadStringDataset(string filePath, string dataSetName) { //With much Help from: https://stackoverflow.com/questions/23295545/reading-string-array-from-a-hdf5-dataset long fileId = H5F.open(filePath, H5F.ACC_RDONLY); string longJoinedString; int stringLength; try { long datasetId = H5D.open(fileId, dataSetName); long spaceID = H5D.get_space(datasetId); long dataType = H5D.get_type(datasetId); int[] dimensions = GetDatasetDimensions(spaceID); stringLength = (int)H5T.get_size(dataType); byte[] buffer = new byte[dimensions[0] * stringLength]; GCHandle gch = GCHandle.Alloc(buffer, GCHandleType.Pinned); try { H5D.read(datasetId, dataType, H5S.ALL, H5S.ALL, H5P.DEFAULT, gch.AddrOfPinnedObject()); longJoinedString = Encoding.ASCII.GetString(buffer); } finally { gch.Free(); H5D.close(dataType); H5D.close(spaceID); H5D.close(datasetId); } } finally { H5F.close(fileId); } return(longJoinedString.SplitInParts(stringLength).Select(ss => (string)(object)ss).ToArray()); }
public static string ReadUnicodeString(hid_t groupId, string name) { hid_t datatype = H5T.create(H5T.class_t.STRING, H5T.VARIABLE); H5T.set_cset(datatype, H5T.cset_t.UTF8); H5T.set_strpad(datatype, H5T.str_t.SPACEPAD); var datasetId = H5D.open(groupId, name); var typeId = H5D.get_type(datasetId); var classId = H5T.get_class(typeId); var order = H5T.get_order(typeId); IntPtr size = H5T.get_size(typeId); int strLen = (int)size; var spaceId = H5D.get_space(datasetId); byte[] wdata = new byte[strLen]; //IntPtr ptr = new IntPtr(); GCHandle hnd = GCHandle.Alloc(wdata, GCHandleType.Pinned); H5D.read(datasetId, datatype, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); //int len = 0; //while (Marshal.ReadByte(ptr, len) != 0) { ++len; } //byte[] name_buf = new byte[len]; //Marshal.Copy(ptr, name_buf, 0, len); string s = Encoding.UTF8.GetString(wdata); H5S.close(spaceId); H5T.close(datatype); H5D.close(datasetId); return(s); }
private static double[] ReadDataVector(hid_t fileLoc, string name) { hid_t dset = H5D.open(fileLoc, name); hid_t fspace = H5D.get_space(dset); hid_t count = H5S.get_simple_extent_ndims(fspace); hid_t type = H5D.get_type(dset); hsize_t[] dims = new hsize_t[count]; hsize_t[] maxdims = new hsize_t[count]; H5S.get_simple_extent_dims(fspace, dims, maxdims); H5S.close(fspace); byte[] rdata = new byte[dims[0] * 8]; hid_t mem_type = H5T.copy(H5T.NATIVE_DOUBLE); H5T.set_size(mem_type, new IntPtr(8)); GCHandle hnd = GCHandle.Alloc(rdata, GCHandleType.Pinned); H5D.read(dset, mem_type, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); H5T.close(mem_type); double[] val = new double[dims[0]]; for (int i = 0; i < (int)dims[0]; i++) { val[i] = BitConverter.ToDouble(rdata, i * 8); } return(val); }
private static string ReadDataString(hid_t fileLoc, string name) { hid_t dset = H5D.open(fileLoc, name); hid_t type = H5D.get_type(dset); // H5T.is_variable_str(type); IntPtr size = H5T.get_size(type); hid_t fspace = H5D.get_space(dset); hid_t mem_type = H5T.copy(type); H5T.set_size(mem_type, size); byte[] buffer = new byte[size.ToInt32()]; GCHandle hnd = GCHandle.Alloc(buffer, GCHandleType.Pinned); H5D.read(dset, mem_type, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); H5T.close(mem_type); // remove the last "/0" if (buffer[buffer.Length - 1] == 0) { byte[] buffer2 = new byte[buffer.Length - 1]; for (int i = 0; i < buffer2.Length; i++) { buffer2[i] = buffer[i]; } return(ASCIIEncoding.ASCII.GetString(buffer2)); } return(ASCIIEncoding.ASCII.GetString(buffer)); }
public Array Get() { Array result = null; WithDataSpace((h5Ref, dsRef) => { var success = H5S.select_none(dsRef); if (success < 0) { throw new H5SSException("Error with dataspace: select_none"); } success = H5S.select_all(dsRef); if (success < 0) { throw new H5SSException("Error with dataspace: select_all"); } int selectElemNpoints = (int)H5S.get_select_npoints(dsRef); var effectiveSize = ElementSize * selectElemNpoints; if (DataType == HDF5DataType.String) { effectiveSize *= _stringLength; } IntPtr iPtr = Marshal.AllocHGlobal(effectiveSize); // TODO Deallocate try { var dtype = H5D.get_type(h5Ref); // Return? success = H5D.read(h5Ref, dtype, H5S.ALL, dsRef, H5P.DEFAULT, iPtr); H5T.close(dtype); if (success < 0) { throw new H5SSException("Error reading dataset"); } var tmp = CreateClrArray(iPtr, selectElemNpoints); var shape = Shape.Select(ul => (long)ul).ToArray(); if (ClrType == typeof(byte)) { shape = shape.Concat(new[] { (long)_stringLength }).ToArray(); } result = Array.CreateInstance(ClrType, shape); Buffer.BlockCopy(tmp, 0, result, 0, effectiveSize); } finally { Marshal.FreeHGlobal(iPtr); } // Convert bytes to characters... if (DataType == HDF5DataType.String) { byte[,] byteArray = (byte[, ])result; result = Enumerable.Range(0, byteArray.GetLength(0)).Select(i => { var slice = Enumerable.Range(0, byteArray.GetLength(1)).Select(j => byteArray[i, j]).ToArray(); //return System.Text.Encoding.Default.GetString(slice); return(Encoding.ASCII.GetString(slice).TrimEnd((Char)0)); }).ToArray(); } H5S.get_simple_extent_dims(dsRef, _shape, _maxDims); // WTF? }); return(result); }
public Dictionary <string, string> GetDatasetAttributes(string originalDatasetName) { H5DataSetId datasetId = 0; H5GroupId groupId = 0; H5DataTypeId typeId = 0; H5DataSpaceId spaceId = 0; try { if (_h5FileId < 0) { return(null); } string datasetName = GetDatasetFullNames(originalDatasetName, _h5FileId); if (string.IsNullOrEmpty(datasetName)) { return(null); } int groupIndex = datasetName.LastIndexOf('/'); if (groupIndex == -1) { datasetId = H5D.open(_h5FileId, datasetName); } else { string groupName = datasetName.Substring(0, groupIndex + 1); string dsName = datasetName.Substring(groupIndex + 1); groupId = H5G.open(_h5FileId, groupName); datasetId = H5D.open(groupId, dsName); } if (datasetId == 0) { return(null); } Dictionary <string, string> attValues = new Dictionary <string, string>(); typeId = H5D.get_type(datasetId); H5T.class_t type = H5T.get_class(typeId); IntPtr tSize = H5T.get_size(typeId); spaceId = H5D.get_space(datasetId); int length = H5S.get_simple_extent_ndims(spaceId); ulong[] dims = new ulong[length]; H5S.get_simple_extent_dims(spaceId, dims, null); ulong storageSize = H5D.get_storage_size(datasetId); attValues.Add("DataSetName", datasetName); attValues.Add("DataType", type.ToString()); attValues.Add("DataTypeSize", tSize.ToString() + "Byte"); attValues.Add("Dims", String.Join("*", dims)); attValues.Add("StorageSize", storageSize.ToString() + "Byte"); //所有Attributes的键 ArrayList arrayList = new ArrayList(); GCHandle handle = GCHandle.Alloc(arrayList); ulong n = 0; // the callback is defined in H5ATest.cs H5A.operator_t cb = (int location_id, IntPtr attr_name, ref H5A.info_t ainfo, IntPtr op_data) => { GCHandle hnd = (GCHandle)op_data; ArrayList al = (hnd.Target as ArrayList); int len = 0; while (Marshal.ReadByte(attr_name, len) != 0) { ++len; } byte[] buf = new byte[len]; Marshal.Copy(attr_name, buf, 0, len); al.Add(Encoding.UTF8.GetString(buf)); return(0); }; H5A.iterate(datasetId, H5.index_t.NAME, H5.iter_order_t.NATIVE, ref n, cb, (IntPtr)handle); handle.Free(); foreach (string attName in arrayList) { attValues.Add(attName, ReadAttributeValue(datasetId, attName)); } return(attValues); } finally { if (spaceId != 0) { H5S.close(spaceId); } if (typeId != 0) { H5T.close(typeId); } if (datasetId != 0) { H5D.close(datasetId); } if (groupId != 0) { H5G.close(groupId); } } }
public Task <string> GetData(DateTime dateTimeBegin, DateTime dateTimeEnd, string sampleRateDescription, FileFormat fileFormat, FileGranularity fileGranularity, Dictionary <string, Dictionary <string, List <string> > > campaignInfoSet) { long fileId = -1; long datasetId = -1; ulong start; ulong stride; ulong block; ulong count; ulong segmentLength; ulong segmentSize; ulong bytesPerRow; double sampleRate; DateTime epochStart; DateTime epochEnd; string zipFilePath; // task return(Task.Run(() => { this.CheckState(); if (!campaignInfoSet.Any()) { return string.Empty; } // zip file zipFilePath = Path.Combine(_options.SupportDirectoryPath, "EXPORT", $"OneDAS_{ dateTimeBegin.ToString("yyyy-MM-ddTHH-mm") }_{ sampleRateDescription }_{ Guid.NewGuid().ToString() }.zip"); // sampleRate sampleRate = sampleRateDescription.ToSampleRate(); // epoch & hyperslab epochStart = new DateTime(2000, 01, 01); epochEnd = new DateTime(2030, 01, 01); if (!(epochStart <= dateTimeBegin && dateTimeBegin <= dateTimeEnd && dateTimeEnd <= epochEnd)) { throw new Exception("requirement >> epochStart <= dateTimeBegin && dateTimeBegin <= dateTimeEnd && dateTimeBegin <= epochEnd << is not matched"); } start = (ulong)(Math.Floor((dateTimeBegin - epochStart).TotalSeconds * sampleRate)); stride = 1; block = (ulong)(Math.Ceiling((dateTimeEnd - dateTimeBegin).TotalSeconds * sampleRate)); count = 1; try { // open file fileId = H5F.open(_options.VdsFilePath, H5F.ACC_RDONLY); // byte count bytesPerRow = 0; foreach (var campaignInfo in campaignInfoSet) { foreach (var variableInfo in campaignInfo.Value) { foreach (string datasetInfo in variableInfo.Value) { try { datasetId = H5D.open(fileId, $"{ campaignInfo.Key }/{ variableInfo.Key }/{ datasetInfo }"); bytesPerRow += (ulong)OneDasUtilities.SizeOf(TypeConversionHelper.GetTypeFromHdfTypeId(H5D.get_type(datasetId))); } finally { if (H5I.is_valid(datasetId) > 0) { H5D.close(datasetId); } } } } } this.GetClient().SendByteCount(bytesPerRow * block); segmentSize = (50 * 1024 * 1024) / bytesPerRow * bytesPerRow; segmentLength = segmentSize / bytesPerRow; // ensure that dataset length is multiple of 1 minute if ((segmentLength / sampleRate) % 60 != 0) { segmentLength = (ulong)((ulong)(segmentLength / sampleRate / 60) * 60 * sampleRate); } // start _stateManager.SetState(this.Context.ConnectionId, HdfExplorerState.Loading); using (ZipArchive zipArchive = ZipFile.Open(zipFilePath, ZipArchiveMode.Create)) { foreach (var campaignInfo in campaignInfoSet) { HdfDataLoader hdfDataLoader; hdfDataLoader = new HdfDataLoader(_stateManager.GetToken(this.Context.ConnectionId)); hdfDataLoader.ProgressUpdated += this.OnProgressUpdated; if (!hdfDataLoader.WriteZipFileCampaignEntry(zipArchive, fileGranularity, fileFormat, new ZipSettings(dateTimeBegin, campaignInfo, fileId, sampleRate, start, stride, block, count, segmentLength))) { return string.Empty; } } } } catch (Exception ex) { this.WriteLogEntry(ex.Message, true); throw; } finally { _stateManager.SetState(this.Context.ConnectionId, HdfExplorerState.Idle); if (H5I.is_valid(fileId) > 0) { H5F.close(fileId); } } this.WriteLogEntry($"{ this.Context.GetHttpContext().Connection.RemoteIpAddress } requested data: { dateTimeBegin.ToString("yyyy-MM-dd HH:mm:ss") } to { dateTimeEnd.ToString("yyyy-MM-dd HH:mm:ss") }", false); return $"download/{ Path.GetFileName(zipFilePath) }"; }, _stateManager.GetToken(this.Context.ConnectionId))); }
public bool WriteZipFileCampaignEntry(ZipArchive zipArchive, FileGranularity fileGranularity, FileFormat fileFormat, ZipSettings zipSettings) { IList <VariableDescription> variableDescriptionSet; IList <CustomMetadataEntry> customMetadataEntrySet; ZipArchiveEntry zipArchiveEntry; DataWriterExtensionLogicBase dataWriter; DataWriterExtensionSettingsBase settings; DataWriterContext dataWriterContext; string directoryPath; string[] campaignName_splitted; string[] filePathSet; int currentFile; int fileCount; // build variable descriptions variableDescriptionSet = new List <VariableDescription>(); zipSettings.CampaignInfo.Value.ToList().ForEach(variableInfo => { variableInfo.Value.ForEach(datasetName => { long groupId = -1; long typeId = -1; long datasetId = -1; string displayName; string groupName; string unit; ulong samplesPerDay; OneDasDataType oneDasDataType; hdf_transfer_function_t[] hdf_transfer_function_t_set; List <TransferFunction> transferFunctionSet; try { groupId = H5G.open(zipSettings.SourceFileId, $"{ zipSettings.CampaignInfo.Key }/{ variableInfo.Key }"); datasetId = H5D.open(groupId, datasetName); typeId = H5D.get_type(datasetId); displayName = IOHelper.ReadAttribute <string>(groupId, "name_set").Last(); groupName = IOHelper.ReadAttribute <string>(groupId, "group_set").Last(); unit = IOHelper.ReadAttribute <string>(groupId, "unit_set").LastOrDefault(); hdf_transfer_function_t_set = IOHelper.ReadAttribute <hdf_transfer_function_t>(groupId, "transfer_function_set"); transferFunctionSet = hdf_transfer_function_t_set.Select(tf => new TransferFunction(DateTime.ParseExact(tf.date_time, "yyyy-MM-ddTHH-mm-ssZ", CultureInfo.InvariantCulture), tf.type, tf.option, tf.argument)).ToList(); oneDasDataType = OneDasUtilities.GetOneDasDataTypeFromType(TypeConversionHelper.GetTypeFromHdfTypeId(typeId)); samplesPerDay = OneDasUtilities.GetSamplesPerDayFromString(datasetName); variableDescriptionSet.Add(new VariableDescription(new Guid(variableInfo.Key), displayName, datasetName, groupName, oneDasDataType, samplesPerDay, unit, transferFunctionSet, typeof(ISimpleDataStorage))); } finally { if (H5I.is_valid(datasetId) > 0) { H5D.close(datasetId); } if (H5I.is_valid(groupId) > 0) { H5G.close(groupId); } if (H5I.is_valid(typeId) > 0) { H5T.close(typeId); } } }); }); dataWriter = null; // REMOVE ONE IMPLEMENTED PROPERLY switch (fileFormat) { case FileFormat.CSV: //settings = new CsvSettings() { FileGranularity = fileGranularity }; //dataWriter = new CsvWriter((CsvSettings)settings, new LoggerFactory()); break; case FileFormat.GAM: //settings = new GamSettings() { FileGranularity = fileGranularity }; //dataWriter = new GamWriter((GamSettings)settings, new LoggerFactory()); break; case FileFormat.MAT73: //settings = new Mat73Settings() { FileGranularity = fileGranularity }; //dataWriter = new Mat73Writer((Mat73Settings)settings, new LoggerFactory()); break; default: throw new NotImplementedException(); } // create temp directory directoryPath = Path.Combine(Path.GetTempPath(), "OneDas.Hdf.Explorer", Guid.NewGuid().ToString()); Directory.CreateDirectory(directoryPath); // create custom meta data customMetadataEntrySet = new List <CustomMetadataEntry>(); //customMetadataEntrySet.Add(new CustomMetadataEntry("system_name", "HDF Explorer", CustomMetadataEntryLevel.File)); // initialize data writer campaignName_splitted = zipSettings.CampaignInfo.Key.Split('/'); dataWriterContext = new DataWriterContext("HDF Explorer", directoryPath, new OneDasCampaignDescription(Guid.Empty, 0, campaignName_splitted[1], campaignName_splitted[2], campaignName_splitted[3]), customMetadataEntrySet); dataWriter.Configure(dataWriterContext, variableDescriptionSet); // create temp files try { if (!this.CreateFiles(dataWriter, zipSettings)) { this.CleanUp(directoryPath); return(false); } } finally { dataWriter.Dispose(); } // write zip archive entries filePathSet = Directory.GetFiles(directoryPath, "*", SearchOption.AllDirectories); currentFile = 0; fileCount = filePathSet.Count(); foreach (string filePath in filePathSet) { zipArchiveEntry = zipArchive.CreateEntry(Path.GetFileName(filePath), CompressionLevel.Optimal); this.OnProgressUpdated(new ProgressUpdatedEventArgs(currentFile / (double)fileCount * 100, $"Writing file { currentFile + 1 } / { fileCount } to ZIP archive ...")); using (FileStream fileStream = File.Open(filePath, FileMode.Open, FileAccess.Read)) { using (Stream zipArchiveEntryStream = zipArchiveEntry.Open()) { fileStream.CopyTo(zipArchiveEntryStream); } } currentFile++; } this.CleanUp(directoryPath); return(true); }
static void Main_Read(string[] args) { int DATA_ARRAY_LENGTH = 5; //var h5 = H5F.open(@"E:\HDF5\HDF5DotNet-src\examples\CSharpExample\CSharpExample1\table.h5", H5F.ACC_RDONLY); //var h5 = H5F.open(@"D:\test.h5", H5F.ACC_RDONLY); //var h5 = H5F.open(@"E:\HDF5\Hdf5DotnetTools-master\ToolTest\bin\Debug\table.h5", H5F.ACC_RDONLY); var h5 = H5F.open(@"E:\HDF5\test_gzip.h5", H5F.ACC_RDONLY); var dataset = H5D.open(h5, "trans_detail/20160929"); var spaceid = H5D.get_space(dataset); var npoints = H5S.get_simple_extent_npoints(spaceid); //var dims = H5S.get_simple_extent_dims(spaceid); int rank = H5S.get_simple_extent_ndims(spaceid); // 是不是不能用自己的type var dtype = H5D.get_type(dataset); var dtcls = H5T.get_class(dtype); var size = H5T.get_size(dtype); var sz = Marshal.SizeOf(typeof(ComType)); var dtype_n = H5T.get_nmembers(dtype); for (uint i = 0; i < dtype_n; ++i) { var x = H5T.get_member_name(dtype, i); var x4 = Marshal.PtrToStringAnsi(x); var y = H5T.get_member_type(dtype, i); var z = H5T.get_class(y); var x1 = H5T.get_member_offset(dtype, i); var x3 = H5T.get_size(y); Console.WriteLine(x4); Console.WriteLine(z); Console.WriteLine(x1); //var x2 = Marshal.OffsetOf(typeof(ComType), x4).ToInt32(); //Console.WriteLine(x2); Console.WriteLine(x3); } int ss1 = Marshal.SizeOf(typeof(ComType)); IntPtr p = Marshal.AllocHGlobal(ss1 * 11); H5D.read(dataset, dtype, H5S.ALL, H5S.ALL, H5P.DEFAULT, p); var s = Marshal.PtrToStructure(p, typeof(ComType)); Console.WriteLine(s); var s2 = Marshal.PtrToStructure(p + ss1, typeof(ComType)); Console.WriteLine(s2); var s3 = Marshal.PtrToStructure(p + ss1 * 4, typeof(ComType)); Console.WriteLine(s3); var s4 = Marshal.PtrToStructure(p + ss1 * 5, typeof(ComType)); Console.WriteLine(s4); var s6 = Marshal.PtrToStructure(p + ss1 * 10, typeof(ComType)); Console.WriteLine(s6); }
/// <summary> /// Read the appropriate H5Type from a *dataset* with ID `dset_id`. /// </summary> public static H5Type FromDataset(hid_t dset_id) { return(new H5Type(H5D.get_type(dset_id))); }
public static string ReadUnicodeString(hid_t groupId, string name) { var datasetId = H5D.open(groupId, name); var typeId = H5D.get_type(datasetId); if (H5T.is_variable_str(typeId) > 0) { var spaceId = H5D.get_space(datasetId); hid_t count = H5S.get_simple_extent_npoints(spaceId); IntPtr[] rdata = new IntPtr[count]; GCHandle hnd = GCHandle.Alloc(rdata, GCHandleType.Pinned); H5D.read(datasetId, typeId, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); var attrStrings = new List <string>(); for (int i = 0; i < rdata.Length; ++i) { int attrLength = 0; while (Marshal.ReadByte(rdata[i], attrLength) != 0) { ++attrLength; } byte[] buffer = new byte[attrLength]; Marshal.Copy(rdata[i], buffer, 0, buffer.Length); string stringPart = Encoding.UTF8.GetString(buffer); attrStrings.Add(stringPart); H5.free_memory(rdata[i]); } hnd.Free(); H5S.close(spaceId); H5D.close(datasetId); return(attrStrings[0]); } // Must be a non-variable length string. int size = H5T.get_size(typeId).ToInt32(); IntPtr iPtr = Marshal.AllocHGlobal(size); int result = H5D.read(datasetId, typeId, H5S.ALL, H5S.ALL, H5P.DEFAULT, iPtr); if (result < 0) { throw new IOException("Failed to read dataset"); } var strDest = new byte[size]; Marshal.Copy(iPtr, strDest, 0, size); Marshal.FreeHGlobal(iPtr); H5D.close(datasetId); return(Encoding.UTF8.GetString(strDest).TrimEnd((Char)0)); }
private List <float[]> ReadDataSetToSingle(AbstractWarpDataset srcbandpro, int[] bands) { List <float[]> datas = new List <float[]>(); var prjBands = PrjBandTable.GetPrjBands(srcbandpro); H5ID h5FileId = H5F.open(srcbandpro.fileName, H5F.ACC_RDONLY); foreach (int index in bands) { //Single[] data = new Single[srcSize.Width * srcSize.Height]; var bandIndex = prjBands[index - 1].DataSetIndex; string dsName = "CALChannel" + bandIndex.ToString("00"); H5ID datasetId = H5D.open(h5FileId, dsName); if (datasetId <= 0) { throw new ArgumentNullException(string.Format("FY4辐射定标,未找到名称为{0}的数据.", "CALChannel" + index.ToString("00"))); } H5ID typeId = H5D.get_type(datasetId); H5ID spaceId = H5D.get_space(datasetId); if (H5T.get_class(typeId) == H5T.class_t.FLOAT) { int rank = H5S.get_simple_extent_ndims(spaceId); ulong[] dims = new ulong[rank]; ulong[] maxDims = new ulong[rank]; H5S.get_simple_extent_dims(spaceId, dims, maxDims); float[] buffer = new float[dims[0]]; GCHandle hnd = GCHandle.Alloc(buffer, GCHandleType.Pinned); H5D.read(datasetId, typeId, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); if (buffer.Any(t => t > Math.Pow(10, 10) || t < -Math.Pow(10, 10))) { for (int i = 0; i < buffer.Length; i++) { var t = BitConverter.GetBytes(buffer[i]); Array.Reverse(t); buffer[i] = BitConverter.ToSingle(t, 0); } } datas.Add(buffer); } if (spaceId != 0) { H5S.close(spaceId); } if (typeId != 0) { H5T.close(typeId); } if (datasetId != 0) { H5D.close(datasetId); } } if (h5FileId != 0) { H5F.close(h5FileId); } return(datas); }