public static double[,] ReadFieldData2D(string file, string dataSet) { H5FileId fileId = H5F.open(file, H5F.OpenMode.ACC_RDONLY); H5DataSetId fDataSetId = H5D.open(fileId, dataSet); H5DataTypeId fDataTypeId = H5D.getType(fDataSetId); long[] dims = H5S.getSimpleExtentDims(H5D.getSpace(fDataSetId)).ToArray(); double[,] data = new double[dims[0], dims[1]]; H5D.read(fDataSetId, fDataTypeId, new H5Array <double>(data)); double[,] fieldValues = new double[dims[1], dims[0]]; for (int i = 0; i < dims[1]; i++) { for (int j = 0; j < dims[0]; j++) { fieldValues[i, j] = (double)data[j, i]; } } H5T.close(fDataTypeId); H5D.close(fDataSetId); H5F.close(fileId); return(fieldValues); }
public static double[, ,] ReadFieldData3D(string fileName) { H5FileId fileId = H5F.open(fileName, H5F.OpenMode.ACC_RDONLY); H5DataSetId fDataSetId = H5D.open(fileId, "/FieldData/FD/f0"); H5DataTypeId fDataTypeId = H5D.getType(fDataSetId); if (!H5T.equal(fDataTypeId, H5T.copy(H5T.H5Type.NATIVE_FLOAT))) { Console.WriteLine("Error: Invalid dataset type, expected {0}", H5T.H5Type.NATIVE_FLOAT); } long[] dims = H5S.getSimpleExtentDims(H5D.getSpace(fDataSetId)).ToArray(); if (dims.Length != 3) { Console.WriteLine("Error: Invalid field data dimensions"); } float[, ,] data = new float[dims[0], dims[1], dims[2]]; H5D.read(fDataSetId, fDataTypeId, new H5Array <float>(data)); // Reorder double[, ,] fieldValues = new double[dims[2], dims[1], dims[0]]; for (int i = 0; i < dims[0]; i++) { for (int j = 0; j < dims[1]; j++) { for (int k = 0; k < dims[2]; k++) { fieldValues[k, j, i] = data[i, j, k]; } } } return(fieldValues); }
public static int WritePrimitiveAttribute <T>(hid_t groupId, string name, Array attributes, string datasetName = null) //where T : struct { var tmpId = groupId; if (!string.IsNullOrWhiteSpace(datasetName)) { var datasetId = H5D.open(groupId, datasetName); if (datasetId > 0) { groupId = datasetId; } } int rank = attributes.Rank; ulong[] dims = Enumerable.Range(0, rank).Select(i => { return((ulong)attributes.GetLength(i)); }).ToArray(); ulong[] maxDims = null; var spaceId = H5S.create_simple(rank, dims, maxDims); var datatype = GetDatatype(typeof(T)); var typeId = H5T.copy(datatype); var attributeId = H5A.create(groupId, name, datatype, spaceId); GCHandle hnd = GCHandle.Alloc(attributes, GCHandleType.Pinned); var result = H5A.write(attributeId, datatype, hnd.AddrOfPinnedObject()); hnd.Free(); H5A.close(attributeId); H5S.close(spaceId); H5T.close(typeId); if (tmpId != groupId) { H5D.close(groupId); } return(result); }
public static void WriteDataSet(hid_t hid, string dataSetName, hsize_t[] offset, hsize_t[] count, hid_t typeId, Array data) { if (string.IsNullOrEmpty(dataSetName)) { throw new ArgumentException("dataSetName"); } if (!DataSetExists(hid, "dataSetName")) { throw new HDF5Exception("Data set \"{0}\" doesn't exist.", dataSetName); } var dataSet = H5D.open(hid, dataSetName); if (dataSet < 0) { throw new HDF5Exception("Failed to open data set \"{0}\".", dataSetName); } try { WriteDataSet(dataSet, offset, count, typeId, data); } finally { H5D.close(dataSet); } }
public static double[][] ReadMesh(string fileName) { double[][] meshes = new double[3][]; string[] meshNames = { "x", "y", "z" }; H5FileId fileId = H5F.open(fileName, H5F.OpenMode.ACC_RDONLY); for (int i = 0; i < meshNames.Length; i++) { H5DataSetId dsId = H5D.open(fileId, "/Mesh/" + meshNames[i]); H5DataTypeId dtId = H5D.getType(dsId); if (!H5T.equal(dtId, H5T.copy(H5T.H5Type.NATIVE_FLOAT))) { Console.WriteLine("Error: Invalid dataset type, expected {0}", H5T.H5Type.NATIVE_FLOAT); } float[] mesh = new float[H5D.getStorageSize(dsId) / H5T.getSize(dtId)]; H5D.read(dsId, dtId, new H5Array <float>(mesh)); meshes[i] = mesh.Select(x => (double)x * 1000.0).ToArray(); // m -> mm H5D.close(dsId); H5T.close(dtId); } H5F.close(fileId); return(meshes); }
//Load weights from hdf5 file. Weights must be saved as a vector per layer public static float[] loadH5(string path, string dsname) { //Get file id var h5fid = H5F.open(path, H5F.OpenMode.ACC_RDONLY); //Get dataset id var h5did = H5D.open(h5fid, dsname); //Dataset size var h5space = H5D.getSpace(h5did); var h5size = H5S.getSimpleExtentDims(h5space); //Dataset size to array var S = h5size.ToArray(); //Empty double array for the data double[] data = new double[S[0]]; //Read the dataset var h5array = new H5Array <double>(data); var h5dtype = H5D.getType(h5did); H5D.read(h5did, h5dtype, h5array); //Convert to float float[] newarray = new float[data.Length]; Parallel.For(0, data.Length, (k) => { newarray[k] = (float)data[k]; }); return(newarray); }
private unsafe void WriteData(ulong fileOffset, ulong bufferOffset, ulong length, ChannelContext channelContext) { Contract.Requires(channelContext != null, nameof(channelContext)); long groupId = -1; long datasetId = -1; long dataspaceId = -1; long dataspaceId_Buffer = -1; try { groupId = H5G.open(_fileId, $"/{ channelContext.ChannelDescription.ChannelName }"); var datasetName = $"dataset_{ channelContext.ChannelDescription.DatasetName.Replace(" ", "_") }"; datasetId = H5D.open(groupId, datasetName); dataspaceId = H5D.get_space(datasetId); dataspaceId_Buffer = H5S.create_simple(1, new ulong[] { length }, null); var simpleBuffers = channelContext.Buffer.ToSimpleBuffer(); // dataset H5S.select_hyperslab(dataspaceId, H5S.seloper_t.SET, new ulong[] { fileOffset }, new ulong[] { 1 }, new ulong[] { 1 }, new ulong[] { length }); var offset = (int)bufferOffset * simpleBuffers.ElementSize; var buffer = simpleBuffers.RawBuffer[offset..];
/// <summary> /// 写数据集属性 /// </summary> public void WriteDatasetAttribute(string datasetName, string attrName, string value) { H5DataSetId datasetId = H5D.open(_fileId, datasetName); H5DataTypeId typeId = H5T.copy(H5T.H5Type.C_S1); H5DataSpaceId spaceId = H5S.create(H5S.H5SClass.SCALAR); H5T.setSize(typeId, value.Length); H5AttributeId attrId = H5A.create(datasetId, attrName, typeId, spaceId); if (value != "") { H5Array <byte> buffer = new H5Array <byte>(Encoding.Default.GetBytes(value)); H5A.write(attrId, typeId, buffer); } if (typeId != null) { H5T.close(typeId); } if (spaceId != null) { H5S.close(spaceId); } if (attrId != null) { H5A.close(attrId); } if (datasetId != null) { H5D.close(datasetId); } }
public void H5Pget_virtual_vspaceTestVDS1() { hid_t vds = H5D.open(m_vds_class_file, "VDS"); Assert.IsTrue(vds >= 0); hid_t dcpl = H5D.get_create_plist(vds); Assert.IsTrue(dcpl >= 0); IntPtr count = IntPtr.Zero; Assert.IsTrue(H5P.get_virtual_count(dcpl, ref count) >= 0); Assert.IsTrue(3 == count.ToInt32()); for (int i = 0; i < count.ToInt32(); ++i) { size_t index = new size_t(i); hid_t vspace = H5P.get_virtual_vspace(dcpl, index); Assert.IsTrue(vspace >= 0); Assert.IsTrue(H5S.is_regular_hyperslab(vspace) > 0); Assert.IsTrue(H5S.close(vspace) >= 0); } Assert.IsTrue(H5P.close(dcpl) >= 0); Assert.IsTrue(H5D.close(vds) >= 0); }
private static Hdf5Container_LidarDaimler ReadContainer(string sFilePath_inp) { int status = 0; long file_id = H5F.open(sFilePath_inp, H5F.ACC_RDWR); long testDataset_id = H5D.open(file_id, "distance"); long testDataspace_id = H5D.get_space(testDataset_id); ulong[] dims = new ulong[2]; status = H5S.get_simple_extent_dims(testDataspace_id, dims, null); int rows = Convert.ToInt32(dims[0]); int cols = Convert.ToInt32(dims[1]); Hdf5Container_LidarDaimler outContainer = new Hdf5Container_LidarDaimler(rows, cols) { _distances = Hdf5IO.GetFloatDataset(H5D.open(file_id, "distance"), rows, cols), _intensity = Hdf5IO.GetFloatDataset(H5D.open(file_id, "intensity"), rows, cols), _labelProbabilities = Hdf5IO.GetFloatDataset(H5D.open(file_id, "labelProbabilities"), rows, cols), _labelWorkingSet = Hdf5IO.GetLabelWorkingSet(H5G.open(file_id, "labelWorkingSet")), _labels = Hdf5IO.GetUintDataset(H5D.open(file_id, "labels"), rows, cols), _pointValid = Hdf5IO.GetIntDataset(H5D.open(file_id, "pointValid"), rows, cols), _sensorX = Hdf5IO.GetFloatDataset(H5D.open(file_id, "sensorX"), rows, cols), _sensorY = Hdf5IO.GetFloatDataset(H5D.open(file_id, "sensorY"), rows, cols), _sensorZ = Hdf5IO.GetFloatDataset(H5D.open(file_id, "sensorZ"), rows, cols), _vehicleX = Hdf5IO.GetFloatDataset(H5D.open(file_id, "vehicleX"), rows, cols), _vehicleY = Hdf5IO.GetFloatDataset(H5D.open(file_id, "vehicleY"), rows, cols), _vehicleZ = Hdf5IO.GetFloatDataset(H5D.open(file_id, "vehicleZ"), rows, cols) }; status = H5F.close(file_id); return(outContainer); }
public static double ReadAttribute(string file, string dataSetOrGroup, string attribute) { double attr = Double.NaN; try { H5FileId fileId = H5F.open(file, H5F.OpenMode.ACC_RDONLY); H5ObjectInfo objectInfo = H5O.getInfoByName(fileId, dataSetOrGroup); H5GroupId groupId = null; H5DataSetId dataSetId = null; H5AttributeId attrId; if (objectInfo.objectType == H5ObjectType.GROUP) { groupId = H5G.open(fileId, dataSetOrGroup); attrId = H5A.open(groupId, attribute); } else { dataSetId = H5D.open(fileId, dataSetOrGroup); attrId = H5A.open(dataSetId, attribute); } H5DataTypeId attrTypeId = H5A.getType(attrId); double[] dAttrs = new double[] { }; if (H5T.equal(attrTypeId, H5T.copy(H5T.H5Type.NATIVE_FLOAT))) { float[] fAttrs = new float[H5S.getSimpleExtentNPoints(H5A.getSpace(attrId))]; H5A.read(attrId, attrTypeId, new H5Array <float>(fAttrs)); dAttrs = (from f in fAttrs select(double) f).ToArray(); } else if (H5T.equal(attrTypeId, H5T.copy(H5T.H5Type.NATIVE_DOUBLE))) { dAttrs = new double[H5S.getSimpleExtentNPoints(H5A.getSpace(attrId))]; H5A.read(attrId, attrTypeId, new H5Array <double>(dAttrs)); } H5T.close(attrTypeId); H5A.close(attrId); if (groupId != null) { H5G.close(groupId); } if (dataSetId != null) { H5D.close(dataSetId); } H5F.close(fileId); return((double)dAttrs[0]); } catch (HDFException e) { Console.WriteLine("Error: Unhandled HDF5 exception"); Console.WriteLine(e.Message); } return(attr); }
public void H5Pget_virtual_dsetnameTestVDS1() { hid_t vds = H5D.open(m_vds_class_file, "VDS"); Assert.IsTrue(vds >= 0); hid_t dcpl = H5D.get_create_plist(vds); Assert.IsTrue(dcpl >= 0); IntPtr count = IntPtr.Zero; Assert.IsTrue(H5P.get_virtual_count(dcpl, ref count) >= 0); Assert.IsTrue(3 == count.ToInt32()); string[] names = { "A", "B", "C" }; for (int i = 0; i < count.ToInt32(); ++i) { size_t index = new ssize_t(i); ssize_t len = H5P.get_virtual_filename(dcpl, index, null, IntPtr.Zero); Assert.IsTrue(len.ToInt32() > 0); StringBuilder name = new StringBuilder(len.ToInt32() + 1); len = H5P.get_virtual_dsetname(dcpl, index, name, len + 1); Assert.IsTrue(len.ToInt32() > 0); Assert.IsTrue(name.ToString() == names[i]); } Assert.IsTrue(H5P.close(dcpl) >= 0); Assert.IsTrue(H5D.close(vds) >= 0); }
public static (int success, hid_t CreatedgroupId) WriteStringAttributes(hid_t groupId, string name, IEnumerable <string> strs, string datasetName = null) { hid_t tmpId = groupId; if (!string.IsNullOrWhiteSpace(datasetName)) { hid_t datasetId = H5D.open(groupId, datasetName); if (datasetId > 0) { groupId = datasetId; } } // create UTF-8 encoded attributes hid_t datatype = H5T.create(H5T.class_t.STRING, H5T.VARIABLE); H5T.set_cset(datatype, H5T.cset_t.UTF8); H5T.set_strpad(datatype, H5T.str_t.SPACEPAD); int strSz = strs.Count(); hid_t spaceId = H5S.create_simple(1, new ulong[] { (ulong)strSz }, null); var attributeId = H5A.create(groupId, name, datatype, spaceId); GCHandle[] hnds = new GCHandle[strSz]; IntPtr[] wdata = new IntPtr[strSz]; int cntr = 0; foreach (string str in strs) { hnds[cntr] = GCHandle.Alloc( Encoding.UTF8.GetBytes(str), GCHandleType.Pinned); wdata[cntr] = hnds[cntr].AddrOfPinnedObject(); cntr++; } var hnd = GCHandle.Alloc(wdata, GCHandleType.Pinned); var result = H5A.write(attributeId, datatype, hnd.AddrOfPinnedObject()); hnd.Free(); for (int i = 0; i < strSz; ++i) { hnds[i].Free(); } H5A.close(attributeId); H5S.close(spaceId); H5T.close(datatype); if (tmpId != groupId) { H5D.close(groupId); } return(result, attributeId); }
private static double[,] ReadDataArray(hid_t fileLoc, string name, bool transpose = false) { hid_t dset = H5D.open(fileLoc, name); hid_t fspace = H5D.get_space(dset); hid_t count = H5S.get_simple_extent_ndims(fspace); hid_t type = H5D.get_type(dset); hsize_t[] dims = new hsize_t[count]; hsize_t[] maxdims = new hsize_t[count]; H5S.get_simple_extent_dims(fspace, dims, maxdims); H5S.close(fspace); byte[] rdata = new byte[dims[0] * dims[1] * 8]; hid_t mem_type = H5T.copy(H5T.NATIVE_DOUBLE); H5T.set_size(mem_type, new IntPtr(8)); GCHandle hnd = GCHandle.Alloc(rdata, GCHandleType.Pinned); H5D.read(dset, mem_type, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); H5T.close(mem_type); if (transpose) { double[,] val = new double[dims[1], dims[0]]; int cnt = 0; for (int i = 0; i < (int)dims[0]; i++) { for (int j = 0; j < (int)dims[1]; j++) { val[j, i] = BitConverter.ToDouble(rdata, cnt * 8); cnt++; } } return(val); } else { double[,] val = new double[dims[0], dims[1]]; int cnt = 0; for (int i = 0; i < (int)dims[0]; i++) { for (int j = 0; j < (int)dims[1]; j++) { val[i, j] = BitConverter.ToDouble(rdata, cnt * 8); cnt++; } } return(val); } }
public Dictionary <string, string> TryReadDataTable(string datasetName) { Dictionary <string, string> result = null; var subDsDic = ds.GetSubDatasets(); if (subDsDic.Count > 0) { H5ID h5FileId = H5F.open(fileName, H5F.ACC_RDONLY); H5ID datasetId = H5D.open(h5FileId, datasetName); H5ID typeId = H5D.get_type(datasetId); H5ID spaceId = H5D.get_space(datasetId); if (H5T.get_class(typeId) == H5T.class_t.COMPOUND) { int numCount = H5T.get_nmembers(typeId); var size = H5T.get_size(typeId); byte[] buffer = new byte[size.ToInt32()]; GCHandle hnd = GCHandle.Alloc(buffer, GCHandleType.Pinned); int ndims = H5S.get_simple_extent_ndims(spaceId); if (ndims == 1) { result = new Dictionary <string, string>(); H5D.read(datasetId, typeId, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); for (uint i = 0; i < numCount; i++) { string name = Marshal.PtrToStringAnsi(H5T.get_member_name(typeId, i)); int offset = H5T.get_member_offset(typeId, i).ToInt32(); H5ID subTypeId = H5T.get_member_type(typeId, i); H5T.class_t typeClass = H5T.get_member_class(typeId, i); string value = ReadBuffer(buffer, offset, typeClass, subTypeId); result.Add(name, value); H5T.close(subTypeId); } } hnd.Free(); } if (spaceId != 0) { H5S.close(spaceId); } if (typeId != 0) { H5T.close(typeId); } if (datasetId != 0) { H5D.close(datasetId); } if (h5FileId != 0) { H5F.close(h5FileId); } } return(result); }
public void H5DrefreshTestSWMR1() { hid_t dst = H5D.open(m_v3_class_file, "int6x6"); Assert.IsTrue(dst >= 0); Assert.IsTrue(H5D.refresh(dst) >= 0); Assert.IsTrue(H5D.close(dst) >= 0); }
/// <summary> /// 获得数据集的类型 /// </summary> public string GetDatasetType(string datasetName) { H5DataSetId datasetId = H5D.open(_fileId, datasetName); H5DataTypeId typeId = H5D.getType(datasetId); H5T.H5TClass typeClass = H5T.getClass(typeId); return(typeClass.ToString()); }
public Dataset(Container container, string datasetname) { _datasetname = datasetname; _dataset_id = H5D.open(container.Id, datasetname); _rows = new List <Row>(); LoadData(); }
/// <summary> /// WARNING: ADVANCED USE ONLY!! Loads a 2D generic dataset from an H5 file. /// The generic loaders only loads data in non-Unity friendly types, such as bytes, uints, longs etc... /// You'll have to know the correct cast to retrieve usable data. /// /// Created With help from https://github.com/LiorBanai/HDF5-CSharp/blob/master/HDF5-CSharp/Hdf5Dataset.cs /// </summary> /// <param name="filePath"></param> /// <param name="datasetName"></param> /// <typeparam name="T"></typeparam> /// <returns></returns> /// <exception cref="FileNotFoundException"></exception> static T[,] Load2DDataset <T>(string filePath, string datasetName) { if (!File.Exists(filePath)) { throw new FileNotFoundException($"Loading dataset {datasetName} from file that doesn't exist {filePath}"); } long fileId = H5F.open(filePath, H5F.ACC_RDONLY); T[,] resultArray = new T[2, 2]; try { ulong[] start = { 0, 0 }; ulong[] count = { 0, 0 }; long datasetId = H5D.open(fileId, datasetName); var datatype = H5D.get_type(datasetId); var spaceId = H5D.get_space(datasetId); int rank = H5S.get_simple_extent_ndims(spaceId); ulong[] maxDims = new ulong[rank]; ulong[] dims = new ulong[rank]; H5S.get_simple_extent_dims(spaceId, dims, maxDims); count[0] = dims[0]; count[1] = dims[1]; // Define file hyperslab. long status = H5S.select_hyperslab(spaceId, H5S.seloper_t.SET, start, null, count, null); // Define the memory dataspace. resultArray = new T[dims[0], dims[1]]; var memId = H5S.create_simple(rank, dims, null); // Define memory hyperslab. status = H5S.select_hyperslab(memId, H5S.seloper_t.SET, start, null, count, null); // Read data from hyperslab in the file into the hyperslab in // memory and display. GCHandle handle = GCHandle.Alloc(resultArray, GCHandleType.Pinned); try { H5D.read(datasetId, datatype, memId, spaceId, H5P.DEFAULT, handle.AddrOfPinnedObject()); } finally { handle.Free(); H5S.close(status); H5S.close(memId); H5S.close(spaceId); H5D.close(datatype); H5D.close(datasetId); } } finally { H5F.close(fileId); } return(resultArray); }
public static (bool success, IEnumerable <string> result) ReadStrings(long groupId, string name, string alternativeName) { long datatype = H5T.create(H5T.class_t.STRING, H5T.VARIABLE); H5T.set_cset(datatype, H5T.cset_t.UTF8); H5T.set_strpad(datatype, H5T.str_t.NULLTERM); //name = ToHdf5Name(name); var datasetId = H5D.open(groupId, Hdf5Utils.NormalizedName(name)); if (datasetId < 0) //does not exist? { datasetId = H5D.open(groupId, Hdf5Utils.NormalizedName(alternativeName)); } if (datasetId <= 0) { Hdf5Utils.LogError?.Invoke($"Error reading {groupId}. Name:{name}. AlternativeName:{alternativeName}"); return(false, Array.Empty <string>()); } long spaceId = H5D.get_space(datasetId); long count = H5S.get_simple_extent_npoints(spaceId); H5S.close(spaceId); var strs = new List <string>(); if (count >= 0) { IntPtr[] rdata = new IntPtr[count]; GCHandle hnd = GCHandle.Alloc(rdata, GCHandleType.Pinned); H5D.read(datasetId, datatype, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); for (int i = 0; i < rdata.Length; ++i) { int len = 0; while (Marshal.ReadByte(rdata[i], len) != 0) { ++len; } byte[] buffer = new byte[len]; Marshal.Copy(rdata[i], buffer, 0, buffer.Length); string s = Encoding.UTF8.GetString(buffer); strs.Add(s); // H5.free_memory(rdata[i]); } hnd.Free(); } H5T.close(datatype); H5D.close(datasetId); return(true, strs); }
private void UpdateVariableInfo(FileContext fileContext, long variableGroupId) { ulong idx; idx = 0; _variableGroupSet = IOHelper.UpdateAttributeList(variableGroupId, "group_set", _variableGroupSet.ToArray()).ToList(); if (fileContext.FormatVersion != 1) { _unitSet = IOHelper.UpdateAttributeList(variableGroupId, "unit_set", _unitSet.ToArray()).ToList(); _transferFunctionSet = IOHelper.UpdateAttributeList(variableGroupId, "transfer_function_set", _transferFunctionSet.ToArray()).ToList(); } H5L.iterate(variableGroupId, H5.index_t.NAME, H5.iter_order_t.INC, ref idx, Callback, IntPtr.Zero); int Callback(long variableGroupId2, IntPtr intPtrName, ref H5L.info_t info, IntPtr userDataPtr) { long datasetId = -1; long typeId_do_not_close = -1; string name; DatasetInfo currentDatasetInfo; try { name = Marshal.PtrToStringAnsi(intPtrName); if (H5L.exists(variableGroupId2, name) > 0) { datasetId = H5D.open(variableGroupId2, name); currentDatasetInfo = _datasetInfoSet.FirstOrDefault(datasetInfo => datasetInfo.Name == name); if (currentDatasetInfo == null) { typeId_do_not_close = H5D.get_type(datasetId); currentDatasetInfo = new DatasetInfo(name, typeId_do_not_close, this, this.IsLazyLoading); _datasetInfoSet.Add(currentDatasetInfo); } currentDatasetInfo.Update(fileContext); } } finally { if (H5I.is_valid(datasetId) > 0) { H5D.close(datasetId); } } return(0); } }
private static bool IsHDF5String(hid_t fileLoc, string name) { hid_t dset = H5D.open(fileLoc, name); hid_t type = H5D.get_type(dset); H5T.class_t cl = H5T.get_class(type); return(cl == H5T.class_t.STRING); }
private mData[] ReadData() { Console.WriteLine("Reading H5 file {0}...", filename); H5FileId fileId = H5F.open(filename, H5F.OpenMode.ACC_RDONLY); H5DataSetId dataSetId = H5D.open(fileId, dataSetName); mData[] readDataBack = new mData[count]; H5D.read(dataSetId, new H5DataTypeId(H5T.H5Type.STD_REF_OBJ), new H5Array <mData>(readDataBack)); H5D.close(dataSetId); H5F.close(fileId); return(readDataBack); }
/// <summary> /// 重写数据集的值(去条带的数据) /// </summary> /// <typeparam name="T">数据类型</typeparam> /// <param name="dataSetName">数据集的名称</param> /// <param name="dataTypeId">数据集的类型ID</param> /// <param name="values">去条带之后数据</param> /// <param name="BrandNo">在数据集的维度从0开始</param> private void ReWriteDataSet <T>(string dataSetName, H5DataTypeId dataTypeId, T[] values, int BrandNo) { H5FileId _h5FileId = null; H5DataSetId dataSetId = null; H5DataSpaceId spaceid = null; try { _h5FileId = H5F.open(fileName, H5F.OpenMode.ACC_RDWR); //先找出含有指定波段的数据集 dataSetId = H5D.open(_h5FileId, dataSetName); spaceid = H5D.getSpace(dataSetId); long[] dims = H5S.getSimpleExtentDims(spaceid); //得到数据数组的大小,比如[3,1800,2048] int rank = H5S.getSimpleExtentNDims(spaceid); //得到数据数组的维数,比如3 H5S.close(spaceid); //根据数据集的名字获取数据集的ID int size = 0; if (rank == 0) { size = 1; } else if (rank == 1) { size = Convert.ToInt32(dims[0]); } else if (rank == 2) { size = Convert.ToInt32(dims[0] * dims[1]); } else if (rank == 3) { size = Convert.ToInt32(dims[0] * dims[1] * dims[2]); } T[] v = new T[size]; //从数据集中读取原始数据 H5D.read <T>(dataSetId, dataTypeId, new H5Array <T>(v)); //将波段校正后的数据读取赋给相应的波段 for (int i = BrandNo; i < values.Length; i++) { v[i] = values[i]; } H5D.write <T>(dataSetId, dataTypeId, new H5Array <T>(v)); } catch (Exception e) { throw new Exception(e.Message); } finally { H5D.close(dataSetId); H5F.close(_h5FileId); } }
static void ReadFile(string filePath) { var file = H5F.open(filePath, H5F.ACC_RDONLY); var dataSet = H5D.open(file, "/group/dataset"); var dataSpace = H5D.get_space(dataSet); var rank = H5S.get_simple_extent_ndims(dataSpace); if (rank == 2) { var dims = new ulong[2]; H5S.get_simple_extent_dims(dataSpace, dims, null); var data = new int[dims[0], dims[1]]; H5D.read(dataSet, H5T.NATIVE_INT, H5S.ALL, H5S.ALL, H5P.DEFAULT, new PinnedObject(data)); for (int i = 0; i < data.GetLength(0); ++i) { for (int j = 0; j < data.GetLength(1); ++j) { Write($"{data[i,j],3}"); } WriteLine(); } } H5S.close(dataSpace); var doubleAttribute = H5A.open(dataSet, "double"); #if false double pi = 0.0; var handle = GCHandle.Alloc(pi, GCHandleType.Pinned); H5A.read(doubleAttribute, H5T.NATIVE_DOUBLE, handle.AddrOfPinnedObject()); handle.Free(); WriteLine($"PI = {pi}"); #else var values = new double[1]; H5A.read(doubleAttribute, H5T.NATIVE_DOUBLE, new PinnedObject(values)); WriteLine($"PI = {values[0]}"); #endif H5A.close(doubleAttribute); WriteLine($"string: {ReadStringAttribute(dataSet, "string")}"); WriteLine($"string-ascii: {ReadStringAttribute(dataSet, "string-ascii")}"); WriteLine($"string-vlen: {ReadStringAttribute(dataSet, "string-vlen")}"); H5D.close(dataSet); H5F.close(file); }
private ISimpleDataStorage LoadDataset(long sourceFileId, string datasetPath, ulong start, ulong stride, ulong block, ulong count) { long datasetId = -1; long typeId = -1; Array dataset; Array dataset_status; Type genericType; ExtendedDataStorageBase extendedDataStorage; ISimpleDataStorage simpleDataStorage; dataset = IOHelper.ReadDataset(sourceFileId, datasetPath, start, stride, block, count); // apply status (only if native dataset) if (H5L.exists(sourceFileId, datasetPath + "_status") > 0) { try { datasetId = H5D.open(sourceFileId, datasetPath); typeId = H5D.get_type(datasetId); dataset_status = IOHelper.ReadDataset(sourceFileId, datasetPath + "_status", start, stride, block, count).Cast <byte>().ToArray(); genericType = typeof(ExtendedDataStorage <>).MakeGenericType(TypeConversionHelper.GetTypeFromHdfTypeId(typeId)); extendedDataStorage = (ExtendedDataStorageBase)Activator.CreateInstance(genericType, dataset, dataset_status); dataset_status = null; } finally { if (H5I.is_valid(datasetId) > 0) { H5D.close(datasetId); } if (H5I.is_valid(typeId) > 0) { H5T.close(typeId); } } simpleDataStorage = extendedDataStorage.ToSimpleDataStorage(); extendedDataStorage.Dispose(); return(simpleDataStorage); } else { return(new SimpleDataStorage(dataset.Cast <double>().ToArray())); } }
/// <summary> /// Reads an n-dimensional dataset. /// </summary> /// <typeparam name="T">Generic parameter strings or primitive type</typeparam> /// <param name="groupId">id of the group. Can also be a file Id</param> /// <param name="name">name of the dataset</param> /// <returns>The n-dimensional dataset</returns> public static Array ReadDatasetToArray <T>(hid_t groupId, string name) //where T : struct { var datatype = GetDatatype(typeof(T)); var datasetId = H5D.open(groupId, name); var spaceId = H5D.get_space(datasetId); int rank = H5S.get_simple_extent_ndims(spaceId); long count = H5S.get_simple_extent_npoints(spaceId); Array dset; Type type = typeof(T); if (rank >= 0 && count >= 0) { int rankChunk; ulong[] maxDims = new ulong[rank]; ulong[] dims = new ulong[rank]; ulong[] chunkDims = new ulong[rank]; hid_t memId = H5S.get_simple_extent_dims(spaceId, dims, maxDims); long[] lengths = dims.Select(d => Convert.ToInt64(d)).ToArray(); dset = Array.CreateInstance(type, lengths); var typeId = H5D.get_type(datasetId); var mem_type = H5T.copy(datatype); if (datatype == H5T.C_S1) { H5T.set_size(datatype, new IntPtr(2)); } var propId = H5D.get_create_plist(datasetId); if (H5D.layout_t.CHUNKED == H5P.get_layout(propId)) { rankChunk = H5P.get_chunk(propId, rank, chunkDims); } memId = H5S.create_simple(rank, dims, maxDims); GCHandle hnd = GCHandle.Alloc(dset, GCHandleType.Pinned); H5D.read(datasetId, datatype, memId, spaceId, H5P.DEFAULT, hnd.AddrOfPinnedObject()); hnd.Free(); } else { dset = Array.CreateInstance(type, new long[1] { 0 }); } H5D.close(datasetId); H5S.close(spaceId); return(dset); }
private void createHD5DataObject(H5GroupId h5GroupId, string pathName, ref HD5DataSetObject dataObject) { H5DataSetId datasetid = null; H5DataSpaceId spaceid = null; H5DataTypeId dataTypeid = null; try { dataObject.GroupId = h5GroupId; datasetid = H5D.open(h5GroupId, pathName); dataObject.DatasetID = datasetid; dataObject.DatasetName = pathName; spaceid = H5D.getSpace(datasetid); var dims = H5S.getSimpleExtentDims(spaceid); dataTypeid = H5D.getType(datasetid); dataObject.Dim = dims.Length; HDF5DotNet.H5T.H5TClass classType = H5T.getClass(dataTypeid); int size = H5T.getSize(dataTypeid); H5T.Sign sign = H5T.Sign.TWOS_COMPLEMENT; if (classType == H5T.H5TClass.INTEGER) { sign = H5T.getSign(dataTypeid); } //var rank = H5S.getSimpleExtentNDims(space); //var statu = H5S.getSimpleExtentDims(space); Boolean bString = H5T.isVariableString(dataTypeid); //String name = H5T.getMemberName(dataType, 0); // var type2 = H5T.getNativeType(dataType, H5T.Direction.DEFAULT); Type type = getTypeof(classType, size, sign); dataObject.DataType = type; dataObject.Data = readData(dataObject); } catch (Exception e) { Console.WriteLine(e.Message); } finally{ if (datasetid != null) { H5D.close(datasetid); } if (spaceid != null) { H5S.close(spaceid); } if (dataTypeid != null) { H5T.close(dataTypeid); } } }
private static bool groupexists(hid_t fileLoc, string name) { hid_t dId = 0; try { dId = H5D.open(fileLoc, name); } catch { dId = 0; } return(dId > 0); }
/// <summary> /// 读取指定数据集,未对异常进行处理 /// </summary> /// <typeparam name="T"></typeparam> /// <param name="datasetName"></param> /// <param name="bandN"></param> /// <param name="bandH"></param> /// <param name="bandW"></param> /// <returns></returns> public T[] ReadDataArray <T>(String datasetName, ref int bandN, ref int bandH, ref int bandW) { H5DataSetId datasetId = null; H5DataSpaceId spaceId = null; H5DataTypeId typeId = null; long[] dims = null; if (!String.IsNullOrEmpty(datasetName) && _datasetNames.Contains(datasetName)) { datasetId = H5D.open(_fileId, datasetName); spaceId = H5D.getSpace(datasetId); dims = H5S.getSimpleExtentDims(spaceId); if (dims.Length == 2) { bandN = 1; bandH = (int)dims[0]; bandW = (int)dims[1]; } else if (dims.Length == 3) { bandN = (int)dims[0]; bandH = (int)dims[1]; bandW = (int)dims[2]; } typeId = H5D.getType(datasetId); typeId = H5T.getNativeType(typeId, H5T.Direction.DEFAULT); T[] dv = new T[bandN * bandH * bandW]; H5D.read <T>(datasetId, typeId, new H5Array <T>(dv)); if (typeId != null) { H5T.close(typeId); } if (spaceId != null) { H5S.close(spaceId); } if (datasetId != null) { H5D.close(datasetId); } return(dv); } else { throw new Exception("未查到指定数据集!"); } }