public void Close() { if (m_fileId != null) { H5F.close(m_fileId); } }
public static void UpdateCampaignInfoSet() { long vdsFileId = -1; long vdsMetaFileId = -1; long groupId = -1; lock (_lock) { try { if (File.Exists(_options.VdsFilePath)) { vdsFileId = H5F.open(_options.VdsFilePath, H5F.ACC_RDONLY); vdsMetaFileId = H5F.open(_options.VdsMetaFilePath, H5F.ACC_RDONLY); Program.CampaignInfoSet = GeneralHelper.GetCampaignInfoSet(vdsFileId, false); } else { Program.CampaignInfoSet = new List <CampaignInfo>(); } Program.CampaignDescriptionSet = Program.CampaignInfoSet.ToDictionary(campaignInfo => campaignInfo.Name, campaignInfo => { if (IOHelper.CheckLinkExists(vdsMetaFileId, campaignInfo.Name)) { try { groupId = H5G.open(vdsMetaFileId, campaignInfo.Name); if (H5A.exists(groupId, "description") > 0) { return(IOHelper.ReadAttribute <string>(groupId, "description").First()); } } finally { if (H5I.is_valid(groupId) > 0) { H5G.close(groupId); } } } return("no description available"); }); } finally { if (H5I.is_valid(vdsFileId) > 0) { H5F.close(vdsFileId); } if (H5I.is_valid(vdsMetaFileId) > 0) { H5F.close(vdsMetaFileId); } } } }
public void H5Freset_mdc_hit_rate_statsTest1() { Assert.IsTrue( H5F.reset_mdc_hit_rate_stats(m_v0_class_file) >= 0); Assert.IsTrue( H5F.reset_mdc_hit_rate_stats(m_v2_class_file) >= 0); }
public void H5FunmountTest2() { Assert.IsFalse( H5F.unmount(Utilities.RandomInvalidHandle(), "AA") >= 0); Assert.IsFalse(H5F.unmount(m_v0_class_file, "") >= 0); }
public void H5Fget_mdc_configTest2() { H5AC.cache_config_t conf = new H5AC.cache_config_t(); Assert.IsFalse( H5F.get_mdc_config(Utilities.RandomInvalidHandle(), ref conf) >= 0); }
public void Cleanup() { file_.Close(); H5D.close(dataSet_); H5F.close(h5file_); }
public static double[][] ReadMesh(string fileName) { double[][] meshes = new double[3][]; string[] meshNames = { "x", "y", "z" }; H5FileId fileId = H5F.open(fileName, H5F.OpenMode.ACC_RDONLY); for (int i = 0; i < meshNames.Length; i++) { H5DataSetId dsId = H5D.open(fileId, "/Mesh/" + meshNames[i]); H5DataTypeId dtId = H5D.getType(dsId); if (!H5T.equal(dtId, H5T.copy(H5T.H5Type.NATIVE_FLOAT))) { Console.WriteLine("Error: Invalid dataset type, expected {0}", H5T.H5Type.NATIVE_FLOAT); } float[] mesh = new float[H5D.getStorageSize(dsId) / H5T.getSize(dtId)]; H5D.read(dsId, dtId, new H5Array <float>(mesh)); meshes[i] = mesh.Select(x => (double)x * 1000.0).ToArray(); // m -> mm H5D.close(dsId); H5T.close(dtId); } H5F.close(fileId); return(meshes); }
public static T GetMetadata <T>(string filename, string path) { H5FileId zFile = null; if (!File.Exists(filename)) { throw new Exception("File not found."); } try { zFile = H5F.open(filename, H5F.OpenMode.ACC_RDONLY); return((T)HDFExtensions.ReadScalar <T>(zFile, path)); } catch { throw new Exception("Path not found"); } finally { if (zFile != null) { H5F.close(zFile); } } }
/// <summary> /// Opens an existing file and loads up the group and dataset headers in the object. /// </summary> /// <param name="_filename"></param> public Hdf5File(string _filename) { if (!File.Exists(_filename)) { throw new FileNotFoundException(); } Id = H5F.open(_filename, H5F.ACC_RDWR).ToId(); if (Id.Value > 0) { FileId = Id; Path = new Hdf5Path("."); Groups = new Hdf5Groups(this); Datasets = new Hdf5Datasets(this); Attributes = new Hdf5Attributes(this); AttributeHelper.LoadAttributes(Attributes); GroupHelper.PopulateChildrenObjects(Id, this); } else { throw new Hdf5UnknownException(); } }
//Load weights from hdf5 file. Weights must be saved as a vector per layer public static float[] loadH5(string path, string dsname) { //Get file id var h5fid = H5F.open(path, H5F.OpenMode.ACC_RDONLY); //Get dataset id var h5did = H5D.open(h5fid, dsname); //Dataset size var h5space = H5D.getSpace(h5did); var h5size = H5S.getSimpleExtentDims(h5space); //Dataset size to array var S = h5size.ToArray(); //Empty double array for the data double[] data = new double[S[0]]; //Read the dataset var h5array = new H5Array <double>(data); var h5dtype = H5D.getType(h5did); H5D.read(h5did, h5dtype, h5array); //Convert to float float[] newarray = new float[data.Length]; Parallel.For(0, data.Length, (k) => { newarray[k] = (float)data[k]; }); return(newarray); }
public void H5Fget_filesizeTest2() { hsize_t size = 0; Assert.IsFalse(H5F.get_filesize(Utilities.RandomInvalidHandle(), ref size) >= 0); }
public override void CloseDocument() { if (fileId != null && fileId.Id > 0 && !_fileClosed) { foreach (var group in EpochGroupsIDs) { try { H5G.close(group.GroupId); H5G.close(group.SubGroupsId); H5G.close(group.EpochsId); } catch (H5GcloseException ex) { log.DebugFormat("HDF5 group already closed: {0}", ex); } } try { H5F.close(fileId); } catch (H5FcloseException ex) { log.DebugFormat("HDF5 file already closed: {0}", ex); } Interlocked.Decrement(ref _openHdf5FileCount); _fileClosed = true; } }
public Hdf5Operator(string filename) { _fname = filename; _h5FileId = H5F.open(filename, H5F.ACC_RDONLY); GetAllFileAttributes(); GetAllDatasetNames(); }
public static double[, ,] ReadFieldData3D(string fileName) { H5FileId fileId = H5F.open(fileName, H5F.OpenMode.ACC_RDONLY); H5DataSetId fDataSetId = H5D.open(fileId, "/FieldData/FD/f0"); H5DataTypeId fDataTypeId = H5D.getType(fDataSetId); if (!H5T.equal(fDataTypeId, H5T.copy(H5T.H5Type.NATIVE_FLOAT))) { Console.WriteLine("Error: Invalid dataset type, expected {0}", H5T.H5Type.NATIVE_FLOAT); } long[] dims = H5S.getSimpleExtentDims(H5D.getSpace(fDataSetId)).ToArray(); if (dims.Length != 3) { Console.WriteLine("Error: Invalid field data dimensions"); } float[, ,] data = new float[dims[0], dims[1], dims[2]]; H5D.read(fDataSetId, fDataTypeId, new H5Array <float>(data)); // Reorder double[, ,] fieldValues = new double[dims[2], dims[1], dims[0]]; for (int i = 0; i < dims[0]; i++) { for (int j = 0; j < dims[1]; j++) { for (int k = 0; k < dims[2]; k++) { fieldValues[k, j, i] = data[i, j, k]; } } } return(fieldValues); }
public void H5Fget_mdc_configTest1() { H5AC.cache_config_t conf = new H5AC.cache_config_t( H5AC.CURR_CACHE_CONFIG_VERSION); Assert.IsTrue(H5F.get_mdc_config(m_v0_class_file, ref conf) >= 0); Assert.IsTrue(H5F.get_mdc_config(m_v2_class_file, ref conf) >= 0); }
public static double ReadAttribute(string file, string dataSetOrGroup, string attribute) { double attr = Double.NaN; try { H5FileId fileId = H5F.open(file, H5F.OpenMode.ACC_RDONLY); H5ObjectInfo objectInfo = H5O.getInfoByName(fileId, dataSetOrGroup); H5GroupId groupId = null; H5DataSetId dataSetId = null; H5AttributeId attrId; if (objectInfo.objectType == H5ObjectType.GROUP) { groupId = H5G.open(fileId, dataSetOrGroup); attrId = H5A.open(groupId, attribute); } else { dataSetId = H5D.open(fileId, dataSetOrGroup); attrId = H5A.open(dataSetId, attribute); } H5DataTypeId attrTypeId = H5A.getType(attrId); double[] dAttrs = new double[] { }; if (H5T.equal(attrTypeId, H5T.copy(H5T.H5Type.NATIVE_FLOAT))) { float[] fAttrs = new float[H5S.getSimpleExtentNPoints(H5A.getSpace(attrId))]; H5A.read(attrId, attrTypeId, new H5Array <float>(fAttrs)); dAttrs = (from f in fAttrs select(double) f).ToArray(); } else if (H5T.equal(attrTypeId, H5T.copy(H5T.H5Type.NATIVE_DOUBLE))) { dAttrs = new double[H5S.getSimpleExtentNPoints(H5A.getSpace(attrId))]; H5A.read(attrId, attrTypeId, new H5Array <double>(dAttrs)); } H5T.close(attrTypeId); H5A.close(attrId); if (groupId != null) { H5G.close(groupId); } if (dataSetId != null) { H5D.close(dataSetId); } H5F.close(fileId); return((double)dAttrs[0]); } catch (HDFException e) { Console.WriteLine("Error: Unhandled HDF5 exception"); Console.WriteLine(e.Message); } return(attr); }
public static double[,] ReadFieldData2D(string file, string dataSet) { H5FileId fileId = H5F.open(file, H5F.OpenMode.ACC_RDONLY); H5DataSetId fDataSetId = H5D.open(fileId, dataSet); H5DataTypeId fDataTypeId = H5D.getType(fDataSetId); long[] dims = H5S.getSimpleExtentDims(H5D.getSpace(fDataSetId)).ToArray(); double[,] data = new double[dims[0], dims[1]]; H5D.read(fDataSetId, fDataTypeId, new H5Array <double>(data)); double[,] fieldValues = new double[dims[1], dims[0]]; for (int i = 0; i < dims[1]; i++) { for (int j = 0; j < dims[0]; j++) { fieldValues[i, j] = (double)data[j, i]; } } H5T.close(fDataTypeId); H5D.close(fDataSetId); H5F.close(fileId); return(fieldValues); }
public void H5Fclear_elink_cacheTest1() { Assert.IsTrue(H5F.clear_elink_file_cache(m_v0_class_file) >= 0); Assert.IsTrue(H5F.clear_elink_file_cache(m_v2_class_file) >= 0); Assert.IsTrue(H5F.clear_elink_file_cache(m_v0_test_file) >= 0); Assert.IsTrue(H5F.clear_elink_file_cache(m_v2_test_file) >= 0); }
} // make_table static void test_getting_info() { try { Console.Write("Testing getting table/field information"); // Open the file to check on the table. H5FileId fileId = H5F.open(FILE_NAME, H5F.OpenMode.ACC_RDWR); hssize_t nfields = 0, nrecords = 0; string[] field_names = { "c", "i", "l" }; // Get the table info. TableInfo table = H5TB.getTableInfo(fileId, TABLE_NAME); if (table.nFields != N_FIELDS) { Console.WriteLine("\ntest_getting_info: incorrect number of fields: read {0} - should be {1}", nfields, N_FIELDS); nerrors++; } if (table.nRecords != N_RECORDS) { Console.WriteLine("\ntest_getting_info: incorrect number of fields: read {0} - should be {1}", nrecords, N_RECORDS); nerrors++; } // Get field info. int [] sizes = new int[N_FIELDS]; int [] offsets = new int[N_FIELDS]; TableFieldInfo tablefield = H5TB.getFieldInfo(fileId, TABLE_NAME); int ii; for (ii = 0; ii < N_FIELDS; ii++) { if (tablefield.fieldName[ii] != field_names[ii]) { Console.WriteLine("\ntest_getting_info: field #{0} has incorrect name: read {0} - should be {1}", ii, field_names[ii], tablefield.fieldName[ii]); nerrors++; } } H5F.close(fileId); Console.WriteLine("\t\t\tPASSED"); } catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // test_getting_info
public void H5Fget_intentTest1() { uint intent = 4711; Assert.IsTrue(H5F.get_intent(m_v0_class_file, ref intent) >= 0); Assert.IsTrue(H5F.get_intent(m_v2_class_file, ref intent) >= 0); }
private static void ConvertHdf4To5(Hdf4FileAttrs hdf4FileAttrs, string f5name, Action <string, int, int> messageAction) { try { // Create a new file using H5F_ACC_TRUNC access, // default file creation properties, and default file // access properties. H5FileId fileId = H5F.create(f5name, H5F.CreateMode.ACC_TRUNC); int nxf5 = hdf4FileAttrs.Hdf4FileAttr.XDim; int nyf5 = hdf4FileAttrs.Hdf4FileAttr.YDim; int rank = 2; //测试读取的科学数据集及其属性 int sdscount = hdf4FileAttrs.Hdf4FileAttr.DataFields.Count; for (int k = 0; k < sdscount; k++) { ConvertHdf4To5BySds(hdf4FileAttrs, messageAction, k, nyf5, nxf5, rank, fileId); } HDFAttributeDef[] attributeDef5s = hdf4FileAttrs.GetHDFAttributeDefs(); foreach (HDFAttributeDef attributeDef5 in attributeDef5s) { WriteHdfAttributes.WriteHdfAttribute(fileId, attributeDef5); } H5F.close(fileId); } catch (Exception ex) { throw new Exception("拼接Hdf4时出错,具体信息:" + ex.Message, ex); } }
private static H5FileId GetFileId() { return (File.Exists(Global.GetOutputPath(Global.Configuration.HDF5Path)) ? H5F.open(Global.GetOutputPath(Global.Configuration.HDF5Path), H5F.OpenMode.ACC_RDWR) : H5F.create(Global.GetOutputPath(Global.Configuration.HDF5Path), H5F.CreateMode.ACC_EXCL)); }
/// <summary> /// Opens a Hdf-5 file /// </summary> /// <param name="filename"></param> /// <param name="readOnly"></param> /// <returns></returns> public static long OpenFile(string filename, bool readOnly = false) { uint access = (readOnly) ? H5F.ACC_RDONLY : H5F.ACC_RDWR; var fileId = H5F.open(filename, access); return(fileId); }
public Dictionary <string, string> TryReadDataTable(string datasetName) { Dictionary <string, string> result = null; var subDsDic = ds.GetSubDatasets(); if (subDsDic.Count > 0) { H5ID h5FileId = H5F.open(fileName, H5F.ACC_RDONLY); H5ID datasetId = H5D.open(h5FileId, datasetName); H5ID typeId = H5D.get_type(datasetId); H5ID spaceId = H5D.get_space(datasetId); if (H5T.get_class(typeId) == H5T.class_t.COMPOUND) { int numCount = H5T.get_nmembers(typeId); var size = H5T.get_size(typeId); byte[] buffer = new byte[size.ToInt32()]; GCHandle hnd = GCHandle.Alloc(buffer, GCHandleType.Pinned); int ndims = H5S.get_simple_extent_ndims(spaceId); if (ndims == 1) { result = new Dictionary <string, string>(); H5D.read(datasetId, typeId, H5S.ALL, H5S.ALL, H5P.DEFAULT, hnd.AddrOfPinnedObject()); for (uint i = 0; i < numCount; i++) { string name = Marshal.PtrToStringAnsi(H5T.get_member_name(typeId, i)); int offset = H5T.get_member_offset(typeId, i).ToInt32(); H5ID subTypeId = H5T.get_member_type(typeId, i); H5T.class_t typeClass = H5T.get_member_class(typeId, i); string value = ReadBuffer(buffer, offset, typeClass, subTypeId); result.Add(name, value); H5T.close(subTypeId); } } hnd.Free(); } if (spaceId != 0) { H5S.close(spaceId); } if (typeId != 0) { H5T.close(typeId); } if (datasetId != 0) { H5D.close(datasetId); } if (h5FileId != 0) { H5F.close(h5FileId); } } return(result); }
public File(string filename, H5F.OpenMode mode = H5F.OpenMode.ACC_RDONLY) { _filename = filename; _mode = mode; EnsureH5Lib(); OpenFile(); }
public void H5Fset_mdc_configTest2() { H5AC.cache_config_t conf = new H5AC.cache_config_t( H5AC.CURR_CACHE_CONFIG_VERSION); Assert.IsFalse( H5F.set_mdc_config(Utilities.RandomInvalidHandle(), ref conf) >= 0); }
public static void ClassCleanup() { // close the global test files Assert.IsTrue(H5F.close(m_v0_class_file) >= 0); Assert.IsTrue(H5F.close(m_v2_class_file) >= 0); File.Delete(m_v0_class_file_name); File.Delete(m_v2_class_file_name); }
public bool OpenHDF(FileName file, H5F.OpenMode mode = H5F.OpenMode.ACC_RDONLY) { try { file_id = H5F.open(file.FullPath, mode); last_exception = null; if (file_id.Id >= 0) return true; else return false; } catch (Exception ex) { last_exception = ex; return false; } }
public bool CreateHDF(FileName file, H5F.CreateMode mode = H5F.CreateMode.ACC_TRUNC) { try { file_id = H5F.create(file.FullPath, mode); last_exception = null; if (file_id.Id >= 0) return true; else return false; } catch (Exception ex) { last_exception = ex; return false; } }