public void H5GcreateTest2() { hid_t file = Utilities.RandomInvalidHandle(); hid_t gid = H5G.create(file, "A"); Assert.IsTrue(gid < 0); }
public void H5Oget_info_by_idxTest1() { Assert.IsTrue( H5G.close(H5G.create(m_v0_test_file, "A")) >= 0); Assert.IsTrue( H5G.close(H5G.create(m_v0_test_file, "AA")) >= 0); Assert.IsTrue( H5G.close(H5G.create(m_v0_test_file, "AAA")) >= 0); Assert.IsTrue( H5G.close(H5G.create(m_v0_test_file, "AAAA")) >= 0); H5O.info_t info = new H5O.info_t(); Assert.IsTrue(H5O.get_info_by_idx(m_v0_test_file, ".", H5.index_t.NAME, H5.iter_order_t.NATIVE, 2, ref info) >= 0); Assert.IsTrue(info.type == H5O.type_t.GROUP); Assert.IsTrue( H5G.close(H5G.create(m_v2_test_file, "A")) >= 0); Assert.IsTrue( H5G.close(H5G.create(m_v2_test_file, "AA")) >= 0); Assert.IsTrue( H5G.close(H5G.create(m_v2_test_file, "AAA")) >= 0); Assert.IsTrue( H5G.close(H5G.create(m_v2_test_file, "AAAA")) >= 0); info = new H5O.info_t(); Assert.IsTrue(H5O.get_info_by_idx(m_v2_test_file, ".", H5.index_t.NAME, H5.iter_order_t.NATIVE, 2, ref info) >= 0); Assert.IsTrue(info.type == H5O.type_t.GROUP); }
public static Dictionary <uint, string> GetLabelWorkingSet(long group_id) { Dictionary <uint, string> labelWorkingSet = new Dictionary <uint, string>(); H5A.operator_t callBackMethod = DelegateMethod; ArrayList attrNameArray = new ArrayList(); GCHandle nameArrayAlloc = GCHandle.Alloc(attrNameArray); IntPtr ptrOnAllocArray = (IntPtr)nameArrayAlloc; int status; ulong beginAt = 0; status = H5A.iterate(group_id, H5.index_t.CRT_ORDER, H5.iter_order_t.INC, ref beginAt, callBackMethod, ptrOnAllocArray); for (int i = 0; i < attrNameArray.Count; i++) { string attr_name = Convert.ToString(attrNameArray[i]); long attr_id = H5A.open(group_id, attr_name); uint[] attr_value = { 0 }; GCHandle valueAlloc = GCHandle.Alloc(attr_value, GCHandleType.Pinned); status = H5A.read(attr_id, H5T.NATIVE_UINT32, valueAlloc.AddrOfPinnedObject()); status = H5A.close(attr_id); labelWorkingSet.Add(attr_value[0], attr_name); } status = H5G.close(group_id); return(labelWorkingSet); }
public static unsafe void AddSomeLinks(long fileId) { long res; var groupId = H5G.create(fileId, "simple"); var groupId_sub = H5G.create(groupId, "sub"); // datasets var dataspaceId1 = H5S.create_simple(1, new ulong[] { 1 }, new ulong[] { 1 }); var datasetId1 = H5D.create(fileId, "D", H5T.NATIVE_INT8, dataspaceId1); var data1 = new byte[] { 1 }; fixed(void *ptr = data1) { res = H5D.write(datasetId1, H5T.NATIVE_INT8, dataspaceId1, dataspaceId1, 0, new IntPtr(ptr)); } res = H5D.close(datasetId1); res = H5S.close(dataspaceId1); var dataspaceId2 = H5S.create_simple(1, new ulong[] { 1 }, new ulong[] { 1 }); var datasetId2 = H5D.create(groupId, "D1", H5T.NATIVE_INT8, dataspaceId2); res = H5D.close(datasetId2); res = H5S.close(dataspaceId2); var dataspaceId3 = H5S.create_simple(1, new ulong[] { 1 }, new ulong[] { 1 }); var datasetId3 = H5D.create(groupId_sub, "D1.1", H5T.NATIVE_INT8, dataspaceId3); res = H5D.close(datasetId3); res = H5S.close(dataspaceId3); res = H5G.close(groupId); res = H5G.close(groupId_sub); }
protected override void OnInitialize() { if (IOHelper.CheckLinkExists(_vdsMetaFileId, _currentPath)) { _groupId = H5G.open(_vdsMetaFileId, _currentPath); if (H5A.exists(_groupId, "unit") > 0) { _unit = IOHelper.ReadAttribute <string>(_groupId, "unit").FirstOrDefault(); } if (H5A.exists(_groupId, "transfer_function_set") > 0) { _vdsMetaTransferFunctionSet = IOHelper.ReadAttribute <hdf_transfer_function_t>(_groupId, "transfer_function_set").ToList(); } else { _vdsMetaTransferFunctionSet = new List <hdf_transfer_function_t>(); } H5G.close(_groupId); } else { _vdsMetaTransferFunctionSet = new List <hdf_transfer_function_t>(); } }
public static T ReadObject <T>(long groupId, T readValue, string groupName) { if (readValue == null) { throw new ArgumentNullException(nameof(readValue)); } Type tyObject = readValue.GetType(); //foreach (Attribute attr in Attribute.GetCustomAttributes(tyObject)) //{ // if (attr is Hdf5GroupName) // groupName = (attr as Hdf5GroupName).Name; // if (attr is Hdf5SaveAttribute) // { // Hdf5SaveAttribute atLeg = attr as Hdf5SaveAttribute; // if (atLeg.SaveKind == Hdf5Save.DoNotSave) // return readValue; // } //} bool isGroupName = !string.IsNullOrWhiteSpace(groupName); if (isGroupName) { groupId = H5G.open(groupId, Hdf5Utils.NormalizedName(groupName)); } ReadFields(tyObject, readValue, groupId); ReadProperties(tyObject, readValue, groupId); if (isGroupName) { CloseGroup(groupId); } return(readValue); }
public void H5OvisitTest1() { Assert.IsTrue(H5G.create(m_v0_test_file, "A/B/C/D", m_lcpl) >= 0); Assert.IsTrue( H5L.create_hard(m_v0_test_file, "A/B/C/D", m_v0_test_file, "shortcut") >= 0); Assert.IsTrue(H5G.create(m_v2_test_file, "A/B/C/D", m_lcpl) >= 0); Assert.IsTrue( H5L.create_hard(m_v2_test_file, "A/B/C/D", m_v2_test_file, "shortcut") >= 0); ArrayList al = new ArrayList(); GCHandle hnd = GCHandle.Alloc(al); IntPtr op_data = (IntPtr)hnd; // the callback is defined in H5LTest.cs H5O.iterate_t cb = DelegateMethod; Assert.IsTrue(H5O.visit(m_v0_test_file, H5.index_t.NAME, H5.iter_order_t.NATIVE, cb, op_data) >= 0); // we should have 5 elements in the array list Assert.IsTrue(al.Count == 5); Assert.IsTrue(H5O.visit(m_v2_test_file, H5.index_t.NAME, H5.iter_order_t.NATIVE, cb, op_data) >= 0); // we should have 10 (5 + 5) elements in the array list Assert.IsTrue(al.Count == 10); hnd.Free(); }
/// <summary> /// frees a group handle, if the group is different from the file handle. /// </summary> /// <param name="groupId"></param> private void CloseGroup(long groupId) { if (groupId != this.h5FileId) { H5G.close(groupId); } }
public void H5LdeleteTest1() { Assert.IsTrue( H5G.close(H5G.create(m_v0_test_file, "A/B/C/D", m_lcpl)) >= 0); Assert.IsTrue( H5L.create_hard(m_v0_test_file, Encoding.ASCII.GetBytes("A/B/C/D"), m_v0_test_file, Encoding.ASCII.GetBytes("shortcut")) >= 0); hid_t group = H5G.open(m_v0_test_file, "A/B/C"); Assert.IsTrue(group >= 0); Assert.IsTrue(H5L.delete(group, "D") >= 0); Assert.IsTrue(H5G.close(group) >= 0); Assert.IsTrue(H5L.exists(m_v0_test_file, "shortcut") > 0); Assert.IsTrue(H5L.exists(m_v0_test_file, "A/B/C/D") == 0); Assert.IsTrue( H5G.close(H5G.create(m_v2_test_file, "A/B/C/D", m_lcpl)) >= 0); Assert.IsTrue( H5L.create_hard(m_v2_test_file, Encoding.ASCII.GetBytes("A/B/C/D"), m_v2_test_file, Encoding.ASCII.GetBytes("shortcut")) >= 0); group = H5G.open(m_v2_test_file, "A/B/C"); Assert.IsTrue(group >= 0); Assert.IsTrue(H5L.delete(group, "D") >= 0); Assert.IsTrue(H5G.close(group) >= 0); Assert.IsTrue(H5L.exists(m_v0_test_file, "shortcut") > 0); Assert.IsTrue(H5L.exists(m_v0_test_file, "A/B/C/D") == 0); }
private bool setOMXFileMatrixTables() { this.dataGroup = H5G.create(fileId, dataGroupName); this.NumMatrix = 0; this.MatrixNames = new List <string>(); return(true); }
private bool setOMXFileIndexMaps() { this.luGroup = H5G.create(fileId, luGroupName); this.NumIndexMap = 0; this.IndexMapNames = new List <string>(); return(true); }
public void H5Oexists_by_nameTest1() { Assert.IsTrue(H5L.create_soft("/oh my", m_v0_test_file, "AA") >= 0); hid_t gid = H5G.create(m_v0_test_file, "A/B/C", m_lcpl); Assert.IsTrue(gid >= 0); Assert.IsTrue(H5O.exists_by_name(m_v0_test_file, "A/B") > 0); Assert.IsTrue(H5O.exists_by_name(m_v0_test_file, "AA") == 0); Assert.IsTrue( H5O.exists_by_name(m_v0_test_file, "A/B/Caesar") < 0); Assert.IsTrue(H5G.close(gid) >= 0); Assert.IsTrue(H5L.create_soft("/oh my", m_v2_test_file, "AA") >= 0); gid = H5G.create(m_v2_test_file, "A/B/C", m_lcpl); Assert.IsTrue(gid >= 0); Assert.IsTrue(H5O.exists_by_name(m_v2_test_file, "A/B") > 0); Assert.IsTrue(H5O.exists_by_name(m_v2_test_file, "AA") == 0); Assert.IsTrue( H5O.exists_by_name(m_v2_test_file, "A/B/Caesar") < 0); Assert.IsTrue(H5G.close(gid) >= 0); }
protected Dictionary <string, long> FindChildren(bool dataSets) { Dictionary <string, long> datasetNames = new Dictionary <string, long>(); Dictionary <string, long> groupNames = new Dictionary <string, long>(); var rootID = Open(); ulong dummy = 0; H5L.iterate(rootID, H5.index_t.NAME, H5.iter_order_t.INC, ref dummy, new H5L.iterate_t( delegate(long objectId, IntPtr namePtr, ref H5L.info_t info, IntPtr op_data) { string objectName = Marshal.PtrToStringAnsi(namePtr); H5O.info_t gInfo = new H5O.info_t(); H5O.get_info_by_name(objectId, objectName, ref gInfo); if (gInfo.type == H5O.type_t.DATASET) { datasetNames[objectName] = objectId; } else if (gInfo.type == H5O.type_t.GROUP) { groupNames[objectName] = objectId; } return(0); }), new IntPtr()); H5G.close(rootID); if (dataSets) { return(datasetNames); } return(groupNames); }
public List <(IVariableV1, NDArray)> load_weights(string filepath, bool by_name = false, bool skip_mismatch = false, object options = null) { long fileId = Hdf5.OpenFile(filepath, true); bool msuccess = Hdf5.GroupExists(fileId, "model_weights"); bool lsuccess = Hdf5.GroupExists(fileId, "layer_names"); if (!lsuccess && msuccess) { fileId = H5G.open(fileId, "model_weights"); } if (by_name) { //fdf5_format.load_weights_from_hdf5_group_by_name(); throw new NotImplementedException(""); } else { var weights = hdf5_format.load_weights_from_hdf5_group(fileId, Layers); Hdf5.CloseFile(fileId); // return a reference to prevent GC collect Variable. return(weights); } }
protected override void CloseId(long id) { if (H5I.is_valid(id) > 0) { H5G.close(id); } }
//public string Name { // get { return name; } // //set // //{ // // name = value; // // // Update HDF5 // //} //} public HDF5Group(string name, HDF5Container parent) { this.name = name; this.parent = parent; h5ID = H5G.open(parent.h5ID, name); H5G.close(h5ID); }
public void H5Rget_obj_typeTest2() { byte[] path = Encoding.UTF8.GetBytes(String.Join("/", m_utf8strings)); // make room for the trailling \0 byte[] name = new byte[path.Length + 1]; Array.Copy(path, name, path.Length); Assert.IsTrue( H5G.close(H5G.create(m_v2_test_file, path, m_lcpl_utf8)) >= 0); byte[] refer = new byte[H5R.OBJ_REF_BUF_SIZE]; GCHandle hnd = GCHandle.Alloc(refer, GCHandleType.Pinned); Assert.IsTrue( H5R.create(hnd.AddrOfPinnedObject(), m_v2_test_file, name, H5R.type_t.OBJECT, -1) >= 0); H5O.type_t obj_type = H5O.type_t.UNKNOWN; Assert.IsTrue( H5R.get_obj_type(m_v2_test_file, H5R.type_t.OBJECT, hnd.AddrOfPinnedObject(), ref obj_type) >= 0); hnd.Free(); Assert.IsTrue(obj_type == H5O.type_t.GROUP); }
public void H5LmoveTest1() { Assert.IsTrue( H5G.close(H5G.create(m_v0_test_file, "A/B/C/D", m_lcpl)) >= 0); Assert.IsTrue( H5L.create_hard(m_v0_test_file, "A/B/C/D", m_v0_test_file, "shortcut") >= 0); Assert.IsTrue( H5L.move(m_v0_test_file, "shortcut", m_v0_test_file, "A/B/C/D/E") >= 0); Assert.IsTrue( H5L.exists(m_v0_test_file, "A/B/C/D/E") > 0); Assert.IsTrue( H5L.exists(m_v0_test_file, "A/B/C/D/shortcut") == 0); Assert.IsTrue( H5G.close(H5G.create(m_v2_test_file, "A/B/C/D", m_lcpl)) >= 0); Assert.IsTrue( H5L.create_hard(m_v2_test_file, "A/B/C/D", m_v2_test_file, "shortcut") >= 0); Assert.IsTrue( H5L.move(m_v2_test_file, "shortcut", m_v2_test_file, "A/B/C/D/E") >= 0); Assert.IsTrue( H5L.exists(m_v0_test_file, "A/B/C/D/E") > 0); Assert.IsTrue( H5L.exists(m_v0_test_file, "A/B/C/D/shortcut") == 0); }
public static H5G.info_t GroupInfo(long groupId) { H5G.info_t info = new H5G.info_t(); var gid = H5G.get_info(groupId, ref info); return(info); }
public static double ReadAttribute(string file, string dataSetOrGroup, string attribute) { double attr = Double.NaN; try { H5FileId fileId = H5F.open(file, H5F.OpenMode.ACC_RDONLY); H5ObjectInfo objectInfo = H5O.getInfoByName(fileId, dataSetOrGroup); H5GroupId groupId = null; H5DataSetId dataSetId = null; H5AttributeId attrId; if (objectInfo.objectType == H5ObjectType.GROUP) { groupId = H5G.open(fileId, dataSetOrGroup); attrId = H5A.open(groupId, attribute); } else { dataSetId = H5D.open(fileId, dataSetOrGroup); attrId = H5A.open(dataSetId, attribute); } H5DataTypeId attrTypeId = H5A.getType(attrId); double[] dAttrs = new double[] { }; if (H5T.equal(attrTypeId, H5T.copy(H5T.H5Type.NATIVE_FLOAT))) { float[] fAttrs = new float[H5S.getSimpleExtentNPoints(H5A.getSpace(attrId))]; H5A.read(attrId, attrTypeId, new H5Array <float>(fAttrs)); dAttrs = (from f in fAttrs select(double) f).ToArray(); } else if (H5T.equal(attrTypeId, H5T.copy(H5T.H5Type.NATIVE_DOUBLE))) { dAttrs = new double[H5S.getSimpleExtentNPoints(H5A.getSpace(attrId))]; H5A.read(attrId, attrTypeId, new H5Array <double>(dAttrs)); } H5T.close(attrTypeId); H5A.close(attrId); if (groupId != null) { H5G.close(groupId); } if (dataSetId != null) { H5D.close(dataSetId); } H5F.close(fileId); return((double)dAttrs[0]); } catch (HDFException e) { Console.WriteLine("Error: Unhandled HDF5 exception"); Console.WriteLine(e.Message); } return(attr); }
public void H5OvisitTest2() { string path = String.Join("/", m_utf8strings); Assert.IsTrue(H5G.create(m_v0_test_file, Encoding.UTF8.GetBytes(path), m_lcpl_utf8) >= 0); Assert.IsTrue(H5G.create(m_v2_test_file, Encoding.UTF8.GetBytes(path), m_lcpl_utf8) >= 0); ArrayList al = new ArrayList(); GCHandle hnd = GCHandle.Alloc(al); IntPtr op_data = (IntPtr)hnd; // the callback is defined in H5LTest.cs H5O.iterate_t cb = DelegateMethod; Assert.IsTrue(H5O.visit(m_v0_test_file, H5.index_t.NAME, H5.iter_order_t.NATIVE, cb, op_data) >= 0); // we should have 6 elements in the array list Assert.IsTrue(al.Count == 6); Assert.IsTrue(H5O.visit(m_v2_test_file, H5.index_t.NAME, H5.iter_order_t.NATIVE, cb, op_data) >= 0); // we should have 12 (6 + 6) elements in the array list Assert.IsTrue(al.Count == 12); hnd.Free(); }
public void load_weights(string filepath, bool by_name = false, bool skip_mismatch = false, object options = null) { long fileId = Hdf5.OpenFile(filepath, true); if (fileId < 0) { tf_output_redirect.WriteLine($"Can't find weights file {filepath}"); return; } bool msuccess = Hdf5.GroupExists(fileId, "model_weights"); bool lsuccess = Hdf5.GroupExists(fileId, "layer_names"); if (!lsuccess && msuccess) { fileId = H5G.open(fileId, "model_weights"); } if (by_name) { //fdf5_format.load_weights_from_hdf5_group_by_name(); throw new NotImplementedException(""); } else { hdf5_format.load_weights_from_hdf5_group(fileId, Layers); Hdf5.CloseFile(fileId); } }
public void WriteAndReadStringAttribute() { string filename = Path.Combine(folder, "testAttributeString.H5"); try { var fileId = Hdf5.CreateFile(filename); Assert.IsTrue(fileId > 0); var groupId = Hdf5.CreateOrOpenGroup(fileId, "test"); string attrStr = "this is an attribute"; Hdf5.WriteAttribute(groupId, "time", attrStr); string readStr = Hdf5.ReadAttribute <string>(groupId, "time_Non_Exist"); Assert.IsTrue(string.IsNullOrEmpty(readStr)); readStr = Hdf5.ReadAttribute <string>(groupId, "time"); Assert.IsTrue(readStr == attrStr); Assert.IsTrue(H5G.close(groupId) == 0); Assert.IsTrue(Hdf5.CloseFile(fileId) == 0); ErrorCountExpected = 2; } catch (Exception ex) { CreateExceptionAssert(ex); } }
private unsafe void WriteData(ulong fileOffset, ulong bufferOffset, ulong length, ChannelContext channelContext) { Contract.Requires(channelContext != null, nameof(channelContext)); long groupId = -1; long datasetId = -1; long dataspaceId = -1; long dataspaceId_Buffer = -1; try { groupId = H5G.open(_fileId, $"/{ channelContext.ChannelDescription.ChannelName }"); var datasetName = $"dataset_{ channelContext.ChannelDescription.DatasetName.Replace(" ", "_") }"; datasetId = H5D.open(groupId, datasetName); dataspaceId = H5D.get_space(datasetId); dataspaceId_Buffer = H5S.create_simple(1, new ulong[] { length }, null); var simpleBuffers = channelContext.Buffer.ToSimpleBuffer(); // dataset H5S.select_hyperslab(dataspaceId, H5S.seloper_t.SET, new ulong[] { fileOffset }, new ulong[] { 1 }, new ulong[] { 1 }, new ulong[] { length }); var offset = (int)bufferOffset * simpleBuffers.ElementSize; var buffer = simpleBuffers.RawBuffer[offset..];
public static ANNDef ReadANNFromKeras(string fnH5Weights, string fnJSONStructure) { ANNDef ann = new ANNDef(); Console.WriteLine("** LOAD file " + fnH5Weights); if (!File.Exists(fnH5Weights)) { throw new Exception("*** File not found"); } const bool READ_ONLY = true; long file = Hdf5.OpenFile(fnH5Weights, READ_ONLY); if (file < 0) { throw new Exception("unable to find/open file " + fnH5Weights); } bool inputLayerIsSparse = false; JsonDocument o = JsonDocument.Parse(File.ReadAllText(fnJSONStructure)); List <(string, ANNDef)> subnets = new(); inputLayerIsSparse = ExtractNetwork(ann, subnets, file, inputLayerIsSparse, o.RootElement, null); if (subnets.Count > 0) { ann.InputSubnetworks = subnets; } H5G.close(file); return(ann); }
public override void CloseDocument() { if (fileId != null && fileId.Id > 0 && !_fileClosed) { foreach (var group in EpochGroupsIDs) { try { H5G.close(group.GroupId); H5G.close(group.SubGroupsId); H5G.close(group.EpochsId); } catch (H5GcloseException ex) { log.DebugFormat("HDF5 group already closed: {0}", ex); } } try { H5F.close(fileId); } catch (H5FcloseException ex) { log.DebugFormat("HDF5 file already closed: {0}", ex); } Interlocked.Decrement(ref _openHdf5FileCount); _fileClosed = true; } }
private static Hdf5Container_LidarDaimler ReadContainer(string sFilePath_inp) { int status = 0; long file_id = H5F.open(sFilePath_inp, H5F.ACC_RDWR); long testDataset_id = H5D.open(file_id, "distance"); long testDataspace_id = H5D.get_space(testDataset_id); ulong[] dims = new ulong[2]; status = H5S.get_simple_extent_dims(testDataspace_id, dims, null); int rows = Convert.ToInt32(dims[0]); int cols = Convert.ToInt32(dims[1]); Hdf5Container_LidarDaimler outContainer = new Hdf5Container_LidarDaimler(rows, cols) { _distances = Hdf5IO.GetFloatDataset(H5D.open(file_id, "distance"), rows, cols), _intensity = Hdf5IO.GetFloatDataset(H5D.open(file_id, "intensity"), rows, cols), _labelProbabilities = Hdf5IO.GetFloatDataset(H5D.open(file_id, "labelProbabilities"), rows, cols), _labelWorkingSet = Hdf5IO.GetLabelWorkingSet(H5G.open(file_id, "labelWorkingSet")), _labels = Hdf5IO.GetUintDataset(H5D.open(file_id, "labels"), rows, cols), _pointValid = Hdf5IO.GetIntDataset(H5D.open(file_id, "pointValid"), rows, cols), _sensorX = Hdf5IO.GetFloatDataset(H5D.open(file_id, "sensorX"), rows, cols), _sensorY = Hdf5IO.GetFloatDataset(H5D.open(file_id, "sensorY"), rows, cols), _sensorZ = Hdf5IO.GetFloatDataset(H5D.open(file_id, "sensorZ"), rows, cols), _vehicleX = Hdf5IO.GetFloatDataset(H5D.open(file_id, "vehicleX"), rows, cols), _vehicleY = Hdf5IO.GetFloatDataset(H5D.open(file_id, "vehicleY"), rows, cols), _vehicleZ = Hdf5IO.GetFloatDataset(H5D.open(file_id, "vehicleZ"), rows, cols) }; status = H5F.close(file_id); return(outContainer); }
protected override void WriteEpochGroupStart(string label, string source, string[] keywords, IDictionary <string, object> properties, Guid identifier, DateTimeOffset startTime, double timeZoneOffset) { H5FileOrGroupId parent = CurrentEpochGroupID == null ? (H5FileOrGroupId)fileId : CurrentEpochGroupID.SubGroupsId; var epochGroup = H5G.create((H5LocId)parent, label + "-" + identifier); var subGroups = H5G.create((H5LocId)epochGroup, "epochGroups"); var epochs = H5G.create((H5LocId)epochGroup, "epochs"); WriteAttribute(epochGroup, "label", label); WriteAttribute(epochGroup, "source", string.IsNullOrEmpty(source) ? "<none>" : source); WriteDictionary(epochGroup, "properties", properties); WriteAttribute(epochGroup, "symphony.uuid", identifier.ToString()); WriteKeywords(epochGroup, new HashSet <string>(keywords)); WriteAttribute(epochGroup, startTimeUtcName, startTime.Ticks); WriteAttribute(epochGroup, startTimeOffsetName, timeZoneOffset); //H5G.close(subGroups); //H5G.close(epochs); EpochGroupsIDs.Push(new EpochGroupIDs(epochGroup, subGroups, epochs)); }
public void H5Gget_infoTest2() { H5G.info_t info = new H5G.info_t(); hid_t gid = Utilities.RandomInvalidHandle(); Assert.IsTrue(H5G.get_info(gid, ref info) < 0); }
public long open() { Hdf5Utils.LogInfo?.Invoke("open(): start"); long gid = -1; try { if (isRoot()) { gid = H5G.open(getFID(), SEPARATOR, HDF5Constants.H5P_DEFAULT); } else { gid = H5G.open(getFID(), getPath() + getName(), HDF5Constants.H5P_DEFAULT); } } catch (Exception ex) { gid = -1; Hdf5Utils.LogError?.Invoke("open(): Error:" + ex); } Hdf5Utils.LogInfo?.Invoke("open(): finish"); return(gid); }