static void ReadFile(string filePath) { var file = Hdf5.OpenFile(filePath, true); var group = Hdf5.CreateOrOpenGroup(file, "group"); var(ok, rawData) = Hdf5.ReadDataset <int>(group, "dataset"); if (ok) { var data = (int[, ])rawData; for (int i = 0; i < data.GetLength(0); ++i) { for (int j = 0; j < data.GetLength(1); ++j) { Write($"{data[i, j],3}"); } WriteLine(); } } //var dataSet = H5D.open(group, "dataset"); //var hello = Hdf5.ReadUnicodeString(dataSet, "string"); //H5D.close(dataSet); //WriteLine($"string: {hello}"); Hdf5.CloseGroup(group); Hdf5.CloseFile(file); }
public void WriteAndReadGroupsWithDataset() { string filename = Path.Combine(folder, "testGroups.H5"); try { var fileId = Hdf5.CreateFile(filename); Assert.IsTrue(fileId > 0); var dset = dsets.First(); var groupId = H5G.create(fileId, Hdf5Utils.NormalizedName("/A")); ///B/C/D/E/F/G/H Hdf5.WriteDataset(groupId, Hdf5Utils.NormalizedName("test"), dset); var subGroupId = Hdf5.CreateOrOpenGroup(groupId, Hdf5Utils.NormalizedName("C")); var subGroupId2 = Hdf5.CreateOrOpenGroup(groupId, Hdf5Utils.NormalizedName("/D")); // will be saved at the root location dset = dsets.Skip(1).First(); Hdf5.WriteDataset(subGroupId, Hdf5Utils.NormalizedName("test2"), dset); Hdf5.CloseGroup(subGroupId); Hdf5.CloseGroup(subGroupId2); Hdf5.CloseGroup(groupId); groupId = H5G.create(fileId, Hdf5Utils.NormalizedName("/A/B")); ///B/C/D/E/F/G/H dset = dsets.Skip(1).First(); Hdf5.WriteDataset(groupId, Hdf5Utils.NormalizedName("test"), dset); Hdf5.CloseGroup(groupId); groupId = Hdf5.CreateGroupRecursively(fileId, Hdf5Utils.NormalizedName("A/B/C/D/E/F/I")); Hdf5.CloseGroup(groupId); Hdf5.CloseFile(fileId); fileId = Hdf5.OpenFile(filename); Assert.IsTrue(fileId > 0); fileId = Hdf5.OpenFile(filename); groupId = H5G.open(fileId, Hdf5Utils.NormalizedName("/A/B")); double[,] dset2 = (double[, ])Hdf5.ReadDataset <double>(groupId, Hdf5Utils.NormalizedName("test")).result; CompareDatasets(dset, dset2); Assert.IsTrue(Hdf5.CloseGroup(groupId) >= 0); groupId = H5G.open(fileId, Hdf5Utils.NormalizedName("/A/C")); dset2 = (double[, ])Hdf5.ReadDataset <double>(groupId, Hdf5Utils.NormalizedName("test2")).result; CompareDatasets(dset, dset2); Assert.IsTrue(Hdf5.CloseGroup(groupId) >= 0); bool same = dset == dset2; dset = dsets.First(); dset2 = (double[, ])Hdf5.ReadDataset <double>(fileId, Hdf5Utils.NormalizedName("/A/test")).result; CompareDatasets(dset, dset2); Assert.IsTrue(Hdf5Utils.ItemExists(fileId, Hdf5Utils.NormalizedName("A/B/C/D/E/F/I"), DataTypes.Hdf5ElementType.Dataset)); Assert.IsTrue(Hdf5.CloseFile(fileId) == 0); } catch (Exception ex) { CreateExceptionAssert(ex); } }
public static void save_weights_to_hdf5_group(long f, List <ILayer> layers) { List <string> layerName = new List <string>(); foreach (var layer in layers) { layerName.Add(layer.Name); } save_attributes_to_hdf5_group(f, "layer_names", layerName.ToArray()); Hdf5.WriteAttribute(f, "backend", "tensorflow"); Hdf5.WriteAttribute(f, "keras_version", "2.5.0"); long g = 0, crDataGroup = 0; List <IVariableV1> weights = new List <IVariableV1>(); //List<IVariableV1> weight_values = new List<IVariableV1>(); List <string> weight_names = new List <string>(); foreach (var layer in layers) { weight_names = new List <string>(); g = Hdf5.CreateOrOpenGroup(f, Hdf5Utils.NormalizedName(layer.Name)); weights = _legacy_weights(layer); //weight_values= keras.backend.batch_get_value(weights); foreach (var weight in weights) { weight_names.Add(weight.Name); } save_attributes_to_hdf5_group(g, "weight_names", weight_names.ToArray()); Tensor tensor = null; foreach (var(name, val) in zip(weight_names, weights)) { tensor = val.AsTensor(); if (name.IndexOf("/") > 1) { crDataGroup = Hdf5.CreateOrOpenGroup(g, Hdf5Utils.NormalizedName(name.Split('/')[0])); WriteDataset(crDataGroup, name.Split('/')[1], tensor); Hdf5.CloseGroup(crDataGroup); } else { WriteDataset(crDataGroup, name, tensor); } tensor = null; } Hdf5.CloseGroup(g); weight_names = null; } weights = null; // weight_values = null; }
public void WriteAndReadPrimitives() { string filename = Path.Combine(folder, "testPrimitives.H5"); int intValue = 2; double dblValue = 1.1; string strValue = "test"; bool boolValue = true; var groupStr = "/test"; string concatFunc(string x) => string.Concat(groupStr, "/", x); Dictionary <string, List <string> > attributes = new Dictionary <string, List <string> >(); try { var fileId = Hdf5.CreateFile(filename); Assert.IsTrue(fileId > 0); var groupId = Hdf5.CreateGroup(fileId, groupStr); Hdf5.WriteOneValue(groupId, concatFunc(nameof(intValue)), intValue, attributes); Hdf5.WriteOneValue(groupId, concatFunc(nameof(dblValue)), dblValue, attributes); Hdf5.WriteOneValue(groupId, concatFunc(nameof(strValue)), strValue, attributes); Hdf5.WriteOneValue(groupId, concatFunc(nameof(boolValue)), boolValue, attributes); Hdf5.CloseGroup(groupId); Hdf5.CloseFile(fileId); } catch (Exception ex) { CreateExceptionAssert(ex); } try { var fileId = Hdf5.OpenFile(filename); Assert.IsTrue(fileId > 0); var groupId = H5G.open(fileId, groupStr); int readInt = Hdf5.ReadOneValue <int>(groupId, concatFunc(nameof(intValue))); Assert.IsTrue(intValue == readInt); double readDbl = Hdf5.ReadOneValue <double>(groupId, concatFunc(nameof(dblValue))); Assert.IsTrue(dblValue == readDbl); string readStr = Hdf5.ReadOneValue <string>(groupId, concatFunc(nameof(strValue))); Assert.IsTrue(strValue == readStr); bool readBool = Hdf5.ReadOneValue <bool>(groupId, concatFunc(nameof(boolValue))); Assert.IsTrue(boolValue == readBool); H5G.close(groupId); Hdf5.CloseFile(fileId); } catch (Exception ex) { CreateExceptionAssert(ex); } }
public void Dispose() { try { if (!Disposed) { Hdf5.CloseGroup(GroupId); } } catch (Exception) { //nothing } }
public void Dispose() { try { if (!Disposed) { Hdf5.CloseGroup(GroupId); } } catch (Exception e) { Logger.LogError(e, $"Error closing RPosition group: {e.Message}"); } }
public void Dispose() { try { if (!Disposed) { Hdf5.CloseGroup(GroupId); Disposed = true; } } catch (Exception e) { Logger?.LogError($"Error closing file: {e}"); } }
public void Dispose() { try { if (!Disposed) { UnFiltered.Dispose(); Filtered.Dispose(); Timestamps.Dispose(); PacketIds?.Dispose(); EcgSamplesData.Dispose(); EcgTaskWriter.Dispose(); Hdf5.CloseGroup(GroupId); Disposed = true; } } catch (Exception e) { Logger.LogError($"Error during dispose of ECG: {e.Message}"); } }
static void WriteFile(string filePath) { var file = Hdf5.CreateFile(filePath); var group = Hdf5.CreateOrOpenGroup(file, "group"); Hdf5.WriteDataset(group, "dataset", new int[, ] { { 1, 2, 3, 4 }, { 5, 6, 7, 8 }, { 9, 10, 11, 12 } }); var hello = "早上好!"; Hdf5.WriteStringAttribute(group, "string", hello, "dataset"); Hdf5.CloseGroup(group); Hdf5.CloseFile(file); }
public async Task <(bool, string)> StopProcedure() { if (!File.Exists(FileName)) { string msg = $"File {FileName} does not exist"; Logger?.LogWarning(msg); return(false, msg); } if (FileClosed) { return(GeneralUtils.CheckFileSize(FileName)); } FileClosed = true; Logger?.LogInformation("Stop Procedure called"); ProcedureInformation.EndDateTime = DateTime.Now; if (RecordingInProgress) { StopRecording(); } await CloseHandles(); Hdf5.Flush(groupRoot, H5F.scope_t.GLOBAL); Hdf5.CloseGroup(groupEIT); Hdf5.CloseGroup(groupRoot); long result = Hdf5.CloseFile(fileId); if (result >= 0) { Logger?.LogInformation("Stop Procedure H5 File closed"); } else { Logger?.LogError("Cannot close H5 File: " + result); } return(GeneralUtils.CheckFileSize(FileName)); }
public void Dispose() { try { if (!Disposed) { VoltagesReal.Dispose(); VoltagesIm.Dispose(); CurrentsReal?.Dispose(); CurrentsIm.Dispose(); Saturation.Dispose(); Timestamps.Dispose(); PacketIds?.Dispose(); ElectrodeTaskWriter.Dispose(); Hdf5.CloseGroup(GroupId); Disposed = true; } } catch (Exception e) { Logger.LogError($"Error during dispose of EIT: {e.Message}"); } }