protected virtual void Dispose(bool disposing) { if (disposing) { SaveHeader(); dset?.Dispose(); var info = Hdf5.GroupInfo(_groupId); _groupId = Hdf5.CloseGroup(_groupId); fileId = Hdf5.CloseFile(fileId); } }
public void Dispose() { _header.Recording.EndTime = _header.Recording.StartTime + TimeSpan.FromSeconds(_sampleCount / _header.Recording.SampleRate); Header.Recording.NrOfSamples = _sampleCount; Header.EventListToEvents(); for (int i = 0; i < Header.Channels.Count(); i++) { Header.Channels[i].NrOfSamples = _sampleCount; } Hdf5.WriteObject(_groupId, _header); fileId = Hdf5.CloseFile(fileId); }
// private readonly ReaderWriterLockSlim lock_ = new ReaderWriterLockSlim(); public Hdf5AcquisitionFileWriter(string filename, string groupName = "ROOT") { H5E.set_auto(H5E.DEFAULT, null, IntPtr.Zero); //lock_.EnterWriteLock(); _filename = filename; fileId = Hdf5.CreateFile(filename); _groupName = groupName; _groupId = Hdf5.CreateGroup(fileId, _groupName); Header = new Hdf5AcquisitionFile(); _nrOfRecords = 0; _sampleCount = 0; //lock_.ExitWriteLock(); }
public void SaveHeader() { //lock_.EnterWriteLock(); Trace.WriteLine($"saving file {Header.Patient.Name} samples: {_sampleCount}; fileId: {fileId}"); Header.Recording.EndTime = Header.Recording.StartTime + TimeSpan.FromSeconds(_sampleCount / Header.Recording.SampleRate); Header.Recording.NrOfSamples = _sampleCount; Header.EventListToEvents(); for (int i = 0; i < Header.Channels.Count(); i++) { Header.Channels[i].NrOfSamples = _sampleCount; } Trace.WriteLine($"writing file {Header.Patient.Name} groupId: {_groupId}; fileId: {fileId}"); Hdf5.WriteObject(_groupId, Header); //lock_.ExitWriteLock(); }
private static long create_type(Type t) { var size = Marshal.SizeOf(t); var float_size = Marshal.SizeOf(typeof(float)); var int_size = Marshal.SizeOf(typeof(int)); var typeId = H5T.create(H5T.class_t.COMPOUND, new IntPtr(size)); var compoundInfo = Hdf5.GetCompoundInfo(t); foreach (var cmp in compoundInfo) { //Console.WriteLine(string.Format("{0} {1}", cmp.name, cmp.datatype)); H5T.insert(typeId, cmp.name, Marshal.OffsetOf(t, cmp.name), cmp.datatype); } return(typeId); }
/// private static int calcCompoundSize(Type type, bool useIEEE, ref hid_t id) { // Create the compound datatype for the file. Because the standard // types we are using for the file may have different sizes than // the corresponding native types var compoundInfo = Hdf5.GetCompoundInfo(type, useIEEE); var curCompound = compoundInfo.Last(); var compoundSize = curCompound.offset + curCompound.size; //Create the compound datatype for memory. id = H5T.create(H5T.class_t.COMPOUND, new IntPtr(compoundSize)); foreach (var cmp in compoundInfo) { H5T.insert(id, cmp.name, new IntPtr(cmp.offset), cmp.datatype); } return(compoundSize); }
private static long CreateType(Type t) { var size = Marshal.SizeOf(t); var float_size = Marshal.SizeOf(typeof(float)); var int_size = Marshal.SizeOf(typeof(int)); var typeId = H5T.create(H5T.class_t.COMPOUND, new IntPtr(size)); var compoundInfo = Hdf5.GetCompoundInfo(t); foreach (var cmp in compoundInfo) { //Console.WriteLine(string.Format("{0} {1}", cmp.name, cmp.datatype)); // Lines below don't produce an error message but hdfview can't read compounds properly //var typeLong = GetDatatype(cmp.type); //H5T.insert(typeId, cmp.name, Marshal.OffsetOf(t, cmp.name), typeLong); H5T.insert(typeId, cmp.name, Marshal.OffsetOf(t, cmp.name), cmp.datatype); } return(typeId); }
public static T ReadObject <T>(hid_t groupId, T readValue, string groupName) { if (readValue == null) { throw new ArgumentNullException(nameof(readValue)); } bool isGroupName = !string.IsNullOrWhiteSpace(groupName); if (isGroupName) { groupId = H5G.open(groupId, groupName); } Type tyObject = readValue.GetType(); foreach (Attribute attr in Attribute.GetCustomAttributes(tyObject)) { if (attr is Hdf5GroupName) { groupName = (attr as Hdf5GroupName).Name; } if (attr is Hdf5SaveAttribute) { Hdf5SaveAttribute atLeg = attr as Hdf5SaveAttribute; if (atLeg.SaveKind == Hdf5Save.DoNotSave) { return(readValue); } } } ReadFields(tyObject, readValue, groupId); ReadProperties(tyObject, readValue, groupId); if (isGroupName) { Hdf5.CloseGroup(groupId); } return(readValue); }
/// <summary> /// Writes data to the hdf5 file. /// </summary> public void Write(IList <double[]> signals) { int cols = signals.Count(); if (cols == 0) { return; } int rows = signals[0].Length; if (rows == 0) { return; } double sr = _header.Recording.SampleRate; var data = new short[rows, cols]; //var byteLength = rows * sizeof(short); for (int i = 0; i < cols; i++) { var sig = signals[i]; for (int j = 0; j < rows; j++) { data[j, i] = convert2Short(sig[j], i); } } var dataName = string.Concat("/", _groupName, "/Data"); if (_nrOfRecords == 0) { _header.Recording.StartTime = DateTime.Now; Hdf5.CreateGroup(fileId, _groupName); dset = new ChunkedDataset <short>(dataName, fileId, data); } else { dset.AppendDataset(data); } _nrOfRecords++; }
/// <summary> /// Initializes a new instance of the <see cref="Hdf5AcquisitionFileReader"/> class. /// </summary> /// <param name="filename">A filename.</param> /// <param name="groupName">a root group. If not specified used "ROOT</param> public Hdf5AcquisitionFileReader(string filename, string[] labels = null, string groupName = "ROOT") { fileId = Hdf5.OpenFile(filename, readOnly: true); _header = Hdf5.ReadObject <Hdf5AcquisitionFile>(fileId, groupName); _groupName = groupName; _usedChannels = new Dictionary <string, short>(); for (short i = 0; i < _header.Recording.NrOfChannels; i++) { _usedChannels.Add(_header.Channels[i].Label, i); } if (labels == null) { _labels = _header.Channels.Select(c => c.Label).ToList(); } else { _labels = labels; } _readChannelCnt = _labels.Count; _signals = new List <short[]>(_readChannelCnt); }
public static object WriteObject(hid_t groupId, object writeValue, string groupName = null) { if (writeValue == null) { throw new ArgumentNullException(nameof(writeValue)); } bool createGroupName = !string.IsNullOrWhiteSpace(groupName); if (createGroupName) { groupId = Hdf5.CreateGroup(groupId, groupName); } Type tyObject = writeValue.GetType(); foreach (Attribute attr in Attribute.GetCustomAttributes(tyObject)) { Hdf5SaveAttribute legAt = attr as Hdf5SaveAttribute; if (legAt != null) { Hdf5Save kind = legAt.SaveKind; if (kind == Hdf5Save.DoNotSave) { return(writeValue); } } } WriteProperties(tyObject, writeValue, groupId); WriteFields(tyObject, writeValue, groupId); WriteHdf5Attributes(tyObject, groupId, groupName); if (createGroupName) { Hdf5.CloseGroup(groupId); } return(writeValue); }
protected virtual void Dispose(bool disposing) { Trace.WriteLine($"saving file {Header.Patient.Name} samples: {_sampleCount}; fileId: {fileId}"); Header.Recording.EndTime = Header.Recording.StartTime + TimeSpan.FromSeconds(_sampleCount / Header.Recording.SampleRate); Header.Recording.NrOfSamples = _sampleCount; Header.EventListToEvents(); for (int i = 0; i < Header.Channels.Count(); i++) { Header.Channels[i].NrOfSamples = _sampleCount; } Trace.WriteLine($"writing file {Header.Patient.Name} groupId: {_groupId}; fileId: {fileId}"); Hdf5.WriteObject(_groupId, Header); if (disposing) { if (dset != null) { dset.Dispose(); } var info = Hdf5.GroupInfo(_groupId); _groupId = Hdf5.CloseGroup(_groupId); fileId = Hdf5.CloseFile(fileId); } }
public void WriteFromArray <T>(hid_t groupId, string name, Array dset, string datasetName = null) { Hdf5.WriteDatasetFromArray <T>(groupId, name, dset, datasetName); }
public Array ReadStucts <T>(hid_t groupId, string name) where T : struct { return(Hdf5.ReadCompounds <T>(groupId, name).ToArray()); }
public Array ReadToArray <T>(hid_t groupId, string name) { return(Hdf5.ReadPrimitiveAttributes <T>(groupId, name)); }
public void Dispose() { _header.Recording.EndTime = _header.Recording.StartTime + TimeSpan.FromSeconds(_sampleCount / _header.Recording.SampleRate); Hdf5.WriteObject(fileId, _header, _groupName); fileId = Hdf5.CloseFile(fileId); }
public void WriteFromArray <T>(hid_t groupId, string name, Array dset, string datasetName = null) { Hdf5.WritePrimitiveAttribute <T>(groupId, name, dset, datasetName); }
public void WriteStrings(hid_t groupId, string name, IEnumerable <string> collection, string datasetName = null) { Hdf5.WriteStringAttributes(groupId, name, (string[])collection, datasetName); }
public IEnumerable <string> ReadStrings(hid_t groupId, string name) { return(Hdf5.ReadStringAttributes(groupId, name)); }
public void WriteStucts <T>(hid_t groupId, string name, IEnumerable <T> dset, string datasetName = null) { Hdf5.WriteCompounds <T>(groupId, name, dset); }
public void WriteArray(hid_t groupId, string name, Array collection, string datasetName = null) { Type type = collection.GetType(); Type elementType = type.GetElementType(); TypeCode typeCode = Type.GetTypeCode(elementType); //Boolean isStruct = type.IsValueType && !type.IsEnum; switch (typeCode) { case TypeCode.Boolean: var bls = collection.ConvertArray <Boolean, UInt16>(bl => Convert.ToUInt16(bl)); rw.WriteFromArray <UInt16>(groupId, name, bls, datasetName); Hdf5.WriteStringAttribute(groupId, name, "Boolean", name); break; case TypeCode.Byte: rw.WriteFromArray <Byte>(groupId, name, collection, datasetName); Hdf5.WriteStringAttribute(groupId, name, "Byte", name); break; case TypeCode.Char: var chrs = collection.ConvertArray <Char, UInt16>(c => Convert.ToUInt16(c)); rw.WriteFromArray <UInt16>(groupId, name, chrs, datasetName); Hdf5.WriteStringAttribute(groupId, name, "Char", name); break; case TypeCode.DateTime: var dts = collection.ConvertArray <DateTime, long>(dt => dt.Ticks); rw.WriteFromArray <long>(groupId, name, dts, datasetName); Hdf5.WriteStringAttribute(groupId, name, "DateTime", name); break; case TypeCode.Decimal: var decs = collection.ConvertArray <decimal, double>(dc => Convert.ToDouble(dc)); rw.WriteFromArray <double>(groupId, name, decs, datasetName); Hdf5.WriteStringAttribute(groupId, name, "Decimal", name); break; case TypeCode.Double: rw.WriteFromArray <double>(groupId, name, collection, datasetName); break; case TypeCode.Int16: rw.WriteFromArray <short>(groupId, name, collection, datasetName); break; case TypeCode.Int32: rw.WriteFromArray <Int32>(groupId, name, collection, datasetName); break; case TypeCode.Int64: rw.WriteFromArray <Int64>(groupId, name, collection, datasetName); break; case TypeCode.SByte: rw.WriteFromArray <SByte>(groupId, name, collection, datasetName); Hdf5.WriteStringAttribute(groupId, name, "SByte", name); break; case TypeCode.Single: rw.WriteFromArray <Single>(groupId, name, collection, datasetName); break; case TypeCode.UInt16: rw.WriteFromArray <UInt16>(groupId, name, collection, datasetName); break; case TypeCode.UInt32: rw.WriteFromArray <UInt32>(groupId, name, collection, datasetName); break; case TypeCode.UInt64: rw.WriteFromArray <UInt64>(groupId, name, collection, datasetName); break; case TypeCode.String: if (collection.Rank > 1 && collection.GetLength(1) > 1) { throw new Exception("Only 1 dimensional string arrays allowed: " + name); } rw.WriteStrings(groupId, name, (string[])collection, datasetName); break; default: if (elementType == typeof(TimeSpan)) { var tss = collection.ConvertArray <TimeSpan, long>(dt => dt.Ticks); rw.WriteFromArray <Int64>(groupId, name, tss, datasetName); Hdf5.WriteStringAttribute(groupId, name, "TimeSpan", name); } //else if (isStruct) { // rw.WriteStucts(groupId, name, collection); //} else { string str = "type is not supported: "; throw new NotSupportedException(str + elementType.FullName); } break; } //WriteHdf5Attributes(type, groupId, name, name); }
public Array ReadToArray <T>(hid_t groupId, string name) { return(Hdf5.ReadDatasetToArray <T>(groupId, name)); }
public void Dispose() { fileId = Hdf5.CloseFile(fileId); }