/// <summary> /// Writes the entire matrix table /// </summary> /// <typeparam name="T"></typeparam> /// <param name="matName"></param> /// <param name="rowIndex"></param> /// <returns></returns> public void SetMatrix <T>(string matName, T[,] data) { // check that matrix exists if (tables.ContainsKey(matName)) { H5DataSetId matId; tables.TryGetValue(matName, out matId); H5DataTypeId matDataId = H5D.getType(matId); H5DataSpaceId spaceId = H5S.create_simple(2, Shape); long[] start = { 0, 0 }; long[] count = { Shape[0], Shape[1] }; var h5matrix = new H5Array <T>(data); H5S.selectHyperslab(spaceId, H5S.SelectOperator.SET, start, count); H5DataSpaceId readSpaceId = H5S.create_simple(2, count); H5D.write(matId, matDataId, readSpaceId, spaceId, H5P.create(H5P.PropertyListClass.DATASET_XFER), h5matrix); H5S.close(spaceId); H5S.close(readSpaceId); } else { Console.WriteLine("table {0} not found in matrix file", matName); } return; }
public static double[, ,] ReadFieldData3D(string fileName) { H5FileId fileId = H5F.open(fileName, H5F.OpenMode.ACC_RDONLY); H5DataSetId fDataSetId = H5D.open(fileId, "/FieldData/FD/f0"); H5DataTypeId fDataTypeId = H5D.getType(fDataSetId); if (!H5T.equal(fDataTypeId, H5T.copy(H5T.H5Type.NATIVE_FLOAT))) { Console.WriteLine("Error: Invalid dataset type, expected {0}", H5T.H5Type.NATIVE_FLOAT); } long[] dims = H5S.getSimpleExtentDims(H5D.getSpace(fDataSetId)).ToArray(); if (dims.Length != 3) { Console.WriteLine("Error: Invalid field data dimensions"); } float[, ,] data = new float[dims[0], dims[1], dims[2]]; H5D.read(fDataSetId, fDataTypeId, new H5Array <float>(data)); // Reorder double[, ,] fieldValues = new double[dims[2], dims[1], dims[0]]; for (int i = 0; i < dims[0]; i++) { for (int j = 0; j < dims[1]; j++) { for (int k = 0; k < dims[2]; k++) { fieldValues[k, j, i] = data[i, j, k]; } } } return(fieldValues); }
public static string GetAttributeValue(H5ObjectWithAttributes objectWithAttributes, string name) { if (objectWithAttributes is null) { throw new ArgumentNullException(nameof(objectWithAttributes)); } if (name is null) { throw new ArgumentNullException(nameof(name)); } H5AttributeId h5AttributeId = H5A.open(objectWithAttributes, name); H5DataTypeId h5DataTypeId = H5A.getType(h5AttributeId); if (H5T.isVariableString(h5DataTypeId)) { VariableLengthString[] variableLengthStrings = new VariableLengthString[1]; H5A.read(h5AttributeId, h5DataTypeId, new H5Array <VariableLengthString> (variableLengthStrings)); H5T.close(h5DataTypeId); H5A.close(h5AttributeId); return(variableLengthStrings[0].ToString()); } byte[] bytes = new byte[H5T.getSize(h5DataTypeId)]; H5A.read(h5AttributeId, h5DataTypeId, new H5Array <byte> (bytes)); H5T.close(h5DataTypeId); H5A.close(h5AttributeId); return(Encoding.ASCII.GetString(bytes)); }
public static double[][] ReadMesh(string fileName) { double[][] meshes = new double[3][]; string[] meshNames = { "x", "y", "z" }; H5FileId fileId = H5F.open(fileName, H5F.OpenMode.ACC_RDONLY); for (int i = 0; i < meshNames.Length; i++) { H5DataSetId dsId = H5D.open(fileId, "/Mesh/" + meshNames[i]); H5DataTypeId dtId = H5D.getType(dsId); if (!H5T.equal(dtId, H5T.copy(H5T.H5Type.NATIVE_FLOAT))) { Console.WriteLine("Error: Invalid dataset type, expected {0}", H5T.H5Type.NATIVE_FLOAT); } float[] mesh = new float[H5D.getStorageSize(dsId) / H5T.getSize(dtId)]; H5D.read(dsId, dtId, new H5Array <float>(mesh)); meshes[i] = mesh.Select(x => (double)x * 1000.0).ToArray(); // m -> mm H5D.close(dsId); H5T.close(dtId); } H5F.close(fileId); return(meshes); }
// test that we can create an empty file and add matrices public static void CreateMatrixTest(string file) { int zones = 3; double[,] testblock; string[] matrixNames = { "mat1", "mat2" }; OmxWriteStream ws = OmxFile.Create(file, zones, true); // NOTE: cannot create data type until after file stream is created H5DataTypeId matrixDataTypes = H5T.copy(H5T.H5Type.NATIVE_DOUBLE); for (int i = 0; i < matrixNames.Length; i++) { ws.AddMatrix(matrixNames[i], matrixDataTypes); } ws.Close(); OmxReadStream rs = OmxFile.OpenReadOnly(file); for (int i = 0; i < matrixNames.Length; i++) { testblock = rs.GetMatrixBlock <double>(matrixNames[i], 0, 0, 1, 1); } Console.WriteLine("mat shape is {0},{1}", rs.Shape[0], rs.Shape[1]); Console.WriteLine("mat names are {0},{1}", rs.MatrixNames[0], rs.MatrixNames[1]); Console.WriteLine("mat data type: {0},{1}", H5T.getClass(rs.GetMatrixDataType(matrixNames[0])), H5T.getClass(rs.GetMatrixDataType(matrixNames[1]))); rs.Close(); }
// test read/write row function public static void ReadWriteRowTest(string file, string matName) { OmxWriteStream ws = OmxFile.OpenReadWrite(file); Console.WriteLine("mat shape is {0},{1}", ws.Shape[0], ws.Shape[1]); H5DataTypeId dt = ws.GetMatrixDataType(matName); // blocks we are reading need to match the data type of the matrix var rowData = new double[ws.Shape[0]]; var rowData2 = new double[ws.Shape[0]]; rowData = ws.GetMatrixRow <double>(matName, 2); rowData[2] = rowData[2] + 1.0; ws.SetMatrixRow <double>(matName, 2, rowData); rowData2 = ws.GetMatrixRow <double>(matName, 2); if (rowData.Sum() == rowData2.Sum()) { Console.WriteLine("Row Read/Write successful"); } else { Console.WriteLine("Read/Write mismatch"); } ws.Close(); }
// Matrix Specific Methods // TODO: // 1. add handling for matrix title // 2. add specification of NA values // 3. other attributes: pa-format flag, year int, source string /// <summary> /// Returns a row of the matrix /// </summary> /// <typeparam name="T"></typeparam> /// <param name="matName"></param> /// <param name="rowIndex"></param> /// <returns></returns> public T[] GetMatrixRow <T>(string matName, int rowIndex) { var rowData = new T[Shape[1]]; // check that matrix exists if (tables.ContainsKey(matName)) { H5DataSetId matId; tables.TryGetValue(matName, out matId); H5DataTypeId matDataId = H5D.getType(matId); H5DataSpaceId spaceId = H5S.create_simple(2, Shape); var h5matrix = new H5Array <T>(rowData); long[] start = { rowIndex, 0 }; long[] count = { 1, Shape[1] }; H5S.selectHyperslab(spaceId, H5S.SelectOperator.SET, start, count); H5DataSpaceId readSpaceId = H5S.create_simple(2, count); H5D.read(matId, matDataId, readSpaceId, spaceId, H5P.create(H5P.PropertyListClass.DATASET_XFER), h5matrix); H5S.close(spaceId); H5S.close(readSpaceId); } else { Console.WriteLine("table {0} not found in matrix file", matName); } return(rowData); }
/// <summary> /// 写数据集属性 /// </summary> public void WriteDatasetAttribute(string datasetName, string attrName, string value) { H5DataSetId datasetId = H5D.open(_fileId, datasetName); H5DataTypeId typeId = H5T.copy(H5T.H5Type.C_S1); H5DataSpaceId spaceId = H5S.create(H5S.H5SClass.SCALAR); H5T.setSize(typeId, value.Length); H5AttributeId attrId = H5A.create(datasetId, attrName, typeId, spaceId); if (value != "") { H5Array <byte> buffer = new H5Array <byte>(Encoding.Default.GetBytes(value)); H5A.write(attrId, typeId, buffer); } if (typeId != null) { H5T.close(typeId); } if (spaceId != null) { H5S.close(spaceId); } if (attrId != null) { H5A.close(attrId); } if (datasetId != null) { H5D.close(datasetId); } }
public static string AttributeAsString(H5AttributeId _attributeId) { H5DataTypeId dataTypeId = H5A.getType(_attributeId); bool isVariableLength = H5T.isVariableString(dataTypeId); if (isVariableLength) { // Variable length string attribute // NOTE: This section only works if the array length is 1 VariableLengthString[] value = new VariableLengthString[1]; H5A.read <VariableLengthString>(_attributeId, dataTypeId, new H5Array <VariableLengthString>(value)); return(value[0].ToString()); } else { // Make length smaller so null termination character is not read int length = (int)H5T.getSize(dataTypeId) - 1; // Fixed length string attribute byte[] valueBytes = new byte[length]; H5A.read <byte>(_attributeId, dataTypeId, new H5Array <byte>(valueBytes)); string value = System.Text.ASCIIEncoding.ASCII.GetString(valueBytes); return(value); } }
public static double ReadAttribute(string file, string dataSetOrGroup, string attribute) { double attr = Double.NaN; try { H5FileId fileId = H5F.open(file, H5F.OpenMode.ACC_RDONLY); H5ObjectInfo objectInfo = H5O.getInfoByName(fileId, dataSetOrGroup); H5GroupId groupId = null; H5DataSetId dataSetId = null; H5AttributeId attrId; if (objectInfo.objectType == H5ObjectType.GROUP) { groupId = H5G.open(fileId, dataSetOrGroup); attrId = H5A.open(groupId, attribute); } else { dataSetId = H5D.open(fileId, dataSetOrGroup); attrId = H5A.open(dataSetId, attribute); } H5DataTypeId attrTypeId = H5A.getType(attrId); double[] dAttrs = new double[] { }; if (H5T.equal(attrTypeId, H5T.copy(H5T.H5Type.NATIVE_FLOAT))) { float[] fAttrs = new float[H5S.getSimpleExtentNPoints(H5A.getSpace(attrId))]; H5A.read(attrId, attrTypeId, new H5Array <float>(fAttrs)); dAttrs = (from f in fAttrs select(double) f).ToArray(); } else if (H5T.equal(attrTypeId, H5T.copy(H5T.H5Type.NATIVE_DOUBLE))) { dAttrs = new double[H5S.getSimpleExtentNPoints(H5A.getSpace(attrId))]; H5A.read(attrId, attrTypeId, new H5Array <double>(dAttrs)); } H5T.close(attrTypeId); H5A.close(attrId); if (groupId != null) { H5G.close(groupId); } if (dataSetId != null) { H5D.close(dataSetId); } H5F.close(fileId); return((double)dAttrs[0]); } catch (HDFException e) { Console.WriteLine("Error: Unhandled HDF5 exception"); Console.WriteLine(e.Message); } return(attr); }
public static double[,] ReadFieldData2D(string file, string dataSet) { H5FileId fileId = H5F.open(file, H5F.OpenMode.ACC_RDONLY); H5DataSetId fDataSetId = H5D.open(fileId, dataSet); H5DataTypeId fDataTypeId = H5D.getType(fDataSetId); long[] dims = H5S.getSimpleExtentDims(H5D.getSpace(fDataSetId)).ToArray(); double[,] data = new double[dims[0], dims[1]]; H5D.read(fDataSetId, fDataTypeId, new H5Array <double>(data)); double[,] fieldValues = new double[dims[1], dims[0]]; for (int i = 0; i < dims[1]; i++) { for (int j = 0; j < dims[0]; j++) { fieldValues[i, j] = (double)data[j, i]; } } H5T.close(fDataTypeId); H5D.close(fDataSetId); H5F.close(fileId); return(fieldValues); }
private static void WriteFile(string filePath) { var file = H5F.create(filePath, H5F.CreateMode.ACC_TRUNC); var group = H5G.create(file, "/group"); H5G.close(group); const int RANK = 2; const int DIM0 = 3; const int DIM1 = 4; var dims = new long[RANK] { DIM0, DIM1 }; var dataSpace = H5S.create_simple(RANK, dims); var dataSet = H5D.create(file, "/group/dataset", H5T.H5Type.NATIVE_INT, dataSpace); H5S.close(dataSpace); var data = new int[DIM0, DIM1] { { 1, 2, 3, 4 }, { 5, 6, 7, 8 }, { 9, 10, 11, 12 } }; H5D.write(dataSet, new H5DataTypeId(H5T.H5Type.NATIVE_INT), new H5Array <int>(data)); var dataType = new H5DataTypeId(H5T.H5Type.NATIVE_INT); dataSpace = H5S.create(H5S.H5SClass.SCALAR); var integerAttribute = H5A.create(dataSet, "int", dataType, dataSpace); H5A.write(integerAttribute, dataType, new H5Array <int>(new int[1] { 42 })); H5A.close(integerAttribute); H5S.close(dataSpace); //H5T.close(dataType); // Read-only. var str = "Hello, world!"; var strBytes = Encoding.ASCII.GetBytes(str); // There is a H5T.get_cset, but there does not seem to be a way of setting the character encoding, i.e. set_cset. dataType = H5T.copy(H5T.H5Type.C_S1); H5T.setSize(dataType, strBytes.Length); dataSpace = H5S.create(H5S.H5SClass.SCALAR); var stringAttribute = H5A.create(dataSet, "string", dataType, dataSpace); H5A.write(stringAttribute, dataType, new H5Array <byte>(strBytes)); H5A.close(stringAttribute); H5S.close(dataSpace); H5T.close(dataType); H5D.close(dataSet); H5F.close(file); }
public static void WriteAttribute(HDFAttributeDef hDFAttributeDef, H5DataTypeId dataTypeId, H5AttributeId attributeId) { if (hDFAttributeDef.Value == null || hDFAttributeDef.Value.ToString() == "") { return; } H5A.write <T>(attributeId, dataTypeId, new H5Array <T>(hDFAttributeDef.Value as T[])); }
/// <summary> /// 获得数据集的类型 /// </summary> public string GetDatasetType(string datasetName) { H5DataSetId datasetId = H5D.open(_fileId, datasetName); H5DataTypeId typeId = H5D.getType(datasetId); H5T.H5TClass typeClass = H5T.getClass(typeId); return(typeClass.ToString()); }
public static Double AttributeAsDouble(H5AttributeId _attributeId) { H5DataTypeId attributeType = H5T.copy(H5T.H5Type.NATIVE_DOUBLE); double[] value = new double[1]; H5A.read <double>(_attributeId, attributeType, new H5Array <double>(value)); return(value[0]); }
public static int AttributeAsInt32(H5AttributeId _attributeId) { H5DataTypeId attributeType = H5T.copy(H5T.H5Type.NATIVE_INT); int[] value = new int[1]; H5A.read <int>(_attributeId, attributeType, new H5Array <int>(value)); return(value[0]); }
protected T[] getAttribute <T>(H5AttributeId aid) { H5DataTypeId sv = H5A.getType(aid); int size = H5T.getSize(sv); var attValue = new T[size]; H5A.read <T>(aid, sv, new H5Array <T>(attValue)); return(attValue); }
private T[] ReadArray <T>(long size, H5AttributeId attId, H5DataTypeId typeId) { T[] v = new T[size]; if (size == 0) { return(v); } H5A.read <T>(attId, typeId, new H5Array <T>(v)); return(v); }
private static byte[] EncodeStringData(string str, out H5DataTypeId dtype) { byte[] strdata = System.Text.Encoding.UTF8.GetBytes(str); dtype = H5T.copy(H5T.H5Type.C_S1); H5T.setSize(dtype, strdata.Length); return(strdata); }
/// <summary> /// 重写数据集的值(去条带的数据) /// </summary> /// <typeparam name="T">数据类型</typeparam> /// <param name="dataSetName">数据集的名称</param> /// <param name="dataTypeId">数据集的类型ID</param> /// <param name="values">去条带之后数据</param> /// <param name="BrandNo">在数据集的维度从0开始</param> private void ReWriteDataSet <T>(string dataSetName, H5DataTypeId dataTypeId, T[] values, int BrandNo) { H5FileId _h5FileId = null; H5DataSetId dataSetId = null; H5DataSpaceId spaceid = null; try { _h5FileId = H5F.open(fileName, H5F.OpenMode.ACC_RDWR); //先找出含有指定波段的数据集 dataSetId = H5D.open(_h5FileId, dataSetName); spaceid = H5D.getSpace(dataSetId); long[] dims = H5S.getSimpleExtentDims(spaceid); //得到数据数组的大小,比如[3,1800,2048] int rank = H5S.getSimpleExtentNDims(spaceid); //得到数据数组的维数,比如3 H5S.close(spaceid); //根据数据集的名字获取数据集的ID int size = 0; if (rank == 0) { size = 1; } else if (rank == 1) { size = Convert.ToInt32(dims[0]); } else if (rank == 2) { size = Convert.ToInt32(dims[0] * dims[1]); } else if (rank == 3) { size = Convert.ToInt32(dims[0] * dims[1] * dims[2]); } T[] v = new T[size]; //从数据集中读取原始数据 H5D.read <T>(dataSetId, dataTypeId, new H5Array <T>(v)); //将波段校正后的数据读取赋给相应的波段 for (int i = BrandNo; i < values.Length; i++) { v[i] = values[i]; } H5D.write <T>(dataSetId, dataTypeId, new H5Array <T>(v)); } catch (Exception e) { throw new Exception(e.Message); } finally { H5D.close(dataSetId); H5F.close(_h5FileId); } }
private static void WriteAttribute(H5ObjectWithAttributes target, string name, long value) { H5DataTypeId dtype = H5T.copy(H5T.H5Type.NATIVE_LLONG); H5DataSpaceId spaceId = H5S.create(H5S.H5SClass.SCALAR); H5AttributeId attributeId = H5A.create(target, name, dtype, spaceId); H5A.write(attributeId, dtype, new H5Array <long>(new[] { value })); H5A.close(attributeId); H5T.close(dtype); H5S.close(spaceId); }
private T[] ReadArray <T>(int size, H5DataSetId dsId, H5DataTypeId typeId, int bandIndex, int outSize) { T[] v = new T[size]; H5D.read <T>(dsId, typeId, new H5Array <T>(v)); T[] outdata = new T[outSize]; int index = bandIndex * outSize; for (int i = 0; i < outdata.Length; i++) { outdata[i] = v[i + index]; } return(outdata); }
/// <summary> /// Create mapping index - assumes that matrices are square and uses first dimension as the map size /// </summary> /// <typeparam name="T"></typeparam> /// <param name="tazEquiv">array the size of the matrix table dimension</param> /// <param name="mapDataType">Data type of array</param> /// <param name="mapName">Name of index map</param> public void CreateMapping <T>(T[] tazEquiv, H5DataTypeId mapDataType, string mapName) { long[] oneDShape = { Shape[0] }; H5DataSpaceId mapSpaceId = H5S.create_simple(1, oneDShape); H5DataSetId newMappingID = H5D.create(luGroup, mapName, mapDataType, mapSpaceId); H5D.write(newMappingID, mapDataType, new H5Array <T>(tazEquiv)); IndexMapNames.Add(mapName); NumIndexMap++; this.indexMaps.Add(mapName, newMappingID); }
private void createHD5DataObject(H5GroupId h5GroupId, string pathName, ref HD5DataSetObject dataObject) { H5DataSetId datasetid = null; H5DataSpaceId spaceid = null; H5DataTypeId dataTypeid = null; try { dataObject.GroupId = h5GroupId; datasetid = H5D.open(h5GroupId, pathName); dataObject.DatasetID = datasetid; dataObject.DatasetName = pathName; spaceid = H5D.getSpace(datasetid); var dims = H5S.getSimpleExtentDims(spaceid); dataTypeid = H5D.getType(datasetid); dataObject.Dim = dims.Length; HDF5DotNet.H5T.H5TClass classType = H5T.getClass(dataTypeid); int size = H5T.getSize(dataTypeid); H5T.Sign sign = H5T.Sign.TWOS_COMPLEMENT; if (classType == H5T.H5TClass.INTEGER) { sign = H5T.getSign(dataTypeid); } //var rank = H5S.getSimpleExtentNDims(space); //var statu = H5S.getSimpleExtentDims(space); Boolean bString = H5T.isVariableString(dataTypeid); //String name = H5T.getMemberName(dataType, 0); // var type2 = H5T.getNativeType(dataType, H5T.Direction.DEFAULT); Type type = getTypeof(classType, size, sign); dataObject.DataType = type; dataObject.Data = readData(dataObject); } catch (Exception e) { Console.WriteLine(e.Message); } finally{ if (datasetid != null) { H5D.close(datasetid); } if (spaceid != null) { H5S.close(spaceid); } if (dataTypeid != null) { H5T.close(dataTypeid); } } }
private static void WriteAttribute(H5ObjectWithAttributes target, string name, double[] value) { H5DataTypeId dtype = H5T.copy(H5T.H5Type.NATIVE_DOUBLE); H5DataSpaceId spaceId = H5S.create_simple(1, new[] { value.LongCount() }); H5AttributeId attributeId = H5A.create(target, name, dtype, spaceId); H5A.write(attributeId, dtype, new H5Array <double>(value)); H5A.close(attributeId); H5T.close(dtype); H5S.close(spaceId); }
/// <summary> /// 读取指定数据集,未对异常进行处理 /// </summary> /// <typeparam name="T"></typeparam> /// <param name="datasetName"></param> /// <param name="bandN"></param> /// <param name="bandH"></param> /// <param name="bandW"></param> /// <returns></returns> public T[] ReadDataArray <T>(String datasetName, ref int bandN, ref int bandH, ref int bandW) { H5DataSetId datasetId = null; H5DataSpaceId spaceId = null; H5DataTypeId typeId = null; long[] dims = null; if (!String.IsNullOrEmpty(datasetName) && _datasetNames.Contains(datasetName)) { datasetId = H5D.open(_fileId, datasetName); spaceId = H5D.getSpace(datasetId); dims = H5S.getSimpleExtentDims(spaceId); if (dims.Length == 2) { bandN = 1; bandH = (int)dims[0]; bandW = (int)dims[1]; } else if (dims.Length == 3) { bandN = (int)dims[0]; bandH = (int)dims[1]; bandW = (int)dims[2]; } typeId = H5D.getType(datasetId); typeId = H5T.getNativeType(typeId, H5T.Direction.DEFAULT); T[] dv = new T[bandN * bandH * bandW]; H5D.read <T>(datasetId, typeId, new H5Array <T>(dv)); if (typeId != null) { H5T.close(typeId); } if (spaceId != null) { H5S.close(spaceId); } if (datasetId != null) { H5D.close(datasetId); } return(dv); } else { throw new Exception("未查到指定数据集!"); } }
/// <summary> /// Returns the data types of the index map /// </summary> /// <param name="mapName"></param> /// <returns></returns> public H5DataTypeId GetMappingDataType(string mapName) { H5DataTypeId mapDataId = null; // check that index map exists if (indexMaps.ContainsKey(mapName)) { H5DataSetId mapId; indexMaps.TryGetValue(mapName, out mapId); mapDataId = H5D.getType(mapId); } else { Console.WriteLine("index map {0} not found in file", mapName); } return(mapDataId); }
/// <summary> /// Returns the data types of the matrix /// </summary> /// <param name="mapName"></param> /// <returns></returns> public H5DataTypeId GetMatrixDataType(string matName) { H5DataTypeId matDataId = null; // check that index map exists if (tables.ContainsKey(matName)) { H5DataSetId matId; tables.TryGetValue(matName, out matId); matDataId = H5D.getType(matId); } else { Console.WriteLine("table {0} not found in file", matName); } return(matDataId); }
private void WriteData() { Console.WriteLine("Creating H5 file {0}...", filename); // Rank is the number of dimensions of the data array. const int RANK = 1; // Create an HDF5 file. // The enumeration type H5F.CreateMode provides only the legal // creation modes. Missing H5Fcreate parameters are provided // with default values. H5FileId fileId = H5F.create(filename, H5F.CreateMode.ACC_TRUNC); // Prepare to create a data space for writing a 1-dimensional // signed integer array. long[] dims = new long[RANK]; dims[0] = count; // Put descending ramp data in an array so that we can // write it to the file. mData[] dset_data = new mData[count]; for (int i = 0; i < count; i++) { dset_data[i] = new mData(i + 80, i + 40, i + 1); } // Create a data space to accommodate our 1-dimensional array. // The resulting H5DataSpaceId will be used to create the // data set. H5DataSpaceId spaceId = H5S.create_simple(RANK, dims); // Create a copy of a standard data type. We will use the // resulting H5DataTypeId to create the data set. We could // have used the HST.H5Type data directly in the call to // H5D.create, but this demonstrates the use of H5T.copy // and the use of a H5DataTypeId in H5D.create. H5DataTypeId typeId = H5T.copy(H5T.H5Type.STD_REF_OBJ); // Find the size of the type int typeSize = H5T.getSize(typeId); // Create the data set. H5DataSetId dataSetId = H5D.create(fileId, dataSetName, typeId, spaceId); // Write the integer data to the data set. H5D.write(dataSetId, new H5DataTypeId(H5T.H5Type.STD_REF_OBJ), new H5Array <mData>(dset_data)); H5D.close(dataSetId); H5F.close(fileId); Console.WriteLine("H5 file {0} created successfully!", filename); }
public static void WriteDataCube(H5FileId fileId, UInt16[,,] datacube) { H5GroupId dataGroup = H5G.create(fileId, "/data"); H5GroupId dataSubGroup = H5G.create(dataGroup, "DEsoftware"); long[] dims = new long[3] { datacube.GetLength(0), datacube.GetLength(1), datacube.GetLength(2) }; H5DataSpaceId spaceId = H5S.create_simple(3, dims); H5DataTypeId typeId = H5T.copy(H5T.H5Type.NATIVE_USHORT); H5DataSetId dataSetId = H5D.create(dataSubGroup, "data", typeId, spaceId); // create attribute emd_group_type for dataSubGroup, which is required to have value 1 int par = 1; long[] attdims = new long[1]; int[] AttArray = new int[1] { par }; dims[0] = AttArray.Length; H5AttributeId attributeId = H5A.create(dataSubGroup, "emd_group_type", H5T.copy(H5T.H5Type.NATIVE_UCHAR), H5S.create_simple(1, dims)); H5A.write(attributeId, H5T.copy(H5T.H5Type.NATIVE_INT), new H5Array <int>(AttArray)); H5A.close(attributeId); // write datacube to "data", which contains whole 3D datacube H5D.write <ushort>(dataSetId, typeId, new H5Array <ushort>(datacube)); // create three more array for three dimensions long[] dim1 = new long[1] { datacube.GetLength(0) }; double[] dimarray = new double [datacube.GetLength(0)]; spaceId = H5S.create_simple(3, dim1); typeId = H5T.copy(H5T.H5Type.NATIVE_DOUBLE); dataSetId = H5D.create(dataSubGroup, "dim1", typeId, spaceId); H5D.write <double>(dataSetId, typeId, new H5Array <double>(dimarray)); H5S.close(spaceId); H5T.close(typeId); H5D.close(dataSetId); H5G.close(dataGroup); H5G.close(dataSubGroup); }
/// <summary> /// Constructs a new EpochHDF5Persistor with an HDF5 file at the given path. /// </summary> /// <param name="filename">Desired HDF5 path</param> /// <param name="assocFilePrefix">Prefix for auxiliary (e.g. image) file associated with this HDF5 file</param> /// <param name="guidGenerator">Function for generating new UUIDs (e.g. Guid.NewGuid)</param> /// <param name="compression">Automatically numeric data compression (0 = none, 9 = maximum)</param> public EpochHDF5Persistor(string filename, string assocFilePrefix, Func<Guid> guidGenerator, uint compression = 9) : base(guidGenerator) { if (filename == null) throw new ArgumentException("File name must not be null", "filename"); if(compression > 9) throw new ArgumentException("Compression must be 0-9", "compression"); if (assocFilePrefix == null) assocFilePrefix = ""; this.AssociatedFilePrefix = assocFilePrefix; NumericDataCompression = compression; EpochGroupsIDs = new Stack<EpochGroupIDs>(); var fInfo = new FileInfo(filename); string prefixedFilePath = fInfo.DirectoryName + Path.DirectorySeparatorChar + this.AssociatedFilePrefix + fInfo.Name; var currentFile = new FileInfo(prefixedFilePath); if (currentFile.Exists) { fileId = H5F.open(prefixedFilePath, H5F.OpenMode.ACC_RDWR); string_t = H5T.open(fileId, "STRING40"); keyval_t = H5T.open(fileId, "KEY40VAR40"); measurement_t = H5T.open(fileId, "MEASUREMENT"); extdevmeasurement_t = H5T.open(fileId, "EXTDEV_MEASUREMENT"); //TODO Check persistence version } else { fileId = H5F.create(prefixedFilePath, H5F.CreateMode.ACC_EXCL); WriteAttribute(fileId, "version", Version); // Create our standard String type (string of length FIXED_STRING_LENGTH characters) string_t = H5T.copy(H5T.H5Type.C_S1); H5T.setSize(string_t, 40); H5T.commit(fileId, "STRING40", string_t); // Create our key/value compound type (two strings of length 40 characters) keyval_t = H5T.create(H5T.CreateClass.COMPOUND, 80); H5T.insert(keyval_t, "key", 0, string_t); H5T.insert(keyval_t, "value", FIXED_STRING_LENGTH, string_t); H5T.commit(fileId, "KEY40VAR40", keyval_t); // Create the Measurement compound type measurement_t = H5T.create(H5T.CreateClass.COMPOUND, 48); // confirm 48 is enough/too much/whatever H5T.insert(measurement_t, "quantity", 0, H5T.H5Type.NATIVE_DOUBLE); H5T.insert(measurement_t, "unit", H5T.getSize(H5T.H5Type.NATIVE_DOUBLE), string_t); H5T.commit(fileId, "MEASUREMENT", measurement_t); // Create the ExtDev/Measurement compound type extdevmeasurement_t = H5T.create(H5T.CreateClass.COMPOUND, H5T.getSize(string_t) + 2 * H5T.getSize(measurement_t)); H5T.insert(extdevmeasurement_t, "externalDevice", 0, string_t); H5T.insert(extdevmeasurement_t, "measurement", H5T.getSize(string_t), measurement_t); H5T.commit(fileId, "EXTDEV_MEASUREMENT", extdevmeasurement_t); } Interlocked.Increment(ref _openHdf5FileCount); }
private static byte[] EncodeStringData(string str, out H5DataTypeId dtype) { byte[] strdata = System.Text.Encoding.UTF8.GetBytes(str); dtype = H5T.copy(H5T.H5Type.C_S1); H5T.setSize(dtype, strdata.Length); return strdata; }
public static Type getSystemType(H5DataTypeId nativeType) { if (H5T.equal(nativeType, new H5DataTypeId(H5T.H5Type.NATIVE_UCHAR))) return typeof(byte); if (H5T.equal(nativeType, new H5DataTypeId(H5T.H5Type.NATIVE_SCHAR))) return typeof(sbyte); if (H5T.equal(nativeType, new H5DataTypeId(H5T.H5Type.NATIVE_SHORT))) return typeof(short); if (H5T.equal(nativeType, new H5DataTypeId(H5T.H5Type.NATIVE_USHORT))) return typeof(ushort); if (H5T.equal(nativeType, new H5DataTypeId(H5T.H5Type.NATIVE_INT))) return typeof(int); if (H5T.equal(nativeType, new H5DataTypeId(H5T.H5Type.NATIVE_UINT))) return typeof(uint); if (H5T.equal(nativeType, new H5DataTypeId(H5T.H5Type.NATIVE_LLONG))) return typeof(long); if (H5T.equal(nativeType, new H5DataTypeId(H5T.H5Type.NATIVE_ULLONG))) return typeof(ulong); if (H5T.equal(nativeType, new H5DataTypeId(H5T.H5Type.NATIVE_USHORT))) return typeof(char); if (H5T.equal(nativeType, new H5DataTypeId(H5T.H5Type.NATIVE_FLOAT))) return typeof(float); if (H5T.equal(nativeType, new H5DataTypeId(H5T.H5Type.NATIVE_DOUBLE))) return typeof(double); if (H5T.equal(nativeType, new H5DataTypeId(H5T.H5Type.NATIVE_UCHAR))) return typeof(bool); throw new NotSupportedException("Unsupported native type"); }