public static double[,] ReadFieldData2D(string file, string dataSet) { H5FileId fileId = H5F.open(file, H5F.OpenMode.ACC_RDONLY); H5DataSetId fDataSetId = H5D.open(fileId, dataSet); H5DataTypeId fDataTypeId = H5D.getType(fDataSetId); long[] dims = H5S.getSimpleExtentDims(H5D.getSpace(fDataSetId)).ToArray(); double[,] data = new double[dims[0], dims[1]]; H5D.read(fDataSetId, fDataTypeId, new H5Array <double>(data)); double[,] fieldValues = new double[dims[1], dims[0]]; for (int i = 0; i < dims[1]; i++) { for (int j = 0; j < dims[0]; j++) { fieldValues[i, j] = (double)data[j, i]; } } H5T.close(fDataTypeId); H5D.close(fDataSetId); H5F.close(fileId); return(fieldValues); }
//Load weights from hdf5 file. Weights must be saved as a vector per layer public static float[] loadH5(string path, string dsname) { //Get file id var h5fid = H5F.open(path, H5F.OpenMode.ACC_RDONLY); //Get dataset id var h5did = H5D.open(h5fid, dsname); //Dataset size var h5space = H5D.getSpace(h5did); var h5size = H5S.getSimpleExtentDims(h5space); //Dataset size to array var S = h5size.ToArray(); //Empty double array for the data double[] data = new double[S[0]]; //Read the dataset var h5array = new H5Array <double>(data); var h5dtype = H5D.getType(h5did); H5D.read(h5did, h5dtype, h5array); //Convert to float float[] newarray = new float[data.Length]; Parallel.For(0, data.Length, (k) => { newarray[k] = (float)data[k]; }); return(newarray); }
public static double[, ,] ReadFieldData3D(string fileName) { H5FileId fileId = H5F.open(fileName, H5F.OpenMode.ACC_RDONLY); H5DataSetId fDataSetId = H5D.open(fileId, "/FieldData/FD/f0"); H5DataTypeId fDataTypeId = H5D.getType(fDataSetId); if (!H5T.equal(fDataTypeId, H5T.copy(H5T.H5Type.NATIVE_FLOAT))) { Console.WriteLine("Error: Invalid dataset type, expected {0}", H5T.H5Type.NATIVE_FLOAT); } long[] dims = H5S.getSimpleExtentDims(H5D.getSpace(fDataSetId)).ToArray(); if (dims.Length != 3) { Console.WriteLine("Error: Invalid field data dimensions"); } float[, ,] data = new float[dims[0], dims[1], dims[2]]; H5D.read(fDataSetId, fDataTypeId, new H5Array <float>(data)); // Reorder double[, ,] fieldValues = new double[dims[2], dims[1], dims[0]]; for (int i = 0; i < dims[0]; i++) { for (int j = 0; j < dims[1]; j++) { for (int k = 0; k < dims[2]; k++) { fieldValues[k, j, i] = data[i, j, k]; } } } return(fieldValues); }
/// <summary> /// 重写数据集的值(去条带的数据) /// </summary> /// <typeparam name="T">数据类型</typeparam> /// <param name="dataSetName">数据集的名称</param> /// <param name="dataTypeId">数据集的类型ID</param> /// <param name="values">去条带之后数据</param> /// <param name="BrandNo">在数据集的维度从0开始</param> private void ReWriteDataSet <T>(string dataSetName, H5DataTypeId dataTypeId, T[] values, int BrandNo) { H5FileId _h5FileId = null; H5DataSetId dataSetId = null; H5DataSpaceId spaceid = null; try { _h5FileId = H5F.open(fileName, H5F.OpenMode.ACC_RDWR); //先找出含有指定波段的数据集 dataSetId = H5D.open(_h5FileId, dataSetName); spaceid = H5D.getSpace(dataSetId); long[] dims = H5S.getSimpleExtentDims(spaceid); //得到数据数组的大小,比如[3,1800,2048] int rank = H5S.getSimpleExtentNDims(spaceid); //得到数据数组的维数,比如3 H5S.close(spaceid); //根据数据集的名字获取数据集的ID int size = 0; if (rank == 0) { size = 1; } else if (rank == 1) { size = Convert.ToInt32(dims[0]); } else if (rank == 2) { size = Convert.ToInt32(dims[0] * dims[1]); } else if (rank == 3) { size = Convert.ToInt32(dims[0] * dims[1] * dims[2]); } T[] v = new T[size]; //从数据集中读取原始数据 H5D.read <T>(dataSetId, dataTypeId, new H5Array <T>(v)); //将波段校正后的数据读取赋给相应的波段 for (int i = BrandNo; i < values.Length; i++) { v[i] = values[i]; } H5D.write <T>(dataSetId, dataTypeId, new H5Array <T>(v)); } catch (Exception e) { throw new Exception(e.Message); } finally { H5D.close(dataSetId); H5F.close(_h5FileId); } }
private void createHD5DataObject(H5GroupId h5GroupId, string pathName, ref HD5DataSetObject dataObject) { H5DataSetId datasetid = null; H5DataSpaceId spaceid = null; H5DataTypeId dataTypeid = null; try { dataObject.GroupId = h5GroupId; datasetid = H5D.open(h5GroupId, pathName); dataObject.DatasetID = datasetid; dataObject.DatasetName = pathName; spaceid = H5D.getSpace(datasetid); var dims = H5S.getSimpleExtentDims(spaceid); dataTypeid = H5D.getType(datasetid); dataObject.Dim = dims.Length; HDF5DotNet.H5T.H5TClass classType = H5T.getClass(dataTypeid); int size = H5T.getSize(dataTypeid); H5T.Sign sign = H5T.Sign.TWOS_COMPLEMENT; if (classType == H5T.H5TClass.INTEGER) { sign = H5T.getSign(dataTypeid); } //var rank = H5S.getSimpleExtentNDims(space); //var statu = H5S.getSimpleExtentDims(space); Boolean bString = H5T.isVariableString(dataTypeid); //String name = H5T.getMemberName(dataType, 0); // var type2 = H5T.getNativeType(dataType, H5T.Direction.DEFAULT); Type type = getTypeof(classType, size, sign); dataObject.DataType = type; dataObject.Data = readData(dataObject); } catch (Exception e) { Console.WriteLine(e.Message); } finally{ if (datasetid != null) { H5D.close(datasetid); } if (spaceid != null) { H5S.close(spaceid); } if (dataTypeid != null) { H5T.close(dataTypeid); } } }
/// <summary> /// 读取指定数据集,未对异常进行处理 /// </summary> /// <typeparam name="T"></typeparam> /// <param name="datasetName"></param> /// <param name="bandN"></param> /// <param name="bandH"></param> /// <param name="bandW"></param> /// <returns></returns> public T[] ReadDataArray <T>(String datasetName, ref int bandN, ref int bandH, ref int bandW) { H5DataSetId datasetId = null; H5DataSpaceId spaceId = null; H5DataTypeId typeId = null; long[] dims = null; if (!String.IsNullOrEmpty(datasetName) && _datasetNames.Contains(datasetName)) { datasetId = H5D.open(_fileId, datasetName); spaceId = H5D.getSpace(datasetId); dims = H5S.getSimpleExtentDims(spaceId); if (dims.Length == 2) { bandN = 1; bandH = (int)dims[0]; bandW = (int)dims[1]; } else if (dims.Length == 3) { bandN = (int)dims[0]; bandH = (int)dims[1]; bandW = (int)dims[2]; } typeId = H5D.getType(datasetId); typeId = H5T.getNativeType(typeId, H5T.Direction.DEFAULT); T[] dv = new T[bandN * bandH * bandW]; H5D.read <T>(datasetId, typeId, new H5Array <T>(dv)); if (typeId != null) { H5T.close(typeId); } if (spaceId != null) { H5S.close(spaceId); } if (datasetId != null) { H5D.close(datasetId); } return(dv); } else { throw new Exception("未查到指定数据集!"); } }
public static T[,] Read2DArray <T>(this H5FileId fileId, string dataSetName) { var dataset = H5D.open(fileId, dataSetName); var space = H5D.getSpace(dataset); var dims = H5S.getSimpleExtentDims(space); var dataType = H5D.getType(dataset); if (typeof(T) == typeof(string)) { // this will also need a string hack... } T[,] dataArray = new T[dims[0], dims[1]]; var wrapArray = new H5Array <T>(dataArray); H5D.read(dataset, dataType, wrapArray); return(dataArray); }
//Reading and Printing Methods private static double[] GetDoubleDataSet(H5FileId dataFile, string path) { if (H5L.Exists(dataFile, path)) { H5DataSetId dataSet = H5D.open(dataFile, path); H5DataSpaceId space = H5D.getSpace(dataSet); long[] size2 = H5S.getSimpleExtentDims(space); long count = size2[0]; double[] dataArray = new double[count]; H5Array <double> wrapArray = new H5Array <double>(dataArray); H5DataTypeId tid1 = H5D.getType(dataSet); H5D.read(dataSet, tid1, wrapArray); return(dataArray); } return(null); }
private static int[] GetInt32DataSet(H5FileId dataFile, string path) { if (H5L.Exists(dataFile, path)) { var dataSet = H5D.open(dataFile, path); var space = H5D.getSpace(dataSet); var size2 = H5S.getSimpleExtentDims(space); long count = size2[0]; var dataArray = new Int32[count]; var wrapArray = new H5Array <Int32>(dataArray); H5DataTypeId tid1 = H5D.getType(dataSet); H5D.read(dataSet, tid1, wrapArray); return(dataArray); } return(null); }
public static T ReadScalar <T>(H5FileId fileId, string datasetName) { H5DataSetId dataset = null; H5DataSpaceId space = null; H5DataTypeId dataType = null; long[] dims; T data = default(T); try { dataset = H5D.open(fileId, datasetName); space = H5D.getSpace(dataset); dims = H5S.getSimpleExtentDims(space); dataType = H5D.getType(dataset); H5D.readScalar <T>(dataset, dataType, ref data); if (typeof(T) == typeof(string)) { int stringLength = H5T.getSize(dataType); byte[] buffer = new byte[2 * stringLength]; H5D.read(dataset, dataType, new H5Array <byte>(buffer)); string stuff = System.Text.ASCIIEncoding.ASCII.GetString(buffer); return((T)(stuff.SplitInParts(stringLength).Select(ss => (T)(object)ss))); } return(data); } catch { return(default(T)); } finally { if (space != null) { H5S.close(space); } if (dataset != null) { H5D.close(dataset); } } }
public static T[] Read1DArray <T>(H5FileId fileId, string dataSetName) { H5DataSetId dataset = null; H5DataSpaceId space = null; H5DataTypeId dataType = null; long[] dims; try { dataset = H5D.open(fileId, dataSetName); space = H5D.getSpace(dataset); dims = H5S.getSimpleExtentDims(space); dataType = H5D.getType(dataset); if (typeof(T) == typeof(string)) { int stringLength = H5T.getSize(dataType); int a = (int)dims[0]; byte[] buffer = new byte[(int)(dims[0]) * stringLength]; H5D.read(dataset, dataType, new H5Array <byte>(buffer)); string stuff = System.Text.ASCIIEncoding.ASCII.GetString(buffer); return(stuff.SplitInParts(stringLength).Select(ss => (T)(object)ss).ToArray()); } T[] dataArray = new T[dims[0]]; var wrapArray = new H5Array <T>(dataArray); H5D.read(dataset, dataType, wrapArray); return(dataArray); } catch { return(null); } finally { if (space != null) { H5S.close(space); } if (dataset != null) { H5D.close(dataset); } } }
public static T[] Read1DArray <T>(this H5FileId fileId, string dataSetName) { var dataset = H5D.open(fileId, dataSetName); var space = H5D.getSpace(dataset); var dims = H5S.getSimpleExtentDims(space); var dataType = H5D.getType(dataset); if (typeof(T) == typeof(string)) { int stringLength = H5T.getSize(dataType); byte[] buffer = new byte[dims[0] * stringLength]; H5D.read(dataset, dataType, new H5Array <byte>(buffer)); string stuff = System.Text.ASCIIEncoding.ASCII.GetString(buffer); return(stuff.SplitInParts(stringLength).Select(ss => (T)(object)ss).ToArray()); } T[] dataArray = new T[dims[0]]; var wrapArray = new H5Array <T>(dataArray); H5D.read(dataset, dataType, wrapArray); return(dataArray); }
private static T[, ,] Read3DArray <T>(H5GroupId groupID, string name) { var dataset = H5D.open(groupID, name); var space = H5D.getSpace(dataset); var dims = H5S.getSimpleExtentDims(space); var dataType = H5D.getType(dataset); if (typeof(T) == typeof(string)) { // this will also need a string hack... T[, ,] dataArray = new T[dims[0], 0, 0]; return(dataArray); } else { T[, ,] dataArray = new T[dims[0], dims[1], dims[2]]; var wrapArray = new H5Array <T>(dataArray); H5D.read(dataset, dataType, wrapArray); return(dataArray); } }
private static T[] Read1DArray <T>(H5GroupId fileId, string dataSetName) { var dataset = H5D.open(fileId, dataSetName); var space = H5D.getSpace(dataset); var dims = H5S.getSimpleExtentDims(space); var dataType = H5D.getType(dataset); T[] dataArray = null; if (typeof(T) == typeof(string)) { // this will also need a string hack... dataArray = new T[dims[0]]; H5D.close(dataset); return(dataArray); } else { dataArray = new T[dims[0]]; var wrapArray = new H5Array <T>(dataArray); H5D.read(dataset, dataType, wrapArray); H5D.close(dataset); return(dataArray); } }
private static void ReadFile(string filePath) { var file = H5F.open(filePath, H5F.OpenMode.ACC_RDONLY); var dataSet = H5D.open(file, "/group/dataset"); var fileSpace = H5D.getSpace(dataSet); var rank = H5S.getSimpleExtentNDims(fileSpace); WriteLine("Rank: {0}", rank); var dims = H5S.getSimpleExtentDims(fileSpace); Write("Dims:"); foreach (var d in dims) { Write(" {0}", d); } WriteLine(); H5S.close(fileSpace); var ints = new int[1]; var intAttribute = H5A.openName(dataSet, "int"); H5A.read(intAttribute, H5A.getType(intAttribute), new H5Array <int>(ints)); WriteLine("int: {0}", ints[0]); H5A.close(intAttribute); var stringAttribute = H5A.openName(dataSet, "string"); var stringType = H5A.getType(stringAttribute); var stringSize = H5T.getSize(stringType); WriteLine("string length: {0}", stringSize); var buffer = new byte[stringSize]; H5A.read(stringAttribute, stringType, new H5Array <byte>(buffer)); WriteLine("string: {0}", Encoding.ASCII.GetString(buffer)); H5T.close(stringType); H5A.close(stringAttribute); if (rank == 2) { var data = new int[dims[0], dims[1]]; H5D.read(dataSet, H5D.getType(dataSet), new H5Array <int>(data)); for (int i = 0; i < data.GetLength(0); ++i) { for (int j = 0; j < data.GetLength(1); ++j) { Write(" {0}", data[i, j]); } WriteLine(); } } H5D.close(dataSet); H5F.close(file); }
public static int GetNumberOfDarks(string filename) { H5FileId zFile = null; H5DataSetId dataset = null; H5DataSpaceId space = null; long[] dims; if (!File.Exists(filename)) { throw new Exception("File not found."); } try { zFile = H5F.open(filename, H5F.OpenMode.ACC_RDONLY); dataset = H5D.open(zFile, "/dark"); space = H5D.getSpace(dataset); dims = H5S.getSimpleExtentDims(space); H5S.close(space); space = null; H5D.close(dataset); dataset = null; H5F.close(zFile); zFile = null; return((int)dims[1]); } catch { } finally { if (space != null) { H5S.close(space); } if (dataset != null) { H5D.close(dataset); } if (zFile != null) { H5F.close(zFile); } } zFile = null; try { zFile = H5F.open(filename, H5F.OpenMode.ACC_RDONLY); dataset = H5D.open(zFile, "/exchange/data_dark"); space = H5D.getSpace(dataset); dims = H5S.getSimpleExtentDims(space); H5S.close(space); space = null; H5D.close(dataset); dataset = null; H5F.close(zFile); zFile = null; return((int)dims[1]); } catch { return(0); } finally { if (space != null) { H5S.close(space); } if (dataset != null) { H5D.close(dataset); } if (zFile != null) { H5F.close(zFile); } } }
private static int GetDim(string filename, int dim) { H5FileId zFile = null; H5DataSetId dataset = null; H5DataSpaceId space = null; long[] dims; if (!File.Exists(filename)) { throw new Exception("File not found."); } try { zFile = H5F.open(filename, H5F.OpenMode.ACC_RDONLY); dataset = H5D.open(zFile, "/tomo"); space = H5D.getSpace(dataset); dims = H5S.getSimpleExtentDims(space); H5S.close(space); space = null; H5D.close(dataset); dataset = null; H5F.close(zFile); zFile = null; return((int)dims[dim]); } catch { } finally { if (space != null) { H5S.close(space); } if (dataset != null) { H5D.close(dataset); } if (zFile != null) { H5F.close(zFile); } } zFile = null; try { zFile = H5F.open(filename, H5F.OpenMode.ACC_RDONLY); dataset = H5D.open(zFile, "/exchange/data"); space = H5D.getSpace(dataset); dims = H5S.getSimpleExtentDims(space); H5S.close(space); space = null; H5D.close(dataset); dataset = null; H5F.close(zFile); zFile = null; return((int)dims[dim]); } catch { throw new Exception("Not a valid HDF5 file."); } finally { if (space != null) { H5S.close(space); } if (dataset != null) { H5D.close(dataset); } if (zFile != null) { H5F.close(zFile); } } }
/// <summary> /// 得到指定属性集合中指定属性名的属性值,未对异常进行处理 /// </summary> /// <param name="obj"></param> /// <param name="attributeName"></param> /// <returns></returns> private String getAttributeValue(H5ObjectWithAttributes obj, String attributeName) { H5AttributeId attId = null; attId = H5A.open(obj, attributeName); if (attId == null) { return(null); } H5DataTypeId typeId = null; H5DataTypeId dtId = null; H5AttributeInfo attInfo = null; H5DataSpaceId spaceId = null; object attributeVal = null; typeId = H5A.getType(attId); attInfo = H5A.getInfo(attId); dtId = H5A.getType(attId); spaceId = H5A.getSpace(attId); int dataSize = H5T.getSize(dtId); typeId = H5T.getNativeType(typeId, H5T.Direction.DEFAULT); H5T.H5TClass typeClass = H5T.getClass(typeId); long[] dims = H5S.getSimpleExtentDims(spaceId); if (dims.Length == 0) { dims = new long[1]; dims[0] = 1; } switch (typeClass) { case H5T.H5TClass.STRING: long size = attInfo.dataSize; byte[] chars = readAttribute <byte>(size, attId, typeId); attributeVal = Encoding.ASCII.GetString(chars); break; case H5T.H5TClass.INTEGER: H5T.Sign sign = H5T.Sign.TWOS_COMPLEMENT; sign = H5T.getSign(typeId); switch (dataSize) { case 1: attributeVal = readAttribute <byte>(dims[0], attId, typeId); break; case 2: switch (sign) { case H5T.Sign.TWOS_COMPLEMENT: attributeVal = readAttribute <Int16>(dims[0], attId, typeId); break; case H5T.Sign.UNSIGNED: attributeVal = readAttribute <UInt16>(dims[0], attId, typeId); break; } break; case 4: switch (sign) { case H5T.Sign.TWOS_COMPLEMENT: attributeVal = readAttribute <Int32>(dims[0], attId, typeId); break; case H5T.Sign.UNSIGNED: attributeVal = readAttribute <UInt32>(dims[0], attId, typeId); break; } break; case 8: switch (sign) { case H5T.Sign.TWOS_COMPLEMENT: attributeVal = readAttribute <Int64>(dims[0], attId, typeId); break; case H5T.Sign.UNSIGNED: attributeVal = readAttribute <UInt64>(dims[0], attId, typeId); break; } break; } break; case H5T.H5TClass.FLOAT: switch (dataSize) { case 4: attributeVal = readAttribute <float>(dims[0], attId, typeId); break; case 8: attributeVal = readAttribute <double>(dims[0], attId, typeId); break; } break; } if (spaceId != null) { H5S.close(spaceId); } if (attId != null) { H5A.close(attId); } if (typeId != null) { H5T.close(typeId); } if (dtId != null) { H5T.close(dtId); } return(arrayToString(attributeVal)); }
public SkimMatrix Read(string filename, int field, float scale) { Console.WriteLine("Reading {0}", filename); int hdf5NameEnd = filename.IndexOf("/"); // the first part of the name in the roster file is the omx/hdf5 file string HDFName = filename.Substring(0, hdf5NameEnd); //rename filename to be only the name of the skim matrix inside of the skim file //skims are stored in the "data" folder within the omx/hdf5 file filename = filename.Substring(hdf5NameEnd); string hdfFile = _path + "\\" + HDFName; var dataFile = H5F.open(hdfFile, H5F.OpenMode.ACC_RDONLY); var dataSet = H5D.open(dataFile, filename); var space = H5D.getSpace(dataSet); var size2 = H5S.getSimpleExtentDims(space); long nRows = size2[0]; long nCols = size2[1]; long numZones = _mapping.Count(); // if the count in the hdf5 file is larger than the number of // tazs in the mapping, ignore the values over the total number //of tazs in the mapping because these are not valid zones. _matrix = new ushort[numZones][]; for (var i = 0; i < numZones; i++) { _matrix[i] = new ushort[numZones]; } //OMX is a square matrix of doubles //In addition to the data folder for matrices, an OMX file has a lookup folder //with a zone mapping vector. However, this is ignored since DaySim also has one. //Therefore, it is assumed the OMX matrix does not skip rows/cols and every row/col //corresponds to an actual zone in the DaySim zone mapping file by index //Scaling should be set to TRUE since OMX stores doubles (not scaled integers) var dataArray = new double[nRows, nCols]; var wrapArray = new H5Array <double>(dataArray); H5DataTypeId tid1 = H5D.getType(dataSet); H5D.read(dataSet, tid1, wrapArray); for (var row = 0; row < nRows; row++) { if (_mapping.ContainsKey(row + 1)) { for (var col = 0; col < nCols; col++) { if (_mapping.ContainsKey(col + 1)) { var value = dataArray[row, col] * scale; if (value > 0) { if (value > ushort.MaxValue - 1) { value = ushort.MaxValue - 1; } _matrix[_mapping[row + 1]][_mapping[col + 1]] = (ushort)value; } } } } } var skimMatrix = new SkimMatrix(_matrix); return(skimMatrix); }
static void test_h5s_basic() { try { int rank; // Logical rank of dataspace hssize_t[] dims1 = { SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3 }; hssize_t[] dims2 = { SPACE2_DIM1, SPACE2_DIM2, SPACE2_DIM3, SPACE2_DIM4 }; hssize_t[] max2 = { SPACE2_MAX1, SPACE2_MAX2, SPACE2_MAX3, SPACE2_MAX4 }; hssize_t[] tmax = new hssize_t[4]; // Output message about test being performed. Console.Write("Testing dataspace manipulation"); // Create a simple dataspace and check its rank. H5DataSpaceId sid1 = H5S.create_simple(SPACE1_RANK, dims1); rank = H5S.getSimpleExtentNDims(sid1); if (rank != SPACE1_RANK) { Console.WriteLine("\ntest_h5s_basic: Incorrect rank {0}, should be SPACE1_RANK({1})", rank, SPACE1_RANK); nerrors++; } // Check its dims. hssize_t[] tdims1 = new hssize_t[3]; tdims1 = H5S.getSimpleExtentDims(sid1); int i; for (i = 0; i < rank; i++) { if (tdims1[i] != dims1[i]) { Console.WriteLine("\ntest_h5s_basic: read tdims1[{0}] = {1} differs from dims1[{0}] = {2}", i, tdims1[i], dims1[i]); nerrors++; } } // Create another simple dataspace and check its rank, dims, and maxdims. H5DataSpaceId sid2 = H5S.create_simple(SPACE2_RANK, dims2, max2); rank = H5S.getSimpleExtentNDims(sid2); if (rank != SPACE2_RANK) { Console.WriteLine("\ntest_h5s_basic: Incorrect rank {0}, should be SPACE1_RANK({1})", rank, SPACE1_RANK); nerrors++; } hssize_t[] tdims2 = new hssize_t[3]; tdims2 = H5S.getSimpleExtentDims(sid2); tmax = H5S.getSimpleExtentMaxDims(sid2); for (i = 0; i < rank; i++) { if (tdims2[i] != dims2[i]) { Console.WriteLine("\ntest_h5s_basic: read tdims2[{0}] = {1} differs from dims2[{0}] = {2}", i, tdims2[i], dims2[i]); nerrors++; } } for (i = 0; i < rank; i++) { if (tmax[i] != max2[i]) { Console.WriteLine("\ntest_h5s_basic: read tmax[{0}] = {1} differs from max2[{0}] = {2}", i, tmax[i], max2[i]); nerrors++; } } // Close all dataspaces. H5S.close(sid1); H5S.close(sid2); /* * Try writing simple dataspaces without setting their extents. */ // Create the file H5FileId fid1 = H5F.create(BASICFILE, H5F.CreateMode.ACC_TRUNC); // Create dataspaces for testing. dims1[0] = SPACE1_DIM1; sid1 = H5S.create(H5S.H5SClass.SIMPLE); sid2 = H5S.create_simple(1, dims1, dims1); // This dataset's space has no extent; it should not be created try { H5DataSetId dset1 = H5D.create(fid1, BASICDATASET, H5T.H5Type.NATIVE_INT, sid1); // should fail, but didn't, print an error message. Console.WriteLine("\ntest_h5s_basic: Attempting to create a dataset whose space has no extent."); nerrors++; } catch (H5DcreateException) { } // does nothing, it should fail // Create dataset with good dataspace. H5DataSetId dataset = H5D.create(fid1, BASICDATASET2, H5T.H5Type.NATIVE_INT, sid2); // Try some writes with the bad dataspace (sid1) try { hssize_t nelems = 10; // Number of dataspace elements H5D.writeScalar(dataset, new H5DataTypeId(H5T.H5Type.NATIVE_INT), sid1, sid2, new H5PropertyListId(H5P.Template.DEFAULT), ref nelems); // should fail, but didn't, print an error message. Console.WriteLine("\ntest_h5s_basic: Attempting to write to a dataset with space that has no extent."); nerrors++; } catch (H5DwriteException) { } // does nothing, it should fail // Make sure that dataspace reads using the bad dataspace fail try { hssize_t n = 10; // Number of dataspace elements H5D.readScalar(dataset, new H5DataTypeId(H5T.H5Type.NATIVE_INT), sid1, sid2, new H5PropertyListId(H5P.Template.DEFAULT), ref n); // should fail, but didn't, print an error message. Console.WriteLine("\ntest_h5s_basic: Attempting to read a dataset with space that has no extent."); nerrors++; } catch (H5DreadException) { } // does nothing, it should fail // Close objects and file. H5D.close(dataset); H5S.close(sid1); H5S.close(sid2); H5F.close(fid1); Console.WriteLine("\t\t\t\tPASSED"); } // end of try catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // test_h5s_basic
static void test_h5s_scalar() { try { hssize_t[] tdims = new hssize_t[3]; // Output message about test being performed. Console.Write("Testing dataspace during writing"); // Create the file. H5FileId fid1 = H5F.create(DATAFILE, H5F.CreateMode.ACC_TRUNC); // Create scalar dataspace. H5DataSpaceId sid1 = H5S.create_simple(SPACE3_RANK, null); // Get the logical rank of dataspace and verify it. int rank = H5S.getSimpleExtentNDims(sid1); if (rank != SPACE3_RANK) { Console.WriteLine("\ntest_h5s_scalar: incorrect rank {0}, should be SPACE3_RANK({1})", rank, SPACE3_RANK); nerrors++; } // Create and write the dataset. uint space3_data = 65; H5DataSetId dataset = H5D.create(fid1, "Dataset1", H5T.H5Type.NATIVE_UINT, sid1); H5D.writeScalar(dataset, new H5DataTypeId(H5T.H5Type.NATIVE_UINT), ref space3_data); // Close objects and file. H5D.close(dataset); H5S.close(sid1); H5F.close(fid1); /* Open the file and verify the dataspace. */ // Open the file. fid1 = H5F.open(DATAFILE, H5F.OpenMode.ACC_RDWR); // Create a dataset. dataset = H5D.open(fid1, "Dataset1"); // Get dataset's dataspace. sid1 = H5D.getSpace(dataset); rank = H5S.getSimpleExtentNDims(sid1); if (rank != SPACE3_RANK) { Console.WriteLine("\ntest_h5s_scalar: incorrect rank {0}", rank); } tdims = H5S.getSimpleExtentDims(sid1); //Console.WriteLine("tdims[0] = {0}, tdims[1] = {1}", tdims[0], tdims[1]); if (rank != 0) { Console.WriteLine("\ntest_h5s_scalar: incorrect rank {0}", rank); } // Read the dataset. uint rdata = 0; H5D.readScalar(dataset, new H5DataTypeId(H5T.H5Type.NATIVE_UINT), ref rdata); if (rdata != space3_data) { Console.WriteLine("\ntest_h5s_scalar: incorrect data {0}, should be {1}", rdata, space3_data); } // Close objects. H5D.close(dataset); H5S.close(sid1); H5F.close(fid1); Console.WriteLine("\t\t\tPASSED"); } // end of try catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // test_h5s_scalar_write
//private string ArrayToString<T>(T[] v) //{ // StringBuilder sb = new StringBuilder(); // //sb.Append("["); // foreach (T t in v) // { // sb.Append(t.ToString()); // sb.Append(","); // } // if (sb.Length > 1) // sb.Remove(sb.Length - 1, 1); // //sb.Append("]"); // return sb.ToString(); //} private object GetAttributeValue(H5ObjectWithAttributes obj, string attributeName) { H5AttributeId attId = null; attId = H5A.open(obj, attributeName); if (attId == null) { return(null); } H5DataTypeId typeId = null; H5DataTypeId dtId = null; H5AttributeInfo attInfo = null; H5DataSpaceId spaceId = null; H5DataTypeId oldTypeId = null; object retObject = null; try { typeId = H5A.getType(attId); attInfo = H5A.getInfo(attId); dtId = H5A.getType(attId); spaceId = H5A.getSpace(attId); int dataSize = H5T.getSize(dtId); // oldTypeId = typeId; typeId = H5T.getNativeType(typeId, H5T.Direction.DEFAULT); H5T.H5TClass typeClass = H5T.getClass(typeId); long[] dims = H5S.getSimpleExtentDims(spaceId); long dimSize = 1; if (dims.Length == 0) { dimSize = 1; } else { foreach (long dim in dims) { dimSize *= dim; } } switch (typeClass) { case H5T.H5TClass.STRING: long size = attInfo.dataSize; byte[] chars = ReadArray <byte>(size, attId, typeId); retObject = Encoding.ASCII.GetString(chars); break; case H5T.H5TClass.INTEGER: H5T.Sign sign = H5T.Sign.TWOS_COMPLEMENT; sign = H5T.getSign(oldTypeId); switch (dataSize) { case 1: retObject = ReadArray <byte>(dimSize, attId, typeId); break; case 2: switch (sign) { case H5T.Sign.TWOS_COMPLEMENT: retObject = ReadArray <Int16>(dimSize, attId, typeId); break; case H5T.Sign.UNSIGNED: retObject = ReadArray <UInt16>(dimSize, attId, typeId); break; } break; case 4: switch (sign) { case H5T.Sign.TWOS_COMPLEMENT: retObject = ReadArray <Int32>(dimSize, attId, typeId); break; case H5T.Sign.UNSIGNED: retObject = ReadArray <UInt32>(dimSize, attId, typeId); break; } break; case 8: switch (sign) { case H5T.Sign.TWOS_COMPLEMENT: retObject = ReadArray <Int64>(dimSize, attId, typeId); break; case H5T.Sign.UNSIGNED: retObject = ReadArray <UInt64>(dimSize, attId, typeId); break; } break; } break; case H5T.H5TClass.FLOAT: switch (dataSize) { case 4: retObject = ReadArray <float>(dimSize, attId, typeId); break; case 8: retObject = ReadArray <double>(dimSize, attId, typeId); break; } break; } return(retObject); } finally { if (spaceId != null) { H5S.close(spaceId); } if (attId != null) { H5A.close(attId); } if (oldTypeId != null) { H5T.close(oldTypeId); } if (typeId != null) { H5T.close(typeId); } if (dtId != null) { H5T.close(dtId); } } }
private void ReadOldDataSetData(string dataSetName, int bandIndex, out int bandWidth, out int bandHeight, out enumDataType dataType, out object retObject) { bandHeight = bandWidth = 0; dataType = enumDataType.UInt16; retObject = null; H5FileId _h5FileId = null; H5DataSpaceId spaceid = null; H5DataSetId dataSetId = null; try { _h5FileId = H5F.open(fileName, H5F.OpenMode.ACC_RDONLY); //先找出含有指定波段的数据集 dataSetId = H5D.open(_h5FileId, dataSetName); spaceid = H5D.getSpace(dataSetId); long[] dims = H5S.getSimpleExtentDims(spaceid); //得到数据数组的大小,比如[3,1800,2048] int rank = H5S.getSimpleExtentNDims(spaceid); //得到数据数组的维数,比如3 int size = 0; if (rank == 1) { bandHeight = bandWidth = 1; size = bandWidth * bandHeight * rank; } else if (rank == 2) { bandWidth = Convert.ToInt32(dims[0]); bandHeight = Convert.ToInt32(dims[1]); size = bandWidth * bandHeight; } else if (rank == 3) { List <long> r = dims.ToList <long>(); r.Sort(); long[] temp = r.ToArray(); bandWidth = Convert.ToInt32(temp[1]); bandHeight = Convert.ToInt32(temp[2]); size = bandWidth * bandHeight * Convert.ToInt32(temp[0]); } int outSize = bandWidth * bandHeight; H5DataTypeId typeId = H5D.getType(dataSetId); H5T.H5TClass typeClass = H5T.getClass(typeId);//得到数据集的类型 int dataSize = H5T.getSize(typeId); H5DataTypeId newTypeId = null; switch (typeClass) { case H5T.H5TClass.INTEGER: H5T.Sign sign = H5T.Sign.TWOS_COMPLEMENT; sign = H5T.getSign(typeId); switch (dataSize) { case 1: newTypeId = H5T.copy(H5T.H5Type.NATIVE_B8); retObject = ReadArray <byte>(size, dataSetId, newTypeId, bandIndex, outSize); dataType = enumDataType.Byte; break; case 2: switch (sign) { case H5T.Sign.TWOS_COMPLEMENT: newTypeId = H5T.copy(H5T.H5Type.NATIVE_SHORT); retObject = ReadArray <Int16>(size, dataSetId, newTypeId, bandIndex, outSize); dataType = enumDataType.Int16; break; case H5T.Sign.UNSIGNED: newTypeId = H5T.copy(H5T.H5Type.NATIVE_USHORT); retObject = ReadArray <UInt16>(size, dataSetId, newTypeId, bandIndex, outSize); dataType = enumDataType.UInt16; break; } break; case 4: switch (sign) { case H5T.Sign.TWOS_COMPLEMENT: newTypeId = H5T.copy(H5T.H5Type.NATIVE_INT); retObject = ReadArray <Int32>(size, dataSetId, newTypeId, bandIndex, outSize); dataType = enumDataType.Int32; break; case H5T.Sign.UNSIGNED: newTypeId = H5T.copy(H5T.H5Type.NATIVE_UINT); retObject = ReadArray <UInt32>(size, dataSetId, newTypeId, bandIndex, outSize); dataType = enumDataType.UInt32; break; } break; case 8: switch (sign) { case H5T.Sign.TWOS_COMPLEMENT: newTypeId = H5T.copy(H5T.H5Type.NATIVE_LONG); retObject = ReadArray <Int64>(size, dataSetId, newTypeId, bandIndex, outSize); dataType = enumDataType.Int64; break; case H5T.Sign.UNSIGNED: newTypeId = H5T.copy(H5T.H5Type.NATIVE_ULONG); retObject = ReadArray <UInt64>(size, dataSetId, newTypeId, bandIndex, outSize); dataType = enumDataType.UInt64; break; } break; } break; case H5T.H5TClass.FLOAT: switch (dataSize) { case 4: newTypeId = H5T.copy(H5T.H5Type.NATIVE_FLOAT); retObject = ReadArray <float>(size, dataSetId, newTypeId, bandIndex, outSize); dataType = enumDataType.Float; break; case 8: newTypeId = H5T.copy(H5T.H5Type.NATIVE_DOUBLE); retObject = ReadArray <double>(size, dataSetId, newTypeId, bandIndex, outSize); dataType = enumDataType.Double; break; } break; } } finally { H5S.close(spaceid); H5D.close(dataSetId); H5F.close(_h5FileId); } }
protected long FindNumberOfRows() { var num_rows_data = H5S.getSimpleExtentDims(H5D.getSpace(Id)); return(num_rows_data[0]); }
public Dictionary <string, string> GetAttributes(string datasetName) { if (string.IsNullOrEmpty(datasetName) || !_datasetNames.Contains(datasetName)) { return(null); } H5DataSetId datasetId = null; H5GroupId groupId = null; H5DataTypeId typeId = null; H5DataSpaceId spaceId = null; //H5PropertyListId psId = null; try { int groupIndex = datasetName.LastIndexOf('/'); if (groupIndex == -1) { datasetId = H5D.open(_h5FileId, datasetName); } else { string groupName = datasetName.Substring(0, groupIndex + 1); string dsName = datasetName.Substring(groupIndex + 1); groupId = H5G.open(_h5FileId, groupName); datasetId = H5D.open(groupId, dsName); } if (datasetId == null) { return(null); } Dictionary <string, string> attValues = new Dictionary <string, string>(); typeId = H5D.getType(datasetId); H5T.H5TClass type = H5T.getClass(typeId); int tSize = H5T.getSize(typeId); spaceId = H5D.getSpace(datasetId); long[] dims = H5S.getSimpleExtentDims(spaceId); long storageSize = H5D.getStorageSize(datasetId); attValues.Add("DataSetName", datasetName); attValues.Add("DataType", type.ToString()); attValues.Add("DataTypeSize", tSize.ToString() + "Byte"); attValues.Add("Dims", String.Join("*", dims)); attValues.Add("StorageSize", storageSize.ToString() + "Byte"); int attrCount = H5A.getNumberOfAttributes(datasetId); for (int i = 0; i < attrCount; i++) { string attName = H5A.getNameByIndex(datasetId, "/" + datasetName, H5IndexType.NAME, H5IterationOrder.NATIVE, (ulong)i); attValues.Add(attName, ReadAttributeValue(datasetId, attName)); } return(attValues); } finally { if (spaceId != null) { H5S.close(spaceId); } if (typeId != null) { H5T.close(typeId); } if (datasetId != null) { H5D.close(datasetId); } if (groupId != null) { H5G.close(groupId); } } }
private Tuple <H5DataSetId, int> load_nd_datasetEx(Blob <T> blob, string strDatasetName, bool bReshape, int nMinDim = 1, int nMaxDim = int.MaxValue, H5GroupId id = null, bool bAllowSingleItems = false) { H5DataSetId ds = null; int nSingleItemSize = 0; try { if (id != null) { ds = H5D.open(id, strDatasetName); } else { ds = H5D.open(m_file, strDatasetName); } if (ds == null) { m_log.FAIL("Failed to find the dataset '" + strDatasetName + "'!"); } // Verify that the number of dimensions are in the accepted range. H5DataSpaceId dsSpace = H5D.getSpace(ds); if (dsSpace == null) { m_log.FAIL("Failed to get the dataset space!"); } int nDims = H5S.getSimpleExtentNDims(dsSpace); m_log.CHECK_GE(nDims, nMinDim, "The dataset dim is out of range!"); m_log.CHECK_LE(nDims, nMaxDim, "The dataset dim is out of range!"); long[] rgDims = H5S.getSimpleExtentDims(dsSpace); // Verify that the data format is what we expect: float or double H5DataTypeId dsType = H5D.getType(ds); if (dsType == null) { m_log.FAIL("Failed to get the dataset type!"); } H5T.H5TClass dataClass = H5T.getClass(dsType); switch (dataClass) { case H5T.H5TClass.FLOAT: m_log.WriteLine("Datatype class: H5T_FLOAT"); break; case H5T.H5TClass.INTEGER: m_log.WriteLine("Datatype class: H5T_INTEGER"); break; default: m_log.FAIL("Unsupported datatype class: " + dataClass.ToString()); break; } List <int> rgBlobDims = new List <int>(); for (int i = 0; i < nDims; i++) { rgBlobDims.Add((int)rgDims[i]); } if (bReshape) { blob.Reshape(rgBlobDims); } else { if (!Utility.Compare <int>(rgBlobDims, blob.shape())) { if (!bAllowSingleItems || (rgBlobDims.Count == 1 && rgBlobDims[0] != 1)) { string strSrcShape = Utility.ToString <int>(rgBlobDims); m_log.FAIL("Cannot load blob from hdf5; shape mismatch. Source shape = " + strSrcShape + ", target shape = " + blob.shape_string); } if (rgBlobDims.Count == 1) { nSingleItemSize = rgBlobDims[0]; } } } } catch (Exception excpt) { if (ds != null) { H5D.close(ds); ds = null; } throw excpt; } return(new Tuple <H5DataSetId, int>(ds, nSingleItemSize)); }
public DatasetInfo GetDatasetInfo(H5FileId fileId, string datasetName, string groupName) { DatasetInfo datasetInfo = new DatasetInfo(); datasetInfo.band = 1; datasetInfo.col = 1; datasetInfo.rank = 1; datasetInfo.row = 1; H5GroupId groupId = H5G.open(fileId, groupName); H5DataSetId dataSetId = H5D.open(groupId, datasetName); // ulong storeSize = H5D.getStorageSize(dataSetId); //得到数据数组存储大小 H5DataSpaceId spaceid = H5D.getSpace(dataSetId); long[] dims = H5S.getSimpleExtentDims(spaceid); //得到数据数组的大小,比如[3,1800,2048] datasetInfo.rank = H5S.getSimpleExtentNDims(spaceid); //得到数据数组的维数,比如3 int dimCount = dims.Length; if (dimCount == 2) { datasetInfo.col = Convert.ToInt32(dims[1]);//宽 datasetInfo.row = Convert.ToInt32(dims[0]); } else if (dimCount == 3) { datasetInfo.band = Convert.ToInt32(dims[0]); //波段数 datasetInfo.col = Convert.ToInt32(dims[2]); //宽 datasetInfo.row = Convert.ToInt32(dims[1]); //高 } else if (dimCount == 1) { datasetInfo.row = Convert.ToInt32(dims[0]);//高 } H5DataTypeId typeId = H5D.getType(dataSetId); H5T.H5TClass dataClass = H5T.getClass(typeId);//得到数据集的类型 string typeName = dataClass.ToString(); switch (typeName) { case "FLOAT": datasetInfo.type = DataValueType.FLOAT; break; case "INTEGER": datasetInfo.type = DataValueType.INT; break; case "COMPOUND": datasetInfo.type = DataValueType.COMPOUND; H5DataTypeId tid0 = H5D.getType(dataSetId); int nMember = H5T.getNMembers(tid0); datasetInfo.col = nMember; break; default: datasetInfo.type = DataValueType.EMPTY; break; } H5T.close(typeId); H5S.close(spaceid); H5D.close(dataSetId); H5G.close(groupId); return(datasetInfo); }
public void GetDataset <T>(H5FileId fileId, string datasetName, string groupName, T[, ,] datasetOut, DataValueType type) { H5GroupId groupId = H5G.open(fileId, groupName); H5DataSetId dataSetId = H5D.open(groupId, datasetName /*"EV_Emissive"*/); switch (type) { case DataValueType.FLOAT: H5DataTypeId tidfloat = new H5DataTypeId(H5T.H5Type.NATIVE_FLOAT); // Read the array back H5D.read(dataSetId, tidfloat, new H5Array <T>(datasetOut));//(dataSetId, tid1, new H5Array<int>(vlReadBackArray)); // H5T.close(tidfloat); break; case DataValueType.INT: H5DataTypeId tidint = new H5DataTypeId(H5T.H5Type.NATIVE_INT); // H5T.H5TClass c = H5T.getMemberClass(tid0); // Read the array back H5D.read(dataSetId, tidint, new H5Array <T>(datasetOut));//(dataSetId, tid1, new H5Array<int>(vlReadBackArray)); //H5T.close(tidint); break; case DataValueType.COMPOUND: H5DataTypeId tid0 = H5D.getType(dataSetId); int nMember = H5T.getNMembers(tid0); H5DataSpaceId spaceid = H5D.getSpace(dataSetId); long[] dims = H5S.getSimpleExtentDims(spaceid);//得到数据数组的大小,比如[3,1800,2048] int length = 1; for (int i = 0; i < dims.Length; i++) { length *= (int)dims[i]; } for (int i = 0; i < nMember; i++) { string memberName = H5T.getMemberName(tid0, i); H5DataTypeId memberTypeId = H5T.getMemberType(tid0, i); H5T.H5TClass dataClass = H5T.getClass(memberTypeId); //得到数据集的类型 string typeName = dataClass.ToString(); if (typeName == "INTEGER") //目前先只支持整形的 { H5DataTypeId tidtmp = H5T.create(H5T.CreateClass.COMPOUND, sizeof(int)); H5T.insert(tidtmp, memberName, 0, H5T.H5Type.NATIVE_INT); int[] dataTmp = new int[length]; H5D.read(dataSetId, tidtmp, new H5Array <int>(dataTmp)); for (int j = 0; j < length; j++) { datasetOut[0, j, i] = (T)Convert.ChangeType(dataTmp[j], datasetOut[0, j, i].GetType()); } } } H5S.close(spaceid); break; default: break; } H5D.close(dataSetId); H5G.close(groupId); //H5F.close(fileId); }
public SkimMatrix Read(string filename, int field, float scale) { Console.WriteLine("Reading {0}", filename); int hdf5NameEnd = filename.IndexOf("/"); // the first part of the name in the roster file is the hdf5 file: string HDFName = filename.Substring(0, hdf5NameEnd); //rename filename to be only the name of the skim inside of the time period file filename = filename.Substring(hdf5NameEnd); string hdfFile = _path + "\\" + HDFName; var dataFile = H5F.open(hdfFile, H5F.OpenMode.ACC_RDONLY); var dataSet = H5D.open(dataFile, filename); var space = H5D.getSpace(dataSet); var size2 = H5S.getSimpleExtentDims(space); long nRows = size2[0]; long nCols = size2[1]; long numZones = _mapping.Count(); var dataArray = new double[nRows, nCols]; var wrapArray = new H5Array <double>(dataArray); H5DataTypeId tid1 = H5D.getType(dataSet); H5D.read(dataSet, tid1, wrapArray); // if the count in the hdf5 file is larger than the number of // tazs in the mapping, ignore the values over the total number //of tazs in the mapping because these are not valid zones. _matrix = new ushort[numZones][]; for (var i = 0; i < numZones; i++) { _matrix[i] = new ushort[numZones]; } //leave as is for PSRC. Values are already scaled integers and matrices already condensed if (Global.Configuration.PSRC) { for (var i = 0; i < numZones; i++) { for (var j = 0; j < numZones; j++) { _matrix[i][j] = (ushort)dataArray[i, j]; } } } else { for (var row = 0; row < nRows; row++) { if (_mapping.ContainsKey(row + 1)) { for (var col = 0; col < nCols; col++) { if (_mapping.ContainsKey(col + 1)) { var value = dataArray[row, col] * scale; if (value > 0) { if (value > short.MaxValue) { value = short.MaxValue; } _matrix[_mapping[row + 1]][_mapping[col + 1]] = (ushort)value; } } } } } } var skimMatrix = new SkimMatrix(_matrix); return(skimMatrix); }
private void button1_Click(object sender, EventArgs e) { fileNameTextBox.Text = ""; string filename = ""; if (openFileDialog1.ShowDialog() == DialogResult.OK) { if ((openFileDialog1.OpenFile()) != null) { filename = openFileDialog1.FileName; fileNameTextBox.Text = openFileDialog1.FileName; Debug.WriteLine(filename); } H5.Open(); var h5 = H5F.open(filename, H5F.OpenMode.ACC_RDONLY); var dataset = H5D.open(h5, "/radarData/data"); var Space = H5D.getSpace(dataset); var size = H5S.getSimpleExtentDims(Space); float[, , , ,] dataarray = new float[size[0], size[1], size[2], size[3], size[4] * 2]; var wrapArray = new H5Array <float>(dataarray); NOS = (int)size[2]; //number_of_scan (X) NOC = (int)size[3]; //Number of channel (antenna) (Y) NOF = (int)size[4]; // Number of frequency (Z) if (NOS < NOF) { m = NOS; last = n = 50; } else { m = NOS; last = n = 50; } x = new float[m][]; xb = new float[m][]; c1 = new float[m][]; y1 = new float[m][]; for (int i = 0; i < m; i++) { x[i] = new float[n]; xb[i] = new float[n]; c1[i] = new float[n]; y1[i] = new float[n]; for (int j = 0; j < n; j++) { y1[i][j] = 0; x[i][j] = 0; } } textBox1.Text = size[2].ToString(); textBox2.Text = size[4].ToString(); textBox3.Text = size[3].ToString(); var dataType = H5D.getType(dataset); H5D.read <float>(dataset, dataType, wrapArray); data = new float[size[2], size[3], size[4] * 2]; var xd = data.Length; Debug.WriteLine(xd); for (int k = 0; k < size[2]; k++) { for (int i = 0; i < size[3]; i++) { for (int j = 0; j < size[4] * 2; j++) { data[k, i, j] = dataarray[0, 0, k, i, j]; } } } // res = 10; //10mm res = 1; //100mm n_o_s = NOS; //640;// 510;//number of files n_o_c = NOC * res; //100;// NOC* res; //* res; //for outdoor =NOC * res for indoor=100; 150 for outdoor as after 15th channel readings were not proper n_o_f = NOF; grid = new float[NOS, NOC *res, NOF]; for (int k = 0; k < NOS; k++) //100mm { for (int i = 0; i < NOC; i++) { for (int j = 0; j < NOF; j++) { grid[k, i, j] = data[k, i, j * 2]; } } } H5.Close(); } hscn = 0; depth = 0; dtscn = 0; dtdep = 0; chnl = 0; dtchnl = 0; imagescanner(0, 0, 0); }