public static double[, ,] ReadFieldData3D(string fileName) { H5FileId fileId = H5F.open(fileName, H5F.OpenMode.ACC_RDONLY); H5DataSetId fDataSetId = H5D.open(fileId, "/FieldData/FD/f0"); H5DataTypeId fDataTypeId = H5D.getType(fDataSetId); if (!H5T.equal(fDataTypeId, H5T.copy(H5T.H5Type.NATIVE_FLOAT))) { Console.WriteLine("Error: Invalid dataset type, expected {0}", H5T.H5Type.NATIVE_FLOAT); } long[] dims = H5S.getSimpleExtentDims(H5D.getSpace(fDataSetId)).ToArray(); if (dims.Length != 3) { Console.WriteLine("Error: Invalid field data dimensions"); } float[, ,] data = new float[dims[0], dims[1], dims[2]]; H5D.read(fDataSetId, fDataTypeId, new H5Array <float>(data)); // Reorder double[, ,] fieldValues = new double[dims[2], dims[1], dims[0]]; for (int i = 0; i < dims[0]; i++) { for (int j = 0; j < dims[1]; j++) { for (int k = 0; k < dims[2]; k++) { fieldValues[k, j, i] = data[i, j, k]; } } } return(fieldValues); }
//Load weights from hdf5 file. Weights must be saved as a vector per layer public static float[] loadH5(string path, string dsname) { //Get file id var h5fid = H5F.open(path, H5F.OpenMode.ACC_RDONLY); //Get dataset id var h5did = H5D.open(h5fid, dsname); //Dataset size var h5space = H5D.getSpace(h5did); var h5size = H5S.getSimpleExtentDims(h5space); //Dataset size to array var S = h5size.ToArray(); //Empty double array for the data double[] data = new double[S[0]]; //Read the dataset var h5array = new H5Array <double>(data); var h5dtype = H5D.getType(h5did); H5D.read(h5did, h5dtype, h5array); //Convert to float float[] newarray = new float[data.Length]; Parallel.For(0, data.Length, (k) => { newarray[k] = (float)data[k]; }); return(newarray); }
public static double[,] ReadFieldData2D(string file, string dataSet) { H5FileId fileId = H5F.open(file, H5F.OpenMode.ACC_RDONLY); H5DataSetId fDataSetId = H5D.open(fileId, dataSet); H5DataTypeId fDataTypeId = H5D.getType(fDataSetId); long[] dims = H5S.getSimpleExtentDims(H5D.getSpace(fDataSetId)).ToArray(); double[,] data = new double[dims[0], dims[1]]; H5D.read(fDataSetId, fDataTypeId, new H5Array <double>(data)); double[,] fieldValues = new double[dims[1], dims[0]]; for (int i = 0; i < dims[1]; i++) { for (int j = 0; j < dims[0]; j++) { fieldValues[i, j] = (double)data[j, i]; } } H5T.close(fDataTypeId); H5D.close(fDataSetId); H5F.close(fileId); return(fieldValues); }
/// <summary> /// 重写数据集的值(去条带的数据) /// </summary> /// <typeparam name="T">数据类型</typeparam> /// <param name="dataSetName">数据集的名称</param> /// <param name="dataTypeId">数据集的类型ID</param> /// <param name="values">去条带之后数据</param> /// <param name="BrandNo">在数据集的维度从0开始</param> private void ReWriteDataSet <T>(string dataSetName, H5DataTypeId dataTypeId, T[] values, int BrandNo) { H5FileId _h5FileId = null; H5DataSetId dataSetId = null; H5DataSpaceId spaceid = null; try { _h5FileId = H5F.open(fileName, H5F.OpenMode.ACC_RDWR); //先找出含有指定波段的数据集 dataSetId = H5D.open(_h5FileId, dataSetName); spaceid = H5D.getSpace(dataSetId); long[] dims = H5S.getSimpleExtentDims(spaceid); //得到数据数组的大小,比如[3,1800,2048] int rank = H5S.getSimpleExtentNDims(spaceid); //得到数据数组的维数,比如3 H5S.close(spaceid); //根据数据集的名字获取数据集的ID int size = 0; if (rank == 0) { size = 1; } else if (rank == 1) { size = Convert.ToInt32(dims[0]); } else if (rank == 2) { size = Convert.ToInt32(dims[0] * dims[1]); } else if (rank == 3) { size = Convert.ToInt32(dims[0] * dims[1] * dims[2]); } T[] v = new T[size]; //从数据集中读取原始数据 H5D.read <T>(dataSetId, dataTypeId, new H5Array <T>(v)); //将波段校正后的数据读取赋给相应的波段 for (int i = BrandNo; i < values.Length; i++) { v[i] = values[i]; } H5D.write <T>(dataSetId, dataTypeId, new H5Array <T>(v)); } catch (Exception e) { throw new Exception(e.Message); } finally { H5D.close(dataSetId); H5F.close(_h5FileId); } }
private void createHD5DataObject(H5GroupId h5GroupId, string pathName, ref HD5DataSetObject dataObject) { H5DataSetId datasetid = null; H5DataSpaceId spaceid = null; H5DataTypeId dataTypeid = null; try { dataObject.GroupId = h5GroupId; datasetid = H5D.open(h5GroupId, pathName); dataObject.DatasetID = datasetid; dataObject.DatasetName = pathName; spaceid = H5D.getSpace(datasetid); var dims = H5S.getSimpleExtentDims(spaceid); dataTypeid = H5D.getType(datasetid); dataObject.Dim = dims.Length; HDF5DotNet.H5T.H5TClass classType = H5T.getClass(dataTypeid); int size = H5T.getSize(dataTypeid); H5T.Sign sign = H5T.Sign.TWOS_COMPLEMENT; if (classType == H5T.H5TClass.INTEGER) { sign = H5T.getSign(dataTypeid); } //var rank = H5S.getSimpleExtentNDims(space); //var statu = H5S.getSimpleExtentDims(space); Boolean bString = H5T.isVariableString(dataTypeid); //String name = H5T.getMemberName(dataType, 0); // var type2 = H5T.getNativeType(dataType, H5T.Direction.DEFAULT); Type type = getTypeof(classType, size, sign); dataObject.DataType = type; dataObject.Data = readData(dataObject); } catch (Exception e) { Console.WriteLine(e.Message); } finally{ if (datasetid != null) { H5D.close(datasetid); } if (spaceid != null) { H5S.close(spaceid); } if (dataTypeid != null) { H5T.close(dataTypeid); } } }
/// <summary> /// 读取指定数据集,未对异常进行处理 /// </summary> /// <typeparam name="T"></typeparam> /// <param name="datasetName"></param> /// <param name="bandN"></param> /// <param name="bandH"></param> /// <param name="bandW"></param> /// <returns></returns> public T[] ReadDataArray <T>(String datasetName, ref int bandN, ref int bandH, ref int bandW) { H5DataSetId datasetId = null; H5DataSpaceId spaceId = null; H5DataTypeId typeId = null; long[] dims = null; if (!String.IsNullOrEmpty(datasetName) && _datasetNames.Contains(datasetName)) { datasetId = H5D.open(_fileId, datasetName); spaceId = H5D.getSpace(datasetId); dims = H5S.getSimpleExtentDims(spaceId); if (dims.Length == 2) { bandN = 1; bandH = (int)dims[0]; bandW = (int)dims[1]; } else if (dims.Length == 3) { bandN = (int)dims[0]; bandH = (int)dims[1]; bandW = (int)dims[2]; } typeId = H5D.getType(datasetId); typeId = H5T.getNativeType(typeId, H5T.Direction.DEFAULT); T[] dv = new T[bandN * bandH * bandW]; H5D.read <T>(datasetId, typeId, new H5Array <T>(dv)); if (typeId != null) { H5T.close(typeId); } if (spaceId != null) { H5S.close(spaceId); } if (datasetId != null) { H5D.close(datasetId); } return(dv); } else { throw new Exception("未查到指定数据集!"); } }
public static T[,] Read2DArray <T>(this H5FileId fileId, string dataSetName) { var dataset = H5D.open(fileId, dataSetName); var space = H5D.getSpace(dataset); var dims = H5S.getSimpleExtentDims(space); var dataType = H5D.getType(dataset); if (typeof(T) == typeof(string)) { // this will also need a string hack... } T[,] dataArray = new T[dims[0], dims[1]]; var wrapArray = new H5Array <T>(dataArray); H5D.read(dataset, dataType, wrapArray); return(dataArray); }
//Reading and Printing Methods private static double[] GetDoubleDataSet(H5FileId dataFile, string path) { if (H5L.Exists(dataFile, path)) { H5DataSetId dataSet = H5D.open(dataFile, path); H5DataSpaceId space = H5D.getSpace(dataSet); long[] size2 = H5S.getSimpleExtentDims(space); long count = size2[0]; double[] dataArray = new double[count]; H5Array <double> wrapArray = new H5Array <double>(dataArray); H5DataTypeId tid1 = H5D.getType(dataSet); H5D.read(dataSet, tid1, wrapArray); return(dataArray); } return(null); }
private static int[] GetInt32DataSet(H5FileId dataFile, string path) { if (H5L.Exists(dataFile, path)) { var dataSet = H5D.open(dataFile, path); var space = H5D.getSpace(dataSet); var size2 = H5S.getSimpleExtentDims(space); long count = size2[0]; var dataArray = new Int32[count]; var wrapArray = new H5Array <Int32>(dataArray); H5DataTypeId tid1 = H5D.getType(dataSet); H5D.read(dataSet, tid1, wrapArray); return(dataArray); } return(null); }
public static T ReadScalar <T>(H5FileId fileId, string datasetName) { H5DataSetId dataset = null; H5DataSpaceId space = null; H5DataTypeId dataType = null; long[] dims; T data = default(T); try { dataset = H5D.open(fileId, datasetName); space = H5D.getSpace(dataset); dims = H5S.getSimpleExtentDims(space); dataType = H5D.getType(dataset); H5D.readScalar <T>(dataset, dataType, ref data); if (typeof(T) == typeof(string)) { int stringLength = H5T.getSize(dataType); byte[] buffer = new byte[2 * stringLength]; H5D.read(dataset, dataType, new H5Array <byte>(buffer)); string stuff = System.Text.ASCIIEncoding.ASCII.GetString(buffer); return((T)(stuff.SplitInParts(stringLength).Select(ss => (T)(object)ss))); } return(data); } catch { return(default(T)); } finally { if (space != null) { H5S.close(space); } if (dataset != null) { H5D.close(dataset); } } }
public static T[] Read1DArray <T>(H5FileId fileId, string dataSetName) { H5DataSetId dataset = null; H5DataSpaceId space = null; H5DataTypeId dataType = null; long[] dims; try { dataset = H5D.open(fileId, dataSetName); space = H5D.getSpace(dataset); dims = H5S.getSimpleExtentDims(space); dataType = H5D.getType(dataset); if (typeof(T) == typeof(string)) { int stringLength = H5T.getSize(dataType); int a = (int)dims[0]; byte[] buffer = new byte[(int)(dims[0]) * stringLength]; H5D.read(dataset, dataType, new H5Array <byte>(buffer)); string stuff = System.Text.ASCIIEncoding.ASCII.GetString(buffer); return(stuff.SplitInParts(stringLength).Select(ss => (T)(object)ss).ToArray()); } T[] dataArray = new T[dims[0]]; var wrapArray = new H5Array <T>(dataArray); H5D.read(dataset, dataType, wrapArray); return(dataArray); } catch { return(null); } finally { if (space != null) { H5S.close(space); } if (dataset != null) { H5D.close(dataset); } } }
public static T[] Read1DArray <T>(this H5FileId fileId, string dataSetName) { var dataset = H5D.open(fileId, dataSetName); var space = H5D.getSpace(dataset); var dims = H5S.getSimpleExtentDims(space); var dataType = H5D.getType(dataset); if (typeof(T) == typeof(string)) { int stringLength = H5T.getSize(dataType); byte[] buffer = new byte[dims[0] * stringLength]; H5D.read(dataset, dataType, new H5Array <byte>(buffer)); string stuff = System.Text.ASCIIEncoding.ASCII.GetString(buffer); return(stuff.SplitInParts(stringLength).Select(ss => (T)(object)ss).ToArray()); } T[] dataArray = new T[dims[0]]; var wrapArray = new H5Array <T>(dataArray); H5D.read(dataset, dataType, wrapArray); return(dataArray); }
/// <summary> /// 暂时不用 /// </summary> /// <typeparam name="T"></typeparam> /// <param name="fileId"></param> /// <param name="datasetName"></param> /// <param name="groupName"></param> /// <param name="datasetOut"></param> /// <param name="rowIndex"></param> /// <param name="rowcount"></param> /// <param name="colcount"></param> public void GetDataset <T>(H5FileId fileId, string datasetName, string groupName, T[,] datasetOut, int rowIndex, int rowcount, int colcount) { H5GroupId groupId = H5G.open(fileId, groupName); H5DataSetId dataSetId = H5D.open(groupId, datasetName /*"EV_Emissive"*/); H5DataTypeId tid0 = H5D.getType(dataSetId); H5DataSpaceId spaceid = H5D.getSpace(dataSetId); long[] start = new long[2]; start[0] = rowIndex; start[1] = 0; long[] count = new long[2]; count[0] = rowcount; count[1] = colcount; H5S.selectHyperslab(spaceid, H5S.SelectOperator.SET, start, count); //long[] dimes = new long[2]; //dimes[0] = 1; //dimes[1] = 8192; H5DataSpaceId simpleSpaceid = H5S.create_simple(2, count); H5PropertyListId listid = new H5PropertyListId(H5P.Template.DEFAULT); H5DataTypeId tid1 = new H5DataTypeId(H5T.H5Type.NATIVE_INT);//数据类型 // Read the array back //int[,] dataSet = new int[cout[0], cout[1]]; H5D.read(dataSetId, tid1, simpleSpaceid, spaceid, listid, new H5Array <T>(datasetOut)); H5S.close(simpleSpaceid); H5S.close(spaceid); H5T.close(tid0); H5D.close(dataSetId); H5G.close(groupId); }
private static T[, ,] Read3DArray <T>(H5GroupId groupID, string name) { var dataset = H5D.open(groupID, name); var space = H5D.getSpace(dataset); var dims = H5S.getSimpleExtentDims(space); var dataType = H5D.getType(dataset); if (typeof(T) == typeof(string)) { // this will also need a string hack... T[, ,] dataArray = new T[dims[0], 0, 0]; return(dataArray); } else { T[, ,] dataArray = new T[dims[0], dims[1], dims[2]]; var wrapArray = new H5Array <T>(dataArray); H5D.read(dataset, dataType, wrapArray); return(dataArray); } }
void WriteDataToNewFile <T>(H5FileId fileId, string datasetName, T[,] barsData, long count, long parametersNumber, H5DataTypeId datatypeId) { H5DataSpaceId dataspaceId = H5S.create_simple(2, new long[] { count, parametersNumber }, new long[] { (long)H5S.H5SType.UNLIMITED, parametersNumber }); H5PropertyListId createChunked = H5P.create(H5P.PropertyListClass.DATASET_CREATE); H5PropertyListId linkCreationDefaults = H5P.create(H5P.PropertyListClass.LINK_CREATE); H5PropertyListId accessCreationDefaults = H5P.create(H5P.PropertyListClass.DATASET_ACCESS); H5P.setChunk(createChunked, new long[] { 1, parametersNumber }); H5DataSetId datasetId = H5D.create(fileId, datasetName, datatypeId, dataspaceId, linkCreationDefaults, createChunked, accessCreationDefaults); H5D.setExtent(datasetId, new long[] { count, parametersNumber }); H5DataSpaceId newSpace = H5D.getSpace(datasetId); H5S.selectHyperslab(newSpace, H5S.SelectOperator.SET, new long[] { 0, 0 }, new long[] { count, parametersNumber }); H5D.write(datasetId, datatypeId, new H5Array <T>(barsData)); H5P.close(createChunked); H5P.close(linkCreationDefaults); H5P.close(accessCreationDefaults); H5S.close(newSpace); H5S.close(dataspaceId); H5D.close(datasetId); }
private static T[] Read1DArray <T>(H5GroupId fileId, string dataSetName) { var dataset = H5D.open(fileId, dataSetName); var space = H5D.getSpace(dataset); var dims = H5S.getSimpleExtentDims(space); var dataType = H5D.getType(dataset); T[] dataArray = null; if (typeof(T) == typeof(string)) { // this will also need a string hack... dataArray = new T[dims[0]]; H5D.close(dataset); return(dataArray); } else { dataArray = new T[dims[0]]; var wrapArray = new H5Array <T>(dataArray); H5D.read(dataset, dataType, wrapArray); H5D.close(dataset); return(dataArray); } }
void DownloadQuotes() { QuoteDepth marketDepth = includeLevel2 ? QuoteDepth.Level2 : QuoteDepth.Top; DownloadQuotesEnumerator enumerator = quoteClient.DownloadQuotes(symbol, marketDepth, from, to, -1); if (outputType == "csv") { string path = Path.Combine(this.location, string.Format("{0}{1}{2}{3}.csv", symbol, includeLevel2 ? " level2" : "", from.ToString(" yyyyMMdd"), to.ToString(" yyyyMMdd"))); using (StreamWriter file = File.CreateText(path)) { file.WriteLine("date_time,bid_price,bid_volume,ask_price,ask_volume"); for (Quote quote = enumerator.Next(-1); quote != null; quote = enumerator.Next(-1)) { StringBuilder builder = new StringBuilder(); builder.Append(quote.CreatingTime.ToString("yyyy-MM-dd HH:mm:ss.fff,", CultureInfo.InvariantCulture)); foreach (QuoteEntry entry in quote.Bids) { builder.AppendFormat("{0},{1},", entry.Price, entry.Volume); } foreach (QuoteEntry entry in quote.Asks) { builder.AppendFormat("{0},{1}", entry.Price, entry.Volume); } file.WriteLine(builder); } } this.Log("Quotes are downloaded successfully"); } else if (outputType == "hdf5") { string path = Path.Combine(this.location, string.Format("{0}{1}{2}{3}.h5", symbol, includeLevel2 ? " level2" : "", from.ToString(" yyyyMMdd"), to.ToString(" yyyyMMdd"))); H5FileId fileId = H5F.create(path, H5F.CreateMode.ACC_TRUNC); H5DataTypeId quotesTypeId = new H5DataTypeId(H5T.H5Type.NATIVE_DOUBLE); H5DataSpaceId quotesSpaceId = H5S.create_simple(3, new long[] { 1, 2, 2 }, new long[] { (long)H5S.H5SType.UNLIMITED, 2, 2 }); H5PropertyListId createChunkedQuotes = H5P.create(H5P.PropertyListClass.DATASET_CREATE); H5PropertyListId linkCreationDefaultsQuotes = H5P.create(H5P.PropertyListClass.LINK_CREATE); H5PropertyListId accessCreationDefaultsQuotes = H5P.create(H5P.PropertyListClass.DATASET_ACCESS); H5P.setChunk(createChunkedQuotes, new long[] { 1, 2, 2 }); H5DataSetId quotesSetId = H5D.create(fileId, "Quotes", quotesTypeId, quotesSpaceId, linkCreationDefaultsQuotes, createChunkedQuotes, accessCreationDefaultsQuotes); H5DataTypeId dateQuotesTypeId = new H5DataTypeId(H5T.H5Type.NATIVE_LLONG); H5DataSpaceId dateQuotesSpaceId = H5S.create_simple(1, new long[] { 1 }, new long[] { (long)H5S.H5SType.UNLIMITED }); H5PropertyListId createChunkedDate = H5P.create(H5P.PropertyListClass.DATASET_CREATE); H5PropertyListId linkCreationDefaultsDate = H5P.create(H5P.PropertyListClass.LINK_CREATE); H5PropertyListId accessCreationDefaultsDate = H5P.create(H5P.PropertyListClass.DATASET_ACCESS); H5P.setChunk(createChunkedDate, new long[] { 1 }); H5DataSetId dateQuotesSetId = H5D.create(fileId, "DateQuotes", dateQuotesTypeId, dateQuotesSpaceId, linkCreationDefaultsDate, createChunkedDate, accessCreationDefaultsDate); int count = 0; int chunkCount = 0; double[,,] quotesArr = new double[chunkSize, 2, 2]; long[] datesArr = new long[chunkSize]; H5DataSpaceId memSpace; for (Quote quote = enumerator.Next(-1); quote != null; quote = enumerator.Next(-1)) { int j = 0; foreach (QuoteEntry entry in quote.Bids) { quotesArr[chunkCount, 0, j] = entry.Price; quotesArr[chunkCount, 0, j + 1] = entry.Volume; j += 2; } j = 0; foreach (QuoteEntry entry in quote.Asks) { quotesArr[chunkCount, 1, j] = entry.Price; quotesArr[chunkCount, 1, j + 1] = entry.Volume; j += 2; } datesArr[chunkCount] = (long)quote.CreatingTime.Subtract(new DateTime(1970, 1, 1)).TotalMilliseconds; chunkCount++; count++; if (chunkCount == chunkSize) { H5D.setExtent(quotesSetId, new long[] { count, 2, 2 }); H5S.close(quotesSpaceId); quotesSpaceId = H5D.getSpace(quotesSetId); H5S.selectHyperslab(quotesSpaceId, H5S.SelectOperator.SET, new long[] { count - chunkSize, 0, 0 }, new long[] { chunkSize, 2, 2 }); memSpace = H5S.create_simple(3, new long[] { chunkSize, 2, 2 }); H5D.write(quotesSetId, quotesTypeId, memSpace, quotesSpaceId, new H5PropertyListId(H5P.Template.DEFAULT), new H5Array <double>(quotesArr)); H5D.setExtent(dateQuotesSetId, new long[] { count }); H5S.close(dateQuotesSpaceId); dateQuotesSpaceId = H5D.getSpace(dateQuotesSetId); H5S.selectHyperslab(dateQuotesSpaceId, H5S.SelectOperator.SET, new long[] { count - chunkSize }, new long[] { chunkSize }); memSpace = H5S.create_simple(1, new long[] { chunkSize }); H5D.write(dateQuotesSetId, dateQuotesTypeId, memSpace, dateQuotesSpaceId, new H5PropertyListId(H5P.Template.DEFAULT), new H5Array <long>(datesArr)); chunkCount = 0; } } if (count % chunkSize != 0) { int delta = count % chunkSize; H5D.setExtent(quotesSetId, new long[] { count, 2, 2 }); H5S.close(quotesSpaceId); quotesSpaceId = H5D.getSpace(quotesSetId); H5S.selectHyperslab(quotesSpaceId, H5S.SelectOperator.SET, new long[] { count - delta, 0, 0 }, new long[] { delta, 2, 2 }); memSpace = H5S.create_simple(3, new long[] { delta, 2, 2 }); H5D.write(quotesSetId, quotesTypeId, memSpace, quotesSpaceId, new H5PropertyListId(H5P.Template.DEFAULT), new H5Array <double>(quotesArr)); H5D.setExtent(dateQuotesSetId, new long[] { count }); H5S.close(dateQuotesSpaceId); dateQuotesSpaceId = H5D.getSpace(dateQuotesSetId); H5S.selectHyperslab(dateQuotesSpaceId, H5S.SelectOperator.SET, new long[] { count - delta }, new long[] { delta }); memSpace = H5S.create_simple(1, new long[] { delta }); H5D.write(dateQuotesSetId, dateQuotesTypeId, memSpace, dateQuotesSpaceId, new H5PropertyListId(H5P.Template.DEFAULT), new H5Array <long>(datesArr)); } H5P.close(createChunkedQuotes); H5P.close(linkCreationDefaultsQuotes); H5P.close(accessCreationDefaultsQuotes); H5P.close(createChunkedDate); H5P.close(linkCreationDefaultsDate); H5P.close(accessCreationDefaultsDate); H5S.close(quotesSpaceId); H5D.close(quotesSetId); H5S.close(dateQuotesSpaceId); H5D.close(dateQuotesSetId); H5F.close(fileId); this.Log("Quotes are downloaded successfully"); } }
void DownloadQuotes() { QuoteDepth marketDepth = includeLevel2 ? QuoteDepth.Level2 : QuoteDepth.Top; DownloadQuotesEnumerator enumerator = quoteClient.DownloadQuotes(symbol, marketDepth, from, to, -1); enumeratorTicks = enumerator; if (outputType == "csv") { if (includeVWAP) { throw new ArgumentException("VWAP is not supported for hdf5 and csv format."); } string path = Path.Combine(this.location, string.Format("{0}{1}{2}{3}.csv", symbol.Replace("/", "%2F"), includeLevel2 ? " level2" : "", from.ToString(" yyyyMMdd-HH-mm-ss"), to.ToString(" yyyyMMdd-HH-mm-ss"))); using (StreamWriter file = File.CreateText(path)) { file.WriteLine("date_time;bid_price;bid_volume;ask_price;ask_volume"); for (Quote quote = enumerator.Next(-1); quote != null; quote = enumerator.Next(-1)) { StringBuilder builder = new StringBuilder(); builder.Append(quote.CreatingTime.ToString("yyyy-MM-dd HH:mm:ss.fff;", CultureInfo.InvariantCulture)); foreach (QuoteEntry entry in quote.Bids) { if (quote.TickType.HasFlag(TickTypes.IndicativeBid) || quote.TickType.HasFlag(TickTypes.IndicativeBidAsk)) { builder.AppendFormat("{0};{1};", (decimal)entry.Price, (decimal) - entry.Volume); } else { builder.AppendFormat("{0};{1};", (decimal)entry.Price, (decimal)entry.Volume); } } foreach (QuoteEntry entry in quote.Asks) { if (quote.TickType.HasFlag(TickTypes.IndicativeAsk) || quote.TickType.HasFlag(TickTypes.IndicativeBidAsk)) { builder.AppendFormat("{0};{1};", (decimal)entry.Price, (decimal) - entry.Volume); } else { builder.AppendFormat("{0};{1};", (decimal)entry.Price, (decimal)entry.Volume); } } builder.Remove(builder.Length - 1, 1); file.WriteLine(builder); } } if (includeLevel2) { this.Log($"level2 {symbol} are downloaded successfully"); } else if (includeVWAP) { this.Log($"VWAP {symbol} are downloaded successfully"); } else { this.Log($"Ticks {symbol} are downloaded successfully"); } } else if (outputType == "hdf5") { if (includeVWAP) { throw new ArgumentException("VWAP is not supported for hdf5 and csv format."); } string path = Path.Combine(this.location, string.Format("{0}{1}{2}{3}.h5", symbol.Replace("/", "%2F"), includeLevel2 ? " level2" : "", from.ToString(" yyyyMMdd-HH-mm-ss"), to.ToString(" yyyyMMdd-HH-mm-ss"))); H5FileId fileId = H5F.create(path, H5F.CreateMode.ACC_TRUNC); H5DataTypeId quotesTypeId = new H5DataTypeId(H5T.H5Type.NATIVE_DOUBLE); H5DataSpaceId quotesSpaceId = H5S.create_simple(3, new long[] { 1, 2, 2 }, new long[] { (long)H5S.H5SType.UNLIMITED, 2, 2 }); H5PropertyListId createChunkedQuotes = H5P.create(H5P.PropertyListClass.DATASET_CREATE); H5PropertyListId linkCreationDefaultsQuotes = H5P.create(H5P.PropertyListClass.LINK_CREATE); H5PropertyListId accessCreationDefaultsQuotes = H5P.create(H5P.PropertyListClass.DATASET_ACCESS); H5P.setChunk(createChunkedQuotes, new long[] { 1, 2, 2 }); H5DataSetId quotesSetId = H5D.create(fileId, "Quotes", quotesTypeId, quotesSpaceId, linkCreationDefaultsQuotes, createChunkedQuotes, accessCreationDefaultsQuotes); H5DataTypeId dateQuotesTypeId = new H5DataTypeId(H5T.H5Type.NATIVE_LLONG); H5DataSpaceId dateQuotesSpaceId = H5S.create_simple(1, new long[] { 1 }, new long[] { (long)H5S.H5SType.UNLIMITED }); H5PropertyListId createChunkedDate = H5P.create(H5P.PropertyListClass.DATASET_CREATE); H5PropertyListId linkCreationDefaultsDate = H5P.create(H5P.PropertyListClass.LINK_CREATE); H5PropertyListId accessCreationDefaultsDate = H5P.create(H5P.PropertyListClass.DATASET_ACCESS); H5P.setChunk(createChunkedDate, new long[] { 1 }); H5DataSetId dateQuotesSetId = H5D.create(fileId, "DateQuotes", dateQuotesTypeId, dateQuotesSpaceId, linkCreationDefaultsDate, createChunkedDate, accessCreationDefaultsDate); int count = 0; int chunkCount = 0; double[,,] quotesArr = new double[chunkSize, 2, 2]; long[] datesArr = new long[chunkSize]; H5DataSpaceId memSpace = null; for (Quote quote = enumerator.Next(-1); quote != null; quote = enumerator.Next(-1)) { int j = 0; foreach (QuoteEntry entry in quote.Bids) { quotesArr[chunkCount, 0, j] = entry.Price; quotesArr[chunkCount, 0, j + 1] = entry.Volume; j += 2; } j = 0; foreach (QuoteEntry entry in quote.Asks) { quotesArr[chunkCount, 1, j] = entry.Price; quotesArr[chunkCount, 1, j + 1] = entry.Volume; j += 2; } datesArr[chunkCount] = (long)quote.CreatingTime.Subtract(new DateTime(1970, 1, 1)).TotalMilliseconds; chunkCount++; count++; if (chunkCount == chunkSize) { H5D.setExtent(quotesSetId, new long[] { count, 2, 2 }); H5S.close(quotesSpaceId); quotesSpaceId = H5D.getSpace(quotesSetId); H5S.selectHyperslab(quotesSpaceId, H5S.SelectOperator.SET, new long[] { count - chunkSize, 0, 0 }, new long[] { chunkSize, 2, 2 }); memSpace = H5S.create_simple(3, new long[] { chunkSize, 2, 2 }); H5D.write(quotesSetId, quotesTypeId, memSpace, quotesSpaceId, new H5PropertyListId(H5P.Template.DEFAULT), new H5Array <double>(quotesArr)); H5S.close(memSpace); H5D.setExtent(dateQuotesSetId, new long[] { count }); H5S.close(dateQuotesSpaceId); dateQuotesSpaceId = H5D.getSpace(dateQuotesSetId); H5S.selectHyperslab(dateQuotesSpaceId, H5S.SelectOperator.SET, new long[] { count - chunkSize }, new long[] { chunkSize }); memSpace = H5S.create_simple(1, new long[] { chunkSize }); H5D.write(dateQuotesSetId, dateQuotesTypeId, memSpace, dateQuotesSpaceId, new H5PropertyListId(H5P.Template.DEFAULT), new H5Array <long>(datesArr)); H5S.close(memSpace); chunkCount = 0; } } if (count % chunkSize != 0) { int delta = count % chunkSize; H5D.setExtent(quotesSetId, new long[] { count, 2, 2 }); H5S.close(quotesSpaceId); quotesSpaceId = H5D.getSpace(quotesSetId); H5S.selectHyperslab(quotesSpaceId, H5S.SelectOperator.SET, new long[] { count - delta, 0, 0 }, new long[] { delta, 2, 2 }); memSpace = H5S.create_simple(3, new long[] { delta, 2, 2 }); H5D.write(quotesSetId, quotesTypeId, memSpace, quotesSpaceId, new H5PropertyListId(H5P.Template.DEFAULT), new H5Array <double>(quotesArr)); H5S.close(memSpace); H5D.setExtent(dateQuotesSetId, new long[] { count }); H5S.close(dateQuotesSpaceId); dateQuotesSpaceId = H5D.getSpace(dateQuotesSetId); H5S.selectHyperslab(dateQuotesSpaceId, H5S.SelectOperator.SET, new long[] { count - delta }, new long[] { delta }); memSpace = H5S.create_simple(1, new long[] { delta }); H5D.write(dateQuotesSetId, dateQuotesTypeId, memSpace, dateQuotesSpaceId, new H5PropertyListId(H5P.Template.DEFAULT), new H5Array <long>(datesArr)); H5S.close(memSpace); } H5P.close(createChunkedQuotes); H5P.close(linkCreationDefaultsQuotes); H5P.close(accessCreationDefaultsQuotes); H5P.close(createChunkedDate); H5P.close(linkCreationDefaultsDate); H5P.close(accessCreationDefaultsDate); H5S.close(quotesSpaceId); H5D.close(quotesSetId); H5S.close(dateQuotesSpaceId); H5D.close(dateQuotesSetId); //H5S.close(memSpace); H5F.close(fileId); if (includeLevel2) { this.Log($"level2 {symbol} are downloaded successfully"); } else if (includeVWAP) { this.Log($"VWAP {symbol} are downloaded successfully"); } else { this.Log($"Ticks {symbol} are downloaded successfully"); } GC.Collect(); GC.WaitForPendingFinalizers(); } else if (outputType == "csv_zip") { string quoteType = "Ticks"; if (includeVWAP) { quoteType = "VWAP"; } if (includeLevel2) { quoteType = "TicksLevel2"; } string path = Path.Combine(location, $"{symbol.Replace("/", "%2F")}_{quoteType}_{from.ToString("yyyy-MM-dd-HH-mm-ss")}_{to.ToString("yyyy-MM-dd-HH-mm-ss")}.zip"); Console.WriteLine(path); using (ZipOutputStream zs = new ZipOutputStream(File.Create(path))) { if (includeVWAP) { DownloadVWAPCSVNew(zs); } else if (includeLevel2) { DownloadLevel2CSVNew(enumerator, zs); } else { DownloadTicksCSVNew(enumerator, zs); } } if (includeLevel2) { this.Log($"level2 {symbol} are downloaded successfully"); } else if (includeVWAP) { this.Log($"VWAP {symbol} are downloaded successfully"); } else { this.Log($"Ticks {symbol} are downloaded successfully"); } } }
public Dictionary <string, string> GetAttributes(string datasetName) { if (string.IsNullOrEmpty(datasetName) || !_datasetNames.Contains(datasetName)) { return(null); } H5DataSetId datasetId = null; H5GroupId groupId = null; H5DataTypeId typeId = null; H5DataSpaceId spaceId = null; //H5PropertyListId psId = null; try { int groupIndex = datasetName.LastIndexOf('/'); if (groupIndex == -1) { datasetId = H5D.open(_h5FileId, datasetName); } else { string groupName = datasetName.Substring(0, groupIndex + 1); string dsName = datasetName.Substring(groupIndex + 1); groupId = H5G.open(_h5FileId, groupName); datasetId = H5D.open(groupId, dsName); } if (datasetId == null) { return(null); } Dictionary <string, string> attValues = new Dictionary <string, string>(); typeId = H5D.getType(datasetId); H5T.H5TClass type = H5T.getClass(typeId); int tSize = H5T.getSize(typeId); spaceId = H5D.getSpace(datasetId); long[] dims = H5S.getSimpleExtentDims(spaceId); long storageSize = H5D.getStorageSize(datasetId); attValues.Add("DataSetName", datasetName); attValues.Add("DataType", type.ToString()); attValues.Add("DataTypeSize", tSize.ToString() + "Byte"); attValues.Add("Dims", String.Join("*", dims)); attValues.Add("StorageSize", storageSize.ToString() + "Byte"); int attrCount = H5A.getNumberOfAttributes(datasetId); for (int i = 0; i < attrCount; i++) { string attName = H5A.getNameByIndex(datasetId, "/" + datasetName, H5IndexType.NAME, H5IterationOrder.NATIVE, (ulong)i); attValues.Add(attName, ReadAttributeValue(datasetId, attName)); } return(attValues); } finally { if (spaceId != null) { H5S.close(spaceId); } if (typeId != null) { H5T.close(typeId); } if (datasetId != null) { H5D.close(datasetId); } if (groupId != null) { H5G.close(groupId); } } }
public SkimMatrix Read(string filename, int field, float scale) { Console.WriteLine("Reading {0}", filename); int hdf5NameEnd = filename.IndexOf("/"); // the first part of the name in the roster file is the hdf5 file: string HDFName = filename.Substring(0, hdf5NameEnd); //rename filename to be only the name of the skim inside of the time period file filename = filename.Substring(hdf5NameEnd); string hdfFile = _path + "\\" + HDFName; var dataFile = H5F.open(hdfFile, H5F.OpenMode.ACC_RDONLY); var dataSet = H5D.open(dataFile, filename); var space = H5D.getSpace(dataSet); var size2 = H5S.getSimpleExtentDims(space); long nRows = size2[0]; long nCols = size2[1]; long numZones = _mapping.Count(); var dataArray = new double[nRows, nCols]; var wrapArray = new H5Array <double>(dataArray); H5DataTypeId tid1 = H5D.getType(dataSet); H5D.read(dataSet, tid1, wrapArray); // if the count in the hdf5 file is larger than the number of // tazs in the mapping, ignore the values over the total number //of tazs in the mapping because these are not valid zones. _matrix = new ushort[numZones][]; for (var i = 0; i < numZones; i++) { _matrix[i] = new ushort[numZones]; } //leave as is for PSRC. Values are already scaled integers and matrices already condensed if (Global.Configuration.PSRC) { for (var i = 0; i < numZones; i++) { for (var j = 0; j < numZones; j++) { _matrix[i][j] = (ushort)dataArray[i, j]; } } } else { for (var row = 0; row < nRows; row++) { if (_mapping.ContainsKey(row + 1)) { for (var col = 0; col < nCols; col++) { if (_mapping.ContainsKey(col + 1)) { var value = dataArray[row, col] * scale; if (value > 0) { if (value > short.MaxValue) { value = short.MaxValue; } _matrix[_mapping[row + 1]][_mapping[col + 1]] = (ushort)value; } } } } } } var skimMatrix = new SkimMatrix(_matrix); return(skimMatrix); }
private void button1_Click(object sender, EventArgs e) { fileNameTextBox.Text = ""; string filename = ""; if (openFileDialog1.ShowDialog() == DialogResult.OK) { if ((openFileDialog1.OpenFile()) != null) { filename = openFileDialog1.FileName; fileNameTextBox.Text = openFileDialog1.FileName; Debug.WriteLine(filename); } H5.Open(); var h5 = H5F.open(filename, H5F.OpenMode.ACC_RDONLY); var dataset = H5D.open(h5, "/radarData/data"); var Space = H5D.getSpace(dataset); var size = H5S.getSimpleExtentDims(Space); float[, , , ,] dataarray = new float[size[0], size[1], size[2], size[3], size[4] * 2]; var wrapArray = new H5Array <float>(dataarray); NOS = (int)size[2]; //number_of_scan (X) NOC = (int)size[3]; //Number of channel (antenna) (Y) NOF = (int)size[4]; // Number of frequency (Z) if (NOS < NOF) { m = NOS; last = n = 50; } else { m = NOS; last = n = 50; } x = new float[m][]; xb = new float[m][]; c1 = new float[m][]; y1 = new float[m][]; for (int i = 0; i < m; i++) { x[i] = new float[n]; xb[i] = new float[n]; c1[i] = new float[n]; y1[i] = new float[n]; for (int j = 0; j < n; j++) { y1[i][j] = 0; x[i][j] = 0; } } textBox1.Text = size[2].ToString(); textBox2.Text = size[4].ToString(); textBox3.Text = size[3].ToString(); var dataType = H5D.getType(dataset); H5D.read <float>(dataset, dataType, wrapArray); data = new float[size[2], size[3], size[4] * 2]; var xd = data.Length; Debug.WriteLine(xd); for (int k = 0; k < size[2]; k++) { for (int i = 0; i < size[3]; i++) { for (int j = 0; j < size[4] * 2; j++) { data[k, i, j] = dataarray[0, 0, k, i, j]; } } } // res = 10; //10mm res = 1; //100mm n_o_s = NOS; //640;// 510;//number of files n_o_c = NOC * res; //100;// NOC* res; //* res; //for outdoor =NOC * res for indoor=100; 150 for outdoor as after 15th channel readings were not proper n_o_f = NOF; grid = new float[NOS, NOC *res, NOF]; for (int k = 0; k < NOS; k++) //100mm { for (int i = 0; i < NOC; i++) { for (int j = 0; j < NOF; j++) { grid[k, i, j] = data[k, i, j * 2]; } } } H5.Close(); } hscn = 0; depth = 0; dtscn = 0; dtdep = 0; chnl = 0; dtchnl = 0; imagescanner(0, 0, 0); }
public static int GetNumberOfDarks(string filename) { H5FileId zFile = null; H5DataSetId dataset = null; H5DataSpaceId space = null; long[] dims; if (!File.Exists(filename)) { throw new Exception("File not found."); } try { zFile = H5F.open(filename, H5F.OpenMode.ACC_RDONLY); dataset = H5D.open(zFile, "/dark"); space = H5D.getSpace(dataset); dims = H5S.getSimpleExtentDims(space); H5S.close(space); space = null; H5D.close(dataset); dataset = null; H5F.close(zFile); zFile = null; return((int)dims[1]); } catch { } finally { if (space != null) { H5S.close(space); } if (dataset != null) { H5D.close(dataset); } if (zFile != null) { H5F.close(zFile); } } zFile = null; try { zFile = H5F.open(filename, H5F.OpenMode.ACC_RDONLY); dataset = H5D.open(zFile, "/exchange/data_dark"); space = H5D.getSpace(dataset); dims = H5S.getSimpleExtentDims(space); H5S.close(space); space = null; H5D.close(dataset); dataset = null; H5F.close(zFile); zFile = null; return((int)dims[1]); } catch { return(0); } finally { if (space != null) { H5S.close(space); } if (dataset != null) { H5D.close(dataset); } if (zFile != null) { H5F.close(zFile); } } }
protected long FindNumberOfRows() { var num_rows_data = H5S.getSimpleExtentDims(H5D.getSpace(Id)); return(num_rows_data[0]); }
private static void ReadFile(string filePath) { var file = H5F.open(filePath, H5F.OpenMode.ACC_RDONLY); var dataSet = H5D.open(file, "/group/dataset"); var fileSpace = H5D.getSpace(dataSet); var rank = H5S.getSimpleExtentNDims(fileSpace); WriteLine("Rank: {0}", rank); var dims = H5S.getSimpleExtentDims(fileSpace); Write("Dims:"); foreach (var d in dims) { Write(" {0}", d); } WriteLine(); H5S.close(fileSpace); var ints = new int[1]; var intAttribute = H5A.openName(dataSet, "int"); H5A.read(intAttribute, H5A.getType(intAttribute), new H5Array <int>(ints)); WriteLine("int: {0}", ints[0]); H5A.close(intAttribute); var stringAttribute = H5A.openName(dataSet, "string"); var stringType = H5A.getType(stringAttribute); var stringSize = H5T.getSize(stringType); WriteLine("string length: {0}", stringSize); var buffer = new byte[stringSize]; H5A.read(stringAttribute, stringType, new H5Array <byte>(buffer)); WriteLine("string: {0}", Encoding.ASCII.GetString(buffer)); H5T.close(stringType); H5A.close(stringAttribute); if (rank == 2) { var data = new int[dims[0], dims[1]]; H5D.read(dataSet, H5D.getType(dataSet), new H5Array <int>(data)); for (int i = 0; i < data.GetLength(0); ++i) { for (int j = 0; j < data.GetLength(1); ++j) { Write(" {0}", data[i, j]); } WriteLine(); } } H5D.close(dataSet); H5F.close(file); }
public SkimMatrix Read(string filename, int field, float scale) { Console.WriteLine("Reading {0}", filename); int hdf5NameEnd = filename.IndexOf("/"); // the first part of the name in the roster file is the omx/hdf5 file string HDFName = filename.Substring(0, hdf5NameEnd); //rename filename to be only the name of the skim matrix inside of the skim file //skims are stored in the "data" folder within the omx/hdf5 file filename = filename.Substring(hdf5NameEnd); string hdfFile = _path + "\\" + HDFName; var dataFile = H5F.open(hdfFile, H5F.OpenMode.ACC_RDONLY); var dataSet = H5D.open(dataFile, filename); var space = H5D.getSpace(dataSet); var size2 = H5S.getSimpleExtentDims(space); long nRows = size2[0]; long nCols = size2[1]; long numZones = _mapping.Count(); // if the count in the hdf5 file is larger than the number of // tazs in the mapping, ignore the values over the total number //of tazs in the mapping because these are not valid zones. _matrix = new ushort[numZones][]; for (var i = 0; i < numZones; i++) { _matrix[i] = new ushort[numZones]; } //OMX is a square matrix of doubles //In addition to the data folder for matrices, an OMX file has a lookup folder //with a zone mapping vector. However, this is ignored since DaySim also has one. //Therefore, it is assumed the OMX matrix does not skip rows/cols and every row/col //corresponds to an actual zone in the DaySim zone mapping file by index //Scaling should be set to TRUE since OMX stores doubles (not scaled integers) var dataArray = new double[nRows, nCols]; var wrapArray = new H5Array <double>(dataArray); H5DataTypeId tid1 = H5D.getType(dataSet); H5D.read(dataSet, tid1, wrapArray); for (var row = 0; row < nRows; row++) { if (_mapping.ContainsKey(row + 1)) { for (var col = 0; col < nCols; col++) { if (_mapping.ContainsKey(col + 1)) { var value = dataArray[row, col] * scale; if (value > 0) { if (value > ushort.MaxValue - 1) { value = ushort.MaxValue - 1; } _matrix[_mapping[row + 1]][_mapping[col + 1]] = (ushort)value; } } } } } var skimMatrix = new SkimMatrix(_matrix); return(skimMatrix); }
public void GetDataset <T>(H5FileId fileId, string datasetName, string groupName, T[, ,] datasetOut, DataValueType type) { H5GroupId groupId = H5G.open(fileId, groupName); H5DataSetId dataSetId = H5D.open(groupId, datasetName /*"EV_Emissive"*/); switch (type) { case DataValueType.FLOAT: H5DataTypeId tidfloat = new H5DataTypeId(H5T.H5Type.NATIVE_FLOAT); // Read the array back H5D.read(dataSetId, tidfloat, new H5Array <T>(datasetOut));//(dataSetId, tid1, new H5Array<int>(vlReadBackArray)); // H5T.close(tidfloat); break; case DataValueType.INT: H5DataTypeId tidint = new H5DataTypeId(H5T.H5Type.NATIVE_INT); // H5T.H5TClass c = H5T.getMemberClass(tid0); // Read the array back H5D.read(dataSetId, tidint, new H5Array <T>(datasetOut));//(dataSetId, tid1, new H5Array<int>(vlReadBackArray)); //H5T.close(tidint); break; case DataValueType.COMPOUND: H5DataTypeId tid0 = H5D.getType(dataSetId); int nMember = H5T.getNMembers(tid0); H5DataSpaceId spaceid = H5D.getSpace(dataSetId); long[] dims = H5S.getSimpleExtentDims(spaceid);//得到数据数组的大小,比如[3,1800,2048] int length = 1; for (int i = 0; i < dims.Length; i++) { length *= (int)dims[i]; } for (int i = 0; i < nMember; i++) { string memberName = H5T.getMemberName(tid0, i); H5DataTypeId memberTypeId = H5T.getMemberType(tid0, i); H5T.H5TClass dataClass = H5T.getClass(memberTypeId); //得到数据集的类型 string typeName = dataClass.ToString(); if (typeName == "INTEGER") //目前先只支持整形的 { H5DataTypeId tidtmp = H5T.create(H5T.CreateClass.COMPOUND, sizeof(int)); H5T.insert(tidtmp, memberName, 0, H5T.H5Type.NATIVE_INT); int[] dataTmp = new int[length]; H5D.read(dataSetId, tidtmp, new H5Array <int>(dataTmp)); for (int j = 0; j < length; j++) { datasetOut[0, j, i] = (T)Convert.ChangeType(dataTmp[j], datasetOut[0, j, i].GetType()); } } } H5S.close(spaceid); break; default: break; } H5D.close(dataSetId); H5G.close(groupId); //H5F.close(fileId); }
private static int GetDim(string filename, int dim) { H5FileId zFile = null; H5DataSetId dataset = null; H5DataSpaceId space = null; long[] dims; if (!File.Exists(filename)) { throw new Exception("File not found."); } try { zFile = H5F.open(filename, H5F.OpenMode.ACC_RDONLY); dataset = H5D.open(zFile, "/tomo"); space = H5D.getSpace(dataset); dims = H5S.getSimpleExtentDims(space); H5S.close(space); space = null; H5D.close(dataset); dataset = null; H5F.close(zFile); zFile = null; return((int)dims[dim]); } catch { } finally { if (space != null) { H5S.close(space); } if (dataset != null) { H5D.close(dataset); } if (zFile != null) { H5F.close(zFile); } } zFile = null; try { zFile = H5F.open(filename, H5F.OpenMode.ACC_RDONLY); dataset = H5D.open(zFile, "/exchange/data"); space = H5D.getSpace(dataset); dims = H5S.getSimpleExtentDims(space); H5S.close(space); space = null; H5D.close(dataset); dataset = null; H5F.close(zFile); zFile = null; return((int)dims[dim]); } catch { throw new Exception("Not a valid HDF5 file."); } finally { if (space != null) { H5S.close(space); } if (dataset != null) { H5D.close(dataset); } if (zFile != null) { H5F.close(zFile); } } }
public DatasetInfo GetDatasetInfo(H5FileId fileId, string datasetName, string groupName) { DatasetInfo datasetInfo = new DatasetInfo(); datasetInfo.band = 1; datasetInfo.col = 1; datasetInfo.rank = 1; datasetInfo.row = 1; H5GroupId groupId = H5G.open(fileId, groupName); H5DataSetId dataSetId = H5D.open(groupId, datasetName); // ulong storeSize = H5D.getStorageSize(dataSetId); //得到数据数组存储大小 H5DataSpaceId spaceid = H5D.getSpace(dataSetId); long[] dims = H5S.getSimpleExtentDims(spaceid); //得到数据数组的大小,比如[3,1800,2048] datasetInfo.rank = H5S.getSimpleExtentNDims(spaceid); //得到数据数组的维数,比如3 int dimCount = dims.Length; if (dimCount == 2) { datasetInfo.col = Convert.ToInt32(dims[1]);//宽 datasetInfo.row = Convert.ToInt32(dims[0]); } else if (dimCount == 3) { datasetInfo.band = Convert.ToInt32(dims[0]); //波段数 datasetInfo.col = Convert.ToInt32(dims[2]); //宽 datasetInfo.row = Convert.ToInt32(dims[1]); //高 } else if (dimCount == 1) { datasetInfo.row = Convert.ToInt32(dims[0]);//高 } H5DataTypeId typeId = H5D.getType(dataSetId); H5T.H5TClass dataClass = H5T.getClass(typeId);//得到数据集的类型 string typeName = dataClass.ToString(); switch (typeName) { case "FLOAT": datasetInfo.type = DataValueType.FLOAT; break; case "INTEGER": datasetInfo.type = DataValueType.INT; break; case "COMPOUND": datasetInfo.type = DataValueType.COMPOUND; H5DataTypeId tid0 = H5D.getType(dataSetId); int nMember = H5T.getNMembers(tid0); datasetInfo.col = nMember; break; default: datasetInfo.type = DataValueType.EMPTY; break; } H5T.close(typeId); H5S.close(spaceid); H5D.close(dataSetId); H5G.close(groupId); return(datasetInfo); }
/* * test_group_iterate -- tests that group iterating works properly. * - opens the file created in the test_group_basics * - creates more groups and datasets * - iterates through root group and each sub group priting out name of each object */ static void test_group_iterate() { try { Console.Write("Testing group iteration"); // Open the file. H5FileId fileId = H5F.open(FILE_NAME, H5F.OpenMode.ACC_RDWR); // Create a group in the file. H5GroupId groupId = H5G.create(fileId, "/Data"); // Open first dataset. H5DataSetId dset1Id = H5D.open(fileId, "/fromRoot/intArray"); // Get dataspace of this dataset. H5DataSpaceId dspace = H5D.getSpace(dset1Id); // Create a dataset in the group using absolute name. H5DataSetId dataset = H5D.create(groupId, "/Data/IntData", H5T.H5Type.NATIVE_INT, dspace); // Close the first dataset. H5S.close(dspace); H5D.close(dataset); // Create the second dataset. hssize_t[] dims = { 500, 20 }; dspace = H5S.create_simple(RANK, dims); dataset = H5D.create(groupId, "/Data/FloatData", H5T.H5Type.NATIVE_FLOAT, dspace); // Close objects and file. H5D.close(dataset); H5G.close(groupId); H5F.close(fileId); // Now reopen the file and group in the file. fileId = H5F.open(FILE_NAME, H5F.OpenMode.ACC_RDWR); groupId = H5G.open(fileId, "/Data"); // Access "IntData" dataset in the group. dataset = H5D.open(groupId, "IntData"); // Create a dataset in the root group. dataset = H5D.create(fileId, "/singleDataset", H5T.H5Type.NATIVE_INT, dspace); // Various checks on number of objects groupId = H5G.open(fileId, "/"); hssize_t num_objs = H5G.getNumObjects(groupId); if (num_objs != 3) { Console.WriteLine("\ntest_group_iterate: / should have 3 objects: /fromRoot, /Data, and /singleDataset, but is {0}", num_objs); } H5G.close(groupId); groupId = H5G.open(fileId, "/fromRoot"); num_objs = H5G.getNumObjects(groupId); if (num_objs != 3) { Console.WriteLine("\ntest_group_iterate: /fromRoot should have 3 objects: intArray, shortArray, notfromRoot, but is {0}", num_objs); } // Use iterator to see the names of the objects in the root group and sub groups. H5GIterateCallback myCallback; myCallback = file_info; int x = 0; Console.WriteLine(); Console.WriteLine("\tRoot Group iterating:"); int index = H5G.iterate(fileId, "/", myCallback, "Object name:", ref x); Console.WriteLine(); Console.WriteLine("\t/fromRoot iterating:"); x = 0; index = H5G.iterate(fileId, "/fromRoot", myCallback, "Object name:", ref x); // Close objects and file. H5D.close(dataset); H5G.close(groupId); H5S.close(dspace); H5F.close(fileId); Console.WriteLine("\t\t\t\t\t\t\tPASSED"); } catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // test_group_iterate
private Tuple <H5DataSetId, int> load_nd_datasetEx(Blob <T> blob, string strDatasetName, bool bReshape, int nMinDim = 1, int nMaxDim = int.MaxValue, H5GroupId id = null, bool bAllowSingleItems = false) { H5DataSetId ds = null; int nSingleItemSize = 0; try { if (id != null) { ds = H5D.open(id, strDatasetName); } else { ds = H5D.open(m_file, strDatasetName); } if (ds == null) { m_log.FAIL("Failed to find the dataset '" + strDatasetName + "'!"); } // Verify that the number of dimensions are in the accepted range. H5DataSpaceId dsSpace = H5D.getSpace(ds); if (dsSpace == null) { m_log.FAIL("Failed to get the dataset space!"); } int nDims = H5S.getSimpleExtentNDims(dsSpace); m_log.CHECK_GE(nDims, nMinDim, "The dataset dim is out of range!"); m_log.CHECK_LE(nDims, nMaxDim, "The dataset dim is out of range!"); long[] rgDims = H5S.getSimpleExtentDims(dsSpace); // Verify that the data format is what we expect: float or double H5DataTypeId dsType = H5D.getType(ds); if (dsType == null) { m_log.FAIL("Failed to get the dataset type!"); } H5T.H5TClass dataClass = H5T.getClass(dsType); switch (dataClass) { case H5T.H5TClass.FLOAT: m_log.WriteLine("Datatype class: H5T_FLOAT"); break; case H5T.H5TClass.INTEGER: m_log.WriteLine("Datatype class: H5T_INTEGER"); break; default: m_log.FAIL("Unsupported datatype class: " + dataClass.ToString()); break; } List <int> rgBlobDims = new List <int>(); for (int i = 0; i < nDims; i++) { rgBlobDims.Add((int)rgDims[i]); } if (bReshape) { blob.Reshape(rgBlobDims); } else { if (!Utility.Compare <int>(rgBlobDims, blob.shape())) { if (!bAllowSingleItems || (rgBlobDims.Count == 1 && rgBlobDims[0] != 1)) { string strSrcShape = Utility.ToString <int>(rgBlobDims); m_log.FAIL("Cannot load blob from hdf5; shape mismatch. Source shape = " + strSrcShape + ", target shape = " + blob.shape_string); } if (rgBlobDims.Count == 1) { nSingleItemSize = rgBlobDims[0]; } } } } catch (Exception excpt) { if (ds != null) { H5D.close(ds); ds = null; } throw excpt; } return(new Tuple <H5DataSetId, int>(ds, nSingleItemSize)); }