private void Write(H5GroupId parent, string name, IEnumerable <IMeasurement> measurements) { H5DataSpaceId spaceId = H5S.create_simple(1, new long[1] { (long)measurements.Count() }); // Set compression options for dataset H5PropertyListId dataSetPropertyList = H5P.create(H5P.PropertyListClass.DATASET_CREATE); H5P.setDeflate(dataSetPropertyList, NumericDataCompression); H5P.setChunk(dataSetPropertyList, new long[] { (long)measurements.Count() }); H5DataSetId dataSetId = H5D.create(parent, name, measurement_t, spaceId, new H5PropertyListId(H5P.Template.DEFAULT), dataSetPropertyList, new H5PropertyListId(H5P.Template.DEFAULT)); MeasurementT[] ms = new MeasurementT[measurements.Count()]; int ilmCount = 0; foreach (IMeasurement m in measurements) { MeasurementT mt = Convert(m); ms[ilmCount++] = mt; } H5D.write <MeasurementT>(dataSetId, measurement_t, new H5Array <MeasurementT>(ms)); H5D.close(dataSetId); H5S.close(spaceId); }
static void test_attr_plist() { try { Console.Write("Testing attribute property lists"); hssize_t[] dims = { 256, 512 }; const string PLST_FILE_NAME = ("tattr_plist.h5"); hssize_t[] dims1 = { SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3 }; hssize_t[] dims2 = { ATTR1_DIM }; // Create file. H5FileId fileId = H5F.create(PLST_FILE_NAME, H5F.CreateMode.ACC_TRUNC); // Create dataspace for dataset. H5DataSpaceId space1_Id = H5S.create_simple(SPACE1_RANK, dims1); // Create a dataset. H5DataSetId dsetId = H5D.create(fileId, DSET1_NAME, H5T.H5Type.NATIVE_UCHAR, space1_Id); // Create dataspace for attribute. H5DataSpaceId space2_Id = H5S.create_simple(ATTR1_RANK, dims2); // Create default property list for attribute. H5PropertyListId plist = H5P.create(H5P.PropertyListClass.ATTRIBUTE_CREATE); // Create an attribute for the dataset using the property list. H5AttributeId attrId = H5A.create(dsetId, ATTR1_NAME, new H5DataTypeId(H5T.H5Type.NATIVE_INT), space2_Id, plist); // Close all objects. H5S.close(space1_Id); H5S.close(space2_Id); H5P.close(plist); H5A.close(attrId); H5D.close(dsetId); H5F.close(fileId); Console.WriteLine("\t\t\tPASSED"); } catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // test_attr_plist
private static H5DataSetId CreateDatasetIfNoneExists(H5FileId fileId, string setName, int dim1, H5DataTypeId dataType) { H5DataSetId dataSetId = null; char[] sep = { '/' }; string[] strings = setName.Split(sep); string path = "/"; for (int x = 0; x < strings.Length - 1; x++) { path += strings[x] + "/"; H5GroupId groupId = CreateGroupIfNoneExists(fileId, path); } long[] dims = new long[] { dim1 }; long[] maxDims = new long[] { -1 }; if (H5L.Exists(fileId, setName)) { dataSetId = H5D.open(fileId, setName); H5D.setExtent(dataSetId, dims); } else { H5PropertyListId linkp = H5P.create(H5P.PropertyListClass.LINK_CREATE); H5PropertyListId accessp = H5P.create(H5P.PropertyListClass.DATASET_ACCESS); H5PropertyListId createp = H5P.create(H5P.PropertyListClass.DATASET_CREATE); H5P.setChunk(createp, dims); H5P.setDeflate(createp, 1); H5DataSpaceId sId = H5S.create_simple(1, dims, maxDims); dataSetId = H5D.create(fileId, setName, dataType, sId, linkp, createp, accessp); } return(dataSetId); }
/// <summary> /// 暂时不用 /// </summary> /// <typeparam name="T"></typeparam> /// <param name="fileId"></param> /// <param name="datasetName"></param> /// <param name="groupName"></param> /// <param name="datasetOut"></param> /// <param name="rowIndex"></param> /// <param name="rowcount"></param> /// <param name="colcount"></param> public void GetDataset <T>(H5FileId fileId, string datasetName, string groupName, T[,] datasetOut, int rowIndex, int rowcount, int colcount) { H5GroupId groupId = H5G.open(fileId, groupName); H5DataSetId dataSetId = H5D.open(groupId, datasetName /*"EV_Emissive"*/); H5DataTypeId tid0 = H5D.getType(dataSetId); H5DataSpaceId spaceid = H5D.getSpace(dataSetId); long[] start = new long[2]; start[0] = rowIndex; start[1] = 0; long[] count = new long[2]; count[0] = rowcount; count[1] = colcount; H5S.selectHyperslab(spaceid, H5S.SelectOperator.SET, start, count); //long[] dimes = new long[2]; //dimes[0] = 1; //dimes[1] = 8192; H5DataSpaceId simpleSpaceid = H5S.create_simple(2, count); H5PropertyListId listid = new H5PropertyListId(H5P.Template.DEFAULT); H5DataTypeId tid1 = new H5DataTypeId(H5T.H5Type.NATIVE_INT);//数据类型 // Read the array back //int[,] dataSet = new int[cout[0], cout[1]]; H5D.read(dataSetId, tid1, simpleSpaceid, spaceid, listid, new H5Array <T>(datasetOut)); H5S.close(simpleSpaceid); H5S.close(spaceid); H5T.close(tid0); H5D.close(dataSetId); H5G.close(groupId); }
void WriteDataToNewFile <T>(H5FileId fileId, string datasetName, T[,] barsData, long count, long parametersNumber, H5DataTypeId datatypeId) { H5DataSpaceId dataspaceId = H5S.create_simple(2, new long[] { count, parametersNumber }, new long[] { (long)H5S.H5SType.UNLIMITED, parametersNumber }); H5PropertyListId createChunked = H5P.create(H5P.PropertyListClass.DATASET_CREATE); H5PropertyListId linkCreationDefaults = H5P.create(H5P.PropertyListClass.LINK_CREATE); H5PropertyListId accessCreationDefaults = H5P.create(H5P.PropertyListClass.DATASET_ACCESS); H5P.setChunk(createChunked, new long[] { 1, parametersNumber }); H5DataSetId datasetId = H5D.create(fileId, datasetName, datatypeId, dataspaceId, linkCreationDefaults, createChunked, accessCreationDefaults); H5D.setExtent(datasetId, new long[] { count, parametersNumber }); H5DataSpaceId newSpace = H5D.getSpace(datasetId); H5S.selectHyperslab(newSpace, H5S.SelectOperator.SET, new long[] { 0, 0 }, new long[] { count, parametersNumber }); H5D.write(datasetId, datatypeId, new H5Array <T>(barsData)); H5P.close(createChunked); H5P.close(linkCreationDefaults); H5P.close(accessCreationDefaults); H5S.close(newSpace); H5S.close(dataspaceId); H5D.close(datasetId); }
private static void WriteHdf5Data(int[] values, string className, string p) { H5FileId fileId = GetFileId(); string setName = GetNamePrefix() + className + p; int dim1 = GetChunkSize(); int writeSize = values.Length; H5DataTypeId dataType = new H5DataTypeId(H5T.H5Type.NATIVE_INT); H5DataSetId dataSetId = CreateDatasetIfNoneExists(fileId, setName, dim1, dataType); H5Array <int> wrapArray = new H5Array <int>(values); H5DataSpaceId fileSpaceId = new H5DataSpaceId(H5S.H5SType.ALL); H5DataSpaceId all = new H5DataSpaceId(H5S.H5SType.ALL); H5PropertyListId xferProp = H5P.create(H5P.PropertyListClass.DATASET_XFER); long[] newSize = new long[] { writeSize }; H5D.setExtent(dataSetId, newSize); H5D.write(dataSetId, dataType, all, fileSpaceId, xferProp, wrapArray); H5F.close(fileId); }
void DownloadQuotes() { QuoteDepth marketDepth = includeLevel2 ? QuoteDepth.Level2 : QuoteDepth.Top; DownloadQuotesEnumerator enumerator = quoteClient.DownloadQuotes(symbol, marketDepth, from, to, -1); if (outputType == "csv") { string path = Path.Combine(this.location, string.Format("{0}{1}{2}{3}.csv", symbol, includeLevel2 ? " level2" : "", from.ToString(" yyyyMMdd"), to.ToString(" yyyyMMdd"))); using (StreamWriter file = File.CreateText(path)) { file.WriteLine("date_time,bid_price,bid_volume,ask_price,ask_volume"); for (Quote quote = enumerator.Next(-1); quote != null; quote = enumerator.Next(-1)) { StringBuilder builder = new StringBuilder(); builder.Append(quote.CreatingTime.ToString("yyyy-MM-dd HH:mm:ss.fff,", CultureInfo.InvariantCulture)); foreach (QuoteEntry entry in quote.Bids) { builder.AppendFormat("{0},{1},", entry.Price, entry.Volume); } foreach (QuoteEntry entry in quote.Asks) { builder.AppendFormat("{0},{1}", entry.Price, entry.Volume); } file.WriteLine(builder); } } this.Log("Quotes are downloaded successfully"); } else if (outputType == "hdf5") { string path = Path.Combine(this.location, string.Format("{0}{1}{2}{3}.h5", symbol, includeLevel2 ? " level2" : "", from.ToString(" yyyyMMdd"), to.ToString(" yyyyMMdd"))); H5FileId fileId = H5F.create(path, H5F.CreateMode.ACC_TRUNC); H5DataTypeId quotesTypeId = new H5DataTypeId(H5T.H5Type.NATIVE_DOUBLE); H5DataSpaceId quotesSpaceId = H5S.create_simple(3, new long[] { 1, 2, 2 }, new long[] { (long)H5S.H5SType.UNLIMITED, 2, 2 }); H5PropertyListId createChunkedQuotes = H5P.create(H5P.PropertyListClass.DATASET_CREATE); H5PropertyListId linkCreationDefaultsQuotes = H5P.create(H5P.PropertyListClass.LINK_CREATE); H5PropertyListId accessCreationDefaultsQuotes = H5P.create(H5P.PropertyListClass.DATASET_ACCESS); H5P.setChunk(createChunkedQuotes, new long[] { 1, 2, 2 }); H5DataSetId quotesSetId = H5D.create(fileId, "Quotes", quotesTypeId, quotesSpaceId, linkCreationDefaultsQuotes, createChunkedQuotes, accessCreationDefaultsQuotes); H5DataTypeId dateQuotesTypeId = new H5DataTypeId(H5T.H5Type.NATIVE_LLONG); H5DataSpaceId dateQuotesSpaceId = H5S.create_simple(1, new long[] { 1 }, new long[] { (long)H5S.H5SType.UNLIMITED }); H5PropertyListId createChunkedDate = H5P.create(H5P.PropertyListClass.DATASET_CREATE); H5PropertyListId linkCreationDefaultsDate = H5P.create(H5P.PropertyListClass.LINK_CREATE); H5PropertyListId accessCreationDefaultsDate = H5P.create(H5P.PropertyListClass.DATASET_ACCESS); H5P.setChunk(createChunkedDate, new long[] { 1 }); H5DataSetId dateQuotesSetId = H5D.create(fileId, "DateQuotes", dateQuotesTypeId, dateQuotesSpaceId, linkCreationDefaultsDate, createChunkedDate, accessCreationDefaultsDate); int count = 0; int chunkCount = 0; double[,,] quotesArr = new double[chunkSize, 2, 2]; long[] datesArr = new long[chunkSize]; H5DataSpaceId memSpace; for (Quote quote = enumerator.Next(-1); quote != null; quote = enumerator.Next(-1)) { int j = 0; foreach (QuoteEntry entry in quote.Bids) { quotesArr[chunkCount, 0, j] = entry.Price; quotesArr[chunkCount, 0, j + 1] = entry.Volume; j += 2; } j = 0; foreach (QuoteEntry entry in quote.Asks) { quotesArr[chunkCount, 1, j] = entry.Price; quotesArr[chunkCount, 1, j + 1] = entry.Volume; j += 2; } datesArr[chunkCount] = (long)quote.CreatingTime.Subtract(new DateTime(1970, 1, 1)).TotalMilliseconds; chunkCount++; count++; if (chunkCount == chunkSize) { H5D.setExtent(quotesSetId, new long[] { count, 2, 2 }); H5S.close(quotesSpaceId); quotesSpaceId = H5D.getSpace(quotesSetId); H5S.selectHyperslab(quotesSpaceId, H5S.SelectOperator.SET, new long[] { count - chunkSize, 0, 0 }, new long[] { chunkSize, 2, 2 }); memSpace = H5S.create_simple(3, new long[] { chunkSize, 2, 2 }); H5D.write(quotesSetId, quotesTypeId, memSpace, quotesSpaceId, new H5PropertyListId(H5P.Template.DEFAULT), new H5Array <double>(quotesArr)); H5D.setExtent(dateQuotesSetId, new long[] { count }); H5S.close(dateQuotesSpaceId); dateQuotesSpaceId = H5D.getSpace(dateQuotesSetId); H5S.selectHyperslab(dateQuotesSpaceId, H5S.SelectOperator.SET, new long[] { count - chunkSize }, new long[] { chunkSize }); memSpace = H5S.create_simple(1, new long[] { chunkSize }); H5D.write(dateQuotesSetId, dateQuotesTypeId, memSpace, dateQuotesSpaceId, new H5PropertyListId(H5P.Template.DEFAULT), new H5Array <long>(datesArr)); chunkCount = 0; } } if (count % chunkSize != 0) { int delta = count % chunkSize; H5D.setExtent(quotesSetId, new long[] { count, 2, 2 }); H5S.close(quotesSpaceId); quotesSpaceId = H5D.getSpace(quotesSetId); H5S.selectHyperslab(quotesSpaceId, H5S.SelectOperator.SET, new long[] { count - delta, 0, 0 }, new long[] { delta, 2, 2 }); memSpace = H5S.create_simple(3, new long[] { delta, 2, 2 }); H5D.write(quotesSetId, quotesTypeId, memSpace, quotesSpaceId, new H5PropertyListId(H5P.Template.DEFAULT), new H5Array <double>(quotesArr)); H5D.setExtent(dateQuotesSetId, new long[] { count }); H5S.close(dateQuotesSpaceId); dateQuotesSpaceId = H5D.getSpace(dateQuotesSetId); H5S.selectHyperslab(dateQuotesSpaceId, H5S.SelectOperator.SET, new long[] { count - delta }, new long[] { delta }); memSpace = H5S.create_simple(1, new long[] { delta }); H5D.write(dateQuotesSetId, dateQuotesTypeId, memSpace, dateQuotesSpaceId, new H5PropertyListId(H5P.Template.DEFAULT), new H5Array <long>(datesArr)); } H5P.close(createChunkedQuotes); H5P.close(linkCreationDefaultsQuotes); H5P.close(accessCreationDefaultsQuotes); H5P.close(createChunkedDate); H5P.close(linkCreationDefaultsDate); H5P.close(accessCreationDefaultsDate); H5S.close(quotesSpaceId); H5D.close(quotesSetId); H5S.close(dateQuotesSpaceId); H5D.close(dateQuotesSetId); H5F.close(fileId); this.Log("Quotes are downloaded successfully"); } }
void DownloadQuotes() { QuoteDepth marketDepth = includeLevel2 ? QuoteDepth.Level2 : QuoteDepth.Top; DownloadQuotesEnumerator enumerator = quoteClient.DownloadQuotes(symbol, marketDepth, from, to, -1); enumeratorTicks = enumerator; if (outputType == "csv") { if (includeVWAP) { throw new ArgumentException("VWAP is not supported for hdf5 and csv format."); } string path = Path.Combine(this.location, string.Format("{0}{1}{2}{3}.csv", symbol.Replace("/", "%2F"), includeLevel2 ? " level2" : "", from.ToString(" yyyyMMdd-HH-mm-ss"), to.ToString(" yyyyMMdd-HH-mm-ss"))); using (StreamWriter file = File.CreateText(path)) { file.WriteLine("date_time;bid_price;bid_volume;ask_price;ask_volume"); for (Quote quote = enumerator.Next(-1); quote != null; quote = enumerator.Next(-1)) { StringBuilder builder = new StringBuilder(); builder.Append(quote.CreatingTime.ToString("yyyy-MM-dd HH:mm:ss.fff;", CultureInfo.InvariantCulture)); foreach (QuoteEntry entry in quote.Bids) { if (quote.TickType.HasFlag(TickTypes.IndicativeBid) || quote.TickType.HasFlag(TickTypes.IndicativeBidAsk)) { builder.AppendFormat("{0};{1};", (decimal)entry.Price, (decimal) - entry.Volume); } else { builder.AppendFormat("{0};{1};", (decimal)entry.Price, (decimal)entry.Volume); } } foreach (QuoteEntry entry in quote.Asks) { if (quote.TickType.HasFlag(TickTypes.IndicativeAsk) || quote.TickType.HasFlag(TickTypes.IndicativeBidAsk)) { builder.AppendFormat("{0};{1};", (decimal)entry.Price, (decimal) - entry.Volume); } else { builder.AppendFormat("{0};{1};", (decimal)entry.Price, (decimal)entry.Volume); } } builder.Remove(builder.Length - 1, 1); file.WriteLine(builder); } } if (includeLevel2) { this.Log($"level2 {symbol} are downloaded successfully"); } else if (includeVWAP) { this.Log($"VWAP {symbol} are downloaded successfully"); } else { this.Log($"Ticks {symbol} are downloaded successfully"); } } else if (outputType == "hdf5") { if (includeVWAP) { throw new ArgumentException("VWAP is not supported for hdf5 and csv format."); } string path = Path.Combine(this.location, string.Format("{0}{1}{2}{3}.h5", symbol.Replace("/", "%2F"), includeLevel2 ? " level2" : "", from.ToString(" yyyyMMdd-HH-mm-ss"), to.ToString(" yyyyMMdd-HH-mm-ss"))); H5FileId fileId = H5F.create(path, H5F.CreateMode.ACC_TRUNC); H5DataTypeId quotesTypeId = new H5DataTypeId(H5T.H5Type.NATIVE_DOUBLE); H5DataSpaceId quotesSpaceId = H5S.create_simple(3, new long[] { 1, 2, 2 }, new long[] { (long)H5S.H5SType.UNLIMITED, 2, 2 }); H5PropertyListId createChunkedQuotes = H5P.create(H5P.PropertyListClass.DATASET_CREATE); H5PropertyListId linkCreationDefaultsQuotes = H5P.create(H5P.PropertyListClass.LINK_CREATE); H5PropertyListId accessCreationDefaultsQuotes = H5P.create(H5P.PropertyListClass.DATASET_ACCESS); H5P.setChunk(createChunkedQuotes, new long[] { 1, 2, 2 }); H5DataSetId quotesSetId = H5D.create(fileId, "Quotes", quotesTypeId, quotesSpaceId, linkCreationDefaultsQuotes, createChunkedQuotes, accessCreationDefaultsQuotes); H5DataTypeId dateQuotesTypeId = new H5DataTypeId(H5T.H5Type.NATIVE_LLONG); H5DataSpaceId dateQuotesSpaceId = H5S.create_simple(1, new long[] { 1 }, new long[] { (long)H5S.H5SType.UNLIMITED }); H5PropertyListId createChunkedDate = H5P.create(H5P.PropertyListClass.DATASET_CREATE); H5PropertyListId linkCreationDefaultsDate = H5P.create(H5P.PropertyListClass.LINK_CREATE); H5PropertyListId accessCreationDefaultsDate = H5P.create(H5P.PropertyListClass.DATASET_ACCESS); H5P.setChunk(createChunkedDate, new long[] { 1 }); H5DataSetId dateQuotesSetId = H5D.create(fileId, "DateQuotes", dateQuotesTypeId, dateQuotesSpaceId, linkCreationDefaultsDate, createChunkedDate, accessCreationDefaultsDate); int count = 0; int chunkCount = 0; double[,,] quotesArr = new double[chunkSize, 2, 2]; long[] datesArr = new long[chunkSize]; H5DataSpaceId memSpace = null; for (Quote quote = enumerator.Next(-1); quote != null; quote = enumerator.Next(-1)) { int j = 0; foreach (QuoteEntry entry in quote.Bids) { quotesArr[chunkCount, 0, j] = entry.Price; quotesArr[chunkCount, 0, j + 1] = entry.Volume; j += 2; } j = 0; foreach (QuoteEntry entry in quote.Asks) { quotesArr[chunkCount, 1, j] = entry.Price; quotesArr[chunkCount, 1, j + 1] = entry.Volume; j += 2; } datesArr[chunkCount] = (long)quote.CreatingTime.Subtract(new DateTime(1970, 1, 1)).TotalMilliseconds; chunkCount++; count++; if (chunkCount == chunkSize) { H5D.setExtent(quotesSetId, new long[] { count, 2, 2 }); H5S.close(quotesSpaceId); quotesSpaceId = H5D.getSpace(quotesSetId); H5S.selectHyperslab(quotesSpaceId, H5S.SelectOperator.SET, new long[] { count - chunkSize, 0, 0 }, new long[] { chunkSize, 2, 2 }); memSpace = H5S.create_simple(3, new long[] { chunkSize, 2, 2 }); H5D.write(quotesSetId, quotesTypeId, memSpace, quotesSpaceId, new H5PropertyListId(H5P.Template.DEFAULT), new H5Array <double>(quotesArr)); H5S.close(memSpace); H5D.setExtent(dateQuotesSetId, new long[] { count }); H5S.close(dateQuotesSpaceId); dateQuotesSpaceId = H5D.getSpace(dateQuotesSetId); H5S.selectHyperslab(dateQuotesSpaceId, H5S.SelectOperator.SET, new long[] { count - chunkSize }, new long[] { chunkSize }); memSpace = H5S.create_simple(1, new long[] { chunkSize }); H5D.write(dateQuotesSetId, dateQuotesTypeId, memSpace, dateQuotesSpaceId, new H5PropertyListId(H5P.Template.DEFAULT), new H5Array <long>(datesArr)); H5S.close(memSpace); chunkCount = 0; } } if (count % chunkSize != 0) { int delta = count % chunkSize; H5D.setExtent(quotesSetId, new long[] { count, 2, 2 }); H5S.close(quotesSpaceId); quotesSpaceId = H5D.getSpace(quotesSetId); H5S.selectHyperslab(quotesSpaceId, H5S.SelectOperator.SET, new long[] { count - delta, 0, 0 }, new long[] { delta, 2, 2 }); memSpace = H5S.create_simple(3, new long[] { delta, 2, 2 }); H5D.write(quotesSetId, quotesTypeId, memSpace, quotesSpaceId, new H5PropertyListId(H5P.Template.DEFAULT), new H5Array <double>(quotesArr)); H5S.close(memSpace); H5D.setExtent(dateQuotesSetId, new long[] { count }); H5S.close(dateQuotesSpaceId); dateQuotesSpaceId = H5D.getSpace(dateQuotesSetId); H5S.selectHyperslab(dateQuotesSpaceId, H5S.SelectOperator.SET, new long[] { count - delta }, new long[] { delta }); memSpace = H5S.create_simple(1, new long[] { delta }); H5D.write(dateQuotesSetId, dateQuotesTypeId, memSpace, dateQuotesSpaceId, new H5PropertyListId(H5P.Template.DEFAULT), new H5Array <long>(datesArr)); H5S.close(memSpace); } H5P.close(createChunkedQuotes); H5P.close(linkCreationDefaultsQuotes); H5P.close(accessCreationDefaultsQuotes); H5P.close(createChunkedDate); H5P.close(linkCreationDefaultsDate); H5P.close(accessCreationDefaultsDate); H5S.close(quotesSpaceId); H5D.close(quotesSetId); H5S.close(dateQuotesSpaceId); H5D.close(dateQuotesSetId); //H5S.close(memSpace); H5F.close(fileId); if (includeLevel2) { this.Log($"level2 {symbol} are downloaded successfully"); } else if (includeVWAP) { this.Log($"VWAP {symbol} are downloaded successfully"); } else { this.Log($"Ticks {symbol} are downloaded successfully"); } GC.Collect(); GC.WaitForPendingFinalizers(); } else if (outputType == "csv_zip") { string quoteType = "Ticks"; if (includeVWAP) { quoteType = "VWAP"; } if (includeLevel2) { quoteType = "TicksLevel2"; } string path = Path.Combine(location, $"{symbol.Replace("/", "%2F")}_{quoteType}_{from.ToString("yyyy-MM-dd-HH-mm-ss")}_{to.ToString("yyyy-MM-dd-HH-mm-ss")}.zip"); Console.WriteLine(path); using (ZipOutputStream zs = new ZipOutputStream(File.Create(path))) { if (includeVWAP) { DownloadVWAPCSVNew(zs); } else if (includeLevel2) { DownloadLevel2CSVNew(enumerator, zs); } else { DownloadTicksCSVNew(enumerator, zs); } } if (includeLevel2) { this.Log($"level2 {symbol} are downloaded successfully"); } else if (includeVWAP) { this.Log($"VWAP {symbol} are downloaded successfully"); } else { this.Log($"Ticks {symbol} are downloaded successfully"); } } }
static void Main(string[] args) { try { // We will write and read an int array of this length. const int DATA_ARRAY_LENGTH = 12; // Rank is the number of dimensions of the data array. const int RANK = 1; // Create an HDF5 file. // The enumeration type H5F.CreateMode provides only the legal // creation modes. Missing H5Fcreate parameters are provided // with default values. H5FileId fileId = H5F.create("myCSharp.h5", H5F.CreateMode.ACC_TRUNC); // Create a HDF5 group. H5GroupId groupId = H5G.create(fileId, "/cSharpGroup"); H5GroupId subGroup = H5G.create(groupId, "mySubGroup"); // Close the subgroup. H5G.close(subGroup); // Prepare to create a data space for writing a 1-dimensional // signed integer array. long[] dims = new long[RANK]; dims[0] = DATA_ARRAY_LENGTH; // Put descending ramp data in an array so that we can // write it to the file. int[] dset_data = new int[DATA_ARRAY_LENGTH]; for (int i = 0; i < DATA_ARRAY_LENGTH; i++) { dset_data[i] = DATA_ARRAY_LENGTH - i; } // Create a data space to accommodate our 1-dimensional array. // The resulting H5DataSpaceId will be used to create the // data set. H5DataSpaceId spaceId = H5S.create_simple(RANK, dims); // Create a copy of a standard data type. We will use the // resulting H5DataTypeId to create the data set. We could // have used the HST.H5Type data directly in the call to // H5D.create, but this demonstrates the use of H5T.copy // and the use of a H5DataTypeId in H5D.create. H5DataTypeId typeId = H5T.copy(H5T.H5Type.NATIVE_INT); // Find the size of the type int typeSize = H5T.getSize(typeId); Console.WriteLine("typeSize is {0}", typeSize); // Set the order to big endian H5T.setOrder(typeId, H5T.Order.BE); // Set the order to little endian H5T.setOrder(typeId, H5T.Order.LE); // Create the data set. H5DataSetId dataSetId = H5D.create(fileId, "/csharpExample", typeId, spaceId); // Write the integer data to the data set. H5D.write(dataSetId, new H5DataTypeId(H5T.H5Type.NATIVE_INT), new H5Array <int>(dset_data)); // If we were writing a single value it might look like this. // int singleValue = 100; // H5D.writeScalar(dataSetId, // new H5DataTypeId(H5T.H5Type.NATIVE_INT), // ref singleValue); // Create an integer array to receive the read data. int[] readDataBack = new int[DATA_ARRAY_LENGTH]; // Read the integer data back from the data set H5D.read(dataSetId, new H5DataTypeId(H5T.H5Type.NATIVE_INT), new H5Array <int>(readDataBack)); // Echo the data for (int i = 0; i < DATA_ARRAY_LENGTH; i++) { Console.WriteLine(readDataBack[i]); } // Close all the open resources. H5D.close(dataSetId); // Reopen and close the data sets to show that we can. dataSetId = H5D.open(fileId, "/csharpExample"); H5D.close(dataSetId); dataSetId = H5D.open(groupId, "/csharpExample"); H5D.close(dataSetId); H5T.close(typeId); H5G.close(groupId); // Get H5O info H5ObjectInfo objectInfo = H5O.getInfoByName(fileId, "/csharpExample"); Console.WriteLine("header.space.message is {0}", objectInfo.header.space.message); Console.WriteLine("fileNumber is {0}", objectInfo.fileNumber); Console.WriteLine("address is {0}", objectInfo.address); Console.WriteLine("type is {0}", objectInfo.objectType.ToString()); Console.WriteLine("reference count is {0}", objectInfo.referenceCount); Console.WriteLine("modification time is {0}", objectInfo.modificationTime); Console.WriteLine("birth time is {0}", objectInfo.birthTime); Console.WriteLine("access time is {0}", objectInfo.accessTime); Console.WriteLine("change time is {0}", objectInfo.changeTime); Console.WriteLine("number of attributes is {0}", objectInfo.nAttributes); Console.WriteLine("header version is {0}", objectInfo.header.version); Console.WriteLine("header nMessages is {0}", objectInfo.header.nMessages); Console.WriteLine("header nChunks is {0}", objectInfo.header.nChunks); Console.WriteLine("header flags is {0}", objectInfo.header.flags); H5LinkInfo linkInfo = H5L.getInfo(fileId, "/cSharpGroup"); Console.WriteLine( "address: {0:x}, charSet: {1}, creationOrder: {2}", linkInfo.address, linkInfo.charSet, linkInfo.creationOrder); Console.WriteLine("linkType: {0}, softLinkSizeOrUD: {1}", linkInfo.linkType, linkInfo.softLinkSizeOrUD); // Reopen the group id to show that we can. groupId = H5G.open(fileId, "/cSharpGroup"); // Use H5L.iterate to visit links H5LIterateCallback myDelegate; myDelegate = MyH5LFunction; ulong linkNumber = 0; H5IterationResult result = H5L.iterate(groupId, H5IndexType.NAME, H5IterationOrder.INCREASING, ref linkNumber, myDelegate, 0); // Create some attributes H5DataTypeId attributeType = H5T.copy(H5T.H5Type.NATIVE_INT); long[] attributeDims = new long[1]; const int RAMP_LENGTH = 5; attributeDims[0] = RAMP_LENGTH; int[] ascendingRamp = new int[RAMP_LENGTH] { 1, 2, 3, 4, 5 }; int[] descendingRamp = new int[RAMP_LENGTH] { 5, 4, 3, 2, 1 }; int[] randomData = new int[RAMP_LENGTH] { 3, 123, 27, 6, 1 }; int[] readBackRamp = new int[RAMP_LENGTH]; // Call set buffer using H5Memory // Allocate memory from "C" runtime heap (not garbage collected) H5Memory typeConversionBuffer = new H5Memory(new IntPtr(DATA_ARRAY_LENGTH)); H5Memory backgroundBuffer = new H5Memory(new IntPtr(DATA_ARRAY_LENGTH)); // Set the property list type conversion and background buffers. H5PropertyListId myPropertyListId = H5P.create(H5P.PropertyListClass.DATASET_XFER); H5P.setBuffer(myPropertyListId, typeConversionBuffer, backgroundBuffer); // Test use of vlen // Create a vlen data type H5DataTypeId tid1 = H5T.vlenCreate(H5T.H5Type.NATIVE_UINT); H5DataSetId vDataSetId = H5D.create(fileId, "/vlenTest", tid1, spaceId); // Create a jagged array of integers. hvl_t[] vlArray = new hvl_t[DATA_ARRAY_LENGTH]; // HDF5 variable length data types require the use of void // pointers. C# requires that sections of code that deal // directly with pointer be marked // as unsafe. unsafe { for (int i = 0; i < DATA_ARRAY_LENGTH; i++) { IntPtr ptr = new IntPtr((i + 1) * sizeof(int)); // Allocate memory that is not garbage collected. vlArray[i].p = H5CrtHeap.Allocate( new IntPtr((i + 1) * sizeof(int)) ).ToPointer(); // Fill the array with integers = the row number int *intPointer = (int *)vlArray[i].p; for (int j = 0; j < i + 1; j++) { intPointer[j] = (int)i; } if (IntPtr.Size == 8) { vlArray[i].len = (ulong)i + 1; } else { vlArray[i].len = (uint)i + 1; } } // Write the variable length data H5D.write(vDataSetId, tid1, new H5Array <hvl_t>(vlArray)); // Create an array to read back the array. hvl_t[] vlReadBackArray = new hvl_t[DATA_ARRAY_LENGTH]; // Read the array back H5D.read(vDataSetId, tid1, new H5Array <hvl_t>(vlReadBackArray)); // Write the data to the console for (int i = 0; i < DATA_ARRAY_LENGTH; i++) { int *iPointer = (int *)vlReadBackArray[i].p; for (int j = 0; j < i + 1; j++) { Console.WriteLine(iPointer[j]); } } // Reclaim the memory that read allocated H5D.vlenReclaim(tid1, spaceId, new H5PropertyListId(H5P.Template.DEFAULT), new H5Array <hvl_t>(vlReadBackArray)); // Now read it back again using our own memory manager //H5AllocateCallback allocDelegate = new H5AllocCallback(userAlloc); H5FreeCallback freeDelegate = new H5FreeCallback(userFree); H5PropertyListId memManagerPlist = H5P.create(H5P.PropertyListClass.DATASET_XFER); unsafe { H5P.setVlenMemManager(memManagerPlist, userAlloc, IntPtr.Zero, freeDelegate, IntPtr.Zero); } // Read the array back H5D.read(vDataSetId, tid1, new H5DataSpaceId(H5S.H5SType.ALL), new H5DataSpaceId(H5S.H5SType.ALL), memManagerPlist, new H5Array <hvl_t>(vlReadBackArray)); // Write the data to the console for (int i = 0; i < DATA_ARRAY_LENGTH; i++) { int *iPointer = (int *)vlReadBackArray[i].p; for (int j = 0; j < i + 1; j++) { Console.WriteLine(iPointer[j]); } } // Reclaim the memory that read allocated using our free routines H5D.vlenReclaim(tid1, spaceId, memManagerPlist, new H5Array <hvl_t>(vlReadBackArray)); } H5S.close(spaceId); H5DataSpaceId attributeSpace = H5S.create_simple(1, attributeDims); H5AttributeId attributeId = H5A.create(groupId, "ascendingRamp", attributeType, attributeSpace); int offset = H5T.getOffset(attributeType); Console.WriteLine("Offset is {0}", offset); H5DataTypeId float32BE = H5T.copy(H5T.H5Type.IEEE_F32BE); H5T.Norm norm = H5T.getNorm(float32BE); Console.WriteLine("Norm is {0}", norm); int precision = H5T.getPrecision(float32BE); Console.WriteLine("Precision is {0}", precision); H5FloatingBitFields bitFields = H5T.getFields(float32BE); Console.WriteLine("getFields: sign bit position: {0}", bitFields.signBitPosition); Console.WriteLine("getFields: exponent bit position: {0}", bitFields.exponentBitPosition); Console.WriteLine("getFields: number of exponent bits: {0}", bitFields.nExponentBits); Console.WriteLine("getFields: mantissa bit position: {0} ", bitFields.mantissaBitPosition); Console.WriteLine("getFields: number of mantissa bits: {0}", bitFields.nMantissaBits); Console.Write("{0}", bitFields); // Write to an attribute H5A.write <int>(attributeId, attributeType, new H5Array <int>(ascendingRamp)); // Read from an attribute H5A.read <int>(attributeId, attributeType, new H5Array <int>(readBackRamp)); // Echo results Console.WriteLine("ramp elements are: "); foreach (int rampElement in readBackRamp) { Console.WriteLine(" {0}", rampElement); } H5A.close(attributeId); // Create and write two more attributes. attributeId = H5A.createByName(groupId, ".", "descendingRamp", attributeType, attributeSpace); H5A.write <int>(attributeId, attributeType, new H5Array <int>(descendingRamp)); H5A.close(attributeId); attributeId = H5A.createByName(groupId, ".", "randomData", attributeType, attributeSpace); H5A.write <int>(attributeId, attributeType, new H5Array <int>(randomData)); // Read back the attribute data H5A.read <int>(attributeId, attributeType, new H5Array <int>(readBackRamp)); Console.WriteLine("ramp elements are: "); foreach (int rampElement in readBackRamp) { Console.WriteLine(" {0}", rampElement); } H5A.close(attributeId); // Iterate through the attributes. long position = 0; H5AIterateCallback attributeDelegate; attributeDelegate = MyH5AFunction; H5ObjectInfo groupInfo = H5O.getInfo(groupId); Console.WriteLine( "fileNumber: {0}, total space: {1}, referceCount: {2}, modification time: {3}", groupInfo.fileNumber, groupInfo.header.space.total, groupInfo.referenceCount, groupInfo.modificationTime); // While iterating, collect the names of all the attributes. ArrayList attributeNames = new ArrayList(); H5A.iterate(groupId, H5IndexType.CRT_ORDER, H5IterationOrder.INCREASING, ref position, attributeDelegate, (object)attributeNames); // Write out the names of the attributes foreach (string attributeName in attributeNames) { Console.WriteLine("attribute name is {0}", attributeName); } // Demonstrate H5A.openName attributeId = H5A.openName(groupId, "descendingRamp"); Console.WriteLine("got {0} by name", H5A.getName(attributeId)); H5A.close(attributeId); // Demonstrate H5A.getNameByIndex string secondAttribute = H5A.getNameByIndex(groupId, ".", H5IndexType.CRT_ORDER, H5IterationOrder.INCREASING, 1); Console.WriteLine("second attribute is named {0}", secondAttribute); // Demonstrate H5G.getInfo H5GInfo gInfo = H5G.getInfo(groupId); Console.WriteLine( "link storage: {0}, max creation order: {1}, nLinks: {2}", gInfo.linkStorageType, gInfo.maxCreationOrder, gInfo.nLinks); // Demonstrate H5A.getSpace //attributeId = H5A.openByName(groupId, ".", "descendingRamp"); attributeId = H5A.open(groupId, "descendingRamp"); H5DataSpaceId rampSpaceId = H5A.getSpace(attributeId); H5S.close(rampSpaceId); // Demonstrate H5A.getType H5DataTypeId rampTypeId = H5A.getType(attributeId); Console.WriteLine("size of ramp data type is {0} bytes.", H5T.getSize(rampTypeId)); H5T.close(rampTypeId); // Demonstrate H5A.getInfo H5AttributeInfo rampInfo = H5A.getInfo(attributeId); Console.WriteLine( "characterset: {0}, creationOrder: {1}, creationOrderValid: {2}, dataSize: {3}", rampInfo.characterSet, rampInfo.creationOrder, rampInfo.creationOrderValid, rampInfo.dataSize); // Demonstrate H5A.Delete H5A.Delete(groupId, "descendingRamp"); //H5A.DeleteByName(groupId, ".", "descendingRamp"); // Iterate through the attributes to show that the deletion // was successful. position = 0; ArrayList namesAfterDeletion = new ArrayList(); H5A.iterate(groupId, H5IndexType.CRT_ORDER, H5IterationOrder.DECREASING, ref position, attributeDelegate, (object)namesAfterDeletion); H5G.close(groupId); H5F.close(fileId); // Reopen and reclose the file. H5FileId openId = H5F.open("myCSharp.h5", H5F.OpenMode.ACC_RDONLY); H5F.close(openId); // Set the function to be called on error. unsafe { H5AutoCallback myErrorDelegate = new H5AutoCallback(myErrorFunction); H5E.setAuto(0, myErrorDelegate, IntPtr.Zero); } // Uncomment the next line if you want to generate an error to // test H5E.setAuto // H5G.open(openId, "noGroup"); } // This catches all the HDF exception classes. Because each call // generates a unique exception, different exception can be handled // separately. For example, to catch open errors we could have used // catch (H5FopenException openException). catch (HDFException e) { Console.WriteLine(e.Message); } Console.WriteLine("Processing complete!"); Console.ReadLine(); }
static void test_buffer() { try { Console.Write("Testing setting buffer"); const string FILE_NAME = ("SDStwodim.h5"); const string DSET_NAME = ("Two-dim IntArray"); const int NX = 5; // data set dimension const int NY = 2; const int RANK = 2; // two-dimension // Data and input buffer initialization. int i, j; int[,] data = new int[NX, NY]; for (i = 0; i < NX; i++) { for (j = 0; j < NY; j++) { data[i, j] = i + j; } } // Create a new file using H5F_ACC_TRUNC access, // default file creation properties, and default file // access properties. H5FileId fileId = H5F.create(FILE_NAME, H5F.CreateMode.ACC_TRUNC); // Describe the size of the array and create the data space for fixed // size dataset. long[] dims = new long[RANK]; dims[0] = NX; dims[1] = NY; H5DataSpaceId dspaceId = H5S.create_simple(RANK, dims); // Define datatype for the data in the file. H5DataTypeId dtypeId = H5T.copy(H5T.H5Type.NATIVE_INT); // Create a dataset property list. H5PropertyListId xferId = H5P.create(H5P.PropertyListClass.DATASET_XFER); // Create a small conversion buffer. unsafe { IntPtr tconv_buf = H5CrtHeap.Allocate(new IntPtr(1000)); // Set buffer size. H5P.setBuffer(xferId, 1000, tconv_buf, IntPtr.Zero); // Create the data set DATASETNAME. H5DataSetId dsetId = H5D.create(fileId, DSET_NAME, dtypeId, dspaceId); // Write the one-dimensional data set array H5DataSpaceId H5S_ALL = new H5DataSpaceId(H5S.H5SType.ALL); H5D.write(dsetId, new H5DataTypeId(H5T.H5Type.NATIVE_INT), H5S_ALL, H5S_ALL, xferId, new H5Array <int>(data)); // Read data back. int[,] outdata = new int[NX, NY]; H5D.read(dsetId, new H5DataTypeId(H5T.H5Type.NATIVE_INT), H5S_ALL, H5S_ALL, xferId, new H5Array <int>(outdata)); // Compare against input buffer to verify. for (i = 0; i < NX; i++) { for (j = 0; j < NY; j++) { if (outdata[i, j] != data[i, j]) { Console.WriteLine("\ntest_twodim_array: read value differs from input: read {0} - input {1}", outdata[i, j], data[i, j]); nerrors++; } } } // Close all objects and file. H5D.close(dsetId); H5T.close(dtypeId); H5S.close(dspaceId); H5P.close(xferId); H5F.close(fileId); // Free the buffer. H5CrtHeap.Free(tconv_buf); } // end unsafe block Console.WriteLine("\t\t\t\t\tPASSED"); } catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // test_buffer
} // test_buffer // Create a new dataset that uses chunked storage instead of the default layout. static void test_chunked_dset() { try { Console.Write("Testing dataset with chunked storage layout"); const string FILE_NAME = ("SDSchunked.h5"); const string DSET_CHUNKED_NAME = ("Chunked IntArray"); const int NX = 50; // data set dimension const int NY = 20; const int RANK = 2; // two-dimension H5PropertyListId P_DEFAULT = new H5PropertyListId(H5P.Template.DEFAULT); // Data and input buffer initialization. int i, j; int[,] data = new int[NX, NY]; for (i = 0; i < NX; i++) { for (j = 0; j < NY; j++) { data[i, j] = i + j; } } // Create a new file using H5F_ACC_TRUNC access, // default file creation properties, and default file // access properties. H5FileId fileId = H5F.create(FILE_NAME, H5F.CreateMode.ACC_TRUNC); // Describe the size of the array and create the data space for fixed // size dataset. long[] dims = new long[RANK]; dims[0] = NX; dims[1] = NY; H5DataSpaceId dspaceId = H5S.create_simple(RANK, dims); // Define datatype for the data in the file. H5DataTypeId dtypeId = H5T.copy(H5T.H5Type.NATIVE_INT); // Create dataset creation property list. H5PropertyListId crtpltId = H5P.create(H5P.PropertyListClass.DATASET_CREATE); // Set chunk sizes for the dataset. long[] csize = new long[2] { 5, 2 }; H5P.setChunk(crtpltId, csize); // Set deflate for the dataset. H5P.setDeflate(crtpltId, 9); // Create the data set DATASETNAME. H5DataSetId dsetId = H5D.create(fileId, DSET_CHUNKED_NAME, dtypeId, dspaceId, P_DEFAULT, crtpltId, P_DEFAULT); // Write the one-dimensional data set array H5D.write(dsetId, dtypeId, new H5Array <int>(data)); // Close dataset and file. H5D.close(dsetId); H5F.close(fileId); // Open the file again in read only mode. fileId = H5F.open(FILE_NAME, H5F.OpenMode.ACC_RDONLY); // Open the dataset using its name. dsetId = H5D.open(fileId, DSET_CHUNKED_NAME); int[,] outdata = new int[NX, NY]; for (i = 0; i < NX; i++) { for (j = 0; j < NY; j++) { outdata[i, j] = 0; } } // Read data back. H5D.read(dsetId, new H5DataTypeId(H5T.H5Type.NATIVE_INT), new H5Array <int>(outdata)); // Compare against input buffer to verify. for (i = 0; i < NX; i++) { for (j = 0; j < NY; j++) { if (outdata[i, j] != data[i, j]) { Console.WriteLine("\ntest_chunked_dset: read value differs from input: read {0} - input {1}", outdata[i, j], data[i, j]); nerrors++; } } } // Close all objects and file. H5D.close(dsetId); H5T.close(dtypeId); H5S.close(dspaceId); H5F.close(fileId); Console.WriteLine("\t\tPASSED"); } catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // test_chunked_dset
} // test_compound_dtype static void test_vlen_dtype(H5FileId fileId) { try { Console.Write("Testing variable-length datatypes"); // Create a VL datatype of int. H5DataTypeId vltId = H5T.vlenCreate(H5T.H5Type.NATIVE_UINT); // Make certain that the correct classes can be detected H5T.H5TClass tcls = H5T.getClass(vltId); if (tcls != H5T.H5TClass.VLEN) { Console.WriteLine("Test class should have been H5T_VLEN"); nerrors++; } // Create a dataset with a simple dataspace. hssize_t[] dims = { DIM1 }; H5DataSpaceId spaceId = H5S.create_simple(1, dims); H5DataSetId dsetId = H5D.create(fileId, "Vlen Dataset", vltId, spaceId); // Change to the custom memory allocation routines for reading VL data. H5PropertyListId xferpId = H5P.create(H5P.PropertyListClass.DATASET_XFER); // Writing unsafe { hvl_t[] wdata = new hvl_t[DIM1]; /* Information to write */ hvl_t[] wdata2 = new hvl_t[DIM1]; /* Information to write */ hvl_t[] rdata = new hvl_t[DIM1]; /* Information read in */ /* Allocate and initialize VL data to write */ for (uint ii = 0; ii < DIM1; ii++) { IntPtr iPtr = new IntPtr((ii + 1) * sizeof(uint)); wdata[ii].p = H5CrtHeap.Allocate(iPtr).ToPointer(); wdata[ii].len = ii + 1; ((uint *)wdata[ii].p)[0] = ii * 10; wdata2[ii].p = (void *)0; wdata2[ii].len = 0; } /* end for */ H5D.write(dsetId, vltId, new H5Array <hvl_t>(wdata)); // Read from dataset before writing data. H5D.read(dsetId, vltId, new H5Array <hvl_t>(rdata)); // Write "nil" data to disk. H5D.write(dsetId, vltId, new H5Array <hvl_t>(wdata2)); // Read from dataset with "nil" data. H5D.read(dsetId, vltId, new H5Array <hvl_t>(rdata)); // Check data read in. // Write data to dataset. H5D.write(dsetId, vltId, new H5Array <hvl_t>(wdata)); // Close resources. H5D.close(dsetId); H5T.close(vltId); H5S.close(spaceId); // Open the dataset. dsetId = H5D.open(fileId, "Vlen Dataset"); // Get dataspace and datatype for the dataset. spaceId = H5D.getSpace(dsetId); vltId = H5D.getType(dsetId); H5AllocCallback allocCallback = new H5AllocCallback(Program.crtHeapAllocate); H5FreeCallback freeCallback = new H5FreeCallback(Program.crtHeapFree); H5P.setVlenMemManager(xferpId, allocCallback, IntPtr.Zero, freeCallback, IntPtr.Zero); // Read dataset from disk. H5D.read(dsetId, vltId, new H5DataSpaceId(H5S.H5SType.ALL), new H5DataSpaceId(H5S.H5SType.ALL), xferpId, new H5Array <hvl_t>(rdata)); // Reclaim the read VL data. H5D.vlenReclaim(vltId, spaceId, xferpId, new H5Array <hvl_t>(rdata)); } // end of unsafe // Close resources. H5D.close(dsetId); H5T.close(vltId); H5S.close(spaceId); H5P.close(xferpId); Console.WriteLine("\t\t\tPASSED"); } catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // test_vlen_dtype