private void Write(H5GroupId parent, string name, IEnumerable <IMeasurement> measurements) { H5DataSpaceId spaceId = H5S.create_simple(1, new long[1] { (long)measurements.Count() }); // Set compression options for dataset H5PropertyListId dataSetPropertyList = H5P.create(H5P.PropertyListClass.DATASET_CREATE); H5P.setDeflate(dataSetPropertyList, NumericDataCompression); H5P.setChunk(dataSetPropertyList, new long[] { (long)measurements.Count() }); H5DataSetId dataSetId = H5D.create(parent, name, measurement_t, spaceId, new H5PropertyListId(H5P.Template.DEFAULT), dataSetPropertyList, new H5PropertyListId(H5P.Template.DEFAULT)); MeasurementT[] ms = new MeasurementT[measurements.Count()]; int ilmCount = 0; foreach (IMeasurement m in measurements) { MeasurementT mt = Convert(m); ms[ilmCount++] = mt; } H5D.write <MeasurementT>(dataSetId, measurement_t, new H5Array <MeasurementT>(ms)); H5D.close(dataSetId); H5S.close(spaceId); }
private static H5DataSetId CreateDatasetIfNoneExists(H5FileId fileId, string setName, int dim1, H5DataTypeId dataType) { H5DataSetId dataSetId = null; char[] sep = { '/' }; var strings = setName.Split(sep); var path = "/"; for (var x = 0; x < strings.Length - 1; x++) { path += strings[x] + "/"; var groupId = CreateGroupIfNoneExists(fileId, path); } var dims = new long[] { dim1 }; var maxDims = new long[] { -1 }; if (H5L.Exists(fileId, setName)) { dataSetId = H5D.open(fileId, setName); H5D.setExtent(dataSetId, dims); } else { var linkp = H5P.create(H5P.PropertyListClass.LINK_CREATE); var accessp = H5P.create(H5P.PropertyListClass.DATASET_ACCESS); var createp = H5P.create(H5P.PropertyListClass.DATASET_CREATE); H5P.setChunk(createp, dims); H5P.setDeflate(createp, 1); var sId = H5S.create_simple(1, dims, maxDims); dataSetId = H5D.create(fileId, setName, dataType, sId, linkp, createp, accessp); } return(dataSetId); }
void WriteDataToNewFile <T>(H5FileId fileId, string datasetName, T[,] barsData, long count, long parametersNumber, H5DataTypeId datatypeId) { H5DataSpaceId dataspaceId = H5S.create_simple(2, new long[] { count, parametersNumber }, new long[] { (long)H5S.H5SType.UNLIMITED, parametersNumber }); H5PropertyListId createChunked = H5P.create(H5P.PropertyListClass.DATASET_CREATE); H5PropertyListId linkCreationDefaults = H5P.create(H5P.PropertyListClass.LINK_CREATE); H5PropertyListId accessCreationDefaults = H5P.create(H5P.PropertyListClass.DATASET_ACCESS); H5P.setChunk(createChunked, new long[] { 1, parametersNumber }); H5DataSetId datasetId = H5D.create(fileId, datasetName, datatypeId, dataspaceId, linkCreationDefaults, createChunked, accessCreationDefaults); H5D.setExtent(datasetId, new long[] { count, parametersNumber }); H5DataSpaceId newSpace = H5D.getSpace(datasetId); H5S.selectHyperslab(newSpace, H5S.SelectOperator.SET, new long[] { 0, 0 }, new long[] { count, parametersNumber }); H5D.write(datasetId, datatypeId, new H5Array <T>(barsData)); H5P.close(createChunked); H5P.close(linkCreationDefaults); H5P.close(accessCreationDefaults); H5S.close(newSpace); H5S.close(dataspaceId); H5D.close(datasetId); }
void DownloadQuotes() { QuoteDepth marketDepth = includeLevel2 ? QuoteDepth.Level2 : QuoteDepth.Top; DownloadQuotesEnumerator enumerator = quoteClient.DownloadQuotes(symbol, marketDepth, from, to, -1); if (outputType == "csv") { string path = Path.Combine(this.location, string.Format("{0}{1}{2}{3}.csv", symbol, includeLevel2 ? " level2" : "", from.ToString(" yyyyMMdd"), to.ToString(" yyyyMMdd"))); using (StreamWriter file = File.CreateText(path)) { file.WriteLine("date_time,bid_price,bid_volume,ask_price,ask_volume"); for (Quote quote = enumerator.Next(-1); quote != null; quote = enumerator.Next(-1)) { StringBuilder builder = new StringBuilder(); builder.Append(quote.CreatingTime.ToString("yyyy-MM-dd HH:mm:ss.fff,", CultureInfo.InvariantCulture)); foreach (QuoteEntry entry in quote.Bids) { builder.AppendFormat("{0},{1},", entry.Price, entry.Volume); } foreach (QuoteEntry entry in quote.Asks) { builder.AppendFormat("{0},{1}", entry.Price, entry.Volume); } file.WriteLine(builder); } } this.Log("Quotes are downloaded successfully"); } else if (outputType == "hdf5") { string path = Path.Combine(this.location, string.Format("{0}{1}{2}{3}.h5", symbol, includeLevel2 ? " level2" : "", from.ToString(" yyyyMMdd"), to.ToString(" yyyyMMdd"))); H5FileId fileId = H5F.create(path, H5F.CreateMode.ACC_TRUNC); H5DataTypeId quotesTypeId = new H5DataTypeId(H5T.H5Type.NATIVE_DOUBLE); H5DataSpaceId quotesSpaceId = H5S.create_simple(3, new long[] { 1, 2, 2 }, new long[] { (long)H5S.H5SType.UNLIMITED, 2, 2 }); H5PropertyListId createChunkedQuotes = H5P.create(H5P.PropertyListClass.DATASET_CREATE); H5PropertyListId linkCreationDefaultsQuotes = H5P.create(H5P.PropertyListClass.LINK_CREATE); H5PropertyListId accessCreationDefaultsQuotes = H5P.create(H5P.PropertyListClass.DATASET_ACCESS); H5P.setChunk(createChunkedQuotes, new long[] { 1, 2, 2 }); H5DataSetId quotesSetId = H5D.create(fileId, "Quotes", quotesTypeId, quotesSpaceId, linkCreationDefaultsQuotes, createChunkedQuotes, accessCreationDefaultsQuotes); H5DataTypeId dateQuotesTypeId = new H5DataTypeId(H5T.H5Type.NATIVE_LLONG); H5DataSpaceId dateQuotesSpaceId = H5S.create_simple(1, new long[] { 1 }, new long[] { (long)H5S.H5SType.UNLIMITED }); H5PropertyListId createChunkedDate = H5P.create(H5P.PropertyListClass.DATASET_CREATE); H5PropertyListId linkCreationDefaultsDate = H5P.create(H5P.PropertyListClass.LINK_CREATE); H5PropertyListId accessCreationDefaultsDate = H5P.create(H5P.PropertyListClass.DATASET_ACCESS); H5P.setChunk(createChunkedDate, new long[] { 1 }); H5DataSetId dateQuotesSetId = H5D.create(fileId, "DateQuotes", dateQuotesTypeId, dateQuotesSpaceId, linkCreationDefaultsDate, createChunkedDate, accessCreationDefaultsDate); int count = 0; int chunkCount = 0; double[,,] quotesArr = new double[chunkSize, 2, 2]; long[] datesArr = new long[chunkSize]; H5DataSpaceId memSpace; for (Quote quote = enumerator.Next(-1); quote != null; quote = enumerator.Next(-1)) { int j = 0; foreach (QuoteEntry entry in quote.Bids) { quotesArr[chunkCount, 0, j] = entry.Price; quotesArr[chunkCount, 0, j + 1] = entry.Volume; j += 2; } j = 0; foreach (QuoteEntry entry in quote.Asks) { quotesArr[chunkCount, 1, j] = entry.Price; quotesArr[chunkCount, 1, j + 1] = entry.Volume; j += 2; } datesArr[chunkCount] = (long)quote.CreatingTime.Subtract(new DateTime(1970, 1, 1)).TotalMilliseconds; chunkCount++; count++; if (chunkCount == chunkSize) { H5D.setExtent(quotesSetId, new long[] { count, 2, 2 }); H5S.close(quotesSpaceId); quotesSpaceId = H5D.getSpace(quotesSetId); H5S.selectHyperslab(quotesSpaceId, H5S.SelectOperator.SET, new long[] { count - chunkSize, 0, 0 }, new long[] { chunkSize, 2, 2 }); memSpace = H5S.create_simple(3, new long[] { chunkSize, 2, 2 }); H5D.write(quotesSetId, quotesTypeId, memSpace, quotesSpaceId, new H5PropertyListId(H5P.Template.DEFAULT), new H5Array <double>(quotesArr)); H5D.setExtent(dateQuotesSetId, new long[] { count }); H5S.close(dateQuotesSpaceId); dateQuotesSpaceId = H5D.getSpace(dateQuotesSetId); H5S.selectHyperslab(dateQuotesSpaceId, H5S.SelectOperator.SET, new long[] { count - chunkSize }, new long[] { chunkSize }); memSpace = H5S.create_simple(1, new long[] { chunkSize }); H5D.write(dateQuotesSetId, dateQuotesTypeId, memSpace, dateQuotesSpaceId, new H5PropertyListId(H5P.Template.DEFAULT), new H5Array <long>(datesArr)); chunkCount = 0; } } if (count % chunkSize != 0) { int delta = count % chunkSize; H5D.setExtent(quotesSetId, new long[] { count, 2, 2 }); H5S.close(quotesSpaceId); quotesSpaceId = H5D.getSpace(quotesSetId); H5S.selectHyperslab(quotesSpaceId, H5S.SelectOperator.SET, new long[] { count - delta, 0, 0 }, new long[] { delta, 2, 2 }); memSpace = H5S.create_simple(3, new long[] { delta, 2, 2 }); H5D.write(quotesSetId, quotesTypeId, memSpace, quotesSpaceId, new H5PropertyListId(H5P.Template.DEFAULT), new H5Array <double>(quotesArr)); H5D.setExtent(dateQuotesSetId, new long[] { count }); H5S.close(dateQuotesSpaceId); dateQuotesSpaceId = H5D.getSpace(dateQuotesSetId); H5S.selectHyperslab(dateQuotesSpaceId, H5S.SelectOperator.SET, new long[] { count - delta }, new long[] { delta }); memSpace = H5S.create_simple(1, new long[] { delta }); H5D.write(dateQuotesSetId, dateQuotesTypeId, memSpace, dateQuotesSpaceId, new H5PropertyListId(H5P.Template.DEFAULT), new H5Array <long>(datesArr)); } H5P.close(createChunkedQuotes); H5P.close(linkCreationDefaultsQuotes); H5P.close(accessCreationDefaultsQuotes); H5P.close(createChunkedDate); H5P.close(linkCreationDefaultsDate); H5P.close(accessCreationDefaultsDate); H5S.close(quotesSpaceId); H5D.close(quotesSetId); H5S.close(dateQuotesSpaceId); H5D.close(dateQuotesSetId); H5F.close(fileId); this.Log("Quotes are downloaded successfully"); } }
void DownloadQuotes() { QuoteDepth marketDepth = includeLevel2 ? QuoteDepth.Level2 : QuoteDepth.Top; DownloadQuotesEnumerator enumerator = quoteClient.DownloadQuotes(symbol, marketDepth, from, to, -1); enumeratorTicks = enumerator; if (outputType == "csv") { if (includeVWAP) { throw new ArgumentException("VWAP is not supported for hdf5 and csv format."); } string path = Path.Combine(this.location, string.Format("{0}{1}{2}{3}.csv", symbol.Replace("/", "%2F"), includeLevel2 ? " level2" : "", from.ToString(" yyyyMMdd-HH-mm-ss"), to.ToString(" yyyyMMdd-HH-mm-ss"))); using (StreamWriter file = File.CreateText(path)) { file.WriteLine("date_time;bid_price;bid_volume;ask_price;ask_volume"); for (Quote quote = enumerator.Next(-1); quote != null; quote = enumerator.Next(-1)) { StringBuilder builder = new StringBuilder(); builder.Append(quote.CreatingTime.ToString("yyyy-MM-dd HH:mm:ss.fff;", CultureInfo.InvariantCulture)); foreach (QuoteEntry entry in quote.Bids) { if (quote.TickType.HasFlag(TickTypes.IndicativeBid) || quote.TickType.HasFlag(TickTypes.IndicativeBidAsk)) { builder.AppendFormat("{0};{1};", (decimal)entry.Price, (decimal) - entry.Volume); } else { builder.AppendFormat("{0};{1};", (decimal)entry.Price, (decimal)entry.Volume); } } foreach (QuoteEntry entry in quote.Asks) { if (quote.TickType.HasFlag(TickTypes.IndicativeAsk) || quote.TickType.HasFlag(TickTypes.IndicativeBidAsk)) { builder.AppendFormat("{0};{1};", (decimal)entry.Price, (decimal) - entry.Volume); } else { builder.AppendFormat("{0};{1};", (decimal)entry.Price, (decimal)entry.Volume); } } builder.Remove(builder.Length - 1, 1); file.WriteLine(builder); } } if (includeLevel2) { this.Log($"level2 {symbol} are downloaded successfully"); } else if (includeVWAP) { this.Log($"VWAP {symbol} are downloaded successfully"); } else { this.Log($"Ticks {symbol} are downloaded successfully"); } } else if (outputType == "hdf5") { if (includeVWAP) { throw new ArgumentException("VWAP is not supported for hdf5 and csv format."); } string path = Path.Combine(this.location, string.Format("{0}{1}{2}{3}.h5", symbol.Replace("/", "%2F"), includeLevel2 ? " level2" : "", from.ToString(" yyyyMMdd-HH-mm-ss"), to.ToString(" yyyyMMdd-HH-mm-ss"))); H5FileId fileId = H5F.create(path, H5F.CreateMode.ACC_TRUNC); H5DataTypeId quotesTypeId = new H5DataTypeId(H5T.H5Type.NATIVE_DOUBLE); H5DataSpaceId quotesSpaceId = H5S.create_simple(3, new long[] { 1, 2, 2 }, new long[] { (long)H5S.H5SType.UNLIMITED, 2, 2 }); H5PropertyListId createChunkedQuotes = H5P.create(H5P.PropertyListClass.DATASET_CREATE); H5PropertyListId linkCreationDefaultsQuotes = H5P.create(H5P.PropertyListClass.LINK_CREATE); H5PropertyListId accessCreationDefaultsQuotes = H5P.create(H5P.PropertyListClass.DATASET_ACCESS); H5P.setChunk(createChunkedQuotes, new long[] { 1, 2, 2 }); H5DataSetId quotesSetId = H5D.create(fileId, "Quotes", quotesTypeId, quotesSpaceId, linkCreationDefaultsQuotes, createChunkedQuotes, accessCreationDefaultsQuotes); H5DataTypeId dateQuotesTypeId = new H5DataTypeId(H5T.H5Type.NATIVE_LLONG); H5DataSpaceId dateQuotesSpaceId = H5S.create_simple(1, new long[] { 1 }, new long[] { (long)H5S.H5SType.UNLIMITED }); H5PropertyListId createChunkedDate = H5P.create(H5P.PropertyListClass.DATASET_CREATE); H5PropertyListId linkCreationDefaultsDate = H5P.create(H5P.PropertyListClass.LINK_CREATE); H5PropertyListId accessCreationDefaultsDate = H5P.create(H5P.PropertyListClass.DATASET_ACCESS); H5P.setChunk(createChunkedDate, new long[] { 1 }); H5DataSetId dateQuotesSetId = H5D.create(fileId, "DateQuotes", dateQuotesTypeId, dateQuotesSpaceId, linkCreationDefaultsDate, createChunkedDate, accessCreationDefaultsDate); int count = 0; int chunkCount = 0; double[,,] quotesArr = new double[chunkSize, 2, 2]; long[] datesArr = new long[chunkSize]; H5DataSpaceId memSpace = null; for (Quote quote = enumerator.Next(-1); quote != null; quote = enumerator.Next(-1)) { int j = 0; foreach (QuoteEntry entry in quote.Bids) { quotesArr[chunkCount, 0, j] = entry.Price; quotesArr[chunkCount, 0, j + 1] = entry.Volume; j += 2; } j = 0; foreach (QuoteEntry entry in quote.Asks) { quotesArr[chunkCount, 1, j] = entry.Price; quotesArr[chunkCount, 1, j + 1] = entry.Volume; j += 2; } datesArr[chunkCount] = (long)quote.CreatingTime.Subtract(new DateTime(1970, 1, 1)).TotalMilliseconds; chunkCount++; count++; if (chunkCount == chunkSize) { H5D.setExtent(quotesSetId, new long[] { count, 2, 2 }); H5S.close(quotesSpaceId); quotesSpaceId = H5D.getSpace(quotesSetId); H5S.selectHyperslab(quotesSpaceId, H5S.SelectOperator.SET, new long[] { count - chunkSize, 0, 0 }, new long[] { chunkSize, 2, 2 }); memSpace = H5S.create_simple(3, new long[] { chunkSize, 2, 2 }); H5D.write(quotesSetId, quotesTypeId, memSpace, quotesSpaceId, new H5PropertyListId(H5P.Template.DEFAULT), new H5Array <double>(quotesArr)); H5S.close(memSpace); H5D.setExtent(dateQuotesSetId, new long[] { count }); H5S.close(dateQuotesSpaceId); dateQuotesSpaceId = H5D.getSpace(dateQuotesSetId); H5S.selectHyperslab(dateQuotesSpaceId, H5S.SelectOperator.SET, new long[] { count - chunkSize }, new long[] { chunkSize }); memSpace = H5S.create_simple(1, new long[] { chunkSize }); H5D.write(dateQuotesSetId, dateQuotesTypeId, memSpace, dateQuotesSpaceId, new H5PropertyListId(H5P.Template.DEFAULT), new H5Array <long>(datesArr)); H5S.close(memSpace); chunkCount = 0; } } if (count % chunkSize != 0) { int delta = count % chunkSize; H5D.setExtent(quotesSetId, new long[] { count, 2, 2 }); H5S.close(quotesSpaceId); quotesSpaceId = H5D.getSpace(quotesSetId); H5S.selectHyperslab(quotesSpaceId, H5S.SelectOperator.SET, new long[] { count - delta, 0, 0 }, new long[] { delta, 2, 2 }); memSpace = H5S.create_simple(3, new long[] { delta, 2, 2 }); H5D.write(quotesSetId, quotesTypeId, memSpace, quotesSpaceId, new H5PropertyListId(H5P.Template.DEFAULT), new H5Array <double>(quotesArr)); H5S.close(memSpace); H5D.setExtent(dateQuotesSetId, new long[] { count }); H5S.close(dateQuotesSpaceId); dateQuotesSpaceId = H5D.getSpace(dateQuotesSetId); H5S.selectHyperslab(dateQuotesSpaceId, H5S.SelectOperator.SET, new long[] { count - delta }, new long[] { delta }); memSpace = H5S.create_simple(1, new long[] { delta }); H5D.write(dateQuotesSetId, dateQuotesTypeId, memSpace, dateQuotesSpaceId, new H5PropertyListId(H5P.Template.DEFAULT), new H5Array <long>(datesArr)); H5S.close(memSpace); } H5P.close(createChunkedQuotes); H5P.close(linkCreationDefaultsQuotes); H5P.close(accessCreationDefaultsQuotes); H5P.close(createChunkedDate); H5P.close(linkCreationDefaultsDate); H5P.close(accessCreationDefaultsDate); H5S.close(quotesSpaceId); H5D.close(quotesSetId); H5S.close(dateQuotesSpaceId); H5D.close(dateQuotesSetId); //H5S.close(memSpace); H5F.close(fileId); if (includeLevel2) { this.Log($"level2 {symbol} are downloaded successfully"); } else if (includeVWAP) { this.Log($"VWAP {symbol} are downloaded successfully"); } else { this.Log($"Ticks {symbol} are downloaded successfully"); } GC.Collect(); GC.WaitForPendingFinalizers(); } else if (outputType == "csv_zip") { string quoteType = "Ticks"; if (includeVWAP) { quoteType = "VWAP"; } if (includeLevel2) { quoteType = "TicksLevel2"; } string path = Path.Combine(location, $"{symbol.Replace("/", "%2F")}_{quoteType}_{from.ToString("yyyy-MM-dd-HH-mm-ss")}_{to.ToString("yyyy-MM-dd-HH-mm-ss")}.zip"); Console.WriteLine(path); using (ZipOutputStream zs = new ZipOutputStream(File.Create(path))) { if (includeVWAP) { DownloadVWAPCSVNew(zs); } else if (includeLevel2) { DownloadLevel2CSVNew(enumerator, zs); } else { DownloadTicksCSVNew(enumerator, zs); } } if (includeLevel2) { this.Log($"level2 {symbol} are downloaded successfully"); } else if (includeVWAP) { this.Log($"VWAP {symbol} are downloaded successfully"); } else { this.Log($"Ticks {symbol} are downloaded successfully"); } } }
} // test_buffer // Create a new dataset that uses chunked storage instead of the default layout. static void test_chunked_dset() { try { Console.Write("Testing dataset with chunked storage layout"); const string FILE_NAME = ("SDSchunked.h5"); const string DSET_CHUNKED_NAME = ("Chunked IntArray"); const int NX = 50; // data set dimension const int NY = 20; const int RANK = 2; // two-dimension H5PropertyListId P_DEFAULT = new H5PropertyListId(H5P.Template.DEFAULT); // Data and input buffer initialization. int i, j; int[,] data = new int[NX, NY]; for (i = 0; i < NX; i++) { for (j = 0; j < NY; j++) { data[i, j] = i + j; } } // Create a new file using H5F_ACC_TRUNC access, // default file creation properties, and default file // access properties. H5FileId fileId = H5F.create(FILE_NAME, H5F.CreateMode.ACC_TRUNC); // Describe the size of the array and create the data space for fixed // size dataset. long[] dims = new long[RANK]; dims[0] = NX; dims[1] = NY; H5DataSpaceId dspaceId = H5S.create_simple(RANK, dims); // Define datatype for the data in the file. H5DataTypeId dtypeId = H5T.copy(H5T.H5Type.NATIVE_INT); // Create dataset creation property list. H5PropertyListId crtpltId = H5P.create(H5P.PropertyListClass.DATASET_CREATE); // Set chunk sizes for the dataset. long[] csize = new long[2] { 5, 2 }; H5P.setChunk(crtpltId, csize); // Set deflate for the dataset. H5P.setDeflate(crtpltId, 9); // Create the data set DATASETNAME. H5DataSetId dsetId = H5D.create(fileId, DSET_CHUNKED_NAME, dtypeId, dspaceId, P_DEFAULT, crtpltId, P_DEFAULT); // Write the one-dimensional data set array H5D.write(dsetId, dtypeId, new H5Array <int>(data)); // Close dataset and file. H5D.close(dsetId); H5F.close(fileId); // Open the file again in read only mode. fileId = H5F.open(FILE_NAME, H5F.OpenMode.ACC_RDONLY); // Open the dataset using its name. dsetId = H5D.open(fileId, DSET_CHUNKED_NAME); int[,] outdata = new int[NX, NY]; for (i = 0; i < NX; i++) { for (j = 0; j < NY; j++) { outdata[i, j] = 0; } } // Read data back. H5D.read(dsetId, new H5DataTypeId(H5T.H5Type.NATIVE_INT), new H5Array <int>(outdata)); // Compare against input buffer to verify. for (i = 0; i < NX; i++) { for (j = 0; j < NY; j++) { if (outdata[i, j] != data[i, j]) { Console.WriteLine("\ntest_chunked_dset: read value differs from input: read {0} - input {1}", outdata[i, j], data[i, j]); nerrors++; } } } // Close all objects and file. H5D.close(dsetId); H5T.close(dtypeId); H5S.close(dspaceId); H5F.close(fileId); Console.WriteLine("\t\tPASSED"); } catch (HDFException anyHDF5E) { Console.WriteLine(anyHDF5E.Message); nerrors++; } catch (System.Exception sysE) { Console.WriteLine(sysE.TargetSite); Console.WriteLine(sysE.Message); nerrors++; } } // test_chunked_dset